Merge changes Iebd26509,I57a69f05 into main
* changes:
APM: Add a test to prevent regressions like one from aosp/3020124
Fix parsing of legacy engine config XML files
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index 96fb3e3..af82982 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -29,6 +29,13 @@
}
flag {
+ name: "codec_availability_support"
+ namespace: "codec_fwk"
+ description: "Feature flag for codec availability HAL API implementation"
+ bug: "363282971"
+}
+
+flag {
name: "codec_buffer_state_cleanup"
namespace: "codec_fwk"
description: "Bugfix flag for more buffer state cleanup in MediaCodec"
diff --git a/media/aconfig/swcodec_flags.aconfig b/media/aconfig/swcodec_flags.aconfig
index a435a43..9dd1fdd 100644
--- a/media/aconfig/swcodec_flags.aconfig
+++ b/media/aconfig/swcodec_flags.aconfig
@@ -12,3 +12,12 @@
description: "Feature flag for APV Software C2 codec"
bug: "376770121"
}
+
+flag {
+ name: "mpeg2_keep_threads_active"
+ is_exported: true
+ is_fixed_read_only: true
+ namespace: "codec_fwk"
+ description: "Enable keep_threads_active in mpeg2 decoder"
+ bug: "343793479"
+}
diff --git a/media/codec2/components/apv/C2SoftApvEnc.cpp b/media/codec2/components/apv/C2SoftApvEnc.cpp
index 9c5e0b2..9d84bc7 100644
--- a/media/codec2/components/apv/C2SoftApvEnc.cpp
+++ b/media/codec2/components/apv/C2SoftApvEnc.cpp
@@ -222,6 +222,7 @@
.build());
std::vector<uint32_t> pixelFormats = {
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ HAL_PIXEL_FORMAT_YCBCR_420_888,
};
if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
@@ -231,7 +232,7 @@
}
addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
.withDefault(new C2StreamPixelFormatInfo::input(
- 0u, HAL_PIXEL_FORMAT_YCBCR_P010))
+ 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
.withFields({C2F(mPixelFormat, value).oneOf({pixelFormats})})
.withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
.build());
@@ -272,6 +273,13 @@
if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
me.set().level = LEVEL_APV_1_BAND_0;
}
+
+ int32_t bandIdc = me.v.level <= LEVEL_APV_7_1_BAND_0 ? 0 :
+ me.v.level <= LEVEL_APV_7_1_BAND_1 ? 1 :
+ me.v.level <= LEVEL_APV_7_1_BAND_2 ? 2 : 3;
+
+ me.set().level = decisionApvLevel(size.v.width, size.v.height, frameRate.v.value,
+ (uint64_t)bitrate.v.value, bandIdc);
return C2R::Ok();
}
@@ -302,6 +310,119 @@
return C2R::Ok();
}
+ static C2Config::level_t decisionApvLevel(int32_t width, int32_t height, int32_t fps,
+ uint64_t bitrate, int32_t band) {
+ C2Config::level_t level = C2Config::LEVEL_APV_1_BAND_0;
+ struct LevelLimits {
+ C2Config::level_t level;
+ uint64_t samplesPerSec;
+ uint64_t kbpsOfBand;
+ };
+
+ constexpr LevelLimits kLimitsBand0[] = {
+ {LEVEL_APV_1_BAND_0, 3'041'280, 7'000},
+ {LEVEL_APV_1_1_BAND_0, 6'082'560, 14'000},
+ {LEVEL_APV_2_BAND_0, 15'667'200, 36'000},
+ {LEVEL_APV_2_1_BAND_0, 31'334'400, 71'000},
+ {LEVEL_APV_3_BAND_0, 66'846'720, 101'000},
+ {LEVEL_APV_3_1_BAND_0, 133'693'440, 201'000},
+ {LEVEL_APV_4_BAND_0, 265'420'800, 401'000},
+ {LEVEL_APV_4_1_BAND_0, 530'841'600, 780'000},
+ {LEVEL_APV_5_BAND_0, 1'061'683'200, 1'560'000},
+ {LEVEL_APV_5_1_BAND_0, 2'123'366'400, 3'324'000},
+ {LEVEL_APV_6_BAND_0, 4'777'574'400, 6'648'000},
+ {LEVEL_APV_6_1_BAND_0, 8'493'465'600, 13'296'000},
+ {LEVEL_APV_7_BAND_0, 16'986'931'200, 26'592'000},
+ {LEVEL_APV_7_1_BAND_0, 33'973'862'400, 53'184'000},
+ };
+
+ constexpr LevelLimits kLimitsBand1[] = {
+ {LEVEL_APV_1_BAND_1, 3'041'280, 11'000},
+ {LEVEL_APV_1_1_BAND_1, 6'082'560, 21'000},
+ {LEVEL_APV_2_BAND_1, 15'667'200, 53'000},
+ {LEVEL_APV_2_1_BAND_1, 31'334'400, 106'00},
+ {LEVEL_APV_3_BAND_1, 66'846'720, 151'000},
+ {LEVEL_APV_3_1_BAND_1, 133'693'440, 301'000},
+ {LEVEL_APV_4_BAND_1, 265'420'800, 602'000},
+ {LEVEL_APV_4_1_BAND_1, 530'841'600, 1'170'000},
+ {LEVEL_APV_5_BAND_1, 1'061'683'200, 2'340'000},
+ {LEVEL_APV_5_1_BAND_1, 2'123'366'400, 4'986'000},
+ {LEVEL_APV_6_BAND_1, 4'777'574'400, 9'972'000},
+ {LEVEL_APV_6_1_BAND_1, 8'493'465'600, 19'944'000},
+ {LEVEL_APV_7_BAND_1, 16'986'931'200, 39'888'000},
+ {LEVEL_APV_7_1_BAND_1, 33'973'862'400, 79'776'000},
+ };
+
+ constexpr LevelLimits kLimitsBand2[] = {
+ {LEVEL_APV_1_BAND_2, 3'041'280, 14'000},
+ {LEVEL_APV_1_1_BAND_2, 6'082'560, 28'000},
+ {LEVEL_APV_2_BAND_2, 15'667'200, 71'000},
+ {LEVEL_APV_2_1_BAND_2, 31'334'400, 141'000},
+ {LEVEL_APV_3_BAND_2, 66'846'720, 201'000},
+ {LEVEL_APV_3_1_BAND_2, 133'693'440, 401'000},
+ {LEVEL_APV_4_BAND_2, 265'420'800, 780'000},
+ {LEVEL_APV_4_1_BAND_2, 530'841'600, 1'560'000},
+ {LEVEL_APV_5_BAND_2, 1'061'683'200, 3'324'000},
+ {LEVEL_APV_5_1_BAND_2, 2'123'366'400, 6'648'000},
+ {LEVEL_APV_6_BAND_2, 4'777'574'400, 13'296'000},
+ {LEVEL_APV_6_1_BAND_2, 8'493'465'600, 26'592'000},
+ {LEVEL_APV_7_BAND_2, 16'986'931'200, 53'184'000},
+ {LEVEL_APV_7_1_BAND_2, 33'973'862'400, 106'368'000},
+ };
+
+ constexpr LevelLimits kLimitsBand3[] = {
+ {LEVEL_APV_1_BAND_3, 3'041'280, 21'000},
+ {LEVEL_APV_1_1_BAND_3, 6'082'560, 42'000},
+ {LEVEL_APV_2_BAND_3, 15'667'200, 106'000},
+ {LEVEL_APV_2_1_BAND_3, 31'334'400, 212'000},
+ {LEVEL_APV_3_BAND_3, 66'846'720, 301'000},
+ {LEVEL_APV_3_1_BAND_3, 133'693'440, 602'000},
+ {LEVEL_APV_4_BAND_3, 265'420'800, 1'170'000},
+ {LEVEL_APV_4_1_BAND_3, 530'841'600, 2'340'000},
+ {LEVEL_APV_5_BAND_3, 1'061'683'200, 4'986'000},
+ {LEVEL_APV_5_1_BAND_3, 2'123'366'400, 9'972'000},
+ {LEVEL_APV_6_BAND_3, 4'777'574'400, 19'944'000},
+ {LEVEL_APV_6_1_BAND_3, 8'493'465'600, 39'888'000},
+ {LEVEL_APV_7_BAND_3, 16'986'931'200, 79'776'000},
+ {LEVEL_APV_7_1_BAND_3, 33'973'862'400, 159'552'000},
+ };
+
+ uint64_t samplesPerSec = width * height * fps;
+ if (band == 0) {
+ for (const LevelLimits& limit : kLimitsBand0) {
+ if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.kbpsOfBand * 1000) {
+ level = limit.level;
+ break;
+ }
+ }
+ } else if (band == 1) {
+ for (const LevelLimits& limit : kLimitsBand1) {
+ if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.kbpsOfBand * 1000) {
+ level = limit.level;
+ break;
+ }
+ }
+ } else if (band == 2) {
+ for (const LevelLimits& limit : kLimitsBand2) {
+ if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.kbpsOfBand * 1000) {
+ level = limit.level;
+ break;
+ }
+ }
+ } else if (band == 3) {
+ for (const LevelLimits& limit : kLimitsBand3) {
+ if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.kbpsOfBand * 1000) {
+ level = limit.level;
+ break;
+ }
+ }
+ } else {
+ ALOGE("Invalid band_idc on calculte level");
+ }
+
+ return level;
+ }
+
uint32_t getProfile_l() const {
int32_t profile = PROFILE_UNUSED;
@@ -328,7 +449,7 @@
profile = 99;
break;
default:
- ALOGD("Unrecognized profile: %x", mProfileLevel->profile);
+ ALOGW("Unrecognized profile: %x", mProfileLevel->profile);
}
return profile;
}
@@ -339,54 +460,264 @@
// TODO: Add Band settings
switch (mProfileLevel->level) {
case C2Config::LEVEL_APV_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_1_BAND_3:
level = 10;
break;
case C2Config::LEVEL_APV_1_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_1_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_1_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_1_1_BAND_3:
level = 11;
break;
case C2Config::LEVEL_APV_2_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_BAND_3:
level = 20;
break;
case C2Config::LEVEL_APV_2_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_1_BAND_3:
level = 21;
break;
case C2Config::LEVEL_APV_3_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_BAND_3:
level = 30;
break;
case C2Config::LEVEL_APV_3_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_1_BAND_3:
level = 31;
break;
case C2Config::LEVEL_APV_4_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_BAND_3:
level = 40;
break;
case C2Config::LEVEL_APV_4_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_1_BAND_3:
level = 41;
break;
case C2Config::LEVEL_APV_5_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_BAND_3:
level = 50;
break;
case C2Config::LEVEL_APV_5_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_1_BAND_3:
level = 51;
break;
case C2Config::LEVEL_APV_6_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_BAND_3:
level = 60;
break;
case C2Config::LEVEL_APV_6_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_1_BAND_3:
level = 61;
break;
case C2Config::LEVEL_APV_7_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_BAND_3:
level = 70;
break;
case C2Config::LEVEL_APV_7_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_1_BAND_3:
level = 71;
break;
default:
- ALOGD("Unrecognized level: %x", mProfileLevel->level);
+ ALOGW("Unrecognized level: %x", mProfileLevel->level);
}
// Convert to APV level_idc according to APV spec
return level * 3;
}
+ uint32_t getBandIdc_l() const {
+ uint32_t bandIdc = 0;
+
+ switch (mProfileLevel->level) {
+ case C2Config::LEVEL_APV_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_1_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_1_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_BAND_0:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_1_BAND_0:
+ bandIdc = 0;
+ break;
+ case C2Config::LEVEL_APV_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_1_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_1_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_BAND_1:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_1_BAND_1:
+ bandIdc = 1;
+ break;
+ case C2Config::LEVEL_APV_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_1_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_1_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_BAND_2:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_1_BAND_2:
+ bandIdc = 2;
+ break;
+ case C2Config::LEVEL_APV_1_BAND_3:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_1_1_BAND_3:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_BAND_3:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_2_1_BAND_3:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_BAND_3:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_3_1_BAND_3:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_BAND_3:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_4_1_BAND_3:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_BAND_3:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_5_1_BAND_3:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_BAND_3:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_6_1_BAND_3:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_BAND_3:
+ [[fallthrough]];
+ case C2Config::LEVEL_APV_7_1_BAND_3:
+ bandIdc = 3;
+ break;
+ default:
+ ALOGW("Unrecognized bandIdc through level: %x", mProfileLevel->level);
+ }
+ return bandIdc;
+ }
+
int32_t getBitrateMode_l() const {
int32_t bitrateMode = C2Config::BITRATE_CONST;
@@ -636,7 +967,7 @@
param.h = mSize->height;
param.fps_num = (int)(mFrameRate->value * 100);
param.fps_den = 100;
- param.bitrate = mBitrate->value / 1000;
+ param.bitrate = (int)(mBitrate->value / 1000);
param.rc_type = mIntf->getBitrateMode_l();
int ApvQP = kApvDefaultQP;
@@ -646,14 +977,8 @@
mQuality->value, ApvQP);
}
param.qp = ApvQP;
- param.band_idc = 0; // TODO: Get from the Level setting
+ param.band_idc = mIntf->getBandIdc_l();
param.profile_idc = mIntf->getProfile_l();
- C2Config::level_t level = decisionApvLevel(
- param.w, param.h, (int)(param.fps_num / param.fps_den), param.bitrate, param.band_idc);
- if (mProfileLevel->level != level) {
- mProfileLevel->level = level;
- ALOGI("Need to update level to %d", mIntf->getLevel_l());
- }
param.level_idc = mIntf->getLevel_l();
}
@@ -750,120 +1075,6 @@
return C2_OK;
}
-C2Config::level_t C2SoftApvEnc::decisionApvLevel(int32_t width, int32_t height, int32_t fps,
- int32_t bitrate, int32_t band) {
- C2Config::level_t level = C2Config::LEVEL_APV_1_BAND_0;
-
- struct LevelLimits {
- C2Config::level_t level;
- uint64_t samplesPerSec;
- uint32_t bitratesOfBand;
- };
-
- constexpr LevelLimits kLimitsBand0[] = {
- {LEVEL_APV_1_BAND_0, 3'041'280, 7'000},
- {LEVEL_APV_1_1_BAND_0, 6'082'560, 14'000},
- {LEVEL_APV_2_BAND_0, 15'667'200, 36'000},
- {LEVEL_APV_2_1_BAND_0, 31'334'400, 71'000},
- {LEVEL_APV_3_BAND_0, 66'846'720, 101'000},
- {LEVEL_APV_3_1_BAND_0, 133'693'440, 201'000},
- {LEVEL_APV_4_BAND_0, 265'420'800, 401'000},
- {LEVEL_APV_4_1_BAND_0, 530'841'600, 780'000},
- {LEVEL_APV_5_BAND_0, 1'061'683'200, 1'560'000},
- {LEVEL_APV_5_1_BAND_0, 2'123'366'400, 3'324'000},
- {LEVEL_APV_6_BAND_0, 4'777'574'400, 6'648'000},
- {LEVEL_APV_6_1_BAND_0, 8'493'465'600, 13'296'000},
- {LEVEL_APV_7_BAND_0, 16'986'931'200, 26'592'000},
- {LEVEL_APV_7_1_BAND_0, 33'973'862'400, 53'184'000},
- };
-
- constexpr LevelLimits kLimitsBand1[] = {
- {LEVEL_APV_1_BAND_1, 3'041'280, 11'000},
- {LEVEL_APV_1_1_BAND_1, 6'082'560, 21'000},
- {LEVEL_APV_2_BAND_1, 15'667'200, 53'000},
- {LEVEL_APV_2_1_BAND_1, 31'334'400, 106'00},
- {LEVEL_APV_3_BAND_1, 66'846'720, 151'000},
- {LEVEL_APV_3_1_BAND_1, 133'693'440, 301'000},
- {LEVEL_APV_4_BAND_1, 265'420'800, 602'000},
- {LEVEL_APV_4_1_BAND_1, 530'841'600, 1'170'000},
- {LEVEL_APV_5_BAND_1, 1'061'683'200, 2'340'000},
- {LEVEL_APV_5_1_BAND_1, 2'123'366'400, 4'986'000},
- {LEVEL_APV_6_BAND_1, 4'777'574'400, 9'972'000},
- {LEVEL_APV_6_1_BAND_1, 8'493'465'600, 19'944'000},
- {LEVEL_APV_7_BAND_1, 16'986'931'200, 39'888'000},
- {LEVEL_APV_7_1_BAND_1, 33'973'862'400, 79'776'000},
- };
-
- constexpr LevelLimits kLimitsBand2[] = {
- {LEVEL_APV_1_BAND_2, 3'041'280, 14'000},
- {LEVEL_APV_1_1_BAND_2, 6'082'560, 28'000},
- {LEVEL_APV_2_BAND_2, 15'667'200, 71'000},
- {LEVEL_APV_2_1_BAND_2, 31'334'400, 141'000},
- {LEVEL_APV_3_BAND_2, 66'846'720, 201'000},
- {LEVEL_APV_3_1_BAND_2, 133'693'440, 401'000},
- {LEVEL_APV_4_BAND_2, 265'420'800, 780'000},
- {LEVEL_APV_4_1_BAND_2, 530'841'600, 1'560'000},
- {LEVEL_APV_5_BAND_2, 1'061'683'200, 3'324'000},
- {LEVEL_APV_5_1_BAND_2, 2'123'366'400, 6'648'000},
- {LEVEL_APV_6_BAND_2, 4'777'574'400, 13'296'000},
- {LEVEL_APV_6_1_BAND_2, 8'493'465'600, 26'592'000},
- {LEVEL_APV_7_BAND_2, 16'986'931'200, 53'184'000},
- {LEVEL_APV_7_1_BAND_2, 33'973'862'400, 106'368'000},
- };
-
- constexpr LevelLimits kLimitsBand3[] = {
- {LEVEL_APV_1_BAND_3, 3'041'280, 21'000},
- {LEVEL_APV_1_1_BAND_3, 6'082'560, 42'000},
- {LEVEL_APV_2_BAND_3, 15'667'200, 106'000},
- {LEVEL_APV_2_1_BAND_3, 31'334'400, 212'000},
- {LEVEL_APV_3_BAND_3, 66'846'720, 301'000},
- {LEVEL_APV_3_1_BAND_3, 133'693'440, 602'000},
- {LEVEL_APV_4_BAND_3, 265'420'800, 1'170'000},
- {LEVEL_APV_4_1_BAND_3, 530'841'600, 2'340'000},
- {LEVEL_APV_5_BAND_3, 1'061'683'200, 4'986'000},
- {LEVEL_APV_5_1_BAND_3, 2'123'366'400, 9'972'000},
- {LEVEL_APV_6_BAND_3, 4'777'574'400, 19'944'000},
- {LEVEL_APV_6_1_BAND_3, 8'493'465'600, 39'888'000},
- {LEVEL_APV_7_BAND_3, 16'986'931'200, 79'776'000},
- {LEVEL_APV_7_1_BAND_3, 33'973'862'400, 159'552'000},
- };
-
- uint64_t samplesPerSec = width * height * fps;
- if (band == 0) {
- for (const LevelLimits& limit : kLimitsBand0) {
- if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
- level = limit.level;
- break;
- }
- }
- } else if (band == 1) {
- for (const LevelLimits& limit : kLimitsBand1) {
- if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
- level = limit.level;
- break;
- }
- }
- } else if (band == 2) {
- for (const LevelLimits& limit : kLimitsBand2) {
- if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
- level = limit.level;
- break;
- }
- }
- } else if (band == 3) {
- for (const LevelLimits& limit : kLimitsBand3) {
- if (samplesPerSec <= limit.samplesPerSec && bitrate <= limit.bitratesOfBand) {
- level = limit.level;
- break;
- }
- }
- } else {
- ALOGE("Invalid band_idc on calculte level");
- }
-
- return level;
-}
-
void C2SoftApvEnc::ColorConvertP010ToYUV422P10le(const C2GraphicView* const input,
oapv_imgb_t* imgb) {
uint32_t width = input->width();
diff --git a/media/codec2/components/apv/C2SoftApvEnc.h b/media/codec2/components/apv/C2SoftApvEnc.h
index fc4ad7d..f281052 100644
--- a/media/codec2/components/apv/C2SoftApvEnc.h
+++ b/media/codec2/components/apv/C2SoftApvEnc.h
@@ -60,8 +60,6 @@
const std::unique_ptr<C2Work>& work);
void setParams(oapve_param_t& param);
int32_t getQpFromQuality(int quality);
- C2Config::level_t decisionApvLevel(int32_t width, int32_t height, int32_t fps, int32_t bitrate,
- int32_t band);
void showEncoderParams(oapve_cdesc_t* cdsc);
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
index e644ee3..ed711ee 100644
--- a/media/codec2/components/mpeg2/Android.bp
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -14,11 +14,10 @@
"libcodec2_soft_sanitize_signed-defaults",
],
- cflags: [
- "-DKEEP_THREADS_ACTIVE=0",
- ],
-
srcs: ["C2SoftMpeg2Dec.cpp"],
- static_libs: ["libmpeg2dec"],
+ static_libs: [
+ "libmpeg2dec",
+ "android.media.swcodec.flags-aconfig-cc",
+ ],
}
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 52920c2..64e4bf0 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -16,11 +16,10 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "C2SoftMpeg2Dec"
-#ifndef KEEP_THREADS_ACTIVE
-#define KEEP_THREADS_ACTIVE 0
-#endif
#include <log/log.h>
+#include <android_media_swcodec_flags.h>
+
#include <media/stagefright/foundation/MediaDefs.h>
#include <C2Debug.h>
@@ -320,14 +319,7 @@
c2_node_id_t id,
const std::shared_ptr<IntfImpl> &intfImpl)
: SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
- mIntf(intfImpl),
- mDecHandle(nullptr),
- mMemRecords(nullptr),
- mOutBufferDrain(nullptr),
- mIvColorformat(IV_YUV_420P),
- mWidth(320),
- mHeight(240),
- mOutIndex(0u) {
+ mIntf(intfImpl) {
// If input dump is enabled, then open create an empty file
GENERATE_FILE_NAMES();
CREATE_DUMP_FILE(mInFile);
@@ -436,7 +428,7 @@
s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = sizeof(ivdext_fill_mem_rec_ip_t);
s_fill_mem_ip.u4_share_disp_buf = 0;
- s_fill_mem_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
+ s_fill_mem_ip.u4_keep_threads_active = mKeepThreadsActive;
s_fill_mem_ip.e_output_format = mIvColorformat;
s_fill_mem_ip.u4_deinterlace = 1;
s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
@@ -478,7 +470,7 @@
s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
s_init_ip.u4_share_disp_buf = 0;
s_init_ip.u4_deinterlace = 1;
- s_init_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
+ s_init_ip.u4_keep_threads_active = mKeepThreadsActive;
s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorformat;
s_init_op.s_ivd_init_op_t.u4_size = sizeof(ivdext_init_op_t);
@@ -571,6 +563,7 @@
status_t ret = getNumMemRecords();
if (OK != ret) return ret;
+ mKeepThreadsActive = android::media::swcodec::flags::mpeg2_keep_threads_active();
ret = fillMemRecords();
if (OK != ret) return ret;
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
index 3965bcc..6d09694 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
@@ -144,21 +144,22 @@
};
std::shared_ptr<IntfImpl> mIntf;
- iv_obj_t *mDecHandle;
- iv_mem_rec_t *mMemRecords;
- size_t mNumMemRecords;
+ iv_obj_t *mDecHandle = nullptr;
+ iv_mem_rec_t *mMemRecords = nullptr;
+ size_t mNumMemRecords = 0;
std::shared_ptr<C2GraphicBlock> mOutBlock;
- uint8_t *mOutBufferDrain;
+ uint8_t *mOutBufferDrain = nullptr;
- size_t mNumCores;
- IV_COLOR_FORMAT_T mIvColorformat;
+ size_t mNumCores = 1;
+ IV_COLOR_FORMAT_T mIvColorformat = IV_YUV_420P;
- uint32_t mWidth;
- uint32_t mHeight;
- uint32_t mStride;
- bool mSignalledOutputEos;
- bool mSignalledError;
- std::atomic_uint64_t mOutIndex;
+ uint32_t mWidth = 320;
+ uint32_t mHeight = 240;
+ uint32_t mStride = 0;
+ bool mSignalledOutputEos = false;
+ bool mSignalledError = false;
+ bool mKeepThreadsActive = false;
+ std::atomic_uint64_t mOutIndex = 0;
// Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
// converting them to C2 values for each frame
diff --git a/media/codec2/hal/client/Android.bp b/media/codec2/hal/client/Android.bp
index 864eeb8..029044f 100644
--- a/media/codec2/hal/client/Android.bp
+++ b/media/codec2/hal/client/Android.bp
@@ -23,6 +23,7 @@
name: "libcodec2_client",
srcs: [
+ "ApexCodecsLazy.cpp",
"GraphicBufferAllocator.cpp",
"GraphicsTracker.cpp",
"client.cpp",
@@ -41,17 +42,18 @@
cpp_std: "gnu++20",
header_libs: [
+ "libapexcodecs-header",
"libcodec2_internal", // private
],
shared_libs: [
"android.hardware.graphics.bufferqueue@1.0",
+ "android.hardware.media.bufferpool2-V2-ndk",
"android.hardware.media.bufferpool@2.0",
+ "android.hardware.media.c2-V1-ndk",
"android.hardware.media.c2@1.0",
"android.hardware.media.c2@1.1",
"android.hardware.media.c2@1.2",
- "android.hardware.media.bufferpool2-V2-ndk",
- "android.hardware.media.c2-V1-ndk",
"libbase",
"libbinder",
"libbinder_ndk",
@@ -79,6 +81,10 @@
"include",
],
+ export_header_lib_headers: [
+ "libapexcodecs-header",
+ ],
+
export_shared_lib_headers: [
"android.hardware.media.c2@1.0",
"android.hardware.media.c2@1.1",
@@ -89,5 +95,4 @@
"libcodec2_hidl_client@1.2",
"libcodec2_vndk",
],
-
}
diff --git a/media/codec2/hal/client/ApexCodecsLazy.cpp b/media/codec2/hal/client/ApexCodecsLazy.cpp
new file mode 100644
index 0000000..cd7953e
--- /dev/null
+++ b/media/codec2/hal/client/ApexCodecsLazy.cpp
@@ -0,0 +1,295 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ApexCodecsLazy"
+#include <log/log.h>
+
+#include <mutex>
+
+#include <dlfcn.h>
+
+#include <android-base/no_destructor.h>
+#include <apex/ApexCodecs.h>
+#include <utils/RWLock.h>
+
+using android::RWLock;
+
+namespace {
+
+// This file provides a lazy interface to libapexcodecs.so to address early boot dependencies.
+
+// Method pointers to libapexcodecs methods are held in an array which simplifies checking
+// all pointers are initialized.
+enum MethodIndex {
+ k_ApexCodec_Component_create,
+ k_ApexCodec_Component_destroy,
+ k_ApexCodec_Component_flush,
+ k_ApexCodec_Component_getConfigurable,
+ k_ApexCodec_Component_process,
+ k_ApexCodec_Component_start,
+ k_ApexCodec_Component_reset,
+ k_ApexCodec_Configurable_config,
+ k_ApexCodec_Configurable_query,
+ k_ApexCodec_Configurable_querySupportedParams,
+ k_ApexCodec_Configurable_querySupportedValues,
+ k_ApexCodec_GetComponentStore,
+ k_ApexCodec_ParamDescriptors_getDescriptor,
+ k_ApexCodec_ParamDescriptors_getIndices,
+ k_ApexCodec_ParamDescriptors_release,
+ k_ApexCodec_SettingResults_getResultAtIndex,
+ k_ApexCodec_SettingResults_release,
+ k_ApexCodec_SupportedValues_getTypeAndValues,
+ k_ApexCodec_SupportedValues_release,
+ k_ApexCodec_Traits_get,
+
+ // Marker for count of methods
+ k_MethodCount
+};
+
+class ApexCodecsLazyLoader {
+public:
+ ApexCodecsLazyLoader() = default;
+
+ static ApexCodecsLazyLoader &Get() {
+ static ::android::base::NoDestructor<ApexCodecsLazyLoader> sLoader;
+ return *sLoader;
+ }
+
+ void *getMethodAt(enum MethodIndex index) {
+ RWLock::AutoRLock l(mLock);
+ if (mInit) {
+ return mMethods[index];
+ } else {
+ mLock.unlock();
+ if (!init()) {
+ return nullptr;
+ }
+ mLock.readLock();
+ return mMethods[index];
+ }
+ }
+
+private:
+ static void* LoadLibapexcodecs(int dlopen_flags) {
+ return dlopen("libapexcodecs.so", dlopen_flags);
+ }
+
+ // Initialization and symbol binding.
+ void bindSymbol_l(void* handle, const char* name, enum MethodIndex index) {
+ void* symbol = dlsym(handle, name);
+ ALOGI_IF(symbol == nullptr, "Failed to find symbol '%s' in libapexcodecs.so: %s",
+ name, dlerror());
+ mMethods[index] = symbol;
+ }
+
+ bool init() {
+ {
+ RWLock::AutoRLock l(mLock);
+ if (mInit) {
+ return true;
+ }
+ }
+ void* handle = LoadLibapexcodecs(RTLD_NOW);
+ if (handle == nullptr) {
+ ALOGI("Failed to load libapexcodecs.so: %s", dlerror());
+ return false;
+ }
+
+ RWLock::AutoWLock l(mLock);
+#undef BIND_SYMBOL
+#define BIND_SYMBOL(name) bindSymbol_l(handle, #name, k_##name);
+ BIND_SYMBOL(ApexCodec_Component_create);
+ BIND_SYMBOL(ApexCodec_Component_destroy);
+ BIND_SYMBOL(ApexCodec_Component_flush);
+ BIND_SYMBOL(ApexCodec_Component_getConfigurable);
+ BIND_SYMBOL(ApexCodec_Component_process);
+ BIND_SYMBOL(ApexCodec_Component_start);
+ BIND_SYMBOL(ApexCodec_Component_reset);
+ BIND_SYMBOL(ApexCodec_Configurable_config);
+ BIND_SYMBOL(ApexCodec_Configurable_query);
+ BIND_SYMBOL(ApexCodec_Configurable_querySupportedParams);
+ BIND_SYMBOL(ApexCodec_Configurable_querySupportedValues);
+ BIND_SYMBOL(ApexCodec_GetComponentStore);
+ BIND_SYMBOL(ApexCodec_ParamDescriptors_getDescriptor);
+ BIND_SYMBOL(ApexCodec_ParamDescriptors_getIndices);
+ BIND_SYMBOL(ApexCodec_ParamDescriptors_release);
+ BIND_SYMBOL(ApexCodec_SettingResults_getResultAtIndex);
+ BIND_SYMBOL(ApexCodec_SettingResults_release);
+ BIND_SYMBOL(ApexCodec_SupportedValues_getTypeAndValues);
+ BIND_SYMBOL(ApexCodec_SupportedValues_release);
+ BIND_SYMBOL(ApexCodec_Traits_get);
+#undef BIND_SYMBOL
+
+ // Check every symbol is bound.
+ for (int i = 0; i < k_MethodCount; ++i) {
+ if (mMethods[i] == nullptr) {
+ ALOGI("Uninitialized method in libapexcodecs_lazy at index: %d", i);
+ return false;
+ }
+ }
+ mInit = true;
+ return true;
+ }
+
+ RWLock mLock;
+ // Table of methods pointers in libapexcodecs APIs.
+ void* mMethods[k_MethodCount];
+ bool mInit{false};
+};
+
+} // anonymous namespace
+
+#define INVOKE_METHOD(name, returnIfNull, args...) \
+ do { \
+ void* method = ApexCodecsLazyLoader::Get().getMethodAt(k_##name); \
+ if (!method) return (returnIfNull); \
+ return reinterpret_cast<decltype(&name)>(method)(args); \
+ } while (0)
+
+//
+// Forwarding for methods in ApexCodecs.h.
+//
+
+ApexCodec_ComponentStore *ApexCodec_GetComponentStore() {
+ INVOKE_METHOD(ApexCodec_GetComponentStore, nullptr);
+}
+
+ApexCodec_ComponentTraits *ApexCodec_Traits_get(
+ ApexCodec_ComponentStore *store, size_t index) {
+ INVOKE_METHOD(ApexCodec_Traits_get, nullptr, store, index);
+}
+
+ApexCodec_Status ApexCodec_Component_create(
+ ApexCodec_ComponentStore *store, const char *name, ApexCodec_Component **comp) {
+ INVOKE_METHOD(ApexCodec_Component_create, APEXCODEC_STATUS_OMITTED, store, name, comp);
+}
+
+void ApexCodec_Component_destroy(ApexCodec_Component *comp) {
+ INVOKE_METHOD(ApexCodec_Component_destroy, void(), comp);
+}
+
+ApexCodec_Status ApexCodec_Component_start(ApexCodec_Component *comp) {
+ INVOKE_METHOD(ApexCodec_Component_start, APEXCODEC_STATUS_OMITTED, comp);
+}
+
+ApexCodec_Status ApexCodec_Component_flush(ApexCodec_Component *comp) {
+ INVOKE_METHOD(ApexCodec_Component_flush, APEXCODEC_STATUS_OMITTED, comp);
+}
+
+ApexCodec_Status ApexCodec_Component_reset(ApexCodec_Component *comp) {
+ INVOKE_METHOD(ApexCodec_Component_reset, APEXCODEC_STATUS_OMITTED, comp);
+}
+
+ApexCodec_Configurable *ApexCodec_Component_getConfigurable(
+ ApexCodec_Component *comp) {
+ INVOKE_METHOD(ApexCodec_Component_getConfigurable, nullptr, comp);
+}
+
+ApexCodec_Status ApexCodec_SupportedValues_getTypeAndValues(
+ ApexCodec_SupportedValues *supportedValues,
+ ApexCodec_SupportedValuesType *type,
+ ApexCodec_SupportedValuesNumberType *numberType,
+ ApexCodec_Value **values,
+ uint32_t *numValues) {
+ INVOKE_METHOD(ApexCodec_SupportedValues_getTypeAndValues, APEXCODEC_STATUS_OMITTED,
+ supportedValues, type, numberType, values, numValues);
+}
+
+void ApexCodec_SupportedValues_release(ApexCodec_SupportedValues *values) {
+ INVOKE_METHOD(ApexCodec_SupportedValues_release, void(), values);
+}
+
+ApexCodec_Status ApexCodec_SettingResults_getResultAtIndex(
+ ApexCodec_SettingResults *results,
+ size_t index,
+ ApexCodec_SettingResultFailure *failure,
+ ApexCodec_ParamFieldValues *field,
+ ApexCodec_ParamFieldValues **conflicts,
+ size_t *numConflicts) {
+ INVOKE_METHOD(ApexCodec_SettingResults_getResultAtIndex, APEXCODEC_STATUS_OMITTED,
+ results, index, failure, field, conflicts, numConflicts);
+}
+
+void ApexCodec_SettingResults_release(ApexCodec_SettingResults *results) {
+ INVOKE_METHOD(ApexCodec_SettingResults_release, void(), results);
+}
+
+ApexCodec_Status ApexCodec_Component_process(
+ ApexCodec_Component *comp,
+ const ApexCodec_Buffer *input,
+ ApexCodec_Buffer *output,
+ size_t *consumed,
+ size_t *produced) {
+ INVOKE_METHOD(ApexCodec_Component_process, APEXCODEC_STATUS_OMITTED,
+ comp, input, output, consumed, produced);
+}
+
+ApexCodec_Status ApexCodec_Configurable_config(
+ ApexCodec_Configurable *comp,
+ ApexCodec_LinearBuffer *config,
+ ApexCodec_SettingResults **results) {
+ INVOKE_METHOD(ApexCodec_Configurable_config, APEXCODEC_STATUS_OMITTED, comp, config, results);
+}
+
+ApexCodec_Status ApexCodec_Configurable_query(
+ ApexCodec_Configurable *comp,
+ uint32_t indices[],
+ size_t numIndices,
+ ApexCodec_LinearBuffer *config,
+ size_t *writtenOrRequested) {
+ INVOKE_METHOD(ApexCodec_Configurable_query, APEXCODEC_STATUS_OMITTED,
+ comp, indices, numIndices, config, writtenOrRequested);
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_getIndices(
+ ApexCodec_ParamDescriptors *descriptors,
+ uint32_t **indices,
+ size_t *numIndices) {
+ INVOKE_METHOD(ApexCodec_ParamDescriptors_getIndices, APEXCODEC_STATUS_OMITTED,
+ descriptors, indices, numIndices);
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_getDescriptor(
+ ApexCodec_ParamDescriptors *descriptors,
+ uint32_t index,
+ ApexCodec_ParamAttribute *attr,
+ const char **name,
+ uint32_t **dependencies,
+ size_t *numDependencies) {
+ INVOKE_METHOD(ApexCodec_ParamDescriptors_getDescriptor, APEXCODEC_STATUS_OMITTED,
+ descriptors, index, attr, name, dependencies, numDependencies);
+}
+
+ApexCodec_Status ApexCodec_ParamDescriptors_release(
+ ApexCodec_ParamDescriptors *descriptors) {
+ INVOKE_METHOD(ApexCodec_ParamDescriptors_release, APEXCODEC_STATUS_OMITTED, descriptors);
+}
+
+ApexCodec_Status ApexCodec_Configurable_querySupportedParams(
+ ApexCodec_Configurable *comp,
+ ApexCodec_ParamDescriptors **descriptors) {
+ INVOKE_METHOD(ApexCodec_Configurable_querySupportedParams, APEXCODEC_STATUS_OMITTED,
+ comp, descriptors);
+}
+
+ApexCodec_Status ApexCodec_Configurable_querySupportedValues(
+ ApexCodec_Configurable *comp,
+ ApexCodec_SupportedValuesQuery *queries,
+ size_t numQueries) {
+ INVOKE_METHOD(ApexCodec_Configurable_querySupportedValues, APEXCODEC_STATUS_OMITTED,
+ comp, queries, numQueries);
+}
diff --git a/media/codec2/hal/client/GraphicBufferAllocator.cpp b/media/codec2/hal/client/GraphicBufferAllocator.cpp
index 6a6da0f..4055f9b 100644
--- a/media/codec2/hal/client/GraphicBufferAllocator.cpp
+++ b/media/codec2/hal/client/GraphicBufferAllocator.cpp
@@ -96,6 +96,10 @@
mGraphicsTracker->onAttached(generation);
}
+void GraphicBufferAllocator::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
+ mGraphicsTracker->pollForRenderedFrames(delta);
+}
+
c2_status_t GraphicBufferAllocator::allocate(
uint32_t width, uint32_t height, ::android::PixelFormat format, uint64_t usage,
AHardwareBuffer **buf, ::android::sp<::android::Fence> *fence) {
@@ -119,6 +123,10 @@
return mGraphicsTracker->render(block, input, output);
}
+void GraphicBufferAllocator::onRequestStop() {
+ mGraphicsTracker->onRequestStop();
+}
+
GraphicBufferAllocator::~GraphicBufferAllocator() {}
std::shared_ptr<GraphicBufferAllocator> GraphicBufferAllocator::CreateGraphicBufferAllocator(
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index bdfc409..ff356fc 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -32,6 +32,9 @@
static constexpr int kMaxDequeueMin = 1;
static constexpr int kMaxDequeueMax = ::android::BufferQueueDefs::NUM_BUFFER_SLOTS - 2;
+// Just some delay for HAL to receive the stop()/release() request.
+static constexpr int kAllocateDirectDelayUs = 16666;
+
c2_status_t retrieveAHardwareBufferId(const C2ConstGraphicBlock &blk, uint64_t *bid) {
std::shared_ptr<const _C2BlockPoolData> bpData = _C2BlockFactory::GetGraphicBlockPoolData(blk);
if (!bpData || bpData->getType() != _C2BlockPoolData::TYPE_AHWBUFFER) {
@@ -177,7 +180,7 @@
mMaxDequeueCommitted{maxDequeueCount},
mDequeueable{maxDequeueCount},
mTotalDequeued{0}, mTotalCancelled{0}, mTotalDropped{0}, mTotalReleased{0},
- mInConfig{false}, mStopped{false} {
+ mInConfig{false}, mStopped{false}, mStopRequested{false}, mAllocAfterStopRequested{0} {
if (maxDequeueCount < kMaxDequeueMin) {
mMaxDequeue = kMaxDequeueMin;
mMaxDequeueCommitted = kMaxDequeueMin;
@@ -490,6 +493,18 @@
}
}
+void GraphicsTracker::onRequestStop() {
+ std::unique_lock<std::mutex> l(mLock);
+ if (mStopped) {
+ return;
+ }
+ if (mStopRequested) {
+ return;
+ }
+ mStopRequested = true;
+ writeIncDequeueableLocked(kMaxDequeueMax - 1);
+}
+
void GraphicsTracker::writeIncDequeueableLocked(int inc) {
CHECK(inc > 0 && inc < kMaxDequeueMax);
thread_local char buf[kMaxDequeueMax];
@@ -544,8 +559,7 @@
return C2_OK;
}
-c2_status_t GraphicsTracker::requestAllocate(std::shared_ptr<BufferCache> *cache) {
- std::lock_guard<std::mutex> l(mLock);
+c2_status_t GraphicsTracker::requestAllocateLocked(std::shared_ptr<BufferCache> *cache) {
if (mDequeueable > 0) {
char buf[1];
int ret = ::read(mReadPipeFd.get(), buf, 1);
@@ -728,6 +742,34 @@
return C2_OK;
}
+c2_status_t GraphicsTracker::_allocateDirect(
+ uint32_t width, uint32_t height, PixelFormat format, uint64_t usage,
+ AHardwareBuffer **buf, sp<Fence> *rFence) {
+ AHardwareBuffer_Desc desc;
+ desc.width = width;
+ desc.height = height;
+ desc.layers = 1u;
+ desc.format = ::android::AHardwareBuffer_convertFromPixelFormat(format);
+ desc.usage = ::android::AHardwareBuffer_convertFromGrallocUsageBits(usage);
+ desc.rfu0 = 0;
+ desc.rfu1 = 0;
+
+ int res = AHardwareBuffer_allocate(&desc, buf);
+ if (res != ::android::OK) {
+ ALOGE("_allocateDirect() failed(%d)", res);
+ if (res == ::android::NO_MEMORY) {
+ return C2_NO_MEMORY;
+ } else {
+ return C2_CORRUPTED;
+ }
+ }
+
+ int alloced = mAllocAfterStopRequested++;
+ *rFence = Fence::NO_FENCE;
+ ALOGD("_allocateDirect() allocated %d buffer", alloced);
+ return C2_OK;
+}
+
c2_status_t GraphicsTracker::allocate(
uint32_t width, uint32_t height, PixelFormat format, uint64_t usage,
AHardwareBuffer **buf, sp<Fence> *rFence) {
@@ -735,10 +777,21 @@
ALOGE("cannot allocate due to being stopped");
return C2_BAD_STATE;
}
+ c2_status_t res = C2_OK;
std::shared_ptr<BufferCache> cache;
- c2_status_t res = requestAllocate(&cache);
- if (res != C2_OK) {
- return res;
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ if (mStopRequested) {
+ l.unlock();
+ res = _allocateDirect(width, height, format, usage, buf, rFence);
+ // Delay a little bit for HAL to receive stop()/release() request.
+ ::usleep(kAllocateDirectDelayUs);
+ return res;
+ }
+ c2_status_t res = requestAllocateLocked(&cache);
+ if (res != C2_OK) {
+ return res;
+ }
}
ALOGV("allocatable or dequeueable");
@@ -1003,6 +1056,19 @@
return C2_OK;
}
+void GraphicsTracker::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
+ sp<IGraphicBufferProducer> igbp;
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ if (mBufferCache) {
+ igbp = mBufferCache->mIgbp;
+ }
+ }
+ if (igbp) {
+ igbp->getFrameTimestamps(delta);
+ }
+}
+
void GraphicsTracker::onReleased(uint32_t generation) {
bool updateDequeue = false;
{
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 6348e42..17e5b62 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -20,6 +20,8 @@
#include <android-base/logging.h>
#include <utils/Trace.h>
+#include <android_media_codec.h>
+
#include <codec2/aidl/GraphicBufferAllocator.h>
#include <codec2/common/HalSelection.h>
#include <codec2/hidl/client.h>
@@ -55,7 +57,9 @@
#include <android/binder_ibinder.h>
#include <android/binder_manager.h>
#include <android-base/properties.h>
+#include <android-base/scopeguard.h>
#include <android-base/stringprintf.h>
+#include <apex/ApexCodecs.h>
#include <bufferpool/ClientManager.h>
#include <bufferpool2/ClientManager.h>
#include <codec2/aidl/BufferTypes.h>
@@ -64,14 +68,14 @@
#include <codec2/hidl/1.1/types.h>
#include <codec2/hidl/1.2/types.h>
#include <codec2/hidl/output.h>
-
#include <cutils/native_handle.h>
#include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
#include <gui/bufferqueue/2.0/H2BGraphicBufferProducer.h>
#include <hardware/gralloc.h> // for GRALLOC_USAGE_*
#include <hidl/HidlSupport.h>
-#include <system/window.h> // for NATIVE_WINDOW_QUERY_*
#include <media/stagefright/foundation/ADebug.h> // for asString(status_t)
+#include <private/android/AHardwareBufferHelpers.h>
+#include <system/window.h> // for NATIVE_WINDOW_QUERY_*
#include <deque>
#include <iterator>
@@ -799,6 +803,386 @@
return status;
}
+// Codec2ConfigurableClient::ApexImpl
+
+struct Codec2ConfigurableClient::ApexImpl : public Codec2ConfigurableClient::ImplBase {
+ ApexImpl(ApexCodec_Configurable *base, const C2String &name);
+
+ const C2String& getName() const override {
+ return mName;
+ }
+
+ c2_status_t query(
+ const std::vector<C2Param*>& stackParams,
+ const std::vector<C2Param::Index> &heapParamIndices,
+ c2_blocking_t mayBlock,
+ std::vector<std::unique_ptr<C2Param>>* const heapParams) const override;
+
+ c2_status_t config(
+ const std::vector<C2Param*> ¶ms,
+ c2_blocking_t mayBlock,
+ std::vector<std::unique_ptr<C2SettingResult>>* const failures) override;
+
+ c2_status_t querySupportedParams(
+ std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
+ ) const override;
+
+ c2_status_t querySupportedValues(
+ std::vector<C2FieldSupportedValuesQuery>& fields,
+ c2_blocking_t mayBlock) const override;
+
+private:
+ ApexCodec_Configurable* mBase;
+ const C2String mName;
+};
+
+Codec2ConfigurableClient::ApexImpl::ApexImpl(ApexCodec_Configurable *base, const C2String &name)
+ : mBase{base},
+ mName{name} {
+}
+
+c2_status_t Codec2ConfigurableClient::ApexImpl::query(
+ const std::vector<C2Param*> &stackParams,
+ const std::vector<C2Param::Index> &heapParamIndices,
+ [[maybe_unused]] c2_blocking_t mayBlock,
+ std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
+ if (mBase == nullptr) {
+ return C2_OMITTED;
+ }
+
+ if (__builtin_available(android 36, *)) {
+ std::vector<uint32_t> indices(
+ stackParams.size() + heapParamIndices.size());
+ size_t numIndices = 0;
+ for (C2Param* const& stackParam : stackParams) {
+ if (!stackParam) {
+ LOG(WARNING) << "query -- null stack param encountered.";
+ continue;
+ }
+ indices[numIndices++] = uint32_t(stackParam->index());
+ }
+ size_t numStackIndices = numIndices;
+ for (const C2Param::Index& index : heapParamIndices) {
+ indices[numIndices++] = uint32_t(index);
+ }
+ indices.resize(numIndices);
+ if (heapParams) {
+ heapParams->reserve(heapParams->size() + numIndices);
+ }
+ if (numIndices == 0) {
+ return C2_OK;
+ }
+ thread_local std::vector<uint8_t> configBuffer(1024);
+ if (configBuffer.capacity() < numIndices * 16u) {
+ configBuffer.resize(numIndices * 16u);
+ }
+ ApexCodec_LinearBuffer config{configBuffer.data(), configBuffer.capacity()};
+ size_t writtenOrRequested = 0;
+ ApexCodec_Status status = ApexCodec_Configurable_query(
+ mBase, indices.data(), indices.size(), &config, &writtenOrRequested);
+ if (status == APEXCODEC_STATUS_NO_MEMORY) {
+ size_t requested = writtenOrRequested;
+ configBuffer.resize(align(requested, 1024));
+ config.data = configBuffer.data();
+ config.size = configBuffer.capacity();
+ status = ApexCodec_Configurable_query(
+ mBase, indices.data(), indices.size(), &config, &writtenOrRequested);
+ }
+ size_t written = writtenOrRequested;
+ if (status != APEXCODEC_STATUS_OK && status != APEXCODEC_STATUS_BAD_INDEX) {
+ written = 0;
+ }
+ configBuffer.resize(written);
+ std::vector<C2Param*> paramPointers;
+ if (!::android::parseParamsBlob(¶mPointers, configBuffer)) {
+ LOG(ERROR) << "query -- error while parsing params.";
+ return C2_CORRUPTED;
+ }
+ size_t i = 0;
+ size_t numQueried = 0;
+ for (auto it = paramPointers.begin(); it != paramPointers.end(); ) {
+ C2Param* paramPointer = *it;
+ if (numStackIndices > 0) {
+ --numStackIndices;
+ if (!paramPointer) {
+ LOG(DEBUG) << "query -- null stack param.";
+ ++it;
+ continue;
+ }
+ for (; i < stackParams.size() && !stackParams[i]; ) {
+ ++i;
+ }
+ if (i >= stackParams.size()) {
+ LOG(ERROR) << "query -- unexpected error.";
+ status = APEXCODEC_STATUS_CORRUPTED;
+ break;
+ }
+ if (stackParams[i]->index() != paramPointer->index()) {
+ LOG(DEBUG) << "query -- param skipped: "
+ "index = "
+ << stackParams[i]->index() << ".";
+ stackParams[i++]->invalidate();
+ // this means that the param could not be queried.
+ // signalling C2_BAD_INDEX to the client.
+ status = APEXCODEC_STATUS_BAD_INDEX;
+ continue;
+ }
+ if (stackParams[i++]->updateFrom(*paramPointer)) {
+ ++numQueried;
+ } else {
+ LOG(WARNING) << "query -- param update failed: "
+ "index = "
+ << paramPointer->index() << ".";
+ }
+ } else {
+ if (!paramPointer) {
+ LOG(DEBUG) << "query -- null heap param.";
+ ++it;
+ continue;
+ }
+ if (!heapParams) {
+ LOG(WARNING) << "query -- "
+ "unexpected extra stack param.";
+ } else {
+ heapParams->emplace_back(C2Param::Copy(*paramPointer));
+ ++numQueried;
+ }
+ }
+ ++it;
+ }
+ if (status == APEXCODEC_STATUS_OK && indices.size() != numQueried) {
+ status = APEXCODEC_STATUS_BAD_INDEX;
+ }
+ return (c2_status_t)status;
+ } else {
+ return C2_OMITTED;
+ }
+}
+
+namespace {
+struct ParamOrField : public C2ParamField {
+ explicit ParamOrField(const ApexCodec_ParamFieldValues& field)
+ : C2ParamField(field.index, field.offset, field.size) {}
+};
+
+static bool FromApex(
+ ApexCodec_SupportedValues *apexValues,
+ C2FieldSupportedValues* c2Values) {
+ if (__builtin_available(android 36, *)) {
+ if (apexValues == nullptr) {
+ c2Values->type = C2FieldSupportedValues::EMPTY;
+ return true;
+ }
+ ApexCodec_SupportedValuesType type = APEXCODEC_SUPPORTED_VALUES_EMPTY;
+ ApexCodec_SupportedValuesNumberType numberType = APEXCODEC_SUPPORTED_VALUES_TYPE_NONE;
+ ApexCodec_Value* values = nullptr;
+ uint32_t numValues = 0;
+ ApexCodec_SupportedValues_getTypeAndValues(
+ apexValues, &type, &numberType, &values, &numValues);
+ c2Values->type = (C2FieldSupportedValues::type_t)type;
+ std::function<C2Value::Primitive(const ApexCodec_Value &)> getPrimitive;
+ switch (numberType) {
+ case APEXCODEC_SUPPORTED_VALUES_TYPE_NONE:
+ getPrimitive = [](const ApexCodec_Value &) -> C2Value::Primitive {
+ return C2Value::Primitive();
+ };
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_TYPE_INT32:
+ getPrimitive = [](const ApexCodec_Value &value) -> C2Value::Primitive {
+ return C2Value::Primitive(value.i32);
+ };
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_TYPE_UINT32:
+ getPrimitive = [](const ApexCodec_Value &value) -> C2Value::Primitive {
+ return C2Value::Primitive(value.u32);
+ };
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_TYPE_INT64:
+ getPrimitive = [](const ApexCodec_Value &value) -> C2Value::Primitive {
+ return C2Value::Primitive(value.i64);
+ };
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_TYPE_UINT64:
+ getPrimitive = [](const ApexCodec_Value &value) -> C2Value::Primitive {
+ return C2Value::Primitive(value.u64);
+ };
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_TYPE_FLOAT:
+ getPrimitive = [](const ApexCodec_Value &value) -> C2Value::Primitive {
+ return C2Value::Primitive(value.f);
+ };
+ break;
+ default:
+ LOG(ERROR) << "Unsupported number type: " << numberType;
+ return false;
+ }
+ switch (type) {
+ case APEXCODEC_SUPPORTED_VALUES_EMPTY:
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_RANGE:
+ c2Values->range.min = getPrimitive(values[0]);
+ c2Values->range.max = getPrimitive(values[1]);
+ c2Values->range.step = getPrimitive(values[2]);
+ c2Values->range.num = getPrimitive(values[3]);
+ c2Values->range.denom = getPrimitive(values[4]);
+ break;
+ case APEXCODEC_SUPPORTED_VALUES_VALUES:
+ case APEXCODEC_SUPPORTED_VALUES_FLAGS:
+ c2Values->values.clear();
+ for (uint32_t i = 0; i < numValues; ++i) {
+ c2Values->values.push_back(getPrimitive(values[i]));
+ }
+ break;
+ default:
+ LOG(ERROR) << "Unsupported supported values type: " << type;
+ return false;
+ }
+ return true;
+ } else {
+ return false;
+ }
+}
+
+} // anonymous namespace
+
+c2_status_t Codec2ConfigurableClient::ApexImpl::config(
+ const std::vector<C2Param*> ¶ms,
+ c2_blocking_t mayBlock,
+ std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
+ (void)mayBlock;
+ if (mBase == nullptr) {
+ return C2_OMITTED;
+ }
+
+ if (__builtin_available(android 36, *)) {
+ std::vector<uint8_t> configBuffer;
+ if (!::android::_createParamsBlob(&configBuffer, params)) {
+ LOG(ERROR) << "config -- bad input.";
+ return C2_TRANSACTION_FAILED;
+ }
+ ApexCodec_SettingResults* result = nullptr;
+ ApexCodec_LinearBuffer config{configBuffer.data(), configBuffer.size()};
+ ApexCodec_Status status = ApexCodec_Configurable_config(
+ mBase, &config, &result);
+ base::ScopeGuard guard([result] {
+ if (result) {
+ ApexCodec_SettingResults_release(result);
+ }
+ });
+ size_t index = 0;
+ ApexCodec_SettingResultFailure failure;
+ ApexCodec_ParamFieldValues field;
+ ApexCodec_ParamFieldValues* conflicts = nullptr;
+ size_t numConflicts = 0;
+ ApexCodec_Status getResultStatus = ApexCodec_SettingResults_getResultAtIndex(
+ result, 0, &failure, &field, &conflicts, &numConflicts);
+ while (getResultStatus == APEXCODEC_STATUS_OK) {
+ std::unique_ptr<C2SettingResult> settingResult;
+ settingResult.reset(new C2SettingResult{
+ C2SettingResult::Failure(failure), C2ParamFieldValues(ParamOrField(field)), {}
+ });
+ // TODO: settingResult->field.values = ?
+ for (size_t i = 0; i < numConflicts; ++i) {
+ settingResult->conflicts.emplace_back(ParamOrField(conflicts[i]));
+ C2ParamFieldValues& conflict = settingResult->conflicts.back();
+ conflict.values = std::make_unique<C2FieldSupportedValues>();
+ FromApex(conflicts[i].values, conflict.values.get());
+ }
+ failures->push_back(std::move(settingResult));
+ getResultStatus = ApexCodec_SettingResults_getResultAtIndex(
+ result, ++index, &failure, &field, &conflicts, &numConflicts);
+ }
+ if (!::android::updateParamsFromBlob(params, configBuffer)) {
+ LOG(ERROR) << "config -- "
+ << "failed to parse returned params.";
+ status = APEXCODEC_STATUS_CORRUPTED;
+ }
+ return (c2_status_t)status;
+ } else {
+ return C2_OMITTED;
+ }
+}
+
+c2_status_t Codec2ConfigurableClient::ApexImpl::querySupportedParams(
+ std::vector<std::shared_ptr<C2ParamDescriptor>>* const params) const {
+ if (mBase == nullptr) {
+ return C2_OMITTED;
+ }
+
+ if (__builtin_available(android 36, *)) {
+ // TODO: Cache and query properly!
+ ApexCodec_ParamDescriptors* paramDescs = nullptr;
+ ApexCodec_Configurable_querySupportedParams(mBase, ¶mDescs);
+ base::ScopeGuard guard([paramDescs] {
+ if (paramDescs) {
+ ApexCodec_ParamDescriptors_release(paramDescs);
+ }
+ });
+ uint32_t *indices = nullptr;
+ size_t numIndices = 0;
+ ApexCodec_Status status = ApexCodec_ParamDescriptors_getIndices(
+ paramDescs, &indices, &numIndices);
+ if (status != APEXCODEC_STATUS_OK) {
+ return (c2_status_t)status;
+ }
+ if (numIndices > 0) {
+ for (int i = 0; i < numIndices; ++i) {
+ uint32_t index = indices[i];
+ ApexCodec_ParamAttribute attr = (ApexCodec_ParamAttribute)0;
+ const char* name = nullptr;
+ uint32_t* dependencies = nullptr;
+ size_t numDependencies = 0;
+ ApexCodec_Status status = ApexCodec_ParamDescriptors_getDescriptor(
+ paramDescs, index, &attr, &name, &dependencies, &numDependencies);
+ if (status != APEXCODEC_STATUS_OK) {
+ LOG(WARNING) << "querySupportedParams -- "
+ << "failed to get descriptor for index "
+ << std::hex << index << std::dec << " with status " << status;
+ continue;
+ }
+ params->push_back(std::make_shared<C2ParamDescriptor>(
+ C2Param::Index(index), C2ParamDescriptor::attrib_t(attr), name,
+ std::vector<C2Param::Index>(dependencies, dependencies + numDependencies)));
+ }
+ }
+ return (c2_status_t)status;
+ } else {
+ return C2_OMITTED;
+ }
+}
+
+c2_status_t Codec2ConfigurableClient::ApexImpl::querySupportedValues(
+ std::vector<C2FieldSupportedValuesQuery>& fields,
+ [[maybe_unused]] c2_blocking_t mayBlock) const {
+ if (mBase == nullptr) {
+ return C2_OMITTED;
+ }
+
+ if (__builtin_available(android 36, *)) {
+ std::vector<ApexCodec_SupportedValuesQuery> queries(fields.size());
+ for (size_t i = 0; i < fields.size(); ++i) {
+ queries[i].index = _C2ParamInspector::GetIndex(fields[i].field());
+ queries[i].offset = _C2ParamInspector::GetOffset(fields[i].field());
+ queries[i].type = (ApexCodec_SupportedValuesQueryType)fields[i].type();
+ queries[i].status = APEXCODEC_STATUS_OK;
+ queries[i].values = nullptr;
+ }
+ ApexCodec_Status status = ApexCodec_Configurable_querySupportedValues(
+ mBase, queries.data(), queries.size());
+ for (size_t i = 0; i < fields.size(); ++i) {
+ fields[i].status = (c2_status_t)queries[i].status;
+ FromApex(queries[i].values, &fields[i].values);
+ if (queries[i].values) {
+ ApexCodec_SupportedValues_release(queries[i].values);
+ queries[i].values = nullptr;
+ }
+ }
+ return (c2_status_t)status;
+ } else {
+ return C2_OMITTED;
+ }
+}
+
// Codec2ConfigurableClient
Codec2ConfigurableClient::Codec2ConfigurableClient(const sp<HidlBase> &hidlBase)
@@ -810,6 +1194,11 @@
: mImpl(new Codec2ConfigurableClient::AidlImpl(aidlBase)) {
}
+Codec2ConfigurableClient::Codec2ConfigurableClient(
+ ApexCodec_Configurable *apexBase, const C2String &name)
+ : mImpl(new Codec2ConfigurableClient::ApexImpl(apexBase, name)) {
+}
+
const C2String& Codec2ConfigurableClient::getName() const {
return mImpl->getName();
}
@@ -1035,6 +1424,393 @@
};
+// Codec2Client::Component::ApexHandler
+class Codec2Client::Component::ApexHandler {
+public:
+ ApexHandler(ApexCodec_Component *apexComponent,
+ const std::shared_ptr<Listener> &listener,
+ const std::shared_ptr<Component> &comp)
+ : mApexComponent(apexComponent),
+ mListener(listener),
+ mComponent(comp),
+ mStopped(false),
+ mOutputBufferType(APEXCODEC_BUFFER_TYPE_INVALID) {
+ }
+
+ void start() {
+ std::shared_ptr<Component> comp = mComponent.lock();
+ if (!comp) {
+ LOG(ERROR) << "ApexHandler::start -- component died.";
+ return;
+ }
+ C2ComponentDomainSetting domain;
+ C2ComponentKindSetting kind;
+ c2_status_t status = comp->query({&domain, &kind}, {}, C2_MAY_BLOCK, {});
+ if (status != C2_OK) {
+ LOG(ERROR) << "ApexHandler::start -- failed to query component domain and kind";
+ return;
+ }
+ if (kind.value != C2Component::KIND_DECODER
+ && kind.value != C2Component::KIND_ENCODER) {
+ LOG(ERROR) << "ApexHandler::start -- unrecognized component kind " << kind.value;
+ return;
+ }
+ ApexCodec_BufferType outputBufferType = APEXCODEC_BUFFER_TYPE_INVALID;
+ if (domain.value == C2Component::DOMAIN_AUDIO) {
+ // For both encoders and decoders the output buffer type is linear.
+ outputBufferType = APEXCODEC_BUFFER_TYPE_LINEAR;
+ } else if (domain.value == C2Component::DOMAIN_VIDEO
+ || domain.value == C2Component::DOMAIN_IMAGE) {
+ // For video / image domain the decoder outputs a graphic buffer, and the encoder
+ // outputs a linear buffer.
+ outputBufferType = (kind.value == C2Component::KIND_DECODER)
+ ? APEXCODEC_BUFFER_TYPE_GRAPHIC : APEXCODEC_BUFFER_TYPE_LINEAR;
+ } else {
+ LOG(ERROR) << "ApexHandler::start -- unrecognized component domain " << domain.value;
+ return;
+ }
+ {
+ std::unique_lock<std::mutex> l(mMutex);
+ mStopped = false;
+ mOutputBufferType = outputBufferType;
+ }
+ mThread = std::thread([this]() {
+ run();
+ });
+ }
+
+ void queue(std::list<std::unique_ptr<C2Work>>& workItems) {
+ std::unique_lock<std::mutex> l(mMutex);
+ mWorkQueue.splice(mWorkQueue.end(), workItems);
+ mCondition.notify_all();
+ }
+
+ void stop() {
+ std::unique_lock<std::mutex> l(mMutex);
+ mStopped = true;
+ mCondition.notify_all();
+ l.unlock();
+ mThread.join();
+ }
+
+private:
+ void run() {
+ while (true) {
+ std::unique_lock<std::mutex> l(mMutex);
+ mCondition.wait(l, [this]() {
+ return !mWorkQueue.empty() || mStopped;
+ });
+ if (mStopped) {
+ break;
+ }
+ if (mWorkQueue.empty()) {
+ continue;
+ }
+ std::list<std::unique_ptr<C2Work>> workItems;
+ mWorkQueue.swap(workItems);
+ for (std::unique_ptr<C2Work>& workItem : workItems) {
+ if (mStopped) {
+ break;
+ }
+ l.unlock();
+ handleWork(std::move(workItem));
+ l.lock();
+ }
+ }
+ mWorkQueue.clear();
+ mWorkMap.clear();
+ }
+
+ void handleWork(std::unique_ptr<C2Work> &&workItem) {
+ if (__builtin_available(android 36, *)) {
+ std::shared_ptr<Listener> listener = mListener.lock();
+ if (!listener) {
+ LOG(DEBUG) << "handleWork -- listener died.";
+ return;
+ }
+ ApexCodec_Buffer input;
+ input.flags = (ApexCodec_BufferFlags)workItem->input.flags;
+ input.frameIndex = workItem->input.ordinal.frameIndex.peekll();
+ input.timestampUs = workItem->input.ordinal.timestamp.peekll();
+
+ if (workItem->input.buffers.size() > 1) {
+ LOG(ERROR) << "handleWork -- input buffer size is "
+ << workItem->input.buffers.size();
+ return;
+ }
+ std::shared_ptr<C2Buffer> buffer;
+ std::optional<C2ReadView> linearView;
+ if (!workItem->input.buffers.empty()) {
+ buffer = workItem->input.buffers[0];
+ }
+ if (!FillMemory(buffer, &input, &linearView)) {
+ LOG(ERROR) << "handleWork -- failed to map input";
+ return;
+ }
+
+ std::vector<uint8_t> configUpdatesVector;
+ if (!_createParamsBlob(&configUpdatesVector, workItem->input.configUpdate)) {
+ listener->onError(mComponent, C2_CORRUPTED);
+ return;
+ }
+ input.configUpdates.data = configUpdatesVector.data();
+ input.configUpdates.size = configUpdatesVector.size();
+ mWorkMap.insert_or_assign(
+ workItem->input.ordinal.frameIndex.peekll(), std::move(workItem));
+
+ std::list<std::unique_ptr<C2Work>> workItems;
+ bool inputDrained = false;
+ while (!inputDrained) {
+ ApexCodec_Buffer output;
+ std::shared_ptr<C2LinearBlock> linearBlock;
+ std::optional<C2WriteView> linearView;
+ std::shared_ptr<C2GraphicBlock> graphicBlock;
+ allocOutputBuffer(&output, &linearBlock, &linearView, &graphicBlock);
+ size_t consumed = 0;
+ size_t produced = 0;
+ ApexCodec_Status status = ApexCodec_Component_process(
+ mApexComponent, &input, &output, &consumed, &produced);
+ if (status == APEXCODEC_STATUS_NO_MEMORY) {
+ continue;
+ }
+ if (produced > 0) {
+ auto it = mWorkMap.find(output.frameIndex);
+ std::unique_ptr<C2Work> outputWorkItem;
+ if (it != mWorkMap.end()) {
+ if (output.flags & APEXCODEC_FLAG_INCOMPLETE) {
+ outputWorkItem = std::make_unique<C2Work>();
+ outputWorkItem->input.ordinal = it->second->input.ordinal;
+ outputWorkItem->input.flags = it->second->input.flags;
+ } else {
+ outputWorkItem = std::move(it->second);
+ mWorkMap.erase(it);
+ }
+ } else {
+ LOG(WARNING) << "handleWork -- no work item found for output frame index "
+ << output.frameIndex;
+ outputWorkItem = std::make_unique<C2Work>();
+ outputWorkItem->input.ordinal.frameIndex = output.frameIndex;
+ outputWorkItem->input.ordinal.timestamp = output.timestampUs;
+ }
+ outputWorkItem->worklets.emplace_back(new C2Worklet);
+ const std::unique_ptr<C2Worklet> &worklet = outputWorkItem->worklets.front();
+ if (worklet == nullptr) {
+ LOG(ERROR) << "handleWork -- output work item has null worklet";
+ return;
+ }
+ worklet->output.ordinal.frameIndex = output.frameIndex;
+ worklet->output.ordinal.timestamp = output.timestampUs;
+ // non-owning hidl_vec<> to wrap around the output config updates
+ hidl_vec<uint8_t> outputConfigUpdates;
+ outputConfigUpdates.setToExternal(
+ output.configUpdates.data, output.configUpdates.size);
+ std::vector<C2Param*> outputConfigUpdatePtrs;
+ parseParamsBlob(&outputConfigUpdatePtrs, outputConfigUpdates);
+ worklet->output.configUpdate.clear();
+ std::ranges::transform(
+ outputConfigUpdatePtrs,
+ std::back_inserter(worklet->output.configUpdate),
+ [](C2Param* param) { return C2Param::Copy(*param); });
+ worklet->output.flags = (C2FrameData::flags_t)output.flags;
+
+ workItems.push_back(std::move(outputWorkItem));
+ }
+
+ // determine whether the input buffer is drained
+ if (input.type == APEXCODEC_BUFFER_TYPE_LINEAR) {
+ if (input.memory.linear.size < consumed) {
+ LOG(WARNING) << "handleWork -- component consumed more bytes "
+ << "than the input buffer size";
+ inputDrained = true;
+ } else {
+ input.memory.linear.data += consumed;
+ input.memory.linear.size -= consumed;
+ }
+ } else if (input.type == APEXCODEC_BUFFER_TYPE_GRAPHIC) {
+ inputDrained = (consumed > 0);
+ }
+ }
+
+ if (!workItems.empty()) {
+ listener->onWorkDone(mComponent, workItems);
+ }
+ }
+ }
+
+ bool ensureBlockPool() {
+ std::shared_ptr<Component> comp = mComponent.lock();
+ if (!comp) {
+ return false;
+ }
+ std::vector<std::unique_ptr<C2Param>> heapParams;
+ comp->query({}, {C2PortBlockPoolsTuning::output::PARAM_TYPE}, C2_MAY_BLOCK, &heapParams);
+ if (heapParams.size() != 1) {
+ return false;
+ }
+ const C2Param* param = heapParams[0].get();
+ if (param->type() != C2PortBlockPoolsTuning::output::PARAM_TYPE) {
+ return false;
+ }
+ const C2PortBlockPoolsTuning::output *blockPools =
+ static_cast<const C2PortBlockPoolsTuning::output *>(param);
+ if (blockPools->flexCount() == 0) {
+ return false;
+ }
+ C2BlockPool::local_id_t blockPoolId = blockPools->m.values[0];
+ if (mBlockPool && mBlockPool->getLocalId() == blockPoolId) {
+ // no need to update
+ return true;
+ }
+ return C2_OK == GetCodec2BlockPool(blockPoolId, nullptr, &mBlockPool);
+ }
+
+ void allocOutputBuffer(
+ ApexCodec_Buffer* output,
+ std::shared_ptr<C2LinearBlock> *linearBlock,
+ std::optional<C2WriteView> *linearView,
+ std::shared_ptr<C2GraphicBlock> *graphicBlock) {
+ if (mOutputBufferType == APEXCODEC_BUFFER_TYPE_LINEAR) {
+ if (!ensureBlockPool()) {
+ return;
+ }
+ {
+ std::shared_ptr<Component> comp = mComponent.lock();
+ if (!comp) {
+ return;
+ }
+ C2StreamMaxBufferSizeInfo::output maxBufferSize(0u /* stream */);
+ comp->query({&maxBufferSize}, {}, C2_MAY_BLOCK, {});
+ mLinearBlockCapacity = maxBufferSize ? maxBufferSize.value : 1024 * 1024;
+ }
+ output->type = APEXCODEC_BUFFER_TYPE_LINEAR;
+ c2_status_t status = mBlockPool->fetchLinearBlock(
+ mLinearBlockCapacity,
+ C2MemoryUsage(C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE),
+ linearBlock);
+ if (!(*linearBlock)) {
+ return;
+ }
+ linearView->emplace((*linearBlock)->map().get());
+ if ((*linearView)->error() != C2_OK) {
+ return;
+ }
+ output->memory.linear.data = (*linearView)->data();
+ output->memory.linear.size = (*linearView)->capacity();
+ } else if (mOutputBufferType == APEXCODEC_BUFFER_TYPE_GRAPHIC) {
+ if (!ensureBlockPool()) {
+ return;
+ }
+ {
+ std::shared_ptr<Component> comp = mComponent.lock();
+ if (!comp) {
+ return;
+ }
+ C2StreamMaxPictureSizeTuning::output maxPictureSize(0u /* stream */);
+ C2StreamPictureSizeInfo::output pictureSize(0u /* stream */);
+ C2StreamPixelFormatInfo::output pixelFormat(0u /* stream */);
+ comp->query({&maxPictureSize, &pictureSize, &pixelFormat}, {}, C2_MAY_BLOCK, {});
+ mWidth = maxPictureSize ? maxPictureSize.width : pictureSize.width;
+ mHeight = maxPictureSize ? maxPictureSize.height : pictureSize.height;
+ mFormat = pixelFormat ? pixelFormat.value : HAL_PIXEL_FORMAT_YCBCR_420_888;
+ }
+ output->type = APEXCODEC_BUFFER_TYPE_GRAPHIC;
+ c2_status_t status = mBlockPool->fetchGraphicBlock(
+ mWidth, mHeight, mFormat,
+ C2MemoryUsage(C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE),
+ graphicBlock);
+ if (!(*graphicBlock)) {
+ return;
+ }
+ const C2Handle *handle = (*graphicBlock)->handle();
+ uint32_t width, height, format, stride, igbp_slot, generation;
+ uint64_t usage, igbp_id;
+ _UnwrapNativeCodec2GrallocMetadata(
+ handle, &width, &height, &format, &usage, &stride, &generation,
+ &igbp_id, &igbp_slot);
+ native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(handle);
+ sp<GraphicBuffer> graphicBuffer = new GraphicBuffer(
+ grallocHandle, GraphicBuffer::CLONE_HANDLE,
+ width, height, format, 1, usage, stride);
+ native_handle_delete(grallocHandle);
+ AHardwareBuffer *hardwareBuffer =
+ AHardwareBuffer_from_GraphicBuffer(graphicBuffer.get());
+ AHardwareBuffer_acquire(hardwareBuffer);
+ output->memory.graphic = hardwareBuffer;
+ } else {
+ LOG(ERROR) << "allocOutputBuffer -- unsupported output buffer type: "
+ << mOutputBufferType;
+ return;
+ }
+ }
+
+ static bool FillMemory(
+ const std::shared_ptr<C2Buffer>& buffer,
+ ApexCodec_Buffer* apexBuffer,
+ std::optional<C2ReadView>* linearView) {
+ if (buffer->data().type() == C2BufferData::LINEAR) {
+ apexBuffer->type = APEXCODEC_BUFFER_TYPE_LINEAR;
+ if (buffer->data().linearBlocks().empty()) {
+ apexBuffer->memory.linear.data = nullptr;
+ apexBuffer->memory.linear.size = 0;
+ return true;
+ } else if (buffer->data().linearBlocks().size() > 1) {
+ return false;
+ }
+ linearView->emplace(buffer->data().linearBlocks().front().map().get());
+ if ((*linearView)->error() != C2_OK) {
+ return false;
+ }
+ apexBuffer->memory.linear.data = const_cast<uint8_t*>((*linearView)->data());
+ apexBuffer->memory.linear.size = (*linearView)->capacity();
+ return true;
+ } else if (buffer->data().type() == C2BufferData::GRAPHIC) {
+ apexBuffer->type = APEXCODEC_BUFFER_TYPE_GRAPHIC;
+ if (buffer->data().graphicBlocks().empty()) {
+ apexBuffer->memory.graphic = nullptr;
+ return true;
+ } else if (buffer->data().graphicBlocks().size() > 1) {
+ return false;
+ }
+ const C2Handle *handle = buffer->data().graphicBlocks().front().handle();
+ uint32_t width, height, format, stride, igbp_slot, generation;
+ uint64_t usage, igbp_id;
+ _UnwrapNativeCodec2GrallocMetadata(
+ handle, &width, &height, &format, &usage, &stride, &generation,
+ &igbp_id, &igbp_slot);
+ native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(handle);
+ sp<GraphicBuffer> graphicBuffer = new GraphicBuffer(
+ grallocHandle, GraphicBuffer::CLONE_HANDLE,
+ width, height, format, 1, usage, stride);
+ native_handle_delete(grallocHandle);
+ AHardwareBuffer *hardwareBuffer =
+ AHardwareBuffer_from_GraphicBuffer(graphicBuffer.get());
+ AHardwareBuffer_acquire(hardwareBuffer);
+ apexBuffer->memory.graphic = hardwareBuffer;
+ return true;
+ }
+ return false;
+ }
+
+ ApexCodec_Component *mApexComponent;
+ std::weak_ptr<Listener> mListener;
+ std::weak_ptr<Component> mComponent;
+
+ std::thread mThread;
+ std::mutex mMutex;
+ std::condition_variable mCondition;
+ bool mStopped;
+ ApexCodec_BufferType mOutputBufferType;
+
+ size_t mLinearBlockCapacity;
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint32_t mFormat;
+
+ std::shared_ptr<C2BlockPool> mBlockPool;
+ std::list<std::unique_ptr<C2Work>> mWorkQueue;
+ std::map<uint64_t, std::unique_ptr<C2Work>> mWorkMap;
+};
+
// Codec2Client::Component::HidlBufferPoolSender
struct Codec2Client::Component::HidlBufferPoolSender :
hardware::media::c2::V1_1::utils::DefaultBufferPoolSender {
@@ -1168,6 +1944,13 @@
}
}
+Codec2Client::Codec2Client(ApexCodec_ComponentStore *base,
+ size_t serviceIndex)
+ : Configurable{nullptr, "android.componentStore.apexCodecs"},
+ mApexBase{base},
+ mServiceIndex{serviceIndex} {
+}
+
sp<Codec2Client::HidlBase> const& Codec2Client::getHidlBase() const {
return mHidlBase1_0;
}
@@ -1196,36 +1979,71 @@
const C2String& name,
const std::shared_ptr<Codec2Client::Listener>& listener,
std::shared_ptr<Codec2Client::Component>* const component) {
- if (mAidlBase) {
- std::shared_ptr<Component::AidlListener> aidlListener =
- Component::AidlListener::make<Component::AidlListener>();
- aidlListener->base = listener;
- std::shared_ptr<c2_aidl::IComponent> aidlComponent;
- ::ndk::ScopedAStatus transStatus = mAidlBase->createComponent(
- name,
- aidlListener,
- bufferpool2_aidl::implementation::ClientManager::getInstance(),
- &aidlComponent);
- c2_status_t status = GetC2Status(transStatus, "createComponent");
- if (status != C2_OK) {
- return status;
- } else if (!aidlComponent) {
- LOG(ERROR) << "createComponent(" << name.c_str()
- << ") -- null component.";
- return C2_CORRUPTED;
- }
- *component = std::make_shared<Codec2Client::Component>(aidlComponent);
- status = (*component)->setDeathListener((*component), listener);
- if (status != C2_OK) {
- LOG(ERROR) << "createComponent(" << name.c_str()
- << ") -- failed to set up death listener: "
- << status << ".";
- }
- (*component)->mAidlBufferPoolSender->setReceiver(mAidlHostPoolManager);
- aidlListener->component = *component;
- return status;
+ if (mApexBase) {
+ return createComponent_apex(name, listener, component);
+ } else if (mAidlBase) {
+ return createComponent_aidl(name, listener, component);
+ } else {
+ return createComponent_hidl(name, listener, component);
}
+}
+c2_status_t Codec2Client::createComponent_apex(
+ const C2String& name,
+ const std::shared_ptr<Codec2Client::Listener>& listener,
+ std::shared_ptr<Codec2Client::Component>* const component) {
+ if (__builtin_available(android 36, *)) {
+ ApexCodec_Component *apexComponent = nullptr;
+ ApexCodec_Status status = ApexCodec_Component_create(
+ mApexBase, name.c_str(), &apexComponent);
+ if (status != APEXCODEC_STATUS_OK) {
+ return (c2_status_t)status;
+ }
+ *component = std::make_shared<Codec2Client::Component>(apexComponent, name);
+ (*component)->initApexHandler(listener, *component);
+ return C2_OK;
+ } else {
+ return C2_OMITTED;
+ }
+}
+
+c2_status_t Codec2Client::createComponent_aidl(
+ const C2String& name,
+ const std::shared_ptr<Codec2Client::Listener>& listener,
+ std::shared_ptr<Codec2Client::Component>* const component) {
+ std::shared_ptr<Component::AidlListener> aidlListener =
+ Component::AidlListener::make<Component::AidlListener>();
+ aidlListener->base = listener;
+ std::shared_ptr<c2_aidl::IComponent> aidlComponent;
+ ::ndk::ScopedAStatus transStatus = mAidlBase->createComponent(
+ name,
+ aidlListener,
+ bufferpool2_aidl::implementation::ClientManager::getInstance(),
+ &aidlComponent);
+ c2_status_t status = GetC2Status(transStatus, "createComponent");
+ if (status != C2_OK) {
+ return status;
+ } else if (!aidlComponent) {
+ LOG(ERROR) << "createComponent(" << name.c_str()
+ << ") -- null component.";
+ return C2_CORRUPTED;
+ }
+ *component = std::make_shared<Codec2Client::Component>(aidlComponent);
+ status = (*component)->setDeathListener((*component), listener);
+ if (status != C2_OK) {
+ LOG(ERROR) << "createComponent(" << name.c_str()
+ << ") -- failed to set up death listener: "
+ << status << ".";
+ }
+ (*component)->mAidlBufferPoolSender->setReceiver(mAidlHostPoolManager);
+ aidlListener->component = *component;
+ return status;
+}
+
+c2_status_t Codec2Client::createComponent_hidl(
+ const C2String& name,
+ const std::shared_ptr<Codec2Client::Listener>& listener,
+ std::shared_ptr<Codec2Client::Component>* const component) {
c2_status_t status;
sp<Component::HidlListener> hidlListener = new Component::HidlListener{};
hidlListener->base = listener;
@@ -1593,6 +2411,13 @@
return a < b;
});
+ if (__builtin_available(android 36, *)) {
+ if (android::media::codec::provider_->in_process_sw_audio_codec_support()
+ && nullptr != ApexCodec_GetComponentStore()) {
+ names.push_back("__ApexCodecs__");
+ }
+ }
+
// Summarize to logcat.
if (names.empty()) {
LOG(INFO) << "No Codec2 services declared in the manifest.";
@@ -1649,7 +2474,13 @@
std::string const& name = GetServiceNames()[index];
LOG(VERBOSE) << "Creating a Codec2 client to service \"" << name << "\"";
- if (c2_aidl::utils::IsSelected()) {
+ if (name == "__ApexCodecs__") {
+ if (__builtin_available(android 36, *)) {
+ return std::make_shared<Codec2Client>(ApexCodec_GetComponentStore(), index);
+ } else {
+ LOG(FATAL) << "ApexCodecs not supported on Android version older than 36";
+ }
+ } else if (c2_aidl::utils::IsSelected()) {
if (__builtin_available(android __ANDROID_API_S__, *)) {
std::string instanceName =
::android::base::StringPrintf("%s/%s", AidlBase::descriptor, name.c_str());
@@ -2054,16 +2885,41 @@
mGraphicBufferAllocators{std::make_unique<GraphicBufferAllocators>()} {
}
+Codec2Client::Component::Component(ApexCodec_Component *base, const C2String &name)
+ : Configurable{[base]() -> ApexCodec_Configurable * {
+ if (__builtin_available(android 36, *)) {
+ return ApexCodec_Component_getConfigurable(base);
+ } else {
+ return nullptr;
+ }
+ }(), name},
+ mApexBase{base} {
+}
+
Codec2Client::Component::~Component() {
if (mAidlDeathSeq) {
GetAidlDeathManager()->unlinkToDeath(*mAidlDeathSeq, mAidlBase);
}
+ if (mApexBase) {
+ if (__builtin_available(android 36, *)) {
+ ApexCodec_Component_destroy(mApexBase);
+ }
+ mApexBase = nullptr;
+ }
}
c2_status_t Codec2Client::Component::createBlockPool(
C2Allocator::id_t id,
C2BlockPool::local_id_t* blockPoolId,
std::shared_ptr<Codec2Client::Configurable>* configurable) {
+ if (mApexBase) {
+ std::shared_ptr<C2BlockPool> blockPool;
+ CreateCodec2BlockPool(id, nullptr, &blockPool);
+ *blockPoolId = blockPool->getLocalId();
+ *configurable = nullptr;
+ mBlockPools[*blockPoolId] = blockPool;
+ return C2_OK;
+ }
if (mAidlBase) {
c2_aidl::IComponent::BlockPool aidlBlockPool;
c2_status_t status = C2_OK;
@@ -2134,6 +2990,10 @@
c2_status_t Codec2Client::Component::destroyBlockPool(
C2BlockPool::local_id_t localId) {
+ if (mApexBase) {
+ mBlockPools.erase(localId);
+ return C2_OK;
+ }
if (mAidlBase) {
mGraphicBufferAllocators->remove(localId);
::ndk::ScopedAStatus transStatus = mAidlBase->destroyBlockPool(localId);
@@ -2150,7 +3010,10 @@
void Codec2Client::Component::handleOnWorkDone(
const std::list<std::unique_ptr<C2Work>> &workItems) {
- if (mAidlBase) {
+ if (mApexBase) {
+ // no-op
+ return;
+ } else if (mAidlBase) {
holdIgbaBlocks(workItems);
} else {
// Output bufferqueue-based blocks' lifetime management
@@ -2160,6 +3023,10 @@
c2_status_t Codec2Client::Component::queue(
std::list<std::unique_ptr<C2Work>>* const items) {
+ if (mApexBase) {
+ mApexHandler->queue(*items);
+ return C2_OK;
+ }
if (mAidlBase) {
c2_aidl::WorkBundle workBundle;
if (!c2_aidl::utils::ToAidl(&workBundle, *items, mAidlBufferPoolSender.get())) {
@@ -2191,6 +3058,13 @@
C2Component::flush_mode_t mode,
std::list<std::unique_ptr<C2Work>>* const flushedWork) {
(void)mode; // Flush mode isn't supported in HIDL/AIDL yet.
+ if (mApexBase) {
+ if (__builtin_available(android 36, *)) {
+ return (c2_status_t)ApexCodec_Component_flush(mApexBase);
+ } else {
+ return C2_OMITTED;
+ }
+ }
c2_status_t status = C2_OK;
if (mAidlBase) {
c2_aidl::WorkBundle workBundle;
@@ -2250,6 +3124,9 @@
}
c2_status_t Codec2Client::Component::drain(C2Component::drain_mode_t mode) {
+ if (mApexBase) {
+ return C2_OMITTED;
+ }
if (mAidlBase) {
::ndk::ScopedAStatus transStatus = mAidlBase->drain(
mode == C2Component::DRAIN_COMPONENT_WITH_EOS);
@@ -2270,6 +3147,10 @@
}
c2_status_t Codec2Client::Component::start() {
+ if (mApexBase) {
+ // no-op
+ return C2_OK;
+ }
if (mAidlBase) {
::ndk::ScopedAStatus transStatus = mAidlBase->start();
return GetC2Status(transStatus, "start");
@@ -2289,6 +3170,11 @@
c2_status_t Codec2Client::Component::stop() {
if (mAidlBase) {
+ std::shared_ptr<AidlGraphicBufferAllocator> gba =
+ mGraphicBufferAllocators->current();
+ if (gba) {
+ gba->onRequestStop();
+ }
::ndk::ScopedAStatus transStatus = mAidlBase->stop();
return GetC2Status(transStatus, "stop");
}
@@ -2306,6 +3192,13 @@
}
c2_status_t Codec2Client::Component::reset() {
+ if (mApexBase) {
+ if (__builtin_available(android 36, *)) {
+ return (c2_status_t)ApexCodec_Component_reset(mApexBase);
+ } else {
+ return C2_OMITTED;
+ }
+ }
if (mAidlBase) {
::ndk::ScopedAStatus transStatus = mAidlBase->reset();
return GetC2Status(transStatus, "reset");
@@ -2324,7 +3217,19 @@
}
c2_status_t Codec2Client::Component::release() {
+ if (mApexBase) {
+ if (__builtin_available(android 36, *)) {
+ return (c2_status_t)ApexCodec_Component_reset(mApexBase);
+ } else {
+ return C2_OMITTED;
+ }
+ }
if (mAidlBase) {
+ std::shared_ptr<AidlGraphicBufferAllocator> gba =
+ mGraphicBufferAllocators->current();
+ if (gba) {
+ gba->onRequestStop();
+ }
::ndk::ScopedAStatus transStatus = mAidlBase->release();
return GetC2Status(transStatus, "release");
}
@@ -2345,6 +3250,10 @@
uint32_t avSyncHwId,
native_handle_t** sidebandHandle) {
*sidebandHandle = nullptr;
+ if (mApexBase) {
+ // tunneling is not supported in APEX
+ return C2_OMITTED;
+ }
if (mAidlBase) {
::aidl::android::hardware::common::NativeHandle handle;
::ndk::ScopedAStatus transStatus = mAidlBase->configureVideoTunnel(avSyncHwId, &handle);
@@ -2508,7 +3417,11 @@
void Codec2Client::Component::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
if (mAidlBase) {
- // TODO b/311348680
+ std::shared_ptr<AidlGraphicBufferAllocator> gba =
+ mGraphicBufferAllocators->current();
+ if (gba) {
+ gba->pollForRenderedFrames(delta);
+ }
return;
}
mOutputBufferQueue->pollForRenderedFrames(delta);
@@ -2616,6 +3529,10 @@
c2_status_t Codec2Client::Component::connectToInputSurface(
const std::shared_ptr<InputSurface>& inputSurface,
std::shared_ptr<InputSurfaceConnection>* connection) {
+ if (mApexBase) {
+ // FIXME
+ return C2_OMITTED;
+ }
if (mAidlBase) {
// FIXME
return C2_OMITTED;
@@ -2644,6 +3561,10 @@
const sp<HGraphicBufferProducer1>& producer,
const sp<HGraphicBufferSource>& source,
std::shared_ptr<InputSurfaceConnection>* connection) {
+ if (mApexBase) {
+ LOG(WARNING) << "Connecting to OMX input surface is not supported for AIDL C2 HAL";
+ return C2_OMITTED;
+ }
if (mAidlBase) {
LOG(WARNING) << "Connecting to OMX input surface is not supported for AIDL C2 HAL";
return C2_OMITTED;
@@ -2669,6 +3590,10 @@
}
c2_status_t Codec2Client::Component::disconnectFromInputSurface() {
+ if (mApexBase) {
+ // FIXME
+ return C2_OMITTED;
+ }
if (mAidlBase) {
// FIXME
return C2_OMITTED;
@@ -2693,6 +3618,16 @@
return sManager;
}
+c2_status_t Codec2Client::Component::initApexHandler(
+ const std::shared_ptr<Listener> &listener,
+ const std::shared_ptr<Component> &comp) {
+ if (!mApexBase) {
+ return C2_BAD_STATE;
+ }
+ mApexHandler = std::make_unique<ApexHandler>(mApexBase, listener, comp);
+ return C2_OK;
+}
+
c2_status_t Codec2Client::Component::setDeathListener(
const std::shared_ptr<Component>& component,
const std::shared_ptr<Listener>& listener) {
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h b/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
index a797cb7..a70ffef 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
@@ -85,6 +85,11 @@
void onBufferAttached(uint32_t generation);
/**
+ * Retrieve frame event history from the crurrent surface if any.
+ */
+ void pollForRenderedFrames(::android::FrameEventHistoryDelta* delta);
+
+ /**
* Allocates a buffer.
*
* @param width width of the requested buffer.
@@ -125,6 +130,11 @@
const ::android::IGraphicBufferProducer::QueueBufferInput& input,
::android::IGraphicBufferProducer::QueueBufferOutput *output);
+ /**
+ * Notify stop()/release() is in progress.
+ */
+ void onRequestStop();
+
~GraphicBufferAllocator();
/**
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index 9a4fa12..536caaa 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -35,6 +35,7 @@
using ::android::IGraphicBufferProducer;
using ::android::GraphicBuffer;
+using ::android::FrameEventHistoryDelta;
using ::android::Fence;
using ::android::PixelFormat;
using ::android::sp;
@@ -133,6 +134,11 @@
IGraphicBufferProducer::QueueBufferOutput *output);
/**
+ * Retrieve frame event history from the crurrent surface if any.
+ */
+ void pollForRenderedFrames(FrameEventHistoryDelta* delta);
+
+ /**
* Notifies when a Buffer is ready to allocate from Graphics.
* If generation does not match to the current, notifications via the interface
* will be ignored. (In the case, the notifications are from one of the old surfaces
@@ -175,6 +181,14 @@
*/
void stop();
+ /**
+ * stop()/release() request to HAL is in process from the client.
+ * The class will never be active again after the request.
+ * Still, allocation requests from HAL should be served until stop()
+ * is being called.
+ */
+ void onRequestStop();
+
private:
struct BufferCache;
@@ -290,6 +304,10 @@
std::atomic<bool> mStopped;
+ bool mStopRequested;
+ std::atomic<int> mAllocAfterStopRequested;
+
+
private:
explicit GraphicsTracker(int maxDequeueCount);
@@ -304,7 +322,7 @@
const std::shared_ptr<BufferCache> &cache,
int maxDequeueCommitted);
- c2_status_t requestAllocate(std::shared_ptr<BufferCache> *cache);
+ c2_status_t requestAllocateLocked(std::shared_ptr<BufferCache> *cache);
c2_status_t requestDeallocate(uint64_t bid, const sp<Fence> &fence,
bool *completed, bool *updateDequeue,
std::shared_ptr<BufferCache> *cache, int *slotId,
@@ -334,6 +352,10 @@
bool *cached, int *rSlotId, sp<Fence> *rFence,
std::shared_ptr<BufferItem> *buffer);
+ c2_status_t _allocateDirect(
+ uint32_t width, uint32_t height, PixelFormat format, uint64_t usage,
+ AHardwareBuffer **buf, sp<Fence> *fence);
+
void writeIncDequeueableLocked(int inc);
void drainDequeueableLocked(int dec);
};
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index 7923f04..35c87e0 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -112,6 +112,10 @@
struct IGraphicBufferSource;
} // namespace android::hardware::media::omx::V1_0
+struct ApexCodec_ComponentStore;
+struct ApexCodec_Component;
+struct ApexCodec_Configurable;
+
namespace android {
// This class is supposed to be called Codec2Client::Configurable, but forward
@@ -148,6 +152,7 @@
explicit Codec2ConfigurableClient(const sp<HidlBase> &hidlBase);
explicit Codec2ConfigurableClient(const std::shared_ptr<AidlBase> &aidlBase);
+ Codec2ConfigurableClient(ApexCodec_Configurable *base, const C2String &name);
const C2String& getName() const;
@@ -172,6 +177,7 @@
private:
struct HidlImpl;
struct AidlImpl;
+ struct ApexImpl;
const std::unique_ptr<ImplBase> mImpl;
};
@@ -282,12 +288,16 @@
std::shared_ptr<AidlBase> const& base,
std::shared_ptr<Codec2ConfigurableClient::AidlBase> const& configurable,
size_t serviceIndex);
+ Codec2Client(
+ ApexCodec_ComponentStore* base,
+ size_t serviceIndex);
protected:
sp<HidlBase1_0> mHidlBase1_0;
sp<HidlBase1_1> mHidlBase1_1;
sp<HidlBase1_2> mHidlBase1_2;
std::shared_ptr<AidlBase> mAidlBase;
+ ApexCodec_ComponentStore* mApexBase{nullptr};
// Finds the first store where the predicate returns C2_OK and returns the
// last predicate result. The predicate will be tried on all stores. The
@@ -325,6 +335,20 @@
std::vector<C2Component::Traits> _listComponents(bool* success) const;
class Cache;
+
+private:
+ c2_status_t createComponent_aidl(
+ C2String const& name,
+ std::shared_ptr<Listener> const& listener,
+ std::shared_ptr<Component>* const component);
+ c2_status_t createComponent_hidl(
+ C2String const& name,
+ std::shared_ptr<Listener> const& listener,
+ std::shared_ptr<Component>* const component);
+ c2_status_t createComponent_apex(
+ C2String const& name,
+ std::shared_ptr<Listener> const& listener,
+ std::shared_ptr<Component>* const component);
};
struct Codec2Client::Interface : public Codec2Client::Configurable {
@@ -508,11 +532,16 @@
c2_status_t disconnectFromInputSurface();
+ c2_status_t initApexHandler(
+ const std::shared_ptr<Listener> &listener,
+ const std::shared_ptr<Component> &comp);
+
// base cannot be null.
Component(const sp<HidlBase>& base);
Component(const sp<HidlBase1_1>& base);
Component(const sp<HidlBase1_2>& base);
Component(const std::shared_ptr<AidlBase>& base);
+ Component(ApexCodec_Component* base, const C2String& name);
~Component();
@@ -521,12 +550,16 @@
sp<HidlBase1_1> mHidlBase1_1;
sp<HidlBase1_2> mHidlBase1_2;
std::shared_ptr<AidlBase> mAidlBase;
+ ApexCodec_Component *mApexBase{nullptr};
struct HidlBufferPoolSender;
struct AidlBufferPoolSender;
std::unique_ptr<HidlBufferPoolSender> mHidlBufferPoolSender;
std::unique_ptr<AidlBufferPoolSender> mAidlBufferPoolSender;
+ class ApexHandler;
+ std::unique_ptr<ApexHandler> mApexHandler;
+
struct OutputBufferQueue;
std::unique_ptr<OutputBufferQueue> mOutputBufferQueue;
@@ -547,6 +580,11 @@
const std::shared_ptr<Listener>& listener);
sp<::android::hardware::hidl_death_recipient> mDeathRecipient;
+ // This is a map of block pools created for APEX components in the client.
+ // Note that the APEX codec API requires output buffers to be passed from the client,
+ // so the client creates and keeps track of the block pools here.
+ std::map<C2BlockPool::local_id_t, std::shared_ptr<C2BlockPool>> mBlockPools;
+
friend struct Codec2Client;
struct HidlListener;
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index ecf7436..fd55bf3 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1300,7 +1300,7 @@
if (isAudioPlaybackRateEqual(playbackRate, mPlaybackRate)) {
return NO_ERROR;
}
- if (isOffloadedOrDirect_l()) {
+ if (isAfTrackOffloadedOrDirect_l()) {
const status_t status = statusTFromBinderStatus(mAudioTrack->setPlaybackRateParameters(
VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(playbackRate))));
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index e3b79b2..359f3c1 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -310,8 +310,16 @@
ts = NULL;
break;
}
+
int32_t old = android_atomic_and(~CBLK_FUTEX_WAKE, &cblk->mFutex);
- if (!(old & CBLK_FUTEX_WAKE)) {
+
+ // Check inactive to prevent waiting if the track has been disabled due to underrun
+ // (or invalidated). The subsequent call to obtainBufer will return NOT_ENOUGH_DATA
+ // (or DEAD_OBJECT) and restart (or restore) the track.
+ const int32_t current_flags = android_atomic_acquire_load(&cblk->mFlags);
+ const bool inactive = current_flags & (CBLK_INVALID | CBLK_DISABLED);
+
+ if (!(old & CBLK_FUTEX_WAKE) && !inactive) {
if (measure && !beforeIsValid) {
clock_gettime(CLOCK_MONOTONIC, &before);
beforeIsValid = true;
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index 9fdde49..658fc18b 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -184,7 +184,7 @@
status_t EffectHalAidl::process() {
State state = State::INIT;
if (mConversion->isBypassing() || !mEffect->getState(&state).isOk() ||
- state != State::PROCESSING) {
+ (state != State::PROCESSING && state != State::DRAINING)) {
ALOGI("%s skipping process because it's %s", mEffectName.c_str(),
mConversion->isBypassing()
? "bypassing"
diff --git a/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp b/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
index e8731ea..c11f908 100644
--- a/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
+++ b/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
@@ -83,6 +83,7 @@
{Parameter::Id::visualizerTag, 1},
{Parameter::Id::volumeTag, 1},
{Parameter::Id::spatializerTag, 2},
+ {Parameter::Id::eraserTag, 3},
};
// Tags defined Parameter::Specific union.
static const std::unordered_map<Parameter::Specific::Tag, int /* version */>
@@ -104,6 +105,7 @@
{Parameter::Specific::visualizer, 1},
{Parameter::Specific::volume, 1},
{Parameter::Specific::spatializer, 2},
+ {Parameter::Specific::eraser, 3},
};
class MockFactory : public IFactory {
@@ -223,6 +225,7 @@
case Parameter::Id::virtualizerTag:
case Parameter::Id::visualizerTag:
case Parameter::Id::volumeTag:
+ case Parameter::Id::eraserTag:
FALLTHROUGH_INTENDED;
case Parameter::Id::spatializerTag: {
if (kParamIdEffectVersionMap.find(idTag) != kParamIdEffectVersionMap.end() &&
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index d5e3cf7..5574ea1 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -90,6 +90,23 @@
}
}
+RetCode BundleContext::setCommon(const Parameter::Common& common) {
+ RetCode ret = EffectContext::setCommon(common);
+ RETURN_VALUE_IF(ret != RetCode::SUCCESS, ret, " setCommonFailed");
+ if (mInstance) {
+ LVM_ControlParams_t params;
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
+ RETURN_VALUE_IF(RetCode::SUCCESS != applyCommonParameter(params),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " applyCommonParameterFailed");
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+ } else {
+ RETURN_VALUE_IF(RetCode::SUCCESS != init(), RetCode::ERROR_EFFECT_LIB_ERROR, " initFailed");
+ }
+ return RetCode::SUCCESS;
+}
+
RetCode BundleContext::enable() {
if (mEnabled) return RetCode::ERROR_ILLEGAL_PARAMETER;
// Bass boost or Virtualizer can be temporarily disabled if playing over device speaker due to
@@ -599,7 +616,7 @@
return ret;
}
-RetCode BundleContext::initControlParameter(LVM_ControlParams_t& params) const {
+RetCode BundleContext::applyCommonParameter(LVM_ControlParams_t& params) const {
int outputChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
mCommon.output.base.channelMask);
auto outputChannelMaskConv = aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
@@ -621,6 +638,13 @@
params.SourceFormat = LVM_MULTICHANNEL;
}
+ return RetCode::SUCCESS;
+}
+
+RetCode BundleContext::initControlParameter(LVM_ControlParams_t& params) const {
+ RETURN_VALUE_IF(RetCode::SUCCESS != applyCommonParameter(params),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " applyCommonParameterFailed");
+
/* General parameters */
params.OperatingMode = LVM_MODE_ON;
params.SpeakerType = LVM_HEADPHONES;
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index e5ab40d..96f63cd 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -35,6 +35,8 @@
void deInit();
lvm::BundleEffectType getBundleType() const { return mType; }
+ RetCode setCommon(const Parameter::Common& common) override;
+
RetCode enable() override;
RetCode enableOperatingMode();
RetCode disable() override;
@@ -133,6 +135,7 @@
bool isBandLevelIndexInRange(const std::vector<Equalizer::BandLevel>& bandLevels) const;
static LVM_EQNB_BandDef_t* getDefaultEqualizerBandDefs();
static LVM_HeadroomBandDef_t* getDefaultEqualizerHeadroomBanDefs();
+ RetCode applyCommonParameter(LVM_ControlParams_t& params) const;
};
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index e434a3d..225cfdd 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -98,24 +98,6 @@
// static
const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
-static audio_format_t constexpr audioFormatFromEncoding(int32_t pcmEncoding) {
- switch (pcmEncoding) {
- case kAudioEncodingPcmFloat:
- return AUDIO_FORMAT_PCM_FLOAT;
- case kAudioEncodingPcm32bit:
- return AUDIO_FORMAT_PCM_32_BIT;
- case kAudioEncodingPcm24bitPacked:
- return AUDIO_FORMAT_PCM_24_BIT_PACKED;
- case kAudioEncodingPcm16bit:
- return AUDIO_FORMAT_PCM_16_BIT;
- case kAudioEncodingPcm8bit:
- return AUDIO_FORMAT_PCM_8_BIT; // TODO: do we want to support this?
- default:
- ALOGE("%s: Invalid encoding: %d", __func__, pcmEncoding);
- return AUDIO_FORMAT_INVALID;
- }
-}
-
NuPlayer::Renderer::Renderer(
const sp<MediaPlayerBase::AudioSink> &sink,
const sp<MediaClock> &mediaClock,
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 50eeb62..46606de 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -2422,6 +2422,24 @@
return;
}
+audio_format_t audioFormatFromEncoding(int32_t pcmEncoding) {
+ switch (pcmEncoding) {
+ case kAudioEncodingPcmFloat:
+ return AUDIO_FORMAT_PCM_FLOAT;
+ case kAudioEncodingPcm32bit:
+ return AUDIO_FORMAT_PCM_32_BIT;
+ case kAudioEncodingPcm24bitPacked:
+ return AUDIO_FORMAT_PCM_24_BIT_PACKED;
+ case kAudioEncodingPcm16bit:
+ return AUDIO_FORMAT_PCM_16_BIT;
+ case kAudioEncodingPcm8bit:
+ return AUDIO_FORMAT_PCM_8_BIT; // TODO: do we want to support this?
+ default:
+ ALOGE("%s: Invalid encoding: %d", __func__, pcmEncoding);
+ return AUDIO_FORMAT_INVALID;
+ }
+}
+
status_t getAudioOffloadInfo(const sp<MetaData>& meta, bool hasVideo,
bool isStreaming, audio_stream_type_t streamType, audio_offload_info_t *info)
{
@@ -2441,6 +2459,12 @@
ALOGV("Mime type \"%s\" mapped to audio_format %d", mime, info->format);
}
+ int32_t pcmEncoding;
+ if (meta->findInt32(kKeyPcmEncoding, &pcmEncoding)) {
+ info->format = audioFormatFromEncoding(pcmEncoding);
+ ALOGV("audio_format use kKeyPcmEncoding value %d first", info->format);
+ }
+
if (AUDIO_FORMAT_INVALID == info->format) {
// can't offload if we don't know what the source format is
ALOGE("mime type \"%s\" not a known audio format", mime);
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index 72a2551..2fb2d59 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -101,6 +101,7 @@
<Limit name="bitrate" range="1-240000000"/>
<Limit name="block-size" value="16x16" />
<Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec"/>
</MediaCodec>
</Decoders>
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 20c97dc..c79ac5c 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -262,6 +262,7 @@
<Limit name="block-size" value="16x16" />
<Limit name="block-count" range="1-32768" /> <!-- max 4096x2048 equivalent -->
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec"/>
</MediaCodec>
</Decoders>
diff --git a/media/libstagefright/include/media/stagefright/Utils.h b/media/libstagefright/include/media/stagefright/Utils.h
index 1673120..e190374 100644
--- a/media/libstagefright/include/media/stagefright/Utils.h
+++ b/media/libstagefright/include/media/stagefright/Utils.h
@@ -44,6 +44,9 @@
// Convert a MIME type to a AudioSystem::audio_format
status_t mapMimeToAudioFormat(audio_format_t& format, const char* mime);
+// Convert a pcm-encoding to a AudioSystem::audio_format
+audio_format_t audioFormatFromEncoding(int32_t pcmEncoding);
+
// Convert a aac profile to a AudioSystem::audio_format
void mapAACProfileToAudioFormat(audio_format_t& format, uint64_t eAacProfile);
diff --git a/media/module/libapexcodecs/Android.bp b/media/module/libapexcodecs/Android.bp
index 790b749..dbda81b 100644
--- a/media/module/libapexcodecs/Android.bp
+++ b/media/module/libapexcodecs/Android.bp
@@ -40,6 +40,15 @@
}
+cc_library_headers {
+ name: "libapexcodecs-header",
+ visibility: [
+ "//frameworks/av/apex:__subpackages__",
+ "//frameworks/av/media/codec2/hal/client",
+ ],
+ export_include_dirs: ["include"],
+}
+
cc_library {
name: "libapexcodecs-testing",
defaults: ["libapexcodecs-defaults"],
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index b250a03..e7fc106 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -93,6 +93,8 @@
srcs: [
"NdkJavaVMHelper.cpp",
"NdkMediaCodec.cpp",
+ "NdkMediaCodecInfo.cpp",
+ "NdkMediaCodecStore.cpp",
"NdkMediaCrypto.cpp",
"NdkMediaDataSource.cpp",
"NdkMediaExtractor.cpp",
@@ -131,6 +133,8 @@
"libbase",
"libdatasource",
"libmedia",
+ "libmedia_codeclist",
+ "libmedia_codeclist_capabilities",
"libmediadrm",
"libmedia_omx",
"libmedia_jni_utils",
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 7b19ac0..7797841 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -655,6 +655,28 @@
}
}
+media_status_t
+AImageReader::setUsage(uint64_t usage) {
+ Mutex::Autolock _l(mLock);
+ if (!mIsOpen || mBufferItemConsumer == nullptr) {
+ ALOGE("not ready to perform setUsage()");
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ if (mUsage == usage) {
+ return AMEDIA_OK;
+ }
+
+ uint64_t halUsage = AHardwareBuffer_convertToGrallocUsageBits(mUsage);
+ status_t ret = mBufferItemConsumer->setConsumerUsageBits(halUsage);
+ if (ret != OK) {
+ ALOGE("setConsumerUsageBits() failed %d", ret);
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+ mUsage = usage;
+ mHalUsage = halUsage;
+ return AMEDIA_OK;
+}
+
static
media_status_t validateParameters(int32_t width, int32_t height, int32_t format,
uint64_t usage, int32_t maxImages,
@@ -912,3 +934,14 @@
reader->setBufferRemovedListener(listener);
return AMEDIA_OK;
}
+
+EXPORT
+media_status_t AImageReader_setUsage(
+ AImageReader *reader, uint64_t usage) {
+ ALOGV("%s", __FUNCTION__);
+ if (reader == nullptr) {
+ ALOGE("%s: invalid argument! reader %p", __FUNCTION__, reader);
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ return reader->setUsage(usage);
+}
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index 0199616..1c50d83 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -20,6 +20,7 @@
#include <inttypes.h>
#include <media/NdkImageReader.h>
+#include <media-vndk/VndkImageReader.h>
#include <utils/List.h>
#include <utils/Mutex.h>
@@ -67,6 +68,7 @@
media_status_t setImageListener(AImageReader_ImageListener* listener);
media_status_t setBufferRemovedListener(AImageReader_BufferRemovedListener* listener);
+ media_status_t setUsage(uint64_t usage);
media_status_t acquireNextImage(/*out*/AImage** image, /*out*/int* fenceFd);
media_status_t acquireLatestImage(/*out*/AImage** image, /*out*/int* fenceFd);
@@ -120,7 +122,7 @@
const int32_t mWidth;
const int32_t mHeight;
int32_t mFormat;
- const uint64_t mUsage; // AHARDWAREBUFFER_USAGE_* flags.
+ uint64_t mUsage; // AHARDWAREBUFFER_USAGE_* flags.
const int32_t mMaxImages;
// TODO(jwcai) Seems completely unused in AImageReader class.
diff --git a/media/ndk/NdkMediaCodecInfo.cpp b/media/ndk/NdkMediaCodecInfo.cpp
new file mode 100644
index 0000000..82ceb61
--- /dev/null
+++ b/media/ndk/NdkMediaCodecInfo.cpp
@@ -0,0 +1,520 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaCodecInfo"
+
+#include "NdkMediaCodecInfoPriv.h"
+
+#include <media/NdkMediaFormatPriv.h>
+
+using namespace android;
+
+extern "C" {
+
+// Utils
+
+EXPORT
+void AIntRange_delete(AIntRange *range) {
+ free(range);
+}
+
+EXPORT
+void ADoubleRange_delete(ADoubleRange *range) {
+ free(range);
+}
+
+// AMediaCodecInfo
+
+EXPORT
+const char* AMediaCodecInfo_getCanonicalName(const AMediaCodecInfo *info) {
+ if (info == nullptr || info->mInfo == nullptr) {
+ return nullptr;
+ }
+
+ return info->mInfo->getCodecName();
+}
+
+EXPORT
+bool AMediaCodecInfo_isEncoder(const AMediaCodecInfo *info) {
+ return info->mInfo->isEncoder();
+}
+
+EXPORT
+bool AMediaCodecInfo_isVendor(const AMediaCodecInfo *info) {
+ int32_t attributes = info->mInfo->getAttributes();
+ return (attributes & android::MediaCodecInfo::kFlagIsVendor);
+}
+
+EXPORT
+AMediaCodecType AMediaCodecInfo_getMediaCodecInfoType(const AMediaCodecInfo *info) {
+ if (info == nullptr || info->mInfo == nullptr) {
+ return (AMediaCodecType)0;
+ }
+
+ int32_t attributes = info->mInfo->getAttributes();
+
+ if (attributes & android::MediaCodecInfo::kFlagIsSoftwareOnly) {
+ return SOFTWARE_ONLY;
+ }
+ if (attributes & android::MediaCodecInfo::kFlagIsHardwareAccelerated) {
+ return HARDWARE_ACCELERATED;
+ }
+ return SOFTWARE_WITH_DEVICE_ACCESS;
+}
+
+EXPORT
+const char* AMediaCodecInfo_getMediaType(const AMediaCodecInfo *info) {
+ if (info == nullptr || info->mInfo == nullptr) {
+ return nullptr;
+ }
+
+ return info->mMediaType.c_str();
+}
+
+EXPORT
+int32_t AMediaCodecInfo_getMaxSupportedInstances(const AMediaCodecInfo *info) {
+ if (info == nullptr) {
+ return -1;
+ }
+
+ return info->mCodecCaps->getMaxSupportedInstances();
+}
+
+EXPORT
+int32_t AMediaCodecInfo_isFeatureSupported(const AMediaCodecInfo *info, const char *featureName) {
+ if (featureName == nullptr) {
+ return -1;
+ }
+ return info->mCodecCaps->isFeatureSupported(std::string(featureName));
+}
+
+EXPORT
+int32_t AMediaCodecInfo_isFeatureRequired(const AMediaCodecInfo *info, const char *featureName) {
+ if (featureName == nullptr) {
+ return -1;
+ }
+ return info->mCodecCaps->isFeatureRequired(std::string(featureName));
+}
+
+EXPORT
+int32_t AMediaCodecInfo_isFormatSupported(const AMediaCodecInfo *info, const AMediaFormat *format) {
+ if (format == nullptr) {
+ return -1;
+ }
+
+ sp<AMessage> nativeFormat;
+ AMediaFormat_getFormat(format, &nativeFormat);
+
+ return info->mCodecCaps->isFormatSupported(nativeFormat);
+}
+
+EXPORT
+media_status_t AMediaCodecInfo_getAudioCapabilities(const AMediaCodecInfo *info,
+ const ACodecAudioCapabilities **outAudioCaps) {
+ if (info == nullptr || info->mInfo == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ *outAudioCaps = info->mAAudioCaps.get();
+
+ if ((*outAudioCaps) == nullptr) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodecInfo_getVideoCapabilities(const AMediaCodecInfo *info,
+ const ACodecVideoCapabilities **outVideoCaps) {
+ if (info == nullptr || info->mInfo == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ *outVideoCaps = info->mAVideoCaps.get();
+
+ if ((*outVideoCaps) == nullptr) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodecInfo_getEncoderCapabilities(const AMediaCodecInfo *info,
+ const ACodecEncoderCapabilities **outEncoderCaps) {
+ if (info == nullptr || info->mInfo == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ *outEncoderCaps = info->mAEncoderCaps.get();
+
+ if ((*outEncoderCaps) == nullptr) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ return AMEDIA_OK;
+}
+
+// ACodecAudioCapabilities
+
+EXPORT
+media_status_t ACodecAudioCapabilities_getBitrateRange(const ACodecAudioCapabilities *audioCaps,
+ AIntRange *outRange) {
+ if (audioCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& bitrateRange = audioCaps->mAudioCaps->getBitrateRange();
+ outRange->mLower = bitrateRange.lower();
+ outRange->mUpper = bitrateRange.upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecAudioCapabilities_getSupportedSampleRates(
+ const ACodecAudioCapabilities *audioCaps, const int **outArrayPtr, size_t *outCount) {
+ if (audioCaps == nullptr || outArrayPtr == nullptr || outCount == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (audioCaps->mSampleRates.empty()) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ *outArrayPtr = audioCaps->mSampleRates.data();
+ *outCount = audioCaps->mSampleRates.size();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecAudioCapabilities_getSupportedSampleRateRanges(
+ const ACodecAudioCapabilities *audioCaps, const AIntRange **outArrayPtr, size_t *outCount) {
+ if (audioCaps == nullptr || outArrayPtr == nullptr || outCount == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ *outArrayPtr = audioCaps->mSampleRateRanges.data();
+ *outCount = audioCaps->mSampleRateRanges.size();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+int32_t ACodecAudioCapabilities_getMaxInputChannelCount(const ACodecAudioCapabilities *audioCaps) {
+ if (audioCaps == nullptr) {
+ return -1;
+ }
+ return audioCaps->mAudioCaps->getMaxInputChannelCount();
+}
+
+EXPORT
+int32_t ACodecAudioCapabilities_getMinInputChannelCount(const ACodecAudioCapabilities *audioCaps) {
+ if (audioCaps == nullptr) {
+ return -1;
+ }
+ return audioCaps->mAudioCaps->getMinInputChannelCount();
+}
+
+EXPORT
+media_status_t ACodecAudioCapabilities_getInputChannelCountRanges(
+ const ACodecAudioCapabilities *audioCaps, const AIntRange **outArrayPtr, size_t *outCount) {
+ if (audioCaps == nullptr || outArrayPtr == nullptr || outCount == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ *outArrayPtr = audioCaps->mInputChannelCountRanges.data();
+ *outCount = audioCaps->mInputChannelCountRanges.size();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+int32_t ACodecAudioCapabilities_isSampleRateSupported(const ACodecAudioCapabilities *audioCaps,
+ int32_t sampleRate) {
+ if (audioCaps == nullptr) {
+ return -1;
+ }
+ return audioCaps->mAudioCaps->isSampleRateSupported(sampleRate);
+}
+
+// ACodecPerformancePoint
+
+EXPORT
+ACodecPerformancePoint* ACodecPerformancePoint_create(int32_t width, int32_t height,
+ int32_t frameRate) {
+ return new ACodecPerformancePoint(
+ std::make_shared<VideoCapabilities::PerformancePoint>(width, height, frameRate));
+}
+
+EXPORT
+media_status_t ACodecPerformancePoint_delete(ACodecPerformancePoint *performancePoint) {
+ if (performancePoint == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ delete performancePoint;
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+bool ACodecPerformancePoint_coversFormat(const ACodecPerformancePoint *performancePoint,
+ const AMediaFormat *format) {
+ sp<AMessage> nativeFormat;
+ AMediaFormat_getFormat(format, &nativeFormat);
+
+ return performancePoint->mPerformancePoint->covers(nativeFormat);
+}
+
+EXPORT
+bool ACodecPerformancePoint_covers(const ACodecPerformancePoint *one,
+ const ACodecPerformancePoint *another) {
+ return one->mPerformancePoint->covers(*(another->mPerformancePoint));
+}
+
+EXPORT
+bool ACodecPerformancePoint_equals(const ACodecPerformancePoint *one,
+ const ACodecPerformancePoint *another) {
+ return one->mPerformancePoint->equals(*(another->mPerformancePoint));
+}
+
+// ACodecVideoCapabilities
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getBitrateRange(const ACodecVideoCapabilities *videoCaps,
+ AIntRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& bitrateRange = videoCaps->mVideoCaps->getBitrateRange();
+ outRange->mLower = bitrateRange.lower();
+ outRange->mUpper = bitrateRange.upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedWidths(const ACodecVideoCapabilities *videoCaps,
+ AIntRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& supportedWidths = videoCaps->mVideoCaps->getSupportedWidths();
+ outRange->mLower = supportedWidths.lower();
+ outRange->mUpper = supportedWidths.upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedHeights(const ACodecVideoCapabilities *videoCaps,
+ AIntRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& supportedHeights = videoCaps->mVideoCaps->getSupportedHeights();
+ outRange->mLower = supportedHeights.lower();
+ outRange->mUpper = supportedHeights.upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+int32_t ACodecVideoCapabilities_getWidthAlignment(const ACodecVideoCapabilities *videoCaps) {
+ if (videoCaps == nullptr) {
+ return -1;
+ }
+ return videoCaps->mVideoCaps->getWidthAlignment();
+}
+
+EXPORT
+int32_t ACodecVideoCapabilities_getHeightAlignment(const ACodecVideoCapabilities *videoCaps) {
+ if (videoCaps == nullptr) {
+ return -1;
+ }
+ return videoCaps->mVideoCaps->getHeightAlignment();
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedFrameRates(
+ const ACodecVideoCapabilities *videoCaps, AIntRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& frameRateRange = videoCaps->mVideoCaps->getSupportedFrameRates();
+ outRange->mLower = frameRateRange.lower();
+ outRange->mUpper = frameRateRange.upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedWidthsFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t height, AIntRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ std::optional<Range<int32_t>> widthRange = videoCaps->mVideoCaps->getSupportedWidthsFor(height);
+ if (!widthRange) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ outRange->mLower = widthRange.value().lower();
+ outRange->mUpper = widthRange.value().upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedHeightsFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t width, AIntRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ std::optional<Range<int32_t>> heightRange
+ = videoCaps->mVideoCaps->getSupportedHeightsFor(width);
+ if (!heightRange) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ outRange->mLower = heightRange.value().lower();
+ outRange->mUpper = heightRange.value().upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedFrameRatesFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
+ ADoubleRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ std::optional<Range<double>> frameRates
+ = videoCaps->mVideoCaps->getSupportedFrameRatesFor(width, height);
+ if (!frameRates) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ outRange->mLower = frameRates.value().lower();
+ outRange->mUpper = frameRates.value().upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getAchievableFrameRatesFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
+ ADoubleRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ std::optional<Range<double>> frameRates
+ = videoCaps->mVideoCaps->getAchievableFrameRatesFor(width, height);
+ if (!frameRates) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ outRange->mLower = frameRates.value().lower();
+ outRange->mUpper = frameRates.value().upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedPerformancePoints(
+ const ACodecVideoCapabilities *videoCaps,
+ const ACodecPerformancePoint **outPerformancePointArray, size_t *outCount) {
+ if (videoCaps == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ *outPerformancePointArray = videoCaps->mPerformancePoints.data();
+ *outCount = videoCaps->mPerformancePoints.size();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+int32_t ACodecVideoCapabilities_areSizeAndRateSupported(const ACodecVideoCapabilities *videoCaps,
+ int32_t width, int32_t height, double frameRate) {
+ if (videoCaps == nullptr) {
+ return -1;
+ }
+ return videoCaps->mVideoCaps->areSizeAndRateSupported(width, height, frameRate);
+}
+
+EXPORT
+int32_t ACodecVideoCapabilities_isSizeSupported(const ACodecVideoCapabilities *videoCaps,
+ int32_t width, int32_t height) {
+ if (videoCaps == nullptr) {
+ return -1;
+ }
+ return videoCaps->mVideoCaps->isSizeSupported(width, height);
+}
+
+// ACodecEncoderCapabilities
+
+EXPORT
+media_status_t ACodecEncoderCapabilities_getQualityRange(
+ const ACodecEncoderCapabilities *encoderCaps, AIntRange *outRange) {
+ if (encoderCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& qualityRange = encoderCaps->mEncoderCaps->getQualityRange();
+ outRange->mLower = qualityRange.lower();
+ outRange->mUpper = qualityRange.upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecEncoderCapabilities_getComplexityRange(
+ const ACodecEncoderCapabilities *encoderCaps, AIntRange *outRange) {
+ if (encoderCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& complexityRange = encoderCaps->mEncoderCaps->getComplexityRange();
+ outRange->mLower = complexityRange.lower();
+ outRange->mUpper = complexityRange.upper();
+
+ return AMEDIA_OK;
+}
+
+int32_t ACodecEncoderCapabilities_isBitrateModeSupported(
+ const ACodecEncoderCapabilities *encoderCaps, ABiterateMode mode) {
+ if (encoderCaps == nullptr) {
+ return -1;
+ }
+ return encoderCaps->mEncoderCaps->isBitrateModeSupported(mode);
+}
+
+
+}
\ No newline at end of file
diff --git a/media/ndk/NdkMediaCodecInfoPriv.h b/media/ndk/NdkMediaCodecInfoPriv.h
new file mode 100644
index 0000000..6d9188b
--- /dev/null
+++ b/media/ndk/NdkMediaCodecInfoPriv.h
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _NDK_MEDIA_CODEC_INFO_PRIV_H
+#define _NDK_MEDIA_CODEC_INFO_PRIV_H
+
+#include <media/MediaCodecInfo.h>
+#include <media/NdkMediaCodecInfo.h>
+
+struct ACodecAudioCapabilities {
+ std::shared_ptr<android::AudioCapabilities> mAudioCaps;
+
+ std::vector<int> mSampleRates;
+ std::vector<AIntRange> mSampleRateRanges;
+ std::vector<AIntRange> mInputChannelCountRanges;
+
+ void initSampleRates() {
+ mSampleRates = mAudioCaps->getSupportedSampleRates();
+ }
+
+ void initSampleRateRanges() {
+ const std::vector<android::Range<int>>& sampleRateRanges
+ = mAudioCaps->getSupportedSampleRateRanges();
+ for (auto it = sampleRateRanges.begin(); it != sampleRateRanges.end(); it++) {
+ mSampleRateRanges.emplace_back(it->lower(), it->upper());
+ }
+ }
+
+ void initInputChannelCountRanges() {
+ const std::vector<android::Range<int>>& inputChannels
+ = mAudioCaps->getInputChannelCountRanges();
+ for (auto it = inputChannels.begin(); it != inputChannels.end(); it++) {
+ mInputChannelCountRanges.emplace_back(it->lower(), it->upper());
+ }
+ }
+
+ ACodecAudioCapabilities(std::shared_ptr<android::AudioCapabilities> audioCaps)
+ : mAudioCaps(audioCaps) {
+ initSampleRates();
+ initSampleRateRanges();
+ initInputChannelCountRanges();
+ }
+};
+
+struct ACodecPerformancePoint {
+ std::shared_ptr<const android::VideoCapabilities::PerformancePoint> mPerformancePoint;
+
+ ACodecPerformancePoint(std::shared_ptr<const android::VideoCapabilities::PerformancePoint>
+ performancePoint) : mPerformancePoint(performancePoint) {}
+};
+
+struct ACodecVideoCapabilities {
+ std::shared_ptr<android::VideoCapabilities> mVideoCaps;
+
+ std::vector<ACodecPerformancePoint> mPerformancePoints;
+
+ void initPerformancePoints() {
+ const std::vector<android::VideoCapabilities::PerformancePoint>& performancePoints
+ = mVideoCaps->getSupportedPerformancePoints();
+ for (auto it = performancePoints.begin(); it != performancePoints.end(); it++) {
+ mPerformancePoints.emplace_back(
+ std::shared_ptr<const android::VideoCapabilities::PerformancePoint>(&(*it)));
+ }
+ }
+
+ ACodecVideoCapabilities(std::shared_ptr<android::VideoCapabilities> videoCaps)
+ : mVideoCaps(videoCaps) {
+ initPerformancePoints();
+ }
+};
+
+struct ACodecEncoderCapabilities {
+ std::shared_ptr<android::EncoderCapabilities> mEncoderCaps;
+
+ ACodecEncoderCapabilities(std::shared_ptr<android::EncoderCapabilities> encoderCaps)
+ : mEncoderCaps(encoderCaps) {}
+};
+
+struct AMediaCodecInfo {
+ std::string mName;
+ android::sp<android::MediaCodecInfo> mInfo;
+ std::string mMediaType;
+ std::shared_ptr<android::CodecCapabilities> mCodecCaps;
+
+ std::shared_ptr<const ACodecAudioCapabilities> mAAudioCaps;
+ std::shared_ptr<const ACodecVideoCapabilities> mAVideoCaps;
+ std::shared_ptr<const ACodecEncoderCapabilities> mAEncoderCaps;
+
+ AMediaCodecInfo(std::string name, android::sp<android::MediaCodecInfo> info,
+ std::shared_ptr<android::CodecCapabilities> codecCaps, std::string mediaType)
+ : mName(name), mInfo(info), mMediaType(mediaType), mCodecCaps(codecCaps) {
+ if (!mName.empty() && mInfo != nullptr && !mMediaType.empty() && mCodecCaps != nullptr) {
+ if (mCodecCaps->getAudioCapabilities() != nullptr) {
+ mAAudioCaps = std::make_shared<const ACodecAudioCapabilities>(
+ mCodecCaps->getAudioCapabilities());
+ }
+ if (mCodecCaps->getVideoCapabilities() != nullptr) {
+ mAVideoCaps = std::make_shared<const ACodecVideoCapabilities>(
+ mCodecCaps->getVideoCapabilities());
+ }
+ if (mCodecCaps->getEncoderCapabilities() != nullptr) {
+ mAEncoderCaps = std::make_shared<const ACodecEncoderCapabilities>(
+ mCodecCaps->getEncoderCapabilities());
+ }
+ }
+ }
+};
+
+#endif //_NDK_MEDIA_CODEC_INFO_PRIV_H
\ No newline at end of file
diff --git a/media/ndk/NdkMediaCodecStore.cpp b/media/ndk/NdkMediaCodecStore.cpp
new file mode 100644
index 0000000..d911593
--- /dev/null
+++ b/media/ndk/NdkMediaCodecStore.cpp
@@ -0,0 +1,254 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaCodecStore"
+
+#include "NdkMediaCodecInfoPriv.h"
+
+#include <media/NdkMediaCodecStore.h>
+#include <media/NdkMediaFormatPriv.h>
+
+#include <media/IMediaCodecList.h>
+
+#include <media/MediaCodecInfo.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaCodecList.h>
+
+using namespace android;
+
+static sp<IMediaCodecList> sCodecList;
+static std::vector<AMediaCodecSupportedMediaType> sMediaTypes;
+static std::vector<AMediaCodecInfo> sCodecInfos;
+
+static std::map<std::string, AMediaCodecInfo> sNameToInfoMap;
+static std::map<std::string, std::vector<AMediaCodecInfo>> sTypeToInfoList;
+
+static void initMediaTypes() {
+ if (sCodecList == nullptr) {
+ sCodecList = MediaCodecList::getInstance();
+ }
+
+ std::map<std::string, AMediaCodecSupportedMediaType> typesInfoMap;
+ std::vector<std::string> mediaTypes; // Keep the order of media types appearing in sCodecList.
+ for (size_t idx = 0; idx < sCodecList->countCodecs(); idx++) {
+ sp<MediaCodecInfo> codecInfo = sCodecList->getCodecInfo(idx);
+ if (codecInfo == nullptr) {
+ ALOGW("NULL MediaCodecInfo in MediaCodecList");
+ continue;
+ }
+ Vector<AString> codecMediaTypes;
+ codecInfo->getSupportedMediaTypes(&codecMediaTypes);
+ for (AString codecMediaType : codecMediaTypes) {
+ std::string mediaType = std::string(codecMediaType.c_str());
+
+ // Excludes special codecs from NDK
+ const std::shared_ptr<CodecCapabilities> codecCaps
+ = codecInfo->getCodecCapsFor(mediaType.c_str());
+ if (codecCaps->isFeatureSupported(FEATURE_SpecialCodec)) {
+ continue;
+ }
+
+ auto it = typesInfoMap.find(mediaType);
+ if (it == typesInfoMap.end()) {
+ AMediaCodecSupportedMediaType supportedType = { mediaType.c_str(), 0 };
+ it = typesInfoMap.emplace(mediaType, supportedType).first;
+ mediaTypes.push_back(mediaType);
+ }
+ uint32_t &mode = it->second.mMode;
+ mode |= (codecInfo->isEncoder() ? AMediaCodecSupportedMediaType::FLAG_ENCODER
+ : AMediaCodecSupportedMediaType::FLAG_DECODER);
+ }
+ }
+
+ // sMediaTypes keeps the order of media types appearing in sCodecList.
+ for (std::string &type : mediaTypes) {
+ sMediaTypes.push_back(typesInfoMap.find(type)->second);
+ }
+}
+
+static void initCodecInfoMap() {
+ if (sCodecList == nullptr) {
+ sCodecList = MediaCodecList::getInstance();
+ }
+
+ for (size_t idx = 0; idx < sCodecList->countCodecs(); idx++) {
+ sp<MediaCodecInfo> codecInfo = sCodecList->getCodecInfo(idx);
+ if (codecInfo == nullptr) {
+ ALOGW("NULL MediaCodecInfo in MediaCodecList");
+ continue;
+ }
+
+ Vector<AString> codecMediaTypes;
+ codecInfo->getSupportedMediaTypes(&codecMediaTypes);
+ bool useTypeSuffix = codecMediaTypes.size() > 1;
+ for (AString codecMediaType : codecMediaTypes) {
+ std::string mediaType = std::string(codecMediaType.c_str());
+
+ // Excludes special codecs from NDK
+ const std::shared_ptr<CodecCapabilities> codecCaps
+ = codecInfo->getCodecCapsFor(mediaType.c_str());
+ if (codecCaps->isFeatureSupported(FEATURE_SpecialCodec)) {
+ continue;
+ }
+
+ // get the type name after the slash. e.g. video/x.on2.vp8
+ size_t slashIx = mediaType.find_last_of('/');
+ if (slashIx == std::string::npos) {
+ slashIx = 0;
+ } else {
+ slashIx++;
+ }
+ std::string ndkBaseName = std::string(codecInfo->getCodecName());
+ if (useTypeSuffix) {
+ // If there are multiple supported media types,
+ // add the type to the end of the name to disambiguate names.
+ ndkBaseName += "." + mediaType.substr(slashIx);
+ }
+
+ int32_t copyIx = 0;
+ std::string ndkName;
+ // if a name is already registered,
+ // add ".1", ".2", ... at the end to disambiguate names.
+ while (true) {
+ ndkName = ndkBaseName;
+ if (copyIx > 0) {
+ ndkName += "." + std::to_string(copyIx);
+ }
+ if (!sNameToInfoMap.contains(ndkName)) {
+ break;
+ }
+ copyIx++;
+ }
+
+ AMediaCodecInfo info = AMediaCodecInfo(ndkName, codecInfo, codecCaps, mediaType);
+ sCodecInfos.push_back(info);
+ sNameToInfoMap.emplace(ndkName, info);
+
+ auto it = sTypeToInfoList.find(mediaType);
+ if (it == sTypeToInfoList.end()) {
+ std::vector<AMediaCodecInfo> infoList;
+ infoList.push_back(info);
+ sTypeToInfoList.emplace(mediaType, infoList);
+ } else {
+ it->second.push_back(info);
+ }
+ }
+ }
+}
+
+static bool codecHandlesFormat(const AMediaCodecInfo codecInfo,
+ sp<AMessage> format, bool isEncoder) {
+ return codecInfo.mCodecCaps->isEncoder() == isEncoder
+ && codecInfo.mCodecCaps->isFormatSupported(format);
+}
+
+static media_status_t findNextCodecForFormat(
+ const AMediaFormat *format, bool isEncoder, const AMediaCodecInfo **outCodecInfo) {
+ if (outCodecInfo == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (sCodecInfos.empty()) {
+ initCodecInfoMap();
+ }
+
+ std::unique_ptr<std::vector<AMediaCodecInfo>> infos;
+ sp<AMessage> nativeFormat;
+ if (format == nullptr) {
+ infos = std::unique_ptr<std::vector<AMediaCodecInfo>>(&sCodecInfos);
+ } else {
+ AMediaFormat_getFormat(format, &nativeFormat);
+ AString mime;
+ if (!nativeFormat->findString(KEY_MIME, &mime)) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ std::string mediaType = std::string(mime.c_str());
+ auto it = sTypeToInfoList.find(mediaType);
+ if (it == sTypeToInfoList.end()) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+ infos = std::unique_ptr<std::vector<AMediaCodecInfo>>(&(it->second));
+ }
+
+ bool found = *outCodecInfo == nullptr;
+ for (const AMediaCodecInfo &info : *infos) {
+ if (found && (format == nullptr
+ || codecHandlesFormat(info, nativeFormat, isEncoder))) {
+ *outCodecInfo = &info;
+ return AMEDIA_OK;
+ }
+ if (*outCodecInfo == &info) {
+ found = true;
+ }
+
+ }
+ *outCodecInfo = nullptr;
+ return AMEDIA_ERROR_UNSUPPORTED;
+}
+
+extern "C" {
+
+EXPORT
+media_status_t AMediaCodecStore_getSupportedMediaTypes(
+ const AMediaCodecSupportedMediaType **outMediaTypes, size_t *outCount) {
+ if (outMediaTypes == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (sMediaTypes.empty()) {
+ initMediaTypes();
+ }
+
+ *outCount = sMediaTypes.size();
+ *outMediaTypes = sMediaTypes.data();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodecStore_findNextDecoderForFormat(
+ const AMediaFormat *format, const AMediaCodecInfo **outCodecInfo){
+ return findNextCodecForFormat(format, false, outCodecInfo);
+}
+
+EXPORT
+media_status_t AMediaCodecStore_findNextEncoderForFormat(
+ const AMediaFormat *format, const AMediaCodecInfo **outCodecInfo){
+ return findNextCodecForFormat(format, true, outCodecInfo);
+}
+
+EXPORT
+media_status_t AMediaCodecStore_getCodecInfo(
+ const char *name, const AMediaCodecInfo **outCodecInfo) {
+ if (outCodecInfo == nullptr || name == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ auto it = sNameToInfoMap.find(std::string(name));
+ if (it == sNameToInfoMap.end()) {
+ *outCodecInfo = nullptr;
+ return AMEDIA_ERROR_UNSUPPORTED;
+ } else {
+ *outCodecInfo = &(it->second);
+ return AMEDIA_OK;
+ }
+}
+
+}
\ No newline at end of file
diff --git a/media/ndk/include/media-vndk/VndkImageReader.h b/media/ndk/include/media-vndk/VndkImageReader.h
new file mode 100644
index 0000000..c67a38c
--- /dev/null
+++ b/media/ndk/include/media-vndk/VndkImageReader.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _VNDK_IMAGE_READER_H
+#define _VNDK_IMAGE_READER_H
+
+// vndk is a superset of the NDK
+#include <media/NdkImageReader.h>
+
+__BEGIN_DECLS
+
+/**
+ * Set the usage of this image reader.
+ *
+ * <p>Note that calling this method will replace the previously set usage.</p>
+ *
+ * <p>Note: This will trigger re-allocation, could cause producer failures mid-stream
+ * if the new usage combination isn't supported, and thus should be avoided as much as
+ * possible regardless.</p>
+ *
+ * Available since API level 36.
+ *
+ * @param reader The image reader of interest.
+ * @param usage specifies how the consumer will access the AImage.
+ * See {@link AImageReader_newWithUsage} parameter description for more details.
+ * @return <ul>
+ * <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ * <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader is NULL.</li>
+ * <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ *
+ * @see AImage_getHardwareBuffer
+ */
+media_status_t AImageReader_setUsage(
+ AImageReader* _Nonnull reader, uint64_t usage) __INTRODUCED_IN(36);
+
+__END_DECLS
+
+#endif //_VNDK_IMAGE_READER_H
diff --git a/media/ndk/include/media/NdkMediaCodecInfo.h b/media/ndk/include/media/NdkMediaCodecInfo.h
new file mode 100644
index 0000000..558e82c
--- /dev/null
+++ b/media/ndk/include/media/NdkMediaCodecInfo.h
@@ -0,0 +1,625 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @addtogroup Media
+ * @{
+ */
+
+/**
+ * @file NdkMediaCodecInfo.h
+ */
+
+/*
+ * This file defines an NDK API.
+ * Do not remove methods.
+ * Do not change method signatures.
+ * Do not change the value of constants.
+ * Do not change the size of any of the classes defined in here.
+ * Do not reference types that are not part of the NDK.
+ * Do not #include files that aren't part of the NDK.
+ */
+
+#ifndef _NDK_MEDIA_CODEC_INFO_H
+#define _NDK_MEDIA_CODEC_INFO_H
+
+#include "NdkMediaError.h"
+#include "NdkMediaFormat.h"
+
+__BEGIN_DECLS
+
+struct ACodecAudioCapabilities;
+typedef struct ACodecAudioCapabilities ACodecAudioCapabilities;
+struct ACodecPerformancePoint;
+typedef struct ACodecPerformancePoint ACodecPerformancePoint;
+struct ACodecVideoCapabilities;
+typedef struct ACodecVideoCapabilities ACodecVideoCapabilities;
+struct ACodecEncoderCapabilities;
+typedef struct ACodecEncoderCapabilities ACodecEncoderCapabilities;
+struct AMediaCodecInfo;
+typedef struct AMediaCodecInfo AMediaCodecInfo;
+
+/**
+ * A uitlity structure describing the range of two integer values.
+ */
+typedef struct AIntRange {
+ int32_t mLower;
+ int32_t mUpper;
+} AIntRange;
+
+/**
+ * A uitlity structure describing the range of two double values.
+ */
+typedef struct ADoubleRange {
+ double mLower;
+ double mUpper;
+} ADoubleRange;
+
+// AMediaCodecInfo
+
+/**
+ * Get the canonical name of a codec.
+ *
+ * \return The char pointer to the canonical name.
+ * It is owned by the framework. No lifetime management needed for users.
+ *
+ * Return NULL if @param info is invalid.
+ */
+const char* AMediaCodecInfo_getCanonicalName(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Query if the codec is an encoder.
+ */
+bool AMediaCodecInfo_isEncoder(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Query if the codec is provided by the Android platform (false) or the device manufacturer (true).
+ */
+bool AMediaCodecInfo_isVendor(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * The type of codecs.
+ */
+typedef enum AMediaCodecType : int32_t {
+ /**
+ * Not a codec type. Used for indicating an invalid operation occurred.
+ */
+ INVALID_CODEC_INFO = 0,
+
+ /**
+ * Software codec.
+ *
+ * Software-only codecs are more secure as they run in a tighter security sandbox.
+ * On the other hand, software-only codecs do not provide any performance guarantees.
+ */
+ SOFTWARE_ONLY = 1,
+
+ /**
+ * Hardware accelerated codec.
+ *
+ * Hardware codecs generally have higher performance or lower power consumption than
+ * software codecs, but since they are specific to each device,
+ * the actual performance details can vary.
+ */
+ HARDWARE_ACCELERATED = 2,
+
+ /**
+ * Software codec but have device access.
+ * Mainly referring to software codecs provided by vendors.
+ */
+ SOFTWARE_WITH_DEVICE_ACCESS = 3,
+} AMediaCodecType;
+
+/**
+ * Query if the codec is SOFTWARE_ONLY, HARDWARE_ACCELERATED or SOFTWARE_WITH_DEVICE_ACCESS.
+ *
+ * Return INVALID_CODEC_INFO if @param info is invalid.
+ */
+AMediaCodecType AMediaCodecInfo_getMediaCodecInfoType(
+ const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Get the supported media type of the codec.
+ *
+ * \return The char pointer to the media type.
+ * It is owned by the framework with infinite lifetime.
+ *
+ * Return NULL if @param info is invalid.
+ */
+const char* AMediaCodecInfo_getMediaType(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Returns the max number of the supported concurrent codec instances.
+ *
+ * This is a hint for an upper bound. Applications should not expect to successfully
+ * operate more instances than the returned value, but the actual number of
+ * concurrently operable instances may be less as it depends on the available
+ * resources at time of use.
+ *
+ * Return -1 if @param info is invalid.
+ */
+int32_t AMediaCodecInfo_getMaxSupportedInstances(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Query codec feature capabilities.
+ *
+ * These features are supported to be used by the codec. These
+ * include optional features that can be turned on, as well as
+ * features that are always on.
+ *
+ * Return 1 if the feature is supported;
+ * Return 0 if the feature is unsupported;
+ * Return -1 if @param featureName is invalid.
+ */
+int32_t AMediaCodecInfo_isFeatureSupported(const AMediaCodecInfo *info,
+ const char *featureName) __INTRODUCED_IN(36);
+
+/**
+ * Query codec feature requirements.
+ *
+ * These features are required to be used by the codec, and as such,
+ * they are always turned on.
+ *
+ * Return 1 if the feature is required;
+ * Return 0 if the feature is not required;
+ * Return -1 if @param featureName is invalid.
+ */
+int32_t AMediaCodecInfo_isFeatureRequired(const AMediaCodecInfo *info,
+ const char *featureName) __INTRODUCED_IN(36);
+
+/**
+ * Query whether codec supports a given @param format.
+ *
+ * Return 1 if the format is supported;
+ * Return 0 if the format is unsupported;
+ * Return -1 if @param format is invalid.
+ */
+int32_t AMediaCodecInfo_isFormatSupported(const AMediaCodecInfo *info,
+ const AMediaFormat *format) __INTRODUCED_IN(36);
+
+/**
+ * Get the ACodecAudioCapabilities from the given AMediaCodecInfo.
+ *
+ * @param outAudioCaps The pointer to the output ACodecAudioCapabilities.
+ * It is owned by the framework and has an infinite lifetime.
+ *
+ * Return AMEDIA_OK if successfully got the ACodecAudioCapabilities.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the codec is not an audio codec.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param info is invalid.
+ */
+media_status_t AMediaCodecInfo_getAudioCapabilities(const AMediaCodecInfo *info,
+ const ACodecAudioCapabilities **outAudioCaps) __INTRODUCED_IN(36);
+
+/**
+ * Get the ACodecVideoCapabilities from the given AMediaCodecInfo.
+ *
+ * @param outVideoCaps The pointer to the output ACodecVideoCapabilities.
+ * It is owned by the framework and has an infinite lifetime.
+ *
+ * Return AMEDIA_OK if successfully got the ACodecVideoCapabilities.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the codec is not a video codec.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param info is invalid.
+ */
+media_status_t AMediaCodecInfo_getVideoCapabilities(const AMediaCodecInfo *info,
+ const ACodecVideoCapabilities **outVideoCaps) __INTRODUCED_IN(36);
+
+/**
+ * Get the ACodecEncoderCapabilities from the given AMediaCodecInfo.
+ *
+ * @param outEncoderCaps The pointer to the output ACodecEncoderCapabilities.
+ * It is owned by the framework and has an infinite lifetime.
+ *
+ * Return AMEDIA_OK if successfully got the ACodecEncoderCapabilities.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the codec is not an encoder.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param info is invalid.
+ */
+media_status_t AMediaCodecInfo_getEncoderCapabilities(const AMediaCodecInfo *info,
+ const ACodecEncoderCapabilities **outEncoderCaps) __INTRODUCED_IN(36);
+
+// ACodecAudioCapabilities
+
+/**
+ * Get the range of supported bitrates in bits/second.
+ *
+ * @param outRange The pointer to the range of supported bitrates.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got bitrates successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps and @param outRange is invalid.
+ */
+media_status_t ACodecAudioCapabilities_getBitrateRange(const ACodecAudioCapabilities *audioCaps,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the array of supported sample rates
+ *
+ * The array is sorted in ascending order.
+ *
+ * @param outArrayPtr The pointer to the output sample rates array.
+ * The array is owned by the framework and has an infinite lifetime.
+ * @param outCount The size of the output array.
+ *
+ * Return AMEDIA_OK if the codec supports only discrete values.
+ * Otherwise, it returns AMEDIA_ERROR_UNSUPPORTED.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps, @param outArrayPtr
+ * and @param outCount is invalid.
+ */
+media_status_t ACodecAudioCapabilities_getSupportedSampleRates(
+ const ACodecAudioCapabilities *audioCaps, const int **outArrayPtr,
+ size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Get the array of supported sample rate ranges.
+ *
+ * The array is sorted in ascending order, and the ranges are distinct (non-intersecting).
+ *
+ * @param outArrayPtr The pointer to the out sample rate ranges array.
+ * The array is owned by the framework and has an infinite lifetime.
+ * @param outCount The size of the out array.
+ *
+ * Return AMEDIA_OK if got the sample rate ranges successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps, @param outArrayPtr
+ * and @param outCount is invalid.
+ */
+media_status_t ACodecAudioCapabilities_getSupportedSampleRateRanges(
+ const ACodecAudioCapabilities *audioCaps,
+ const AIntRange **outArrayPtr, size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Return the maximum number of input channels supported.
+ *
+ * Return -1 if @param audioCaps is invalid.
+ */
+int32_t ACodecAudioCapabilities_getMaxInputChannelCount(
+ const ACodecAudioCapabilities *audioCaps) __INTRODUCED_IN(36);
+
+/**
+ * Returns the minimum number of input channels supported.
+ * This is often 1, but does vary for certain mime types.
+ *
+ * Return -1 if @param audioCaps is invalid.
+ */
+int32_t ACodecAudioCapabilities_getMinInputChannelCount(
+ const ACodecAudioCapabilities *audioCaps) __INTRODUCED_IN(36);
+
+/**
+ * Get an array of ranges representing the number of input channels supported.
+ * The codec supports any number of input channels within this range.
+ * For many codecs, this will be a single range [1..N], for some N.
+ *
+ * The array is sorted in ascending order, and the ranges are distinct (non-intersecting).
+ *
+ * @param outArrayPtr The pointer to the output array of input-channels ranges.
+ * The array is owned by the framework and has an infinite lifetime.
+ * @param outCount The size of the output array.
+ *
+ * Return AMEDIA_OK if got the input channel array successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param audioCaps is invalid.
+ */
+media_status_t ACodecAudioCapabilities_getInputChannelCountRanges(
+ const ACodecAudioCapabilities *audioCaps,
+ const AIntRange **outArrayPtr, size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Query whether the sample rate is supported by the codec.
+ *
+ * Return 1 if the sample rate is supported.
+ * Return 0 if the sample rate is unsupported
+ * Return -1 if @param audioCaps is invalid.
+ */
+int32_t ACodecAudioCapabilities_isSampleRateSupported(const ACodecAudioCapabilities *audioCaps,
+ int32_t sampleRate) __INTRODUCED_IN(36);
+
+// ACodecPerformancePoint
+
+/**
+ * Create a performance point for a given frame size and frame rate.
+ *
+ * Performance points are defined by number of pixels, pixel rate and frame rate.
+ *
+ * Users are responsible for calling
+ * ACodecPerformancePoint_delete(ACodecPerformancePoint *performancePoint) after use.
+ *
+ * @param width width of the frame in pixels
+ * @param height height of the frame in pixels
+ * @param frameRate frame rate in frames per second
+ */
+ACodecPerformancePoint* ACodecPerformancePoint_create(int32_t width, int32_t height,
+ int32_t frameRate) __INTRODUCED_IN(36);
+
+/**
+ * Delete a created performance point.
+ *
+ * Return AMEDIA_OK if it is successfully deleted.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param performancePoint is invalid.
+ */
+media_status_t ACodecPerformancePoint_delete(
+ ACodecPerformancePoint *performancePoint) __INTRODUCED_IN(36);
+
+/**
+ * Checks whether the performance point covers a media format.
+ *
+ * @param format Stream format considered.
+ * Return true if the performance point covers the format.
+ */
+bool ACodecPerformancePoint_coversFormat(const ACodecPerformancePoint *performancePoint,
+ const AMediaFormat *format) __INTRODUCED_IN(36);
+
+/**
+ * Checks whether a performance point covers another performance point.
+ *
+ * Use this method to determine if a performance point advertised by a codec covers the
+ * performance point required. This method can also be used for loose ordering as this
+ * method is transitive.
+ *
+ * A Performance point represents an upper bound. This means that
+ * it covers all performance points with fewer pixels, pixel rate and frame rate.
+ *
+ * Return true if @param one covers @param another.
+ */
+bool ACodecPerformancePoint_covers(const ACodecPerformancePoint *one,
+ const ACodecPerformancePoint *another) __INTRODUCED_IN(36);
+
+/**
+ * Checks whether two performance points are equal.
+ */
+bool ACodecPerformancePoint_equals(const ACodecPerformancePoint *one,
+ const ACodecPerformancePoint *another) __INTRODUCED_IN(36);
+
+// ACodecVideoCapabilities
+
+/**
+ * Get the range of supported bitrates in bits/second.
+ *
+ * @param outRange The pointer to the range of output bitrates.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported bitrates successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getBitrateRange(const ACodecVideoCapabilities *videoCaps,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video widths.
+ *
+ * @param outRange The pointer to the range of output supported widths.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video widths successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedWidths(const ACodecVideoCapabilities *videoCaps,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video heights.
+ *
+ * @param outRange The pointer to the range of output supported heights.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video heights successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedHeights(const ACodecVideoCapabilities *videoCaps,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Return the alignment requirement for video width (in pixels).
+ *
+ * This is a power-of-2 value that video width must be a multiple of.
+ *
+ * Return -1 if @param videoCaps is invalid.
+ */
+int32_t ACodecVideoCapabilities_getWidthAlignment(
+ const ACodecVideoCapabilities *videoCaps) __INTRODUCED_IN(36);
+
+/**
+ * Return the alignment requirement for video height (in pixels).
+ *
+ * This is a power-of-2 value that video height must be a multiple of.
+ *
+ * Return -1 if @param videoCaps is invalid.
+ */
+int32_t ACodecVideoCapabilities_getHeightAlignment(
+ const ACodecVideoCapabilities *videoCaps) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported frame rates.
+ *
+ * This is not a performance indicator. Rather, it expresses the limits specified in the coding
+ * standard, based on the complexities of encoding material for later playback at a certain
+ * frame rate, or the decoding of such material in non-realtime.
+ *
+ * @param outRange The pointer to the range of output supported frame rates.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * \return AMEDIA_OK if got the frame rate range successfully.
+ * \return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedFrameRates(
+ const ACodecVideoCapabilities *videoCaps, AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video widths for a video height.
+ *
+ * @param outRange The pointer to the range of supported widths.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video width range successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the height query is not supported.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedWidthsFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t height,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video heights for a video width.
+ *
+ * @param outRange The pointer to the range of supported heights.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video height range successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the width query is not supported.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedHeightsFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t width,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video frame rates for a video size.
+ *
+ * This is not a performance indicator. Rather, it expresses the limits specified in the coding
+ * standard, based on the complexities of encoding material of a given size for later playback at
+ * a certain frame rate, or the decoding of such material in non-realtime.
+ *
+ * @param outRange The pointer to the range of frame rates.
+ * Users are responsible for allocating a valid ADoubleRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video frame rates successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the size query is not supported.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedFrameRatesFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
+ ADoubleRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of achievable video frame rates for a video size.
+ *
+ * This is based on manufacturer's performance measurements for this device and codec.
+ * The measurements may not be available for all codecs or devices.
+ *
+ * @param outRange The pointer to the range of frame rates.
+ * Users are responsible for allocating a valid ADoubleRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the achievable video frame rates successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the codec did not publish any measurement data.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getAchievableFrameRatesFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
+ ADoubleRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the supported performance points.
+ *
+ * @param outPerformancePointArray The pointer to the output performance points array.
+ * The array is owned by the framework and has an infinite
+ * lifetime.
+ * @param outCount The size of the output array.
+ *
+ * Return AMEDIA_OK if successfully got the performance points.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param videoCaps is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedPerformancePoints(
+ const ACodecVideoCapabilities *videoCaps,
+ const ACodecPerformancePoint **outPerformancePointArray,
+ size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Return whether a given video size and frameRate combination is supported.
+ *
+ * Return 1 if the size and rate are supported.
+ * Return 0 if they are not supported.
+ * Return -1 if @param videoCaps is invalid.
+ */
+int32_t ACodecVideoCapabilities_areSizeAndRateSupported(const ACodecVideoCapabilities *videoCaps,
+ int32_t width, int32_t height, double frameRate) __INTRODUCED_IN(36);
+
+/**
+ * Return whether a given video size is supported.
+ *
+ * Return 1 if the size is supported.
+ * Return 0 if the size is not supported.
+ * Return -1 if @param videoCaps is invalid.
+ */
+int32_t ACodecVideoCapabilities_isSizeSupported(const ACodecVideoCapabilities *videoCaps,
+ int32_t width, int32_t height) __INTRODUCED_IN(36);
+
+// ACodecEncoderCapabilities
+
+/**
+ * Get the supported range of quality values.
+ *
+ * Quality is implementation-specific. As a general rule, a higher quality
+ * setting results in a better image quality and a lower compression ratio.
+ *
+ * @param outRange The pointer to the range of quality values.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if successfully got the quality range.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecEncoderCapabilities_getQualityRange(
+ const ACodecEncoderCapabilities *encoderCaps,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the supported range of encoder complexity values.
+ *
+ * Some codecs may support multiple complexity levels, where higher complexity values use more
+ * encoder tools (e.g. perform more intensive calculations) to improve the quality or the
+ * compression ratio. Use a lower value to save power and/or time.
+ *
+ * @param outRange The pointer to the range of encoder complexity values.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if successfully got the complexity range.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecEncoderCapabilities_getComplexityRange(
+ const ACodecEncoderCapabilities *encoderCaps,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Encoder bitrate modes.
+ */
+typedef enum ABiterateMode : int32_t {
+ BITRATE_MODE_CQ = 0,
+ BITRATE_MODE_VBR = 1,
+ BITRATE_MODE_CBR = 2,
+ BITRATE_MODE_CBR_FD = 3
+} ABiterateMode;
+
+/**
+ * Query whether a bitrate mode is supported.
+ *
+ * Return 1 if the bitrate mode is supported.
+ * Return 0 if the bitrate mode is unsupported.
+ * Return -1 if @param encoderCaps is invalid.
+ */
+int32_t ACodecEncoderCapabilities_isBitrateModeSupported(
+ const ACodecEncoderCapabilities *encoderCaps, ABiterateMode mode) __INTRODUCED_IN(36);
+
+__END_DECLS
+
+#endif //_NDK_MEDIA_CODEC_INFO_H
+
+/** @} */
\ No newline at end of file
diff --git a/media/ndk/include/media/NdkMediaCodecStore.h b/media/ndk/include/media/NdkMediaCodecStore.h
new file mode 100644
index 0000000..aab8689
--- /dev/null
+++ b/media/ndk/include/media/NdkMediaCodecStore.h
@@ -0,0 +1,144 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @addtogroup Media
+ * @{
+ */
+
+/**
+ * @file NdkMediaCodecStore.h
+ */
+
+/*
+ * This file defines an NDK API.
+ * Do not remove methods.
+ * Do not change method signatures.
+ * Do not change the value of constants.
+ * Do not change the size of any of the classes defined in here.
+ * Do not reference types that are not part of the NDK.
+ * Do not #include files that aren't part of the NDK.
+ */
+
+#ifndef _NDK_MEDIA_CODEC_STORE_H
+#define _NDK_MEDIA_CODEC_STORE_H
+
+#include <stdint.h>
+
+#include "NdkMediaCodecInfo.h"
+#include "NdkMediaError.h"
+#include "NdkMediaFormat.h"
+
+__BEGIN_DECLS
+
+/**
+ * The media type definition with bitfeids indicating if it is
+ * supported by decoders/ encoders/ both.
+ */
+typedef struct AMediaCodecSupportedMediaType {
+ enum Mode : uint32_t {
+ FLAG_DECODER = 1 << 0,
+ FLAG_ENCODER = 1 << 1,
+ };
+
+ // The media type.
+ const char *mMediaType;
+ // bitfields for modes.
+ uint32_t mMode;
+} AMediaCodecSupportedMediaType;
+
+/**
+ * Get an array of all the supported media types of a device.
+ *
+ * @param outMediaTypes The pointer to the output AMediaCodecSupportedMediaType array.
+ * It is owned by the fraework and has an infinite lifetime.
+ *
+ * @param outCount size of the out array.
+ *
+ * Return AMEDIA_OK if successfully made the copy.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if the @param outMediaTypes is invalid.
+ */
+media_status_t AMediaCodecStore_getSupportedMediaTypes(
+ const AMediaCodecSupportedMediaType **outMediaTypes, size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Get the next decoder info that supports the format.
+ *
+ * @param outCodecInfo should be set as NULL to start the iteration.
+ * Keep the last codecInfo you got from a previous call to get the next one.
+ * *outCodecInfo will be set to NULL if reached the end.
+ * It is owned by the framework and has an infinite lifetime.
+ *
+ * @param format If set as NULL, this API will iterate through all available decoders.
+ * If NOT NULL, it MUST contain key "mime" implying the media type.
+ *
+ * Return AMEDIA_OK if successfully got the info.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo or @param format is invalid.
+ * Return AMEDIA_ERROR_UNSUPPORTED if no more decoder supporting the format.
+ *
+ * It is undefined behavior to call this API with a NON NULL @param outCodecInfo
+ * and a different @param format during an iteration.
+ */
+media_status_t AMediaCodecStore_findNextDecoderForFormat(
+ const AMediaFormat *format, const AMediaCodecInfo **outCodecInfo) __INTRODUCED_IN(36);
+
+/**
+ * Get the next encoder info that supports the format.
+ *
+ * @param outCodecInfo should be set as NULL to start the iteration.
+ * Keep the last codecInfo you got from a previous call to get the next one.
+ * *outCodecInfo will be set to NULL if reached the end.
+ * It is owned by the framework and has an infinite lifetime.
+ *
+ * @param format If set as NULL, this API will iterate through all available encoders.
+ * If NOT NULL, it MUST contain key "mime" implying the media type.
+ *
+ * Return AMEDIA_OK if successfully got the info.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo is invalid.
+ * Return AMEDIA_ERROR_UNSUPPORTED if no more encoder supporting the format.
+ *
+ * It is undefined behavior to call this API with a NON NULL @param outCodecInfo
+ * and a different @param format during an iteration.
+ *
+ * No secure encoder will show in the output.
+ */
+media_status_t AMediaCodecStore_findNextEncoderForFormat(
+ const AMediaFormat* format, const AMediaCodecInfo **outCodecInfo) __INTRODUCED_IN(36);
+
+/**
+ * Get the codecInfo corresponding to a given codec name.
+ *
+ * @param name Media codec name.
+ * Users can get valid codec names from the AMediaCodecInfo structures
+ * returned from findNextDecoder|EncoderForFormat methods.
+ * Note that this name may not correspond to the name the same codec used
+ * by the SDK API, but will always do for codec names starting with "c2.".
+ *
+ * @param outCodecInfo Output parameter for the corresponding AMeidaCodecInfo structure.
+ * It is owned by the framework and has an infinite lifetime.
+ *
+ * Return AMEDIA_OK if got the codecInfo successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if no corresponding codec found.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo or @param name is invalid.
+ */
+media_status_t AMediaCodecStore_getCodecInfo(
+ const char *name, const AMediaCodecInfo **outCodecInfo) __INTRODUCED_IN(36);
+
+__END_DECLS
+
+#endif //_NDK_MEDIA_CODEC_STORE_H
+
+/** @} */
\ No newline at end of file
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 262c169..939f151 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -1,5 +1,33 @@
LIBMEDIANDK {
global:
+ ACodecAudioCapabilities_getBitrateRange; # introduced=36
+ ACodecAudioCapabilities_getInputChannelCountRanges; # introduced=36
+ ACodecAudioCapabilities_getMaxInputChannelCount; # introduced=36
+ ACodecAudioCapabilities_getMinInputChannelCount; # introduced=36
+ ACodecAudioCapabilities_getSupportedSampleRates; # introduced=36
+ ACodecAudioCapabilities_getSupportedSampleRateRanges; # introduced=36
+ ACodecAudioCapabilities_isSampleRateSupported; # introduced=36
+ ACodecEncoderCapabilities_getComplexityRange; # introduced=36
+ ACodecEncoderCapabilities_getQualityRange; # introduced=36
+ ACodecEncoderCapabilities_isBitrateModeSupported; # introduced=36
+ ACodecPerformancePoint_create; # introduced=36
+ ACodecPerformancePoint_covers; # introduced=36
+ ACodecPerformancePoint_coversFormat; # introduced=36
+ ACodecPerformancePoint_delete; # introduced=36
+ ACodecPerformancePoint_equals; # introduced=36
+ ACodecVideoCapabilities_areSizeAndRateSupported; # introduced=36
+ ACodecVideoCapabilities_getAchievableFrameRatesFor; # introduced=36
+ ACodecVideoCapabilities_getBitrateRange; # introduced=36
+ ACodecVideoCapabilities_getHeightAlignment; # introduced=36
+ ACodecVideoCapabilities_getSupportedFrameRates; # introduced=36
+ ACodecVideoCapabilities_getSupportedFrameRatesFor; # introduced=36
+ ACodecVideoCapabilities_getSupportedHeights; # introduced=36
+ ACodecVideoCapabilities_getSupportedHeightsFor; # introduced=36
+ ACodecVideoCapabilities_getSupportedPerformancePoints; # introduced=36
+ ACodecVideoCapabilities_getSupportedWidths; # introduced=36
+ ACodecVideoCapabilities_getSupportedWidthsFor; # introduced=36
+ ACodecVideoCapabilities_getWidthAlignment; # introduced=36
+ ACodecVideoCapabilities_isSizeSupported; # introduced=36
AImageReader_acquireLatestImage; # introduced=24
AImageReader_acquireLatestImageAsync; # introduced=26
AImageReader_acquireNextImage; # introduced=24
@@ -16,6 +44,7 @@
AImageReader_newWithDataSpace; # introduced=UpsideDownCake
AImageReader_setBufferRemovedListener; # introduced=26
AImageReader_setImageListener; # introduced=24
+ AImageReader_setUsage; # introduced=36 llndk
AImage_delete; # introduced=24
AImage_deleteAsync; # introduced=26
AImage_getCropRect; # introduced=24
@@ -216,6 +245,22 @@
AMediaCodec_createPersistentInputSurface; # introduced=26
AMediaCodec_start;
AMediaCodec_stop;
+ AMediaCodecInfo_getAudioCapabilities; # introduced=36
+ AMediaCodecInfo_getEncoderCapabilities; # introduced=36
+ AMediaCodecInfo_getVideoCapabilities; # introduced=36
+ AMediaCodecInfo_getCanonicalName; # introduced=36
+ AMediaCodecInfo_getMaxSupportedInstances; # introduced=36
+ AMediaCodecInfo_getMediaCodecInfoType; # introduced=36
+ AMediaCodecInfo_getMediaType; # introduced=36
+ AMediaCodecInfo_isEncoder; # introduced=36
+ AMediaCodecInfo_isFeatureRequired; # introduced=36
+ AMediaCodecInfo_isFeatureSupported; # introduced=36
+ AMediaCodecInfo_isFormatSupported; # introduced=36
+ AMediaCodecInfo_isVendor; # introduced=36
+ AMediaCodecStore_getCodecInfo; # introduced=36
+ AMediaCodecStore_getSupportedMediaTypes; # introduced=36
+ AMediaCodecStore_findNextDecoderForFormat; # introduced=36
+ AMediaCodecStore_findNextEncoderForFormat; # introduced=36
AMediaCrypto_delete;
AMediaCrypto_isCryptoSchemeSupported;
AMediaCrypto_new;
diff --git a/media/utils/EventLogTags.logtags b/media/utils/EventLogTags.logtags
index c397f34..5b98b0f 100644
--- a/media/utils/EventLogTags.logtags
+++ b/media/utils/EventLogTags.logtags
@@ -31,7 +31,7 @@
# 6: Percent
# Default value for data of type int/long is 2 (bytes).
#
-# See system/core/logcat/event.logtags for the original definition of the tags.
+# See system/logging/logcat/event.logtags for the original definition of the tags.
# 61000 - 61199 reserved for audioserver
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 8215247..282f3fa 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -98,10 +98,6 @@
static constexpr char kAudioServiceName[] = "audio";
-// In order to avoid invalidating offloaded tracks each time a Visualizer is turned on and off
-// we define a minimum time during which a global effect is considered enabled.
-static const nsecs_t kMinGlobalEffectEnabletimeNs = seconds(7200);
-
// Keep a strong reference to media.log service around forever.
// The service is within our parent process so it can never die in a way that we could observe.
// These two variables are const after initialization.
@@ -4842,11 +4838,6 @@
bool AudioFlinger::isNonOffloadableGlobalEffectEnabled_l() const
{
- if (mGlobalEffectEnableTime != 0 &&
- ((systemTime() - mGlobalEffectEnableTime) < kMinGlobalEffectEnabletimeNs)) {
- return true;
- }
-
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
const auto thread = mPlaybackThreads.valueAt(i);
audio_utils::lock_guard l(thread->mutex());
@@ -4862,8 +4853,6 @@
{
audio_utils::lock_guard _l(mutex());
- mGlobalEffectEnableTime = systemTime();
-
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
const sp<IAfPlaybackThread> t = mPlaybackThreads.valueAt(i);
if (t->type() == IAfThreadBase::OFFLOAD) {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 6777075..c229e83 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -760,9 +760,6 @@
std::atomic<size_t> mClientSharedHeapSize = kMinimumClientSharedHeapSizeBytes;
static constexpr size_t kMinimumClientSharedHeapSizeBytes = 1024 * 1024; // 1MB
- // when a global effect was last enabled
- nsecs_t mGlobalEffectEnableTime GUARDED_BY(mutex()) = 0;
-
/* const */ sp<IAfPatchPanel> mPatchPanel;
const sp<EffectsFactoryHalInterface> mEffectsFactoryHal =
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 776775b..1cb9ea4 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -7760,6 +7760,9 @@
audio_utils::lock_guard l(mutex());
localTracks = std::move(mOutputTracks);
mOutputTracks.clear();
+ for (size_t i = 0; i < localTracks.size(); ++i) {
+ localTracks[i]->destroy();
+ }
}
localTracks.clear();
outputTracks.clear();
@@ -8418,7 +8421,6 @@
}
if (invalidate) {
activeTrack->invalidate();
- ALOG_ASSERT(fastTrackToRemove == 0);
fastTrackToRemove = activeTrack;
removeTrack_l(activeTrack);
mActiveTracks.remove(activeTrack);
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index f7b9b33..9107e2a 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -75,7 +75,8 @@
bool isEffectEnabled(int id) const;
uint32_t getMaxEffectsCpuLoad() const;
uint32_t getMaxEffectsMemory() const;
- bool isNonOffloadableEffectEnabled() const;
+ bool isNonOffloadableEffectEnabled(
+ const std::optional<const effect_uuid_t>& uuid = std::nullopt) const;
void moveEffects(audio_session_t session,
audio_io_handle_t srcOutput,
diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
index 090da6c..6d66781 100644
--- a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
@@ -21,6 +21,7 @@
#include "AudioInputDescriptor.h"
#include "EffectDescriptor.h"
+#include <system/audio_effects/audio_effects_utils.h>
#include <utils/String8.h>
#include <AudioPolicyInterface.h>
@@ -157,14 +158,18 @@
return NO_ERROR;
}
-bool EffectDescriptorCollection::isNonOffloadableEffectEnabled() const
+bool EffectDescriptorCollection::isNonOffloadableEffectEnabled(
+ const std::optional<const effect_uuid_t>& uuid) const
{
+ using namespace android::effect::utils;
for (size_t i = 0; i < size(); i++) {
sp<EffectDescriptor> effectDesc = valueAt(i);
- if (effectDesc->mEnabled && (effectDesc->isMusicEffect()) &&
- ((effectDesc->mDesc.flags & EFFECT_FLAG_OFFLOAD_SUPPORTED) == 0)) {
- ALOGV("isNonOffloadableEffectEnabled() non offloadable effect %s enabled on session %d",
- effectDesc->mDesc.name, effectDesc->mSession);
+ if ((effectDesc->mEnabled && (effectDesc->isMusicEffect()) &&
+ ((effectDesc->mDesc.flags & EFFECT_FLAG_OFFLOAD_SUPPORTED) == 0)) &&
+ (uuid == std::nullopt || uuid.value() == effectDesc->mDesc.uuid)) {
+ ALOGE("%s: non offloadable effect %s, uuid %s, enabled on session %d", __func__,
+ effectDesc->mDesc.name, ToString(effectDesc->mDesc.uuid).c_str(),
+ effectDesc->mSession);
return true;
}
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 354c59c..74e77e8 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1492,7 +1492,8 @@
for (auto &secondaryMix : secondaryMixes) {
sp<SwAudioOutputDescriptor> outputDesc = secondaryMix->getOutput();
if (outputDesc != nullptr &&
- outputDesc->mIoHandle != AUDIO_IO_HANDLE_NONE) {
+ outputDesc->mIoHandle != AUDIO_IO_HANDLE_NONE &&
+ outputDesc->mIoHandle != *output) {
secondaryOutputs->push_back(outputDesc->mIoHandle);
weakSecondaryOutputDescs.push_back(outputDesc);
}
@@ -7432,7 +7433,8 @@
for (auto &secondaryMix : secondaryMixes) {
sp<SwAudioOutputDescriptor> outputDesc = secondaryMix->getOutput();
if (outputDesc != nullptr &&
- outputDesc->mIoHandle != AUDIO_IO_HANDLE_NONE) {
+ outputDesc->mIoHandle != AUDIO_IO_HANDLE_NONE &&
+ outputDesc != outputDescriptor) {
secondaryDescs.push_back(outputDesc);
}
}
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index c600fb6..154b063 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -53,7 +53,10 @@
"libaudiopolicymanager_interface_headers",
],
- srcs: ["audiopolicymanager_tests.cpp"],
+ srcs: [
+ "audiopolicymanager_tests.cpp",
+ "test_execution_tracer.cpp",
+ ],
data: [":audiopolicytest_configuration_files"],
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 7913123..6974a0b 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -44,6 +44,7 @@
#include "AudioPolicyManagerTestClient.h"
#include "AudioPolicyTestClient.h"
#include "AudioPolicyTestManager.h"
+#include "test_execution_tracer.h"
using namespace android;
using testing::UnorderedElementsAre;
@@ -4214,3 +4215,9 @@
testing::Values(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
AUDIO_USAGE_ALARM)
);
+
+int main(int argc, char** argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+ return RUN_ALL_TESTS();
+}
diff --git a/services/audiopolicy/tests/test_execution_tracer.cpp b/services/audiopolicy/tests/test_execution_tracer.cpp
new file mode 100644
index 0000000..09de4a1
--- /dev/null
+++ b/services/audiopolicy/tests/test_execution_tracer.cpp
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TestExecutionTracer"
+
+#include "test_execution_tracer.h"
+
+#include <android-base/logging.h>
+
+void TestExecutionTracer::OnTestStart(const ::testing::TestInfo& test_info) {
+ TraceTestState("Started", test_info);
+}
+
+void TestExecutionTracer::OnTestEnd(const ::testing::TestInfo& test_info) {
+ TraceTestState("Finished", test_info);
+}
+
+void TestExecutionTracer::OnTestPartResult(const ::testing::TestPartResult& result) {
+ if (result.failed()) {
+ LOG(ERROR) << result;
+ } else {
+ LOG(INFO) << result;
+ }
+}
+
+// static
+void TestExecutionTracer::TraceTestState(const std::string& state,
+ const ::testing::TestInfo& test_info) {
+ LOG(INFO) << state << " " << test_info.test_suite_name() << "::" << test_info.name();
+}
diff --git a/services/audiopolicy/tests/test_execution_tracer.h b/services/audiopolicy/tests/test_execution_tracer.h
new file mode 100644
index 0000000..9031aaf
--- /dev/null
+++ b/services/audiopolicy/tests/test_execution_tracer.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gtest/gtest.h>
+
+class TestExecutionTracer : public ::testing::EmptyTestEventListener {
+ public:
+ void OnTestStart(const ::testing::TestInfo& test_info) override;
+ void OnTestEnd(const ::testing::TestInfo& test_info) override;
+ void OnTestPartResult(const ::testing::TestPartResult& result) override;
+
+ private:
+ static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info);
+};
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index e1815c7..d074826 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -191,7 +191,11 @@
}
halStream.overrideDataSpace = stream.dataSpace;
- halStream.producerUsage = BufferUsage::GPU_RENDER_TARGET;
+ halStream.producerUsage = static_cast<BufferUsage>(
+ static_cast<int64_t>(stream.usage) |
+ static_cast<int64_t>(BufferUsage::CAMERA_OUTPUT) |
+ static_cast<int64_t>(BufferUsage::GPU_RENDER_TARGET));
+
halStream.supportOffline = false;
return halStream;
}