Merge "aaudio: factor the format converstion function" into sc-dev
diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp
index 96ea5f2..a4ae71b 100644
--- a/camera/CameraMetadata.cpp
+++ b/camera/CameraMetadata.cpp
@@ -528,6 +528,8 @@
mBuffer = allocate_camera_metadata(newEntryCount,
newDataCount);
if (mBuffer == NULL) {
+ // Maintain old buffer to avoid potential memory leak.
+ mBuffer = oldBuffer;
ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__);
return NO_MEMORY;
}
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index d1b4ede..1609c7b 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -4079,6 +4079,34 @@
ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION =
// int64[4*n]
ACAMERA_SCALER_START + 22,
+ /**
+ * <p>Whether the camera device supports multi-resolution input or output streams</p>
+ *
+ * <p>Type: byte (acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>A logical multi-camera or an ultra high resolution camera may support multi-resolution
+ * input or output streams. With multi-resolution output streams, the camera device is able
+ * to output different resolution images depending on the current active physical camera or
+ * pixel mode. With multi-resolution input streams, the camera device can reprocess images
+ * of different resolutions from different physical cameras or sensor pixel modes.</p>
+ * <p>When set to TRUE:
+ * * For a logical multi-camera, the camera framework derives
+ * android.scaler.multiResolutionStreamConfigurationMap by combining the
+ * ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS from its physical
+ * cameras.
+ * * For an ultra-high resolution sensor camera, the camera framework directly copies
+ * the value of ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS to
+ * android.scaler.multiResolutionStreamConfigurationMap.</p>
+ *
+ * @see ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS
+ */
+ ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED = // byte (acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t)
+ ACAMERA_SCALER_START + 24,
ACAMERA_SCALER_END,
/**
@@ -9116,6 +9144,14 @@
} acamera_metadata_enum_android_scaler_available_stream_configurations_maximum_resolution_t;
+// ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED
+typedef enum acamera_metadata_enum_acamera_scaler_multi_resolution_stream_supported {
+ ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED_FALSE = 0,
+
+ ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED_TRUE = 1,
+
+} acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t;
+
// ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
typedef enum acamera_metadata_enum_acamera_sensor_reference_illuminant1 {
diff --git a/drm/libmediadrm/CryptoHal.cpp b/drm/libmediadrm/CryptoHal.cpp
index 3257f71..e0db1c4 100644
--- a/drm/libmediadrm/CryptoHal.cpp
+++ b/drm/libmediadrm/CryptoHal.cpp
@@ -382,7 +382,8 @@
return;
}
- mPlugin->notifyResolution(width, height);
+ auto hResult = mPlugin->notifyResolution(width, height);
+ ALOGE_IF(!hResult.isOk(), "notifyResolution txn failed %s", hResult.description().c_str());
}
status_t CryptoHal::setMediaDrmSession(const Vector<uint8_t> &sessionId) {
@@ -392,7 +393,8 @@
return mInitCheck;
}
- return toStatusT(mPlugin->setMediaDrmSession(toHidlVec(sessionId)));
+ auto err = mPlugin->setMediaDrmSession(toHidlVec(sessionId));
+ return err.isOk() ? toStatusT(err) : DEAD_OBJECT;
}
status_t CryptoHal::getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const {
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index 253a1fa..40d1e0c 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -888,7 +888,7 @@
Return<void> hResult;
if (mPluginV1_2 != NULL) {
- Return<void> hResult = mPluginV1_2->getProvisionRequest_1_2(
+ hResult = mPluginV1_2->getProvisionRequest_1_2(
toHidlString(certType), toHidlString(certAuthority),
[&](Status_V1_2 status, const hidl_vec<uint8_t>& hRequest,
const hidl_string& hDefaultUrl) {
@@ -900,7 +900,7 @@
}
);
} else {
- Return<void> hResult = mPlugin->getProvisionRequest(
+ hResult = mPlugin->getProvisionRequest(
toHidlString(certType), toHidlString(certAuthority),
[&](Status status, const hidl_vec<uint8_t>& hRequest,
const hidl_string& hDefaultUrl) {
@@ -1522,22 +1522,38 @@
return metricsString;
}
-bool DrmHal::requiresSecureDecoder(const char *mime) const {
+status_t DrmHal::requiresSecureDecoder(const char *mime, bool *required) const {
Mutex::Autolock autoLock(mLock);
if (mPluginV1_4 == NULL) {
return false;
}
- return mPluginV1_4->requiresSecureDecoderDefault(hidl_string(mime));
+ auto hResult = mPluginV1_4->requiresSecureDecoderDefault(hidl_string(mime));
+ if (!hResult.isOk()) {
+ DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %s", hResult.description().c_str());
+ return DEAD_OBJECT;
+ }
+ if (required) {
+ *required = hResult;
+ }
+ return OK;
}
-bool DrmHal::requiresSecureDecoder(const char *mime,
- DrmPlugin::SecurityLevel securityLevel) const {
+status_t DrmHal::requiresSecureDecoder(const char *mime, DrmPlugin::SecurityLevel securityLevel,
+ bool *required) const {
Mutex::Autolock autoLock(mLock);
if (mPluginV1_4 == NULL) {
return false;
}
auto hLevel = toHidlSecurityLevel(securityLevel);
- return mPluginV1_4->requiresSecureDecoder(hidl_string(mime), hLevel);
+ auto hResult = mPluginV1_4->requiresSecureDecoder(hidl_string(mime), hLevel);
+ if (!hResult.isOk()) {
+ DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %s", hResult.description().c_str());
+ return DEAD_OBJECT;
+ }
+ if (required) {
+ *required = hResult;
+ }
+ return OK;
}
status_t DrmHal::setPlaybackId(Vector<uint8_t> const &sessionId, const char *playbackId) {
@@ -1545,10 +1561,8 @@
if (mPluginV1_4 == NULL) {
return ERROR_UNSUPPORTED;
}
- drm::V1_0::Status err = mPluginV1_4->setPlaybackId(
- toHidlVec(sessionId),
- hidl_string(playbackId));
- return toStatusT(err);
+ auto err = mPluginV1_4->setPlaybackId(toHidlVec(sessionId), hidl_string(playbackId));
+ return err.isOk() ? toStatusT(err) : DEAD_OBJECT;
}
status_t DrmHal::getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const {
diff --git a/drm/libmediadrm/DrmUtils.cpp b/drm/libmediadrm/DrmUtils.cpp
index ed3848d..0b117a3 100644
--- a/drm/libmediadrm/DrmUtils.cpp
+++ b/drm/libmediadrm/DrmUtils.cpp
@@ -82,7 +82,17 @@
auto factory = Hal::getService(instance);
if (factory != nullptr) {
instances[instance.c_str()] = Hal::descriptor;
- if (!uuid || factory->isCryptoSchemeSupported(uuid)) {
+ if (!uuid) {
+ factories.push_back(factory);
+ continue;
+ }
+ auto supported = factory->isCryptoSchemeSupported(uuid);
+ if (!supported.isOk()) {
+ LOG2BE(uuid, "isCryptoSchemeSupported txn failed: %s",
+ supported.description().c_str());
+ continue;
+ }
+ if (supported) {
factories.push_back(factory);
}
}
@@ -114,30 +124,42 @@
sp<::V1_0::IDrmPlugin> MakeDrmPlugin(const sp<::V1_0::IDrmFactory> &factory,
const uint8_t uuid[16], const char *appPackageName) {
sp<::V1_0::IDrmPlugin> plugin;
- factory->createPlugin(toHidlArray16(uuid), hidl_string(appPackageName),
- [&](::V1_0::Status status, const sp<::V1_0::IDrmPlugin> &hPlugin) {
- if (status != ::V1_0::Status::OK) {
- LOG2BE(uuid, "MakeDrmPlugin failed: %d", status);
- return;
- }
- plugin = hPlugin;
- });
- return plugin;
+ auto err = factory->createPlugin(
+ toHidlArray16(uuid), hidl_string(appPackageName),
+ [&](::V1_0::Status status, const sp<::V1_0::IDrmPlugin> &hPlugin) {
+ if (status != ::V1_0::Status::OK) {
+ LOG2BE(uuid, "MakeDrmPlugin failed: %d", status);
+ return;
+ }
+ plugin = hPlugin;
+ });
+ if (err.isOk()) {
+ return plugin;
+ } else {
+ LOG2BE(uuid, "MakeDrmPlugin txn failed: %s", err.description().c_str());
+ return nullptr;
+ }
}
sp<::V1_0::ICryptoPlugin> MakeCryptoPlugin(const sp<::V1_0::ICryptoFactory> &factory,
const uint8_t uuid[16], const void *initData,
size_t initDataSize) {
sp<::V1_0::ICryptoPlugin> plugin;
- factory->createPlugin(toHidlArray16(uuid), toHidlVec(initData, initDataSize),
- [&](::V1_0::Status status, const sp<::V1_0::ICryptoPlugin> &hPlugin) {
- if (status != ::V1_0::Status::OK) {
- LOG2BE(uuid, "MakeCryptoPlugin failed: %d", status);
- return;
- }
- plugin = hPlugin;
- });
- return plugin;
+ auto err = factory->createPlugin(
+ toHidlArray16(uuid), toHidlVec(initData, initDataSize),
+ [&](::V1_0::Status status, const sp<::V1_0::ICryptoPlugin> &hPlugin) {
+ if (status != ::V1_0::Status::OK) {
+ LOG2BE(uuid, "MakeCryptoPlugin failed: %d", status);
+ return;
+ }
+ plugin = hPlugin;
+ });
+ if (err.isOk()) {
+ return plugin;
+ } else {
+ LOG2BE(uuid, "MakeCryptoPlugin txn failed: %s", err.description().c_str());
+ return nullptr;
+ }
}
} // namespace
@@ -304,6 +326,9 @@
std::string GetExceptionMessage(status_t err, const char *msg,
const Vector<::V1_4::LogMessage> &logs) {
+ std::string ruler("==============================");
+ std::string header("Beginning of DRM Plugin Log");
+ std::string footer("End of DRM Plugin Log");
String8 msg8;
if (msg) {
msg8 += msg;
@@ -311,6 +336,7 @@
}
auto errStr = StrCryptoError(err);
msg8 += errStr.c_str();
+ msg8 += String8::format("\n%s %s %s", ruler.c_str(), header.c_str(), ruler.c_str());
for (auto log : logs) {
time_t seconds = log.timeMs / 1000;
@@ -322,9 +348,10 @@
}
char p = logPriorityToChar(log.priority);
- msg8 += String8::format("\n%s.%03d %c %s", timeStr.c_str(), ms, p, log.message.c_str());
+ msg8 += String8::format("\n %s.%03d %c %s", timeStr.c_str(), ms, p, log.message.c_str());
}
+ msg8 += String8::format("\n%s %s %s", ruler.c_str(), footer.c_str(), ruler.c_str());
return msg8.c_str();
}
diff --git a/drm/libmediadrm/include/mediadrm/DrmHal.h b/drm/libmediadrm/include/mediadrm/DrmHal.h
index c5206fa..7eb1dec 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHal.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHal.h
@@ -179,11 +179,10 @@
virtual status_t setListener(const sp<IDrmClient>& listener);
- virtual bool requiresSecureDecoder(const char *mime) const;
+ virtual status_t requiresSecureDecoder(const char *mime, bool *required) const;
- virtual bool requiresSecureDecoder(
- const char *mime,
- DrmPlugin::SecurityLevel securityLevel) const;
+ virtual status_t requiresSecureDecoder(const char *mime, DrmPlugin::SecurityLevel securityLevel,
+ bool *required) const;
virtual status_t setPlaybackId(
Vector<uint8_t> const &sessionId,
diff --git a/drm/libmediadrm/include/mediadrm/IDrm.h b/drm/libmediadrm/include/mediadrm/IDrm.h
index 80c5c9b..a88784d 100644
--- a/drm/libmediadrm/include/mediadrm/IDrm.h
+++ b/drm/libmediadrm/include/mediadrm/IDrm.h
@@ -154,12 +154,10 @@
virtual status_t setListener(const sp<IDrmClient>& listener) = 0;
- virtual bool requiresSecureDecoder(
- const char *mime) const = 0;
+ virtual status_t requiresSecureDecoder(const char *mime, bool *required) const = 0;
- virtual bool requiresSecureDecoder(
- const char *mime,
- DrmPlugin::SecurityLevel securityLevel) const = 0;
+ virtual status_t requiresSecureDecoder(const char *mime, DrmPlugin::SecurityLevel securityLevel,
+ bool *required) const = 0;
virtual status_t setPlaybackId(
Vector<uint8_t> const &sessionId,
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index bab651f..0b121ad 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -28,6 +28,7 @@
#include <media/stagefright/foundation/AUtils.h>
#include <C2Debug.h>
+#include <Codec2Mapper.h>
#include <C2PlatformSupport.h>
#include <Codec2BufferUtils.h>
#include <SimpleC2Interface.h>
@@ -213,6 +214,42 @@
.withFields({C2F(mSyncFramePeriod, value).any()})
.withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
.build());
+
+ addParameter(
+ DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::input(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(ColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::output(
+ 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mCodedColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mCodedColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mCodedColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mCodedColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(CodedColorAspectsSetter, mColorAspects)
+ .build());
}
static C2R InputDelaySetter(
@@ -359,6 +396,33 @@
return C2R::Ok();
}
+ static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+ const C2P<C2StreamColorAspectsInfo::input> &coded) {
+ (void)mayBlock;
+ me.set().range = coded.v.range;
+ me.set().primaries = coded.v.primaries;
+ me.set().transfer = coded.v.transfer;
+ me.set().matrix = coded.v.matrix;
+ return C2R::Ok();
+ }
+
IV_PROFILE_T getProfile_l() const {
switch (mProfileLevel->profile) {
case PROFILE_AVC_CONSTRAINED_BASELINE: [[fallthrough]];
@@ -418,6 +482,9 @@
std::shared_ptr<C2StreamGopTuning::output> getGop_l() const { return mGop; }
std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const
{ return mPictureQuantization; }
+ std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
+ return mCodedColorAspects;
+ }
private:
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -430,6 +497,8 @@
std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
std::shared_ptr<C2StreamGopTuning::output> mGop;
std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
+ std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
};
#define ive_api_function ih264e_api_function
@@ -980,6 +1049,55 @@
return;
}
+c2_status_t C2SoftAvcEnc::setVuiParams()
+{
+ ColorAspects sfAspects;
+ if (!C2Mapper::map(mColorAspects->primaries, &sfAspects.mPrimaries)) {
+ sfAspects.mPrimaries = android::ColorAspects::PrimariesUnspecified;
+ }
+ if (!C2Mapper::map(mColorAspects->range, &sfAspects.mRange)) {
+ sfAspects.mRange = android::ColorAspects::RangeUnspecified;
+ }
+ if (!C2Mapper::map(mColorAspects->matrix, &sfAspects.mMatrixCoeffs)) {
+ sfAspects.mMatrixCoeffs = android::ColorAspects::MatrixUnspecified;
+ }
+ if (!C2Mapper::map(mColorAspects->transfer, &sfAspects.mTransfer)) {
+ sfAspects.mTransfer = android::ColorAspects::TransferUnspecified;
+ }
+ int32_t primaries, transfer, matrixCoeffs;
+ bool range;
+ ColorUtils::convertCodecColorAspectsToIsoAspects(sfAspects,
+ &primaries,
+ &transfer,
+ &matrixCoeffs,
+ &range);
+ ih264e_vui_ip_t s_vui_params_ip {};
+ ih264e_vui_op_t s_vui_params_op {};
+
+ s_vui_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_vui_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_VUI_PARAMS;
+
+ s_vui_params_ip.u1_video_signal_type_present_flag = 1;
+ s_vui_params_ip.u1_colour_description_present_flag = 1;
+ s_vui_params_ip.u1_colour_primaries = primaries;
+ s_vui_params_ip.u1_transfer_characteristics = transfer;
+ s_vui_params_ip.u1_matrix_coefficients = matrixCoeffs;
+ s_vui_params_ip.u1_video_full_range_flag = range;
+
+ s_vui_params_ip.u4_size = sizeof(ih264e_vui_ip_t);
+ s_vui_params_op.u4_size = sizeof(ih264e_vui_op_t);
+
+ IV_STATUS_T status = ih264e_api_function(mCodecCtx, &s_vui_params_ip,
+ &s_vui_params_op);
+ if(status != IV_SUCCESS)
+ {
+ ALOGE("Unable to set vui params = 0x%x\n",
+ s_vui_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
c2_status_t C2SoftAvcEnc::initEncoder() {
IV_STATUS_T status;
WORD32 level;
@@ -999,6 +1117,7 @@
mIInterval = mIntf->getSyncFramePeriod_l();
mIDRInterval = mIntf->getSyncFramePeriod_l();
gop = mIntf->getGop_l();
+ mColorAspects = mIntf->getCodedColorAspects_l();
}
if (gop && gop->flexCount() > 0) {
uint32_t syncInterval = 1;
@@ -1223,6 +1342,9 @@
/* Video control Set Profile params */
setProfileParams();
+ /* Video control Set VUI params */
+ setVuiParams();
+
/* Video control Set in Encode header mode */
setEncMode(IVE_ENC_MODE_HEADER);
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index 673a282..baf33e2 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -196,6 +196,7 @@
std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
uint32_t mOutBufferSize;
UWORD32 mHeaderGenerated;
@@ -229,6 +230,7 @@
c2_status_t setProfileParams();
c2_status_t setDeblockParams();
c2_status_t setVbvParams();
+ c2_status_t setVuiParams();
void logVersion();
c2_status_t setEncodeArgs(
ive_video_encode_ip_t *ps_encode_ip,
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index 436a2c4..4bc1777 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -25,6 +25,7 @@
#include <media/stagefright/foundation/AUtils.h>
#include <C2Debug.h>
+#include <Codec2Mapper.h>
#include <C2PlatformSupport.h>
#include <Codec2BufferUtils.h>
#include <SimpleC2Interface.h>
@@ -208,6 +209,42 @@
.withSetter(
Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
.build());
+
+ addParameter(
+ DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::input(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(ColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::output(
+ 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mCodedColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mCodedColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mCodedColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mCodedColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(CodedColorAspectsSetter, mColorAspects)
+ .build());
}
static C2R InputDelaySetter(
@@ -402,6 +439,34 @@
std::shared_ptr<C2StreamGopTuning::output> getGop_l() const {
return mGop;
}
+ static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+ static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+ const C2P<C2StreamColorAspectsInfo::input> &coded) {
+ (void)mayBlock;
+ me.set().range = coded.v.range;
+ me.set().primaries = coded.v.primaries;
+ me.set().transfer = coded.v.transfer;
+ me.set().matrix = coded.v.matrix;
+ return C2R::Ok();
+ }
+ std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() {
+ return mCodedColorAspects;
+ }
private:
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -415,6 +480,8 @@
std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
std::shared_ptr<C2StreamGopTuning::output> mGop;
+ std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
};
static size_t GetCPUCoreCount() {
@@ -533,6 +600,32 @@
mBframes = maxBframes;
}
}
+ ColorAspects sfAspects;
+ if (!C2Mapper::map(mColorAspects->primaries, &sfAspects.mPrimaries)) {
+ sfAspects.mPrimaries = android::ColorAspects::PrimariesUnspecified;
+ }
+ if (!C2Mapper::map(mColorAspects->range, &sfAspects.mRange)) {
+ sfAspects.mRange = android::ColorAspects::RangeUnspecified;
+ }
+ if (!C2Mapper::map(mColorAspects->matrix, &sfAspects.mMatrixCoeffs)) {
+ sfAspects.mMatrixCoeffs = android::ColorAspects::MatrixUnspecified;
+ }
+ if (!C2Mapper::map(mColorAspects->transfer, &sfAspects.mTransfer)) {
+ sfAspects.mTransfer = android::ColorAspects::TransferUnspecified;
+ }
+ int32_t primaries, transfer, matrixCoeffs;
+ bool range;
+ ColorUtils::convertCodecColorAspectsToIsoAspects(sfAspects,
+ &primaries,
+ &transfer,
+ &matrixCoeffs,
+ &range);
+ mEncParams.s_out_strm_prms.i4_vui_enable = 1;
+ mEncParams.s_vui_sei_prms.u1_colour_description_present_flag = 1;
+ mEncParams.s_vui_sei_prms.u1_colour_primaries = primaries;
+ mEncParams.s_vui_sei_prms.u1_transfer_characteristics = transfer;
+ mEncParams.s_vui_sei_prms.u1_matrix_coefficients = matrixCoeffs;
+ mEncParams.s_vui_sei_prms.u1_video_full_range_flag = range;
// update configuration
mEncParams.s_src_prms.i4_width = mSize->width;
mEncParams.s_src_prms.i4_height = mSize->height;
@@ -629,6 +722,7 @@
mQuality = mIntf->getQuality_l();
mGop = mIntf->getGop_l();
mRequestSync = mIntf->getRequestSync_l();
+ mColorAspects = mIntf->getCodedColorAspects_l();
}
c2_status_t status = initEncParams();
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.h b/media/codec2/components/hevc/C2SoftHevcEnc.h
index 5ea4602..9dbf682 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.h
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.h
@@ -89,6 +89,7 @@
std::shared_ptr<C2StreamQualityTuning::output> mQuality;
std::shared_ptr<C2StreamGopTuning::output> mGop;
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
#ifdef FILE_DUMP_ENABLE
char mInFile[200];
char mOutFile[200];
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index 5e34b8a..c98b802 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -345,6 +345,42 @@
.withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
.withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
.build());
+
+ addParameter(
+ DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::input(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(ColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::output(
+ 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mCodedColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mCodedColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mCodedColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mCodedColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(CodedColorAspectsSetter, mColorAspects)
+ .build());
}
static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
@@ -415,6 +451,31 @@
double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
}
+ static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+ static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+ const C2P<C2StreamColorAspectsInfo::input> &coded) {
+ (void)mayBlock;
+ me.set().range = coded.v.range;
+ me.set().primaries = coded.v.primaries;
+ me.set().transfer = coded.v.transfer;
+ me.set().matrix = coded.v.matrix;
+ return C2R::Ok();
+ }
private:
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -427,6 +488,8 @@
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+ std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
};
} // namespace android
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 6176646..9d9ed70 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -392,6 +392,7 @@
namespace {
+// Codec bases are ordered by their date of introduction to the code base.
enum : uint32_t {
_C2_PL_MP2V_BASE = 0x1000,
_C2_PL_AAC_BASE = 0x2000,
@@ -403,12 +404,15 @@
_C2_PL_DV_BASE = 0x8000,
_C2_PL_AV1_BASE = 0x9000,
_C2_PL_VP8_BASE = 0xA000,
+ _C2_PL_MPEGH_BASE = 0xB000, // MPEG-H 3D Audio
C2_PROFILE_LEVEL_VENDOR_START = 0x70000000,
};
}
+// Profiles and levels for each codec are ordered based on how they are ordered in the
+// corresponding standard documents at introduction, and chronologically afterwards.
enum C2Config::profile_t : uint32_t {
PROFILE_UNUSED = 0, ///< profile is not used by this media type
@@ -562,6 +566,13 @@
PROFILE_VP8_1, ///< VP8 Profile 1
PROFILE_VP8_2, ///< VP8 Profile 2
PROFILE_VP8_3, ///< VP8 Profile 3
+
+ // MPEG-H 3D Audio profiles
+ PROFILE_MPEGH_MAIN = _C2_PL_MPEGH_BASE, ///< MPEG-H Main
+ PROFILE_MPEGH_HIGH, ///< MPEG-H High
+ PROFILE_MPEGH_LC, ///< MPEG-H Low-complexity
+ PROFILE_MPEGH_BASELINE, ///< MPEG-H Baseline
+
};
enum C2Config::level_t : uint32_t {
@@ -704,6 +715,13 @@
LEVEL_AV1_7_1, ///< AV1 Level 7.1
LEVEL_AV1_7_2, ///< AV1 Level 7.2
LEVEL_AV1_7_3, ///< AV1 Level 7.3
+
+ // MPEG-H 3D Audio levels
+ LEVEL_MPEGH_1 = _C2_PL_MPEGH_BASE, ///< MPEG-H L1
+ LEVEL_MPEGH_2, ///< MPEG-H L2
+ LEVEL_MPEGH_3, ///< MPEG-H L3
+ LEVEL_MPEGH_4, ///< MPEG-H L4
+ LEVEL_MPEGH_5, ///< MPEG-H L5
};
struct C2ProfileLevelStruct {
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 5461544..58a568e 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -23,15 +23,12 @@
#include <stdio.h>
#include <algorithm>
-#include <C2AllocatorIon.h>
#include <C2Buffer.h>
#include <C2BufferPriv.h>
#include <C2Config.h>
#include <C2Debug.h>
#include <codec2/hidl/client.h>
-using android::C2AllocatorIon;
-
#include "media_c2_hidl_test_common.h"
using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
static std::vector<DecodeTestParameters> gDecodeTestParameters;
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index 2905cbf..92b53a0 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -24,15 +24,12 @@
#include <algorithm>
#include <fstream>
-#include <C2AllocatorIon.h>
#include <C2Buffer.h>
#include <C2BufferPriv.h>
#include <C2Config.h>
#include <C2Debug.h>
#include <codec2/hidl/client.h>
-using android::C2AllocatorIon;
-
#include "media_c2_hidl_test_common.h"
using EncodeTestParameters = std::tuple<std::string, std::string, bool, int32_t>;
@@ -45,6 +42,8 @@
: C2Buffer({block->share(block->offset(), block->size(), ::C2Fence())}) {}
};
+constexpr uint32_t kMaxSamplesPerFrame = 256;
+
namespace {
class Codec2AudioEncHidlTestBase : public ::testing::Test {
@@ -98,7 +97,7 @@
// Get the test parameters from GetParam call.
virtual void getParams() {}
- void GetURLForComponent(char* mURL);
+ void GetURLForComponent(char* mURL, int32_t channelCount, int32_t sampleRate);
// callback function to process onWorkDone received by Listener
void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
@@ -223,53 +222,105 @@
return false;
}
+c2_status_t getChannelCount(const std::shared_ptr<android::Codec2Client::Component>& component,
+ int32_t* nChannels) {
+ std::unique_ptr<C2StreamChannelCountInfo::input> channelCount =
+ std::make_unique<C2StreamChannelCountInfo::input>();
+ std::vector<C2FieldSupportedValuesQuery> validValueInfos = {
+ C2FieldSupportedValuesQuery::Current(
+ C2ParamField(channelCount.get(), &C2StreamChannelCountInfo::value))};
+ c2_status_t c2err = component->querySupportedValues(validValueInfos, C2_DONT_BLOCK);
+ if (c2err != C2_OK || validValueInfos.size() != 1u) {
+ ALOGE("querySupportedValues_vb failed for channelCount");
+ return c2err;
+ }
+
+ // setting default value of channelCount
+ *nChannels = 1;
+ const auto& c2FSV = validValueInfos[0].values;
+ switch (c2FSV.type) {
+ case C2FieldSupportedValues::type_t::RANGE: {
+ const auto& range = c2FSV.range;
+ uint32_t rmax = (uint32_t)(range.max).ref<uint32_t>();
+ if (rmax >= 2) {
+ *nChannels = 2;
+ } else {
+ *nChannels = 1;
+ }
+ break;
+ }
+ case C2FieldSupportedValues::type_t::VALUES: {
+ for (const C2Value::Primitive& prim : c2FSV.values) {
+ if ((uint32_t)prim.ref<uint32_t>() == 2) {
+ *nChannels = 2;
+ } else if ((uint32_t)prim.ref<uint32_t>() == 1) {
+ *nChannels = 1;
+ }
+ }
+ break;
+ }
+ default:
+ break;
+ }
+ return C2_OK;
+}
+
+c2_status_t getSampleRate(const std::shared_ptr<android::Codec2Client::Component>& component,
+ int32_t* nSampleRate) {
+ // Use the default sample rate for components
+ std::vector<std::unique_ptr<C2Param>> queried;
+ c2_status_t c2err = component->query({}, {C2StreamSampleRateInfo::input::PARAM_TYPE},
+ C2_DONT_BLOCK, &queried);
+ if (c2err != C2_OK || queried.size() == 0) return c2err;
+
+ size_t offset = sizeof(C2Param);
+ C2Param* param = queried[0].get();
+ *nSampleRate = *(int32_t*)((uint8_t*)param + offset);
+
+ return C2_OK;
+}
+
+c2_status_t getSamplesPerFrame(const std::shared_ptr<android::Codec2Client::Component>& component,
+ int32_t nChannels, int32_t* samplesPerFrame) {
+ std::vector<std::unique_ptr<C2Param>> queried;
+ c2_status_t c2err = component->query({}, {C2StreamMaxBufferSizeInfo::input::PARAM_TYPE},
+ C2_DONT_BLOCK, &queried);
+ if (c2err != C2_OK || queried.size() == 0) return c2err;
+
+ size_t offset = sizeof(C2Param);
+ C2Param* param = queried[0].get();
+ uint32_t maxInputSize = *(uint32_t*)((uint8_t*)param + offset);
+ *samplesPerFrame = std::min((maxInputSize / (nChannels * 2)), kMaxSamplesPerFrame);
+
+ return C2_OK;
+}
+
// Get config params for a component
-bool getConfigParams(std::string mime, int32_t* nChannels, int32_t* nSampleRate,
- int32_t* samplesPerFrame) {
- if (mime.find("mp4a-latm") != std::string::npos) {
- *nChannels = 2;
- *nSampleRate = 48000;
- *samplesPerFrame = 1024;
- } else if (mime.find("flac") != std::string::npos) {
- *nChannels = 2;
- *nSampleRate = 48000;
- *samplesPerFrame = 1152;
- } else if (mime.find("opus") != std::string::npos) {
- *nChannels = 2;
- *nSampleRate = 48000;
- *samplesPerFrame = 960;
- } else if (mime.find("3gpp") != std::string::npos) {
- *nChannels = 1;
- *nSampleRate = 8000;
- *samplesPerFrame = 160;
- } else if (mime.find("amr-wb") != std::string::npos) {
- *nChannels = 1;
- *nSampleRate = 16000;
- *samplesPerFrame = 160;
- } else
- return false;
+bool getConfigParams(const std::shared_ptr<android::Codec2Client::Component>& component,
+ int32_t* nChannels, int32_t* nSampleRate, int32_t* samplesPerFrame) {
+ c2_status_t status = getChannelCount(component, nChannels);
+ if (status != C2_OK) return false;
+
+ status = getSampleRate(component, nSampleRate);
+ if (status != C2_OK) return false;
+
+ status = getSamplesPerFrame(component, *nChannels, samplesPerFrame);
+ if (status != C2_OK) return false;
return true;
}
// LookUpTable of clips and metadata for component testing
-void Codec2AudioEncHidlTestBase::GetURLForComponent(char* mURL) {
- struct CompToURL {
- std::string mime;
- const char* mURL;
- };
- static const CompToURL kCompToURL[] = {
- {"mp4a-latm", "bbb_raw_2ch_48khz_s16le.raw"}, {"3gpp", "bbb_raw_1ch_8khz_s16le.raw"},
- {"amr-wb", "bbb_raw_1ch_16khz_s16le.raw"}, {"flac", "bbb_raw_2ch_48khz_s16le.raw"},
- {"opus", "bbb_raw_2ch_48khz_s16le.raw"},
- };
-
- for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
- if (mMime.find(kCompToURL[i].mime) != std::string::npos) {
- strcat(mURL, kCompToURL[i].mURL);
- return;
- }
+void Codec2AudioEncHidlTestBase::GetURLForComponent(char* mURL, int32_t channelCount,
+ int32_t sampleRate) {
+ std::string rawInput = "bbb_raw_1ch_8khz_s16le.raw";
+ if (channelCount == 1 && sampleRate == 16000) {
+ rawInput = "bbb_raw_1ch_16khz_s16le.raw";
+ } else if (channelCount == 2) {
+ rawInput = "bbb_raw_2ch_48khz_s16le.raw";
}
+
+ strcat(mURL, rawInput.c_str());
}
void encodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -283,9 +334,17 @@
uint32_t frameID = 0;
uint32_t maxRetry = 0;
- int bytesCount = samplesPerFrame * nChannels * 2;
+ uint32_t bytesCount = samplesPerFrame * nChannels * 2;
int32_t timestampIncr = (int)(((float)samplesPerFrame / nSampleRate) * 1000000);
uint64_t timestamp = 0;
+
+ // get length of file:
+ int32_t currPos = eleStream.tellg();
+ eleStream.seekg(0, eleStream.end);
+ uint32_t remainingBytes = (uint32_t)eleStream.tellg() - currPos;
+ eleStream.seekg(currPos, eleStream.beg);
+
+ nFrames = std::min(nFrames, remainingBytes / bytesCount);
while (1) {
if (nFrames == 0) break;
uint32_t flags = 0;
@@ -319,7 +378,12 @@
char* data = (char*)malloc(bytesCount);
ASSERT_NE(data, nullptr);
eleStream.read(data, bytesCount);
- ASSERT_EQ(eleStream.gcount(), bytesCount);
+ // if we have reached at the end of input stream, signal eos
+ if (eleStream.gcount() < bytesCount) {
+ bytesCount = eleStream.gcount();
+ if (signalEOS) flags |= C2FrameData::FLAG_END_OF_STREAM;
+ }
+
std::shared_ptr<C2LinearBlock> block;
ASSERT_EQ(C2_OK,
linearPool->fetchLinearBlock(
@@ -373,9 +437,6 @@
TEST_P(Codec2AudioEncEncodeTest, EncodeTest) {
ALOGV("EncodeTest");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512];
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL);
bool signalEOS = std::get<2>(GetParam());
// Ratio w.r.t to mInputMaxBufSize
int32_t inputMaxBufRatio = std::get<3>(GetParam());
@@ -384,7 +445,7 @@
int32_t nSampleRate;
int32_t samplesPerFrame;
- if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+ if (!getConfigParams(mComponent, &nChannels, &nSampleRate, &samplesPerFrame)) {
std::cout << "Failed to get the config params for " << mComponentName << "\n";
std::cout << "[ WARN ] Test Skipped \n";
return;
@@ -398,6 +459,10 @@
std::cout << "[ WARN ] Test Skipped \n";
return;
}
+ char mURL[512];
+ strcpy(mURL, sResourceDir.c_str());
+ GetURLForComponent(mURL, nChannels, nSampleRate);
+
ASSERT_EQ(mComponent->start(), C2_OK);
std::ifstream eleStream;
uint32_t numFrames = 16;
@@ -479,16 +544,12 @@
description("Test Request for flush");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512];
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL);
-
mFlushedIndices.clear();
int32_t nChannels;
int32_t nSampleRate;
int32_t samplesPerFrame;
- if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+ if (!getConfigParams(mComponent, &nChannels, &nSampleRate, &samplesPerFrame)) {
std::cout << "Failed to get the config params for " << mComponentName << "\n";
std::cout << "[ WARN ] Test Skipped \n";
return;
@@ -498,6 +559,10 @@
std::cout << "[ WARN ] Test Skipped \n";
return;
}
+ char mURL[512];
+ strcpy(mURL, sResourceDir.c_str());
+ GetURLForComponent(mURL, nChannels, nSampleRate);
+
ASSERT_EQ(mComponent->start(), C2_OK);
std::ifstream eleStream;
@@ -544,26 +609,25 @@
description("Encodes input file for different channel count");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512];
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL);
-
- std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
- ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
- ALOGV("mURL : %s", mURL);
-
int32_t nSampleRate;
int32_t samplesPerFrame;
int32_t nChannels;
int32_t numFrames = 16;
int32_t maxChannelCount = 8;
- if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+ if (!getConfigParams(mComponent, &nChannels, &nSampleRate, &samplesPerFrame)) {
std::cout << "Failed to get the config params for " << mComponentName << "\n";
std::cout << "[ WARN ] Test Skipped \n";
return;
}
+ char mURL[512];
+ strcpy(mURL, sResourceDir.c_str());
+ GetURLForComponent(mURL, nChannels, nSampleRate);
+
+ std::ifstream eleStream;
+ eleStream.open(mURL, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
+ ALOGV("mURL : %s", mURL);
uint64_t prevOutputSize = 0u;
uint32_t prevChannelCount = 0u;
@@ -591,8 +655,11 @@
}
// To check if the input stream is sufficient to encode for the higher channel count
+ struct stat buf;
+ stat(mURL, &buf);
+ size_t fileSize = buf.st_size;
int32_t bytesCount = (samplesPerFrame * nChannels * 2) * numFrames;
- if (eleStream.gcount() < bytesCount) {
+ if (fileSize < bytesCount) {
std::cout << "[ WARN ] Test Skipped for ChannelCount " << nChannels
<< " because of insufficient input data\n";
continue;
@@ -616,9 +683,6 @@
// blocking call to ensures application to Wait till all the inputs are consumed
waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
- // Validate output size based on chosen ChannelCount
- EXPECT_GE(mOutputSize, prevOutputSize);
-
prevChannelCount = nChannels;
prevOutputSize = mOutputSize;
@@ -633,7 +697,8 @@
ASSERT_TRUE(mCsd) << "CSD buffer missing";
}
ASSERT_TRUE(mEos);
- ASSERT_EQ(mComponent->stop(), C2_OK);
+ // TODO(b/147348711) Use reset instead of stop when using the same instance of codec.
+ ASSERT_EQ(mComponent->reset(), C2_OK);
mFramesReceived = 0;
mOutputSize = 0;
mEos = false;
@@ -646,25 +711,24 @@
description("Encodes input file for different SampleRate");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512];
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL);
-
- std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
- ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
- ALOGV("mURL : %s", mURL);
-
int32_t nSampleRate;
int32_t samplesPerFrame;
int32_t nChannels;
int32_t numFrames = 16;
- if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+ if (!getConfigParams(mComponent, &nChannels, &nSampleRate, &samplesPerFrame)) {
std::cout << "Failed to get the config params for " << mComponentName << "\n";
std::cout << "[ WARN ] Test Skipped \n";
return;
}
+ char mURL[512];
+ strcpy(mURL, sResourceDir.c_str());
+ GetURLForComponent(mURL, nChannels, nSampleRate);
+
+ std::ifstream eleStream;
+ eleStream.open(mURL, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
+ ALOGV("mURL : %s", mURL);
int32_t sampleRateValues[] = {1000, 8000, 16000, 24000, 48000, 96000, 192000};
@@ -694,8 +758,11 @@
}
// To check if the input stream is sufficient to encode for the higher SampleRate
+ struct stat buf;
+ stat(mURL, &buf);
+ size_t fileSize = buf.st_size;
int32_t bytesCount = (samplesPerFrame * nChannels * 2) * numFrames;
- if (eleStream.gcount() < bytesCount) {
+ if (fileSize < bytesCount) {
std::cout << "[ WARN ] Test Skipped for SampleRate " << nSampleRate
<< " because of insufficient input data\n";
continue;
@@ -719,12 +786,6 @@
// blocking call to ensures application to Wait till all the inputs are consumed
waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue);
- // Validate output size based on chosen samplerate
- if (prevSampleRate >= nSampleRate) {
- EXPECT_LE(mOutputSize, prevOutputSize);
- } else {
- EXPECT_GT(mOutputSize, prevOutputSize);
- }
prevSampleRate = nSampleRate;
prevOutputSize = mOutputSize;
@@ -739,7 +800,8 @@
ASSERT_TRUE(mCsd) << "CSD buffer missing";
}
ASSERT_TRUE(mEos);
- ASSERT_EQ(mComponent->stop(), C2_OK);
+ // TODO(b/147348711) Use reset instead of stop when using the same instance of codec.
+ ASSERT_EQ(mComponent->reset(), C2_OK);
mFramesReceived = 0;
mOutputSize = 0;
mEos = false;
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index 10298c3..8d917b3 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -24,7 +24,6 @@
#include <openssl/md5.h>
-#include <C2AllocatorIon.h>
#include <C2Buffer.h>
#include <C2BufferPriv.h>
#include <C2Config.h>
@@ -35,8 +34,6 @@
#include <gui/IProducerListener.h>
#include <system/window.h>
-using android::C2AllocatorIon;
-
#include "media_c2_hidl_test_common.h"
#include "media_c2_video_hidl_test_common.h"
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index 8b40353..dfd649d 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -23,15 +23,12 @@
#include <stdio.h>
#include <fstream>
-#include <C2AllocatorIon.h>
#include <C2Buffer.h>
#include <C2BufferPriv.h>
#include <C2Config.h>
#include <C2Debug.h>
#include <codec2/hidl/client.h>
-using android::C2AllocatorIon;
-
#include "media_c2_hidl_test_common.h"
#include "media_c2_video_hidl_test_common.h"
diff --git a/media/codec2/hidl/client/output.cpp b/media/codec2/hidl/client/output.cpp
index 283ed8d..8cd4934 100644
--- a/media/codec2/hidl/client/output.cpp
+++ b/media/codec2/hidl/client/output.cpp
@@ -222,7 +222,7 @@
if (generation == mGeneration) {
// case of old BlockPool destruction
C2SyncVariables *var = mSyncMem ? mSyncMem->mem() : nullptr;
- if (var) {
+ if (syncObj && var) {
*syncObj = std::make_shared<V1_2::SurfaceSyncObj>();
(*syncObj)->bqId = bqId;
(*syncObj)->syncMemory = mSyncMem->handle();
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index e33a5ba..c6ff825 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1780,7 +1780,7 @@
{
Mutexed<Output>::Locked output(mOutput);
numOutputSlots = output->numSlots;
- reorderDepth = output->buffers->getReorderDepth();
+ reorderDepth = output->buffers ? output->buffers->getReorderDepth() : 0;
}
{
Mutexed<OutputSurface>::Locked output(mOutputSurface);
diff --git a/media/codec2/vndk/C2Config.cpp b/media/codec2/vndk/C2Config.cpp
index 34680a7..e9223fb 100644
--- a/media/codec2/vndk/C2Config.cpp
+++ b/media/codec2/vndk/C2Config.cpp
@@ -142,6 +142,14 @@
{ "av1-0", C2Config::PROFILE_AV1_0 },
{ "av1-1", C2Config::PROFILE_AV1_1 },
{ "av1-2", C2Config::PROFILE_AV1_2 },
+ { "vp8-0", C2Config::PROFILE_VP8_0 },
+ { "vp8-1", C2Config::PROFILE_VP8_1 },
+ { "vp8-2", C2Config::PROFILE_VP8_2 },
+ { "vp8-3", C2Config::PROFILE_VP8_3 },
+ { "mpegh-main", C2Config::PROFILE_MPEGH_MAIN },
+ { "mpegh-high", C2Config::PROFILE_MPEGH_HIGH },
+ { "mpegh-lc", C2Config::PROFILE_MPEGH_LC },
+ { "mpegh-baseline", C2Config::PROFILE_MPEGH_BASELINE },
}))
DEFINE_C2_ENUM_VALUE_CUSTOM_HELPER(C2Config::level_t, ({
@@ -248,6 +256,11 @@
{ "av1-7.1", C2Config::LEVEL_AV1_7_1 },
{ "av1-7.2", C2Config::LEVEL_AV1_7_2 },
{ "av1-7.3", C2Config::LEVEL_AV1_7_3 },
+ { "mpegh-1", C2Config::LEVEL_MPEGH_1 },
+ { "mpegh-2", C2Config::LEVEL_MPEGH_2 },
+ { "mpegh-3", C2Config::LEVEL_MPEGH_3 },
+ { "mpegh-4", C2Config::LEVEL_MPEGH_4 },
+ { "mpegh-5", C2Config::LEVEL_MPEGH_5 },
}))
DEFINE_C2_ENUM_VALUE_CUSTOM_HELPER(C2BufferData::type_t, ({
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
index 685b779..eef238f 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
@@ -59,7 +59,8 @@
}
void SharedMemoryParcelable::setup(const unique_fd& fd, int32_t sizeInBytes) {
- mFd.reset(::dup(fd.get())); // store a duplicate fd
+ constexpr int minFd = 3; // skip over stdout, stdin and stderr
+ mFd.reset(fcntl(fd.get(), F_DUPFD_CLOEXEC, minFd)); // store a duplicate FD
ALOGV("setup(fd = %d -> %d, size = %d) this = %p\n", fd.get(), mFd.get(), sizeInBytes, this);
mSizeInBytes = sizeInBytes;
}
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index bf07abb..d8b27c3 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -159,7 +159,9 @@
android::mediametrics::LogItem(mMetricsId)
.set(AMEDIAMETRICS_PROP_PERFORMANCEMODE,
- AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
+ AudioGlobal_convertPerformanceModeToText(builder.getPerformanceMode()))
+ .set(AMEDIAMETRICS_PROP_SHARINGMODE,
+ AudioGlobal_convertSharingModeToText(builder.getSharingMode()))
.set(AMEDIAMETRICS_PROP_ENCODINGCLIENT,
android::toString(requestedFormat).c_str()).record();
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 91b8a4d..1ed240a 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -117,7 +117,7 @@
item.set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_OPEN)
.set(AMEDIAMETRICS_PROP_PERFORMANCEMODEACTUAL,
AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
- .set(AMEDIAMETRICS_PROP_SHARINGMODE,
+ .set(AMEDIAMETRICS_PROP_SHARINGMODEACTUAL,
AudioGlobal_convertSharingModeToText(getSharingMode()))
.set(AMEDIAMETRICS_PROP_BUFFERCAPACITYFRAMES, getBufferCapacity())
.set(AMEDIAMETRICS_PROP_BURSTFRAMES, getFramesPerBurst())
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index de8fe6a..eca5392 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -221,7 +221,9 @@
+ std::to_string(mAudioRecord->getPortId());
android::mediametrics::LogItem(mMetricsId)
.set(AMEDIAMETRICS_PROP_PERFORMANCEMODE,
- AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
+ AudioGlobal_convertPerformanceModeToText(builder.getPerformanceMode()))
+ .set(AMEDIAMETRICS_PROP_SHARINGMODE,
+ AudioGlobal_convertSharingModeToText(builder.getSharingMode()))
.set(AMEDIAMETRICS_PROP_ENCODINGCLIENT, toString(requestedFormat).c_str()).record();
// Get the actual values from the AudioRecord.
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 2ee5cdc..04a9dec 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -191,7 +191,9 @@
+ std::to_string(mAudioTrack->getPortId());
android::mediametrics::LogItem(mMetricsId)
.set(AMEDIAMETRICS_PROP_PERFORMANCEMODE,
- AudioGlobal_convertPerformanceModeToText(getPerformanceMode()))
+ AudioGlobal_convertPerformanceModeToText(builder.getPerformanceMode()))
+ .set(AMEDIAMETRICS_PROP_SHARINGMODE,
+ AudioGlobal_convertSharingModeToText(builder.getSharingMode()))
.set(AMEDIAMETRICS_PROP_ENCODINGCLIENT, toString(getFormat()).c_str()).record();
doSetVolume();
diff --git a/media/libaudiofoundation/include/media/AudioProfile.h b/media/libaudiofoundation/include/media/AudioProfile.h
index 20c7ab9..6a36e78 100644
--- a/media/libaudiofoundation/include/media/AudioProfile.h
+++ b/media/libaudiofoundation/include/media/AudioProfile.h
@@ -98,7 +98,7 @@
bool mIsDynamicChannels = false;
bool mIsDynamicRate = false;
- audio_encapsulation_type_t mEncapsulationType;
+ audio_encapsulation_type_t mEncapsulationType = AUDIO_ENCAPSULATION_TYPE_NONE;
AudioProfile() = default;
AudioProfile& operator=(const AudioProfile& other);
diff --git a/media/libaudiohal/impl/StreamHalLocal.cpp b/media/libaudiohal/impl/StreamHalLocal.cpp
index d0c375e..34bd5df 100644
--- a/media/libaudiohal/impl/StreamHalLocal.cpp
+++ b/media/libaudiohal/impl/StreamHalLocal.cpp
@@ -17,6 +17,7 @@
#define LOG_TAG "StreamHalLocal"
//#define LOG_NDEBUG 0
+#include <audio_utils/Metadata.h>
#include <hardware/audio.h>
#include <media/AudioParameter.h>
#include <utils/Log.h>
@@ -353,7 +354,11 @@
if (callback.get() == nullptr) return 0;
switch (event) {
case STREAM_EVENT_CBK_TYPE_CODEC_FORMAT_CHANGED:
- callback->onCodecFormatChanged(std::basic_string<uint8_t>((uint8_t*)param));
+ // void* param is the byte string buffer from byte_string_from_audio_metadata().
+ // As the byte string buffer may have embedded zeroes, we cannot use strlen()
+ callback->onCodecFormatChanged(std::basic_string<uint8_t>(
+ (const uint8_t*)param,
+ audio_utils::metadata::dataByteStringLen((const uint8_t*)param)));
break;
default:
ALOGW("%s unknown event %d", __func__, event);
diff --git a/media/libmedia/IMediaExtractor.cpp b/media/libmedia/IMediaExtractor.cpp
index 7ed76d8..eb436d1 100644
--- a/media/libmedia/IMediaExtractor.cpp
+++ b/media/libmedia/IMediaExtractor.cpp
@@ -39,7 +39,8 @@
SETMEDIACAS,
NAME,
GETMETRICS,
- SETENTRYPOINT
+ SETENTRYPOINT,
+ SETLOGSESSIONID
};
class BpMediaExtractor : public BpInterface<IMediaExtractor> {
@@ -150,6 +151,13 @@
data.writeInt32(static_cast<int32_t>(entryPoint));
return remote()->transact(SETENTRYPOINT, data, &reply);
}
+
+ virtual status_t setLogSessionId(const String8& logSessionId) {
+ Parcel data, reply;
+ data.writeInterfaceToken(BpMediaExtractor::getInterfaceDescriptor());
+ data.writeString8(logSessionId);
+ return remote()->transact(SETLOGSESSIONID, data, &reply);
+ }
};
IMPLEMENT_META_INTERFACE(MediaExtractor, "android.media.IMediaExtractor");
@@ -250,6 +258,16 @@
}
return err;
}
+ case SETLOGSESSIONID: {
+ ALOGV("setLogSessionId");
+ CHECK_INTERFACE(IMediaExtractor, data, reply);
+ String8 logSessionId;
+ status_t status = data.readString8(&logSessionId);
+ if (status == OK) {
+ setLogSessionId(logSessionId);
+ }
+ return status;
+ }
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 1705c97..0c87aee 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -230,7 +230,7 @@
return tag;
}
-/*static*/ MediaProfiles::VideoCodec*
+/*static*/ void
MediaProfiles::createVideoCodec(const char **atts, MediaProfiles *profiles)
{
CHECK(!strcmp("codec", atts[0]) &&
@@ -242,22 +242,21 @@
const size_t nMappings = sizeof(sVideoEncoderNameMap)/sizeof(sVideoEncoderNameMap[0]);
const int codec = findTagForName(sVideoEncoderNameMap, nMappings, atts[1]);
if (codec == -1) {
- ALOGE("MediaProfiles::createVideoCodec failed to locate codec %s", atts[1]);
- return nullptr;
+ ALOGE("MediaProfiles::createVideoCodec failed to locate codec %s", atts[1]);
+ return;
}
- MediaProfiles::VideoCodec *videoCodec =
- new MediaProfiles::VideoCodec(static_cast<video_encoder>(codec),
- atoi(atts[3]), atoi(atts[5]), atoi(atts[7]), atoi(atts[9]));
- logVideoCodec(*videoCodec);
+ VideoCodec videoCodec {
+ static_cast<video_encoder>(codec),
+ atoi(atts[3]), atoi(atts[5]), atoi(atts[7]), atoi(atts[9]) };
+ logVideoCodec(videoCodec);
size_t nCamcorderProfiles;
CHECK((nCamcorderProfiles = profiles->mCamcorderProfiles.size()) >= 1);
- profiles->mCamcorderProfiles[nCamcorderProfiles - 1]->mVideoCodec = videoCodec;
- return videoCodec;
+ profiles->mCamcorderProfiles[nCamcorderProfiles - 1]->mVideoCodecs.emplace_back(videoCodec);
}
-/*static*/ MediaProfiles::AudioCodec*
+/*static*/ void
MediaProfiles::createAudioCodec(const char **atts, MediaProfiles *profiles)
{
CHECK(!strcmp("codec", atts[0]) &&
@@ -267,20 +266,20 @@
const size_t nMappings = sizeof(sAudioEncoderNameMap)/sizeof(sAudioEncoderNameMap[0]);
const int codec = findTagForName(sAudioEncoderNameMap, nMappings, atts[1]);
if (codec == -1) {
- ALOGE("MediaProfiles::createAudioCodec failed to locate codec %s", atts[1]);
- return nullptr;
+ ALOGE("MediaProfiles::createAudioCodec failed to locate codec %s", atts[1]);
+ return;
}
- MediaProfiles::AudioCodec *audioCodec =
- new MediaProfiles::AudioCodec(static_cast<audio_encoder>(codec),
- atoi(atts[3]), atoi(atts[5]), atoi(atts[7]));
- logAudioCodec(*audioCodec);
+ AudioCodec audioCodec {
+ static_cast<audio_encoder>(codec),
+ atoi(atts[3]), atoi(atts[5]), atoi(atts[7]) };
+ logAudioCodec(audioCodec);
size_t nCamcorderProfiles;
CHECK((nCamcorderProfiles = profiles->mCamcorderProfiles.size()) >= 1);
- profiles->mCamcorderProfiles[nCamcorderProfiles - 1]->mAudioCodec = audioCodec;
- return audioCodec;
+ profiles->mCamcorderProfiles[nCamcorderProfiles - 1]->mAudioCodecs.emplace_back(audioCodec);
}
+
/*static*/ MediaProfiles::AudioDecoderCap*
MediaProfiles::createAudioDecoderCap(const char **atts)
{
@@ -575,8 +574,20 @@
initRequiredProfileRefs(mCameraIds);
for (size_t i = 0, n = mCamcorderProfiles.size(); i < n; ++i) {
- int product = mCamcorderProfiles[i]->mVideoCodec->mFrameWidth *
- mCamcorderProfiles[i]->mVideoCodec->mFrameHeight;
+ // ensure at least one video and audio profile is added
+ if (mCamcorderProfiles[i]->mVideoCodecs.size() == 0) {
+ mCamcorderProfiles[i]->mVideoCodecs.emplace_back(
+ VIDEO_ENCODER_H263, 192000 /* bitrate */,
+ 176 /* width */, 144 /* height */, 20 /* frameRate */);
+ }
+ if (mCamcorderProfiles[i]->mAudioCodecs.size() == 0) {
+ mCamcorderProfiles[i]->mAudioCodecs.emplace_back(
+ AUDIO_ENCODER_AMR_NB, 12200 /* bitrate */,
+ 8000 /* sampleRate */, 1 /* channels */);
+ }
+
+ int product = mCamcorderProfiles[i]->mVideoCodecs[0].mFrameWidth *
+ mCamcorderProfiles[i]->mVideoCodecs[0].mFrameHeight;
camcorder_quality quality = mCamcorderProfiles[i]->mQuality;
int cameraId = mCamcorderProfiles[i]->mCameraId;
@@ -745,34 +756,35 @@
/*static*/ MediaProfiles::CamcorderProfile*
MediaProfiles::createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality)
{
- MediaProfiles::VideoCodec *videoCodec =
- new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 1000000, 176, 144, 20);
-
- AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
profile->mCameraId = 0;
profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
profile->mQuality = quality;
profile->mDuration = 60;
- profile->mVideoCodec = videoCodec;
- profile->mAudioCodec = audioCodec;
+ profile->mVideoCodecs.emplace_back(
+ VIDEO_ENCODER_H263, 1000000 /* bitrate */,
+ 176 /* width */, 144 /* height */, 20 /* frameRate */);
+ profile->mAudioCodecs.emplace_back(
+ AUDIO_ENCODER_AMR_NB, 12200 /* bitrate */,
+ 8000 /* sampleRate */, 1 /* channels */);
+
return profile;
}
/*static*/ MediaProfiles::CamcorderProfile*
MediaProfiles::createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality)
{
- MediaProfiles::VideoCodec *videoCodec =
- new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 20000000, 720, 480, 20);
-
- AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
profile->mCameraId = 0;
profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
profile->mQuality = quality;
profile->mDuration = 60;
- profile->mVideoCodec = videoCodec;
- profile->mAudioCodec = audioCodec;
+ profile->mVideoCodecs.emplace_back(
+ VIDEO_ENCODER_H263, 20000000 /* bitrate */,
+ 720 /* width */, 480 /* height */, 20 /* frameRate */);
+ profile->mAudioCodecs.emplace_back(
+ AUDIO_ENCODER_AMR_NB, 12200 /* bitrate */,
+ 8000 /* sampleRate */, 1 /* channels */);
return profile;
}
@@ -799,36 +811,34 @@
/*static*/ MediaProfiles::CamcorderProfile*
MediaProfiles::createDefaultCamcorderQcifProfile(camcorder_quality quality)
{
- MediaProfiles::VideoCodec *videoCodec =
- new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 192000, 176, 144, 20);
-
- MediaProfiles::AudioCodec *audioCodec =
- new MediaProfiles::AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
-
- MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+ CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
profile->mCameraId = 0;
profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
profile->mQuality = quality;
profile->mDuration = 30;
- profile->mVideoCodec = videoCodec;
- profile->mAudioCodec = audioCodec;
+ profile->mVideoCodecs.emplace_back(
+ VIDEO_ENCODER_H263, 192000 /* bitrate */,
+ 176 /* width */, 144 /* height */, 20 /* frameRate */);
+ profile->mAudioCodecs.emplace_back(
+ AUDIO_ENCODER_AMR_NB, 12200 /* bitrate */,
+ 8000 /* sampleRate */, 1 /* channels */);
return profile;
}
/*static*/ MediaProfiles::CamcorderProfile*
MediaProfiles::createDefaultCamcorderCifProfile(camcorder_quality quality)
{
- MediaProfiles::VideoCodec *videoCodec =
- new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20);
-
- AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
profile->mCameraId = 0;
profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
profile->mQuality = quality;
profile->mDuration = 60;
- profile->mVideoCodec = videoCodec;
- profile->mAudioCodec = audioCodec;
+ profile->mVideoCodecs.emplace_back(
+ VIDEO_ENCODER_H263, 360000 /* bitrate */,
+ 352 /* width */, 288 /* height */, 20 /* frameRate */);
+ profile->mAudioCodecs.emplace_back(
+ AUDIO_ENCODER_AMR_NB, 12200 /* bitrate */,
+ 8000 /* sampleRate */, 1 /* channels */);
return profile;
}
@@ -1112,6 +1122,36 @@
return index;
}
+const MediaProfiles::CamcorderProfile *MediaProfiles::getCamcorderProfile(
+ int cameraId, camcorder_quality quality) const {
+ int index = getCamcorderProfileIndex(cameraId, quality);
+ if (index == -1) {
+ ALOGE("The given camcorder profile camera %d quality %d is not found",
+ cameraId, quality);
+ return nullptr;
+ }
+
+ return mCamcorderProfiles[index];
+}
+
+std::vector<const MediaProfiles::AudioCodec *>
+MediaProfiles::CamcorderProfile::getAudioCodecs() const {
+ std::vector<const MediaProfiles::AudioCodec *> res;
+ for (const MediaProfiles::AudioCodec &ac : mAudioCodecs) {
+ res.push_back(&ac);
+ }
+ return res;
+}
+
+std::vector<const MediaProfiles::VideoCodec *>
+MediaProfiles::CamcorderProfile::getVideoCodecs() const {
+ std::vector<const MediaProfiles::VideoCodec *> res;
+ for (const MediaProfiles::VideoCodec &vc : mVideoCodecs) {
+ res.push_back(&vc);
+ }
+ return res;
+}
+
int MediaProfiles::getCamcorderProfileParamByName(const char *name,
int cameraId,
camcorder_quality quality) const
@@ -1128,15 +1168,15 @@
if (!strcmp("duration", name)) return mCamcorderProfiles[index]->mDuration;
if (!strcmp("file.format", name)) return mCamcorderProfiles[index]->mFileFormat;
- if (!strcmp("vid.codec", name)) return mCamcorderProfiles[index]->mVideoCodec->mCodec;
- if (!strcmp("vid.width", name)) return mCamcorderProfiles[index]->mVideoCodec->mFrameWidth;
- if (!strcmp("vid.height", name)) return mCamcorderProfiles[index]->mVideoCodec->mFrameHeight;
- if (!strcmp("vid.bps", name)) return mCamcorderProfiles[index]->mVideoCodec->mBitRate;
- if (!strcmp("vid.fps", name)) return mCamcorderProfiles[index]->mVideoCodec->mFrameRate;
- if (!strcmp("aud.codec", name)) return mCamcorderProfiles[index]->mAudioCodec->mCodec;
- if (!strcmp("aud.bps", name)) return mCamcorderProfiles[index]->mAudioCodec->mBitRate;
- if (!strcmp("aud.ch", name)) return mCamcorderProfiles[index]->mAudioCodec->mChannels;
- if (!strcmp("aud.hz", name)) return mCamcorderProfiles[index]->mAudioCodec->mSampleRate;
+ if (!strcmp("vid.codec", name)) return mCamcorderProfiles[index]->mVideoCodecs[0].mCodec;
+ if (!strcmp("vid.width", name)) return mCamcorderProfiles[index]->mVideoCodecs[0].mFrameWidth;
+ if (!strcmp("vid.height", name)) return mCamcorderProfiles[index]->mVideoCodecs[0].mFrameHeight;
+ if (!strcmp("vid.bps", name)) return mCamcorderProfiles[index]->mVideoCodecs[0].mBitRate;
+ if (!strcmp("vid.fps", name)) return mCamcorderProfiles[index]->mVideoCodecs[0].mFrameRate;
+ if (!strcmp("aud.codec", name)) return mCamcorderProfiles[index]->mAudioCodecs[0].mCodec;
+ if (!strcmp("aud.bps", name)) return mCamcorderProfiles[index]->mAudioCodecs[0].mBitRate;
+ if (!strcmp("aud.ch", name)) return mCamcorderProfiles[index]->mAudioCodecs[0].mChannels;
+ if (!strcmp("aud.hz", name)) return mCamcorderProfiles[index]->mAudioCodecs[0].mSampleRate;
ALOGE("The given camcorder profile param id %d name %s is not found", cameraId, name);
return -1;
diff --git a/media/libmedia/include/android/IMediaExtractor.h b/media/libmedia/include/android/IMediaExtractor.h
index f9cafde..bb1eb99 100644
--- a/media/libmedia/include/android/IMediaExtractor.h
+++ b/media/libmedia/include/android/IMediaExtractor.h
@@ -72,6 +72,8 @@
};
virtual status_t setEntryPoint(EntryPoint entryPoint) = 0;
+
+ virtual status_t setLogSessionId(const String8& logSessionId) = 0;
};
diff --git a/media/libmedia/include/media/MediaProfiles.h b/media/libmedia/include/media/MediaProfiles.h
index 7f0b99d..75e16fb 100644
--- a/media/libmedia/include/media/MediaProfiles.h
+++ b/media/libmedia/include/media/MediaProfiles.h
@@ -21,6 +21,8 @@
#include <utils/threads.h>
#include <media/mediarecorder.h>
+#include <vector>
+
namespace android {
enum camcorder_quality {
@@ -100,6 +102,176 @@
static MediaProfiles* getInstance();
/**
+ * Configuration for a video encoder.
+ */
+ struct VideoCodec {
+ public:
+ /**
+ * Constructs a video encoder configuration.
+ *
+ * @param codec codec type
+ * @param bitrate bitrate in bps
+ * @param frameWidth frame width in pixels
+ * @param frameHeight frame height in pixels
+ * @param frameRate frame rate in fps
+ */
+ VideoCodec(video_encoder codec, int bitrate, int frameWidth, int frameHeight, int frameRate)
+ : mCodec(codec),
+ mBitRate(bitrate),
+ mFrameWidth(frameWidth),
+ mFrameHeight(frameHeight),
+ mFrameRate(frameRate) {
+ }
+
+ VideoCodec(const VideoCodec&) = default;
+
+ ~VideoCodec() {}
+
+ /** Returns the codec type. */
+ video_encoder getCodec() const {
+ return mCodec;
+ }
+
+ /** Returns the bitrate in bps. */
+ int getBitrate() const {
+ return mBitRate;
+ }
+
+ /** Returns the frame width in pixels. */
+ int getFrameWidth() const {
+ return mFrameWidth;
+ }
+
+ /** Returns the frame height in pixels. */
+ int getFrameHeight() const {
+ return mFrameHeight;
+ }
+
+ /** Returns the frame rate in fps. */
+ int getFrameRate() const {
+ return mFrameRate;
+ }
+
+ private:
+ video_encoder mCodec;
+ int mBitRate;
+ int mFrameWidth;
+ int mFrameHeight;
+ int mFrameRate;
+ friend class MediaProfiles;
+ };
+
+ /**
+ * Configuration for an audio encoder.
+ */
+ struct AudioCodec {
+ public:
+ /**
+ * Constructs an audio encoder configuration.
+ *
+ * @param codec codec type
+ * @param bitrate bitrate in bps
+ * @param sampleRate sample rate in Hz
+ * @param channels number of channels
+ */
+ AudioCodec(audio_encoder codec, int bitrate, int sampleRate, int channels)
+ : mCodec(codec),
+ mBitRate(bitrate),
+ mSampleRate(sampleRate),
+ mChannels(channels) {
+ }
+
+ AudioCodec(const AudioCodec&) = default;
+
+ ~AudioCodec() {}
+
+ /** Returns the codec type. */
+ audio_encoder getCodec() const {
+ return mCodec;
+ }
+
+ /** Returns the bitrate in bps. */
+ int getBitrate() const {
+ return mBitRate;
+ }
+
+ /** Returns the sample rate in Hz. */
+ int getSampleRate() const {
+ return mSampleRate;
+ }
+
+ /** Returns the number of channels. */
+ int getChannels() const {
+ return mChannels;
+ }
+
+ private:
+ audio_encoder mCodec;
+ int mBitRate;
+ int mSampleRate;
+ int mChannels;
+ friend class MediaProfiles;
+ };
+
+ /**
+ * Configuration for a camcorder profile/encoder profiles object.
+ */
+ struct CamcorderProfile {
+ /**
+ * Returns on ordered list of the video codec configurations in
+ * decreasing preference. The returned object is only valid
+ * during the lifetime of this object.
+ */
+ std::vector<const VideoCodec *> getVideoCodecs() const;
+
+ /**
+ * Returns on ordered list of the audio codec configurations in
+ * decreasing preference. The returned object is only valid
+ * during the lifetime of this object.
+ */
+ std::vector<const AudioCodec *> getAudioCodecs() const;
+
+ /** Returns the default duration in seconds. */
+ int getDuration() const {
+ return mDuration;
+ }
+
+ /** Returns the preferred file format. */
+ int getFileFormat() const {
+ return mFileFormat;
+ }
+
+ CamcorderProfile(const CamcorderProfile& copy) = default;
+
+ ~CamcorderProfile() = default;
+
+ private:
+ /**
+ * Constructs an empty object with no audio/video profiles.
+ */
+ CamcorderProfile()
+ : mCameraId(0),
+ mFileFormat(OUTPUT_FORMAT_THREE_GPP),
+ mQuality(CAMCORDER_QUALITY_HIGH),
+ mDuration(0) {}
+
+ int mCameraId;
+ output_format mFileFormat;
+ camcorder_quality mQuality;
+ int mDuration;
+ std::vector<VideoCodec> mVideoCodecs;
+ std::vector<AudioCodec> mAudioCodecs;
+ friend class MediaProfiles;
+ };
+
+ /**
+ * Returns the CamcorderProfile object for the given camera at
+ * the given quality level, or null if it does not exist.
+ */
+ const CamcorderProfile *getCamcorderProfile(
+ int cameraId, camcorder_quality quality) const;
+
+ /**
* Returns the value for the given param name for the given camera at
* the given quality level, or -1 if error.
*
@@ -202,84 +374,6 @@
MediaProfiles() {} // Dummy default constructor
~MediaProfiles(); // Don't delete me
- struct VideoCodec {
- VideoCodec(video_encoder codec, int bitRate, int frameWidth, int frameHeight, int frameRate)
- : mCodec(codec),
- mBitRate(bitRate),
- mFrameWidth(frameWidth),
- mFrameHeight(frameHeight),
- mFrameRate(frameRate) {}
-
- VideoCodec(const VideoCodec& copy) {
- mCodec = copy.mCodec;
- mBitRate = copy.mBitRate;
- mFrameWidth = copy.mFrameWidth;
- mFrameHeight = copy.mFrameHeight;
- mFrameRate = copy.mFrameRate;
- }
-
- ~VideoCodec() {}
-
- video_encoder mCodec;
- int mBitRate;
- int mFrameWidth;
- int mFrameHeight;
- int mFrameRate;
- };
-
- struct AudioCodec {
- AudioCodec(audio_encoder codec, int bitRate, int sampleRate, int channels)
- : mCodec(codec),
- mBitRate(bitRate),
- mSampleRate(sampleRate),
- mChannels(channels) {}
-
- AudioCodec(const AudioCodec& copy) {
- mCodec = copy.mCodec;
- mBitRate = copy.mBitRate;
- mSampleRate = copy.mSampleRate;
- mChannels = copy.mChannels;
- }
-
- ~AudioCodec() {}
-
- audio_encoder mCodec;
- int mBitRate;
- int mSampleRate;
- int mChannels;
- };
-
- struct CamcorderProfile {
- CamcorderProfile()
- : mCameraId(0),
- mFileFormat(OUTPUT_FORMAT_THREE_GPP),
- mQuality(CAMCORDER_QUALITY_HIGH),
- mDuration(0),
- mVideoCodec(0),
- mAudioCodec(0) {}
-
- CamcorderProfile(const CamcorderProfile& copy) {
- mCameraId = copy.mCameraId;
- mFileFormat = copy.mFileFormat;
- mQuality = copy.mQuality;
- mDuration = copy.mDuration;
- mVideoCodec = new VideoCodec(*copy.mVideoCodec);
- mAudioCodec = new AudioCodec(*copy.mAudioCodec);
- }
-
- ~CamcorderProfile() {
- delete mVideoCodec;
- delete mAudioCodec;
- }
-
- int mCameraId;
- output_format mFileFormat;
- camcorder_quality mQuality;
- int mDuration;
- VideoCodec *mVideoCodec;
- AudioCodec *mAudioCodec;
- };
-
struct VideoEncoderCap {
// Ugly constructor
VideoEncoderCap(video_encoder codec,
@@ -365,8 +459,8 @@
// from the xml
static MediaProfiles* createInstanceFromXmlFile(const char *xml);
static output_format createEncoderOutputFileFormat(const char **atts);
- static VideoCodec* createVideoCodec(const char **atts, MediaProfiles *profiles);
- static AudioCodec* createAudioCodec(const char **atts, MediaProfiles *profiles);
+ static void createVideoCodec(const char **atts, MediaProfiles *profiles);
+ static void createAudioCodec(const char **atts, MediaProfiles *profiles);
static AudioDecoderCap* createAudioDecoderCap(const char **atts);
static VideoDecoderCap* createVideoDecoderCap(const char **atts);
static VideoEncoderCap* createVideoEncoderCap(const char **atts);
diff --git a/media/libmediaformatshaper/CodecProperties.cpp b/media/libmediaformatshaper/CodecProperties.cpp
index b118af6..c57b693 100644
--- a/media/libmediaformatshaper/CodecProperties.cpp
+++ b/media/libmediaformatshaper/CodecProperties.cpp
@@ -53,13 +53,6 @@
mMinimumQuality = vmaf;
}
-int CodecProperties::targetQpMax() {
- return mTargetQpMax;
-}
-void CodecProperties::setTargetQpMax(int qpMax) {
- mTargetQpMax = qpMax;
-}
-
void CodecProperties::setMissingQpBoost(double boost) {
mMissingQpBoost = boost;
}
@@ -118,6 +111,11 @@
setTargetQpMax(iValue);
legal = true;
}
+ } else if (!strncmp(key.c_str(), "vq-target-qpmax-", strlen("vq-target-qpmax-"))) {
+ std::string resolution = key.substr(strlen("vq-target-qpmax-"));
+ if (qpMaxPoint(resolution, value)) {
+ legal = true;
+ }
} else if (!strcmp(key.c_str(), "vq-target-bpp")) {
const char *p = value.c_str();
char *q;
@@ -292,6 +290,131 @@
return mBpp;
}
+bool CodecProperties::qpMaxPoint(std::string resolution, std::string value) {
+
+ int32_t width = 0;
+ int32_t height = 0;
+ int qpMax = INT32_MAX;
+
+ // resolution is "WxH", "W*H" or a standard name like "720p"
+ if (resolution == "1080p") {
+ width = 1080; height = 1920;
+ } else if (resolution == "720p") {
+ width = 720; height = 1280;
+ } else if (resolution == "540p") {
+ width = 540; height = 960;
+ } else if (resolution == "480p") {
+ width = 480; height = 854;
+ } else {
+ size_t sep = resolution.find('x');
+ if (sep == std::string::npos) {
+ sep = resolution.find('*');
+ }
+ if (sep == std::string::npos) {
+ ALOGW("unable to parse resolution: '%s'", resolution.c_str());
+ return false;
+ }
+ std::string w = resolution.substr(0, sep);
+ std::string h = resolution.substr(sep+1);
+
+ char *q;
+ const char *p = w.c_str();
+ width = strtol(p, &q, 0);
+ if (q == p) {
+ width = -1;
+ }
+ p = h.c_str();
+ height = strtol(p, &q, 0);
+ if (q == p) {
+ height = -1;
+ }
+ if (width <= 0 || height <= 0 || width > DIMENSION_LIMIT || height > DIMENSION_LIMIT) {
+ ALOGW("unparseable: width, height '%s'", resolution.c_str());
+ return false;
+ }
+ }
+
+ const char *p = value.c_str();
+ char *q;
+ qpMax = strtol(p, &q, 0);
+ if (q == p) {
+ ALOGW("unparseable qpmax '%s'", value.c_str());
+ return false;
+ }
+
+ // convert to our internal 'unspecified' notation
+ if (qpMax == -1)
+ qpMax = INT32_MAX;
+
+ struct qpmax_point *point = (struct qpmax_point*) malloc(sizeof(*point));
+ if (point == nullptr) {
+ ALOGW("unable to allocate memory for qpmax point");
+ return false;
+ }
+
+ point->pixels = width * height;
+ point->width = width;
+ point->height = height;
+ point->qpMax = qpMax;
+
+ if (mQpMaxPoints == nullptr) {
+ point->next = nullptr;
+ mQpMaxPoints = point;
+ } else if (point->pixels < mQpMaxPoints->pixels) {
+ // at the front
+ point->next = mQpMaxPoints;
+ mQpMaxPoints = point;
+ } else {
+ struct qpmax_point *after = mQpMaxPoints;
+ while (after->next) {
+ if (point->pixels > after->next->pixels) {
+ after = after->next;
+ continue;
+ }
+
+ // insert before after->next
+ point->next = after->next;
+ after->next = point;
+ break;
+ }
+ if (after->next == nullptr) {
+ // hasn't gone in yet
+ point->next = nullptr;
+ after->next = point;
+ }
+ }
+
+ return true;
+}
+
+int CodecProperties::targetQpMax(int32_t width, int32_t height) {
+ // look in the per-resolution list
+
+ int32_t pixels = width * height;
+
+ if (mQpMaxPoints) {
+ struct qpmax_point *point = mQpMaxPoints;
+ while (point && point->pixels < pixels) {
+ point = point->next;
+ }
+ if (point) {
+ ALOGV("targetQpMax(w=%d,h=%d) returns %d from qpmax_point w=%d h=%d",
+ width, height, point->qpMax, point->width, point->height);
+ return point->qpMax;
+ }
+ }
+
+ ALOGV("defaulting to %d qpmax", mTargetQpMax);
+ return mTargetQpMax;
+}
+
+void CodecProperties::setTargetQpMax(int qpMax) {
+ // convert to our internal 'unspecified' notation
+ if (qpMax == -1)
+ qpMax = INT32_MAX;
+ mTargetQpMax = qpMax;
+}
+
std::string CodecProperties::getMapping(std::string key, std::string kind) {
ALOGV("getMapping(key %s, kind %s )", key.c_str(), kind.c_str());
//play with mMappings
diff --git a/media/libmediaformatshaper/CodecProperties.h b/media/libmediaformatshaper/CodecProperties.h
index 98ab99e..196a40f 100644
--- a/media/libmediaformatshaper/CodecProperties.h
+++ b/media/libmediaformatshaper/CodecProperties.h
@@ -69,7 +69,7 @@
// qp max bound used to compensate when SupportedMinimumQuality == 0
// 0 == let a system default handle it
void setTargetQpMax(int qpmax);
- int targetQpMax();
+ int targetQpMax(int32_t width, int32_t height);
// target bits-per-pixel (per second) for encoding operations.
// This is used to calculate a minimum bitrate for any particular resolution.
@@ -123,6 +123,19 @@
struct bpp_point *mBppPoints = nullptr;
bool bppPoint(std::string resolution, std::string value);
+ // same thing for qpmax -- allow different ones based on resolution
+ // allow different target bits-per-pixel based on resolution
+ // similar to codec 'performance points'
+ // uses 'next largest' (by pixel count) point as minimum bpp
+ struct qpmax_point {
+ struct qpmax_point *next;
+ int32_t pixels;
+ int32_t width, height;
+ int qpMax;
+ };
+ struct qpmax_point *mQpMaxPoints = nullptr;
+ bool qpMaxPoint(std::string resolution, std::string value);
+
std::mutex mMappingLock;
// XXX figure out why I'm having problems getting compiler to like GUARDED_BY
std::map<std::string, std::string> mMappings /*GUARDED_BY(mMappingLock)*/ ;
diff --git a/media/libmediaformatshaper/CodecSeeding.cpp b/media/libmediaformatshaper/CodecSeeding.cpp
index 292b6df..2f2e29d 100644
--- a/media/libmediaformatshaper/CodecSeeding.cpp
+++ b/media/libmediaformatshaper/CodecSeeding.cpp
@@ -49,28 +49,39 @@
*/
static preloadTuning_t featuresAvc[] = {
+ {true, "vq-target-bpp", "0"},
{true, "vq-target-bpp-1080p", "1.90"},
{true, "vq-target-bpp-720p", "2.25"},
{true, "vq-target-bpp-540p", "2.65"},
{true, "vq-target-bpp-480p", "3.00"},
- {true, "vq-target-qpmax", "40"},
+ {true, "vq-target-qpmax", "-1"},
+ {true, "vq-target-qpmax-1080p", "45"},
+ {true, "vq-target-qpmax-720p", "43"},
+ {true, "vq-target-qpmax-540p", "42"},
+ {true, "vq-target-qpmax-480p", "38"},
{true, "vq-bitrate-phaseout", "1.75"},
{true, "vq-boost-missing-qp", "0.20"},
{true, nullptr, 0}
};
static preloadTuning_t featuresHevc[] = {
+ {true, "vq-target-bpp", "0"},
{true, "vq-target-bpp-1080p", "1.50"},
{true, "vq-target-bpp-720p", "1.80"},
{true, "vq-target-bpp-540p", "2.10"},
- {true, "vq-target-qpmax", "40"},
+ {true, "vq-target-qpmax", "-1"},
+ {true, "vq-target-qpmax-1080p", "45"},
+ {true, "vq-target-qpmax-720p", "43"},
+ {true, "vq-target-qpmax-540p", "42"},
+ {true, "vq-target-qpmax-480p", "39"},
{true, "vq-bitrate-phaseout", "1.75"},
{true, "vq-boost-missing-qp", "0.20"},
{true, nullptr, 0}
};
static preloadTuning_t featuresGenericVideo[] = {
- {true, "vq-target-bpp", "2.00"},
+ // 0 == off
+ {true, "vq-target-bpp", "0"},
{true, nullptr, 0}
};
diff --git a/media/libmediaformatshaper/VQApply.cpp b/media/libmediaformatshaper/VQApply.cpp
index 28ce43f..585ec6c 100644
--- a/media/libmediaformatshaper/VQApply.cpp
+++ b/media/libmediaformatshaper/VQApply.cpp
@@ -115,7 +115,7 @@
bool qpPresent = hasQpMax(inFormat);
// calculate a target QP value
- int32_t qpmax = codec->targetQpMax();
+ int32_t qpmax = codec->targetQpMax(width, height);
if (!qpPresent) {
// user didn't, so shaper wins
if (qpmax != INT32_MAX) {
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 66108df..a09a673 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -166,6 +166,8 @@
// "powerSaving"
#define AMEDIAMETRICS_PROP_PERFORMANCEMODEACTUAL "performanceModeActual" // string
#define AMEDIAMETRICS_PROP_FRAMESTRANSFERRED "framesTransferred" // int64_t, transferred frames
+// string value, "exclusive", "shared". the actual selected sharing mode by the server
+#define AMEDIAMETRICS_PROP_SHARINGMODEACTUAL "sharingModeActual"
// Timing values: millisecond values are suffixed with MS and the type is double
// nanosecond values are suffixed with NS and the type is int64.
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index b485b1e..ce642f3 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -79,6 +79,7 @@
// NB: these are matched with public Java API constants defined
// in frameworks/base/media/java/android/media/MediaRecorder.java
// These must be kept synchronized with the constants there.
+static const char *kRecorderLogSessionId = "android.media.mediarecorder.log-session-id";
static const char *kRecorderAudioBitrate = "android.media.mediarecorder.audio-bitrate";
static const char *kRecorderAudioChannels = "android.media.mediarecorder.audio-channels";
static const char *kRecorderAudioSampleRate = "android.media.mediarecorder.audio-samplerate";
@@ -160,6 +161,8 @@
// know is the app which requested the recording.
mMetricsItem->setUid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mClient.uid)));
+ mMetricsItem->setCString(kRecorderLogSessionId, mLogSessionId.c_str());
+
// populate the values from the raw fields.
// TBD mOutputFormat = OUTPUT_FORMAT_THREE_GPP;
@@ -914,6 +917,14 @@
return ret;
}
+status_t StagefrightRecorder::setLogSessionId(const String8 &log_session_id) {
+ ALOGV("setLogSessionId: %s", log_session_id.string());
+
+ // TODO: validity check that log_session_id is a 32-byte hex digit.
+ mLogSessionId.setTo(log_session_id.string());
+ return OK;
+}
+
status_t StagefrightRecorder::setParameter(
const String8 &key, const String8 &value) {
ALOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
@@ -1082,6 +1093,8 @@
if (safe_strtoi64(value.string(), &networkHandle)) {
return setSocketNetwork(networkHandle);
}
+ } else if (key == "log-session-id") {
+ return setLogSessionId(value);
} else {
ALOGE("setParameter: failed to find key %s", key.string());
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 278f348..59a080e 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -48,6 +48,7 @@
explicit StagefrightRecorder(const Identity& clientIdentity);
virtual ~StagefrightRecorder();
virtual status_t init();
+ virtual status_t setLogSessionId(const String8 &id);
virtual status_t setAudioSource(audio_source_t as);
status_t setPrivacySensitive(bool privacySensitive) override;
status_t isPrivacySensitive(bool *privacySensitive) const override;
@@ -110,6 +111,7 @@
void flushAndResetMetrics(bool reinitialize);
void updateMetrics();
+ AString mLogSessionId;
audio_source_t mAudioSource;
privacy_sensitive_t mPrivacySensitive;
video_source mVideoSource;
diff --git a/media/libmediatranscoding/TranscoderWrapper.cpp b/media/libmediatranscoding/TranscoderWrapper.cpp
index 5e4c671..60b780f 100644
--- a/media/libmediatranscoding/TranscoderWrapper.cpp
+++ b/media/libmediatranscoding/TranscoderWrapper.cpp
@@ -478,7 +478,7 @@
media_status_t err =
setupTranscoder(clientId, sessionId, request, callingUid, clientCb, &reason);
if (err != AMEDIA_OK) {
- ALOGI("%s: failed to setup transcoder", __FUNCTION__);
+ ALOGE("%s: failed to setup transcoder", __FUNCTION__);
logSessionEnded(reason, err);
return err;
}
@@ -613,7 +613,7 @@
Event event = *mQueue.begin();
mQueue.pop_front();
- ALOGD("%s: %s", __FUNCTION__, toString(event).c_str());
+ ALOGV("%s: %s", __FUNCTION__, toString(event).c_str());
if (event.type == Event::Abandon) {
break;
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index f0383b5..a5c3ba6 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -885,4 +885,15 @@
return ERROR_UNSUPPORTED;
}
+status_t NuMediaExtractor::setLogSessionId(const String8& logSessionId) {
+ if (mImpl == nullptr) {
+ return ERROR_UNSUPPORTED;
+ }
+ status_t status = mImpl->setLogSessionId(logSessionId);
+ if (status != OK) {
+ ALOGW("Failed to set log session id: %d.", status);
+ }
+ return status;
+}
+
} // namespace android
diff --git a/media/libstagefright/RemoteMediaExtractor.cpp b/media/libstagefright/RemoteMediaExtractor.cpp
index 381eb1a..baa2ca1 100644
--- a/media/libstagefright/RemoteMediaExtractor.cpp
+++ b/media/libstagefright/RemoteMediaExtractor.cpp
@@ -39,7 +39,11 @@
static const char *kExtractorFormat = "android.media.mediaextractor.fmt";
static const char *kExtractorMime = "android.media.mediaextractor.mime";
static const char *kExtractorTracks = "android.media.mediaextractor.ntrk";
+
+// The following are not available in frameworks/base/media/java/android/media/MediaExtractor.java
+// because they are not applicable or useful to that API.
static const char *kExtractorEntryPoint = "android.media.mediaextractor.entry";
+static const char *kExtractorLogSessionId = "android.media.mediaextractor.logSessionId";
static const char *kEntryPointSdk = "sdk";
static const char *kEntryPointWithJvm = "ndk-with-jvm";
@@ -174,6 +178,11 @@
return OK;
}
+status_t RemoteMediaExtractor::setLogSessionId(const String8& logSessionId) {
+ mMetricsItem->setCString(kExtractorLogSessionId, logSessionId.c_str());
+ return OK;
+}
+
////////////////////////////////////////////////////////////////////////////////
// static
diff --git a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
index 6aa7c0f..d17a480 100644
--- a/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/NuMediaExtractor.h
@@ -100,7 +100,7 @@
status_t getAudioPresentations(size_t trackIdx, AudioPresentationCollection *presentations);
- status_t setPlaybackId(const String8& playbackId);
+ status_t setLogSessionId(const String8& logSessionId);
const char* getName() const;
diff --git a/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h b/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
index 25125f2..9ad90d7 100644
--- a/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
+++ b/media/libstagefright/include/media/stagefright/RemoteMediaExtractor.h
@@ -43,6 +43,7 @@
virtual status_t setMediaCas(const HInterfaceToken &casToken);
virtual String8 name();
virtual status_t setEntryPoint(EntryPoint entryPoint);
+ virtual status_t setLogSessionId(const String8& logSessionId);
private:
MediaExtractor *mExtractor;
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index c19fe38..e3931b1 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -73,6 +73,8 @@
return AppOpsManager::OP_RECORD_AUDIO_HOTWORD;
case AUDIO_SOURCE_REMOTE_SUBMIX:
return AppOpsManager::OP_RECORD_AUDIO_OUTPUT;
+ case AUDIO_SOURCE_VOICE_DOWNLINK:
+ return AppOpsManager::OP_RECORD_INCOMING_PHONE_AUDIO;
case AUDIO_SOURCE_DEFAULT:
default:
return AppOpsManager::OP_RECORD_AUDIO;
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index fe86264..11ec993 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -157,6 +157,7 @@
"log_session_id",
"sample_rate",
"content_type",
+ "sharing_requested",
};
/**
@@ -973,10 +974,11 @@
const auto perfModeActual =
types::lookup<types::AAUDIO_PERFORMANCE_MODE, int32_t>(perfModeActualStr);
- std::string sharingModeStr;
+ std::string sharingModeActualStr;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
- key, AMEDIAMETRICS_PROP_SHARINGMODE, &sharingModeStr);
- const auto sharingMode = types::lookup<types::AAUDIO_SHARING_MODE, int32_t>(sharingModeStr);
+ key, AMEDIAMETRICS_PROP_SHARINGMODEACTUAL, &sharingModeActualStr);
+ const auto sharingModeActual =
+ types::lookup<types::AAUDIO_SHARING_MODE, int32_t>(sharingModeActualStr);
int32_t xrunCount = -1;
mAudioAnalytics.mAnalyticsState->timeMachine().get(
@@ -1008,6 +1010,12 @@
key, AMEDIAMETRICS_PROP_CONTENTTYPE, &contentTypeStr);
const auto contentType = types::lookup<types::CONTENT_TYPE, int32_t>(contentTypeStr);
+ std::string sharingModeRequestedStr;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_SHARINGMODE, &sharingModeRequestedStr);
+ const auto sharingModeRequested =
+ types::lookup<types::AAUDIO_SHARING_MODE, int32_t>(sharingModeRequestedStr);
+
LOG(LOG_LEVEL) << "key:" << key
<< " path:" << path
<< " direction:" << direction << "(" << directionStr << ")"
@@ -1018,14 +1026,16 @@
<< " total_frames_transferred:" << totalFramesTransferred
<< " perf_mode_requested:" << perfModeRequested << "(" << perfModeRequestedStr << ")"
<< " perf_mode_actual:" << perfModeActual << "(" << perfModeActualStr << ")"
- << " sharing:" << sharingMode << "(" << sharingModeStr << ")"
+ << " sharing:" << sharingModeActual << "(" << sharingModeActualStr << ")"
<< " xrun_count:" << xrunCount
<< " device_type:" << serializedDeviceTypes
<< " format_app:" << formatApp << "(" << formatAppStr << ")"
<< " format_device: " << formatDevice << "(" << formatDeviceStr << ")"
<< " log_session_id: " << logSessionId
<< " sample_rate: " << sampleRate
- << " content_type: " << contentType << "(" << contentTypeStr << ")";
+ << " content_type: " << contentType << "(" << contentTypeStr << ")"
+ << " sharing_requested:" << sharingModeRequested
+ << "(" << sharingModeRequestedStr << ")";
if (mAudioAnalytics.mDeliverStatistics) {
android::util::BytesField bf_serialized(
@@ -1041,7 +1051,7 @@
, totalFramesTransferred
, perfModeRequested
, perfModeActual
- , sharingMode
+ , sharingModeActual
, xrunCount
, bf_serialized
, formatApp
@@ -1049,6 +1059,7 @@
, logSessionId.c_str()
, sampleRate
, contentType
+ , sharingModeRequested
);
std::stringstream ss;
ss << "result:" << result;
@@ -1063,7 +1074,7 @@
, totalFramesTransferred
, perfModeRequested
, perfModeActual
- , sharingMode
+ , sharingModeActual
, xrunCount
, serializedDeviceTypes.c_str()
, formatApp
@@ -1071,6 +1082,7 @@
, logSessionId.c_str()
, sampleRate
, contentType
+ , sharingModeRequested
);
ss << " " << fieldsStr;
std::string str = ss.str();
diff --git a/services/mediametrics/statsd_extractor.cpp b/services/mediametrics/statsd_extractor.cpp
index e228f07..281a4ce 100644
--- a/services/mediametrics/statsd_extractor.cpp
+++ b/services/mediametrics/statsd_extractor.cpp
@@ -86,6 +86,12 @@
metrics_proto.set_entry_point(entry_point);
}
+ // android.media.mediaextractor.logSessionId string
+ std::string log_session_id;
+ if (item->getString("android.media.mediaextractor.logSessionId", &log_session_id)) {
+ metrics_proto.set_log_session_id(log_session_id);
+ }
+
std::string serialized;
if (!metrics_proto.SerializeToString(&serialized)) {
ALOGE("Failed to serialize extractor metrics");
@@ -110,10 +116,7 @@
<< " mime:" << mime
<< " tracks:" << tracks
<< " entry_point:" << entry_point_string << "(" << entry_point << ")"
-
- // TODO: Add MediaExtractor log_session_id
- // << " log_session_id:" << log_session_id
-
+ << " log_session_id:" << log_session_id
<< " }";
statsdLog->log(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED, log.str());
return true;
diff --git a/services/mediametrics/statsd_recorder.cpp b/services/mediametrics/statsd_recorder.cpp
index 23b884f..6edad7c 100644
--- a/services/mediametrics/statsd_recorder.cpp
+++ b/services/mediametrics/statsd_recorder.cpp
@@ -32,7 +32,7 @@
#include <statslog.h>
#include "MediaMetricsService.h"
-#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
+#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
#include "iface_statsd.h"
namespace android {
@@ -50,11 +50,16 @@
// the rest into our own proto
//
- ::android::stats::mediametrics::RecorderData metrics_proto;
+ ::android::stats::mediametrics_message::RecorderData metrics_proto;
// flesh out the protobuf we'll hand off with our data
//
+ // string kRecorderLogSessionId = "android.media.mediarecorder.log-session-id";
+ std::string log_session_id;
+ if (item->getString("android.media.mediarecorder.log_session_id", &log_session_id)) {
+ metrics_proto.set_log_session_id(std::move(log_session_id));
+ }
// string kRecorderAudioMime = "android.media.mediarecorder.audio.mime";
std::string audio_mime;
if (item->getString("android.media.mediarecorder.audio.mime", &audio_mime)) {
diff --git a/tools/mainline_hook_partial.sh b/tools/mainline_hook_partial.sh
index 74c8ebd..bd82315 100755
--- a/tools/mainline_hook_partial.sh
+++ b/tools/mainline_hook_partial.sh
Binary files differ
diff --git a/tools/mainline_hook_project.sh b/tools/mainline_hook_project.sh
index e432000..cb5fc44 100755
--- a/tools/mainline_hook_project.sh
+++ b/tools/mainline_hook_project.sh
@@ -17,7 +17,7 @@
# tunables
DEV_BRANCH=master
-MAINLINE_BRANCH=mainline-prod
+MAINLINE_BRANCH=sc-mainline-prod
###
RED=$(tput setaf 1)