Merge "CCodec: calculate max dequeued buffer count"
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index f7863a5..9aafcd3 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -289,7 +289,7 @@
}
camera_status_t
-CameraDevice::allocateCaptureRequest(
+CameraDevice::allocateCaptureRequestLocked(
const ACaptureRequest* request, /*out*/sp<CaptureRequest> &outReq) {
sp<CaptureRequest> req(new CaptureRequest());
req->mCaptureRequest.physicalCameraSettings.resize(1);
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index c63b97f..d571585 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -169,7 +169,12 @@
camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
- camera_status_t allocateCaptureRequest(
+ // Since this writes to ICameraDeviceUser's fmq, clients must take care that:
+ // a) This function is called serially.
+ // b) This function is called in accordance with ICameraDeviceUser.submitRequestList,
+ // otherwise, the wrong capture request might have the wrong settings
+ // metadata associated with it.
+ camera_status_t allocateCaptureRequestLocked(
const ACaptureRequest* request, sp<CaptureRequest>& outReq);
static ACaptureRequest* allocateACaptureRequest(sp<CaptureRequest>& req);
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc b/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
index 7d2304e..8bd5a52 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
+++ b/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
@@ -73,7 +73,7 @@
requestsV.setCapacity(numRequests);
for (int i = 0; i < numRequests; i++) {
sp<CaptureRequest> req;
- ret = allocateCaptureRequest(requests[i], req);
+ ret = allocateCaptureRequestLocked(requests[i], req);
// We need to call this method since after submitRequestList is called,
// the request metadata queue might have removed the capture request
// metadata. Therefore we simply add the metadata to its wrapper class,
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 04dda8f..4d00d35 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -75,7 +75,7 @@
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::output(0u, 44100))
.withFields({C2F(mSampleRate, value).oneOf({
7350, 8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000
@@ -84,15 +84,15 @@
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::output(0u, 1))
.withFields({C2F(mChannelCount, value).inRange(1, 8)})
.withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::input(0u, 64000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::input(0u, 64000))
.withFields({C2F(mBitrate, value).inRange(8000, 960000)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -103,10 +103,10 @@
.build());
addParameter(
- DefineParam(mAacFormat, C2_NAME_STREAM_AAC_FORMAT_SETTING)
- .withDefault(new C2StreamAacFormatInfo::input(0u, C2AacStreamFormatRaw))
+ DefineParam(mAacFormat, C2_PARAMKEY_AAC_PACKAGING)
+ .withDefault(new C2StreamAacFormatInfo::input(0u, C2Config::AAC_PACKAGING_RAW))
.withFields({C2F(mAacFormat, value).oneOf({
- C2AacStreamFormatRaw, C2AacStreamFormatAdts
+ C2Config::AAC_PACKAGING_RAW, C2Config::AAC_PACKAGING_ADTS
})})
.withSetter(Setter<decltype(*mAacFormat)>::StrictValueWithNoDeps)
.build());
@@ -191,7 +191,7 @@
.build());
}
- bool isAdts() const { return mAacFormat->value == C2AacStreamFormatAdts; }
+ bool isAdts() const { return mAacFormat->value == C2Config::AAC_PACKAGING_ADTS; }
static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me) {
(void)mayBlock;
(void)me; // TODO: validate
@@ -205,13 +205,13 @@
int32_t getDrcEffectType() const { return mDrcEffectType->value; }
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
- std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
std::shared_ptr<C2StreamAacFormatInfo::input> mAacFormat;
std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index d1bdf0d..137e775 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -37,29 +37,29 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatAudio))
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_AAC))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::input(0u, 44100))
.withFields({C2F(mSampleRate, value).oneOf({
8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000
@@ -68,15 +68,15 @@
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::input(0u, 1))
.withFields({C2F(mChannelCount, value).inRange(1, 6)})
.withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::output(0u, 64000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
.withFields({C2F(mBitrate, value).inRange(8000, 960000)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -125,13 +125,13 @@
}
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
- std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
};
@@ -323,8 +323,8 @@
return;
}
- std::unique_ptr<C2StreamCsdInfo::output> csd =
- C2StreamCsdInfo::output::AllocUnique(encInfo.confSize, 0u);
+ std::unique_ptr<C2StreamInitDataInfo::output> csd =
+ C2StreamInitDataInfo::output::AllocUnique(encInfo.confSize, 0u);
if (!csd) {
ALOGE("CSD allocation failed");
mSignalledError = true;
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
index c591e21..edad75a 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
@@ -47,18 +47,18 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
#ifdef AMRNB
MEDIA_MIMETYPE_AUDIO_AMR_NB
#else
@@ -67,13 +67,13 @@
)).build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
#ifdef AMRNB
.withDefault(new C2StreamSampleRateInfo::output(0u, 8000))
.withFields({C2F(mSampleRate, value).equalTo(8000)})
@@ -85,19 +85,19 @@
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::output(0u, 1))
.withFields({C2F(mChannelCount, value).equalTo(1)})
.withSetter((Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps))
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
#ifdef AMRNB
- .withDefault(new C2BitrateTuning::input(0u, 4750))
+ .withDefault(new C2StreamBitrateInfo::input(0u, 4750))
.withFields({C2F(mBitrate, value).inRange(4750, 12200)})
#else
- .withDefault(new C2BitrateTuning::input(0u, 6600))
+ .withDefault(new C2StreamBitrateInfo::input(0u, 6600))
.withFields({C2F(mBitrate, value).inRange(6600, 23850)})
#endif
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
@@ -110,13 +110,13 @@
}
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
- std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
};
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
index 8c03257..3d3aa7d 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
@@ -36,38 +36,38 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
.withConstValue(
- new C2StreamFormatConfig::input(0u, C2FormatAudio))
+ new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
.withConstValue(
- new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_AMR_NB))
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::input(0u, 1))
.withFields({C2F(mChannelCount, value).equalTo(1)})
.withSetter((Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::input(0u, 8000))
.withFields({C2F(mSampleRate, value).equalTo(8000)})
.withSetter(
@@ -75,8 +75,8 @@
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::output(0u, 4750))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::output(0u, 4750))
.withFields({C2F(mBitrate, value).inRange(4750, 12200)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -92,13 +92,13 @@
uint32_t getBitrate() const { return mBitrate->value; }
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
- std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
};
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
index 074493c..379cb32 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
@@ -38,38 +38,38 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
.withConstValue(
- new C2StreamFormatConfig::input(0u, C2FormatAudio))
+ new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
.withConstValue(
- new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_AMR_WB))
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::input(0u, 1))
.withFields({C2F(mChannelCount, value).equalTo(1)})
.withSetter((Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::input(0u, 16000))
.withFields({C2F(mSampleRate, value).equalTo(16000)})
.withSetter(
@@ -77,8 +77,8 @@
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::output(0u, 6600))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::output(0u, 6600))
.withFields({C2F(mBitrate, value).inRange(6600, 23850)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -94,13 +94,13 @@
uint32_t getBitrate() const { return mBitrate->value; }
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
- std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
};
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index 6be1807..4bcc2c6 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -141,7 +141,7 @@
static C2R SizeSetter(bool mayBlock,
const C2P<C2StreamPictureSizeInfo::output>& oldMe,
- C2P<C2VideoSizeStreamInfo::output>& me) {
+ C2P<C2StreamPictureSizeInfo::output>& me) {
(void)mayBlock;
C2R res = C2R::Ok();
if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
@@ -586,7 +586,7 @@
mWidth = img->d_w;
mHeight = img->d_h;
- C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
std::vector<std::unique_ptr<C2SettingResult>> failures;
c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
if (err == C2_OK) {
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 86cd3d8..9290d74 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -198,7 +198,7 @@
}
static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
- C2P<C2VideoSizeStreamInfo::output> &me) {
+ C2P<C2StreamPictureSizeInfo::output> &me) {
(void)mayBlock;
C2R res = C2R::Ok();
if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
@@ -845,7 +845,7 @@
mHeight = s_decode_op.u4_pic_ht;
CHECK_EQ(0u, s_decode_op.u4_output_present);
- C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
std::vector<std::unique_ptr<C2SettingResult>> failures;
c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
if (err == OK) {
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index 6ddb9ff..b851908 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -45,36 +45,36 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatVideo))
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::GRAPHIC))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_VIDEO_RAW))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_VIDEO_AVC))
.build());
addParameter(
- DefineParam(mUsage, C2_NAME_INPUT_STREAM_USAGE_SETTING)
+ DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
.withConstValue(new C2StreamUsageTuning::input(
0u, (uint64_t)C2MemoryUsage::CPU_READ))
.build());
addParameter(
- DefineParam(mSize, C2_NAME_STREAM_VIDEO_SIZE_SETTING)
- .withDefault(new C2VideoSizeStreamTuning::input(0u, 320, 240))
+ DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
.withFields({
C2F(mSize, width).inRange(2, 2560, 2),
C2F(mSize, height).inRange(2, 2560, 2),
@@ -83,7 +83,7 @@
.build());
addParameter(
- DefineParam(mFrameRate, C2_NAME_STREAM_FRAME_RATE_SETTING)
+ DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
.withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
// TODO: More restriction?
.withFields({C2F(mFrameRate, value).greaterThan(0.)})
@@ -91,8 +91,8 @@
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::output(0u, 64000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
.withFields({C2F(mBitrate, value).inRange(4096, 12000000)})
.withSetter(BitrateSetter)
.build());
@@ -182,9 +182,9 @@
static C2R ProfileLevelSetter(
bool mayBlock,
C2P<C2StreamProfileLevelInfo::output> &me,
- const C2P<C2VideoSizeStreamTuning::input> &size,
+ const C2P<C2StreamPictureSizeInfo::input> &size,
const C2P<C2StreamFrameRateInfo::output> &frameRate,
- const C2P<C2BitrateTuning::output> &bitrate) {
+ const C2P<C2StreamBitrateInfo::output> &bitrate) {
(void)mayBlock;
if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
me.set().profile = PROFILE_AVC_CONSTRAINED_BASELINE;
@@ -325,16 +325,16 @@
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
- std::shared_ptr<C2VideoSizeStreamTuning::input> mSize;
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
- std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
};
@@ -1332,8 +1332,8 @@
mSpsPpsHeaderReceived = true;
- std::unique_ptr<C2StreamCsdInfo::output> csd =
- C2StreamCsdInfo::output::AllocUnique(s_encode_op.s_out_buf.u4_bytes, 0u);
+ std::unique_ptr<C2StreamInitDataInfo::output> csd =
+ C2StreamInitDataInfo::output::AllocUnique(s_encode_op.s_out_buf.u4_bytes, 0u);
if (!csd) {
ALOGE("CSD allocation failed");
mSignalledError = true;
@@ -1492,7 +1492,7 @@
if (IV_IDR_FRAME == s_encode_op.u4_encoded_frame_type) {
ALOGV("IDR frame produced");
buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
- 0u /* stream id */, C2PictureTypeKeyFrame));
+ 0u /* stream id */, C2Config::SYNC_FRAME));
}
work->worklets.front()->output.buffers.push_back(buffer);
}
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index b158f8f..44f1fe0 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -473,7 +473,7 @@
if (!mOutputBlockPool) {
c2_status_t err = [this] {
// TODO: don't use query_vb
- C2StreamFormatConfig::output outputFormat(0u);
+ C2StreamBufferTypeSetting::output outputFormat(0u);
std::vector<std::unique_ptr<C2Param>> params;
c2_status_t err = intf()->query_vb(
{ &outputFormat },
@@ -485,7 +485,7 @@
return err;
}
C2BlockPool::local_id_t poolId =
- outputFormat.value == C2FormatVideo
+ outputFormat.value == C2BufferData::GRAPHIC
? C2BlockPool::BASIC_GRAPHIC
: C2BlockPool::BASIC_LINEAR;
if (params.size()) {
diff --git a/media/codec2/components/flac/C2SoftFlacDec.cpp b/media/codec2/components/flac/C2SoftFlacDec.cpp
index 86b16e8..10b14ce 100644
--- a/media/codec2/components/flac/C2SoftFlacDec.cpp
+++ b/media/codec2/components/flac/C2SoftFlacDec.cpp
@@ -37,44 +37,44 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_AUDIO_FLAC))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::output(0u, 44100))
.withFields({C2F(mSampleRate, value).inRange(1, 655350)})
.withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::output(0u, 1))
.withFields({C2F(mChannelCount, value).inRange(1, 8)})
.withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::input(0u, 768000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::input(0u, 768000))
.withFields({C2F(mBitrate, value).inRange(1, 21000000)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -99,13 +99,13 @@
int32_t getPcmEncodingInfo() const { return mPcmEncodingInfo->value; }
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
- std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
std::shared_ptr<C2StreamPcmEncodingInfo::output> mPcmEncodingInfo;
};
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 4ea35c2..0ce2543 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -34,38 +34,38 @@
: C2InterfaceHelper(helper) {
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatAudio))
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_FLAC))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::input(0u, 44100))
.withFields({C2F(mSampleRate, value).inRange(1, 655350)})
.withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::input(0u, 1))
.withFields({C2F(mChannelCount, value).inRange(1, 2)})
.withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::output(0u, 768000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::output(0u, 768000))
.withFields({C2F(mBitrate, value).inRange(1, 21000000)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -92,13 +92,13 @@
int32_t getPcmEncodingInfo() const { return mPcmEncodingInfo->value; }
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
- std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
std::shared_ptr<C2StreamPcmEncodingInfo::input> mPcmEncodingInfo;
};
@@ -223,8 +223,8 @@
}
if (!mWroteHeader) {
- std::unique_ptr<C2StreamCsdInfo::output> csd =
- C2StreamCsdInfo::output::AllocUnique(mHeaderOffset, 0u);
+ std::unique_ptr<C2StreamInitDataInfo::output> csd =
+ C2StreamInitDataInfo::output::AllocUnique(mHeaderOffset, 0u);
if (!csd) {
ALOGE("CSD allocation failed");
mSignalledError = true;
diff --git a/media/codec2/components/g711/C2SoftG711Dec.cpp b/media/codec2/components/g711/C2SoftG711Dec.cpp
index 1c71d45..504ca78 100644
--- a/media/codec2/components/g711/C2SoftG711Dec.cpp
+++ b/media/codec2/components/g711/C2SoftG711Dec.cpp
@@ -41,18 +41,18 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
#ifdef ALAW
MEDIA_MIMETYPE_AUDIO_G711_ALAW
#else
@@ -61,28 +61,28 @@
)).build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::output(0u, 8000))
.withFields({C2F(mSampleRate, value).inRange(8000, 48000)})
.withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::output(0u, 1))
.withFields({C2F(mChannelCount, value).equalTo(1)})
.withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::input(0u, 64000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::input(0u, 64000))
.withFields({C2F(mBitrate, value).equalTo(64000)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -94,13 +94,13 @@
}
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
- std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
};
diff --git a/media/codec2/components/gsm/C2SoftGsmDec.cpp b/media/codec2/components/gsm/C2SoftGsmDec.cpp
index 7101c79..69d4885 100644
--- a/media/codec2/components/gsm/C2SoftGsmDec.cpp
+++ b/media/codec2/components/gsm/C2SoftGsmDec.cpp
@@ -36,44 +36,44 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_AUDIO_MSGSM))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::output(0u, 8000))
.withFields({C2F(mSampleRate, value).equalTo(8000)})
.withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::output(0u, 1))
.withFields({C2F(mChannelCount, value).equalTo(1)})
.withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::input(0u, 13200))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::input(0u, 13200))
.withFields({C2F(mBitrate, value).equalTo(13200)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -85,13 +85,13 @@
}
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
- std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
};
diff --git a/media/codec2/components/hevc/Android.bp b/media/codec2/components/hevc/Android.bp
index 2a045e1..369bd78 100644
--- a/media/codec2/components/hevc/Android.bp
+++ b/media/codec2/components/hevc/Android.bp
@@ -9,8 +9,17 @@
static_libs: ["libhevcdec"],
- include_dirs: [
- "external/libhevc/decoder",
- "external/libhevc/common",
+}
+
+cc_library_shared {
+ name: "libcodec2_soft_hevcenc",
+ defaults: [
+ "libcodec2_soft-defaults",
+ "libcodec2_soft_sanitize_signed-defaults",
],
+
+ srcs: ["C2SoftHevcEnc.cpp"],
+
+ static_libs: ["libhevcenc"],
+
}
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index f0d7d88..bb8dda0 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -192,7 +192,7 @@
}
static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
- C2P<C2VideoSizeStreamInfo::output> &me) {
+ C2P<C2StreamPictureSizeInfo::output> &me) {
(void)mayBlock;
C2R res = C2R::Ok();
if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
@@ -839,7 +839,7 @@
mHeight = s_decode_op.u4_pic_ht;
CHECK_EQ(0u, s_decode_op.u4_output_present);
- C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
std::vector<std::unique_ptr<C2SettingResult>> failures;
c2_status_t err =
mIntf->config({&size}, C2_MAY_BLOCK, &failures);
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
new file mode 100644
index 0000000..2c0a7a0
--- /dev/null
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -0,0 +1,802 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftHevcEnc"
+#include <log/log.h>
+
+#include <media/hardware/VideoAPI.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <SimpleC2Interface.h>
+#include <util/C2InterfaceHelper.h>
+
+#include "ihevc_typedefs.h"
+#include "itt_video_api.h"
+#include "ihevce_api.h"
+#include "ihevce_plugin.h"
+#include "C2SoftHevcEnc.h"
+
+namespace android {
+
+class C2SoftHevcEnc::IntfImpl : public C2InterfaceHelper {
+ public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+ : C2InterfaceHelper(helper) {
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(
+ new C2StreamBufferTypeSetting::input(0u, C2BufferData::GRAPHIC))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(
+ new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
+ MEDIA_MIMETYPE_VIDEO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
+ MEDIA_MIMETYPE_VIDEO_HEVC))
+ .build());
+
+ addParameter(DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+ .withConstValue(new C2StreamUsageTuning::input(
+ 0u, (uint64_t)C2MemoryUsage::CPU_READ))
+ .build());
+
+ addParameter(
+ DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(320, 1920, 2),
+ C2F(mSize, height).inRange(128, 1088, 2),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
+ .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+ .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+ .withSetter(
+ Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(4096, 12000000)})
+ .withSetter(BitrateSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::output(
+ 0u, PROFILE_HEVC_MAIN, LEVEL_HEVC_MAIN_1))
+ .withFields({
+ C2F(mProfileLevel, profile)
+ .oneOf({C2Config::PROFILE_HEVC_MAIN,
+ C2Config::PROFILE_HEVC_MAIN_STILL}),
+ C2F(mProfileLevel, level)
+ .oneOf({LEVEL_HEVC_MAIN_1, LEVEL_HEVC_MAIN_2,
+ LEVEL_HEVC_MAIN_2_1, LEVEL_HEVC_MAIN_3,
+ LEVEL_HEVC_MAIN_3_1, LEVEL_HEVC_MAIN_4,
+ LEVEL_HEVC_MAIN_4_1, LEVEL_HEVC_MAIN_5,
+ LEVEL_HEVC_MAIN_5_1, LEVEL_HEVC_MAIN_5_2}),
+ })
+ .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
+ .build());
+
+ addParameter(
+ DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
+ .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
+ .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
+ .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
+ .withDefault(
+ new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
+ .withFields({C2F(mSyncFramePeriod, value).any()})
+ .withSetter(
+ Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
+ .build());
+ }
+
+ static C2R BitrateSetter(bool mayBlock,
+ C2P<C2StreamBitrateInfo::output>& me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (me.v.value <= 4096) {
+ me.set().value = 4096;
+ }
+ return res;
+ }
+
+ static C2R SizeSetter(bool mayBlock,
+ const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+ C2P<C2StreamPictureSizeInfo::input>& me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R ProfileLevelSetter(
+ bool mayBlock,
+ C2P<C2StreamProfileLevelInfo::output> &me,
+ const C2P<C2StreamPictureSizeInfo::input> &size,
+ const C2P<C2StreamFrameRateInfo::output> &frameRate,
+ const C2P<C2StreamBitrateInfo::output> &bitrate) {
+ (void)mayBlock;
+ if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+ me.set().profile = PROFILE_HEVC_MAIN;
+ }
+
+ struct LevelLimits {
+ C2Config::level_t level;
+ uint64_t samplesPerSec;
+ uint64_t samples;
+ uint32_t bitrate;
+ };
+
+ constexpr LevelLimits kLimits[] = {
+ { LEVEL_HEVC_MAIN_1, 552960, 36864, 128000 },
+ { LEVEL_HEVC_MAIN_2, 3686400, 122880, 1500000 },
+ { LEVEL_HEVC_MAIN_2_1, 7372800, 245760, 3000000 },
+ { LEVEL_HEVC_MAIN_3, 16588800, 552960, 6000000 },
+ { LEVEL_HEVC_MAIN_3_1, 33177600, 983040, 10000000 },
+ { LEVEL_HEVC_MAIN_4, 66846720, 2228224, 12000000 },
+ { LEVEL_HEVC_MAIN_4_1, 133693440, 2228224, 20000000 },
+ { LEVEL_HEVC_MAIN_5, 267386880, 8912896, 25000000 },
+ { LEVEL_HEVC_MAIN_5_1, 534773760, 8912896, 40000000 },
+ { LEVEL_HEVC_MAIN_5_2, 1069547520, 8912896, 60000000 },
+ { LEVEL_HEVC_MAIN_6, 1069547520, 35651584, 60000000 },
+ { LEVEL_HEVC_MAIN_6_1, 2139095040, 35651584, 120000000 },
+ { LEVEL_HEVC_MAIN_6_2, 4278190080, 35651584, 240000000 },
+ };
+
+ uint64_t samples = size.v.width * size.v.height;
+ uint64_t samplesPerSec = samples * frameRate.v.value;
+
+ // Check if the supplied level meets the MB / bitrate requirements. If
+ // not, update the level with the lowest level meeting the requirements.
+
+ bool found = false;
+ // By default needsUpdate = false in case the supplied level does meet
+ // the requirements.
+ bool needsUpdate = false;
+ for (const LevelLimits &limit : kLimits) {
+ if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
+ bitrate.v.value <= limit.bitrate) {
+ // This is the lowest level that meets the requirements, and if
+ // we haven't seen the supplied level yet, that means we don't
+ // need the update.
+ if (needsUpdate) {
+ ALOGD("Given level %x does not cover current configuration: "
+ "adjusting to %x", me.v.level, limit.level);
+ me.set().level = limit.level;
+ }
+ found = true;
+ break;
+ }
+ if (me.v.level == limit.level) {
+ // We break out of the loop when the lowest feasible level is
+ // found. The fact that we're here means that our level doesn't
+ // meet the requirement and needs to be updated.
+ needsUpdate = true;
+ }
+ }
+ if (!found) {
+ // We set to the highest supported level.
+ me.set().level = LEVEL_HEVC_MAIN_5_2;
+ }
+ return C2R::Ok();
+ }
+
+ UWORD32 getProfile_l() const {
+ switch (mProfileLevel->profile) {
+ case PROFILE_HEVC_MAIN: [[fallthrough]];
+ case PROFILE_HEVC_MAIN_STILL: return 1;
+ default:
+ ALOGD("Unrecognized profile: %x", mProfileLevel->profile);
+ return 1;
+ }
+ }
+
+ UWORD32 getLevel_l() const {
+ struct Level {
+ C2Config::level_t c2Level;
+ UWORD32 hevcLevel;
+ };
+ constexpr Level levels[] = {
+ { LEVEL_HEVC_MAIN_1, 30 },
+ { LEVEL_HEVC_MAIN_2, 60 },
+ { LEVEL_HEVC_MAIN_2_1, 63 },
+ { LEVEL_HEVC_MAIN_3, 90 },
+ { LEVEL_HEVC_MAIN_3_1, 93 },
+ { LEVEL_HEVC_MAIN_4, 120 },
+ { LEVEL_HEVC_MAIN_4_1, 123 },
+ { LEVEL_HEVC_MAIN_5, 150 },
+ { LEVEL_HEVC_MAIN_5_1, 153 },
+ { LEVEL_HEVC_MAIN_5_2, 156 },
+ { LEVEL_HEVC_MAIN_6, 180 },
+ { LEVEL_HEVC_MAIN_6_1, 183 },
+ { LEVEL_HEVC_MAIN_6_2, 186 },
+ };
+ for (const Level &level : levels) {
+ if (mProfileLevel->level == level.c2Level) {
+ return level.hevcLevel;
+ }
+ }
+ ALOGD("Unrecognized level: %x", mProfileLevel->level);
+ return 156;
+ }
+ uint32_t getSyncFramePeriod_l() const {
+ if (mSyncFramePeriod->value < 0 ||
+ mSyncFramePeriod->value == INT64_MAX) {
+ return 0;
+ }
+ double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
+ return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+ }
+
+ std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const {
+ return mSize;
+ }
+ std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const {
+ return mFrameRate;
+ }
+ std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const {
+ return mBitrate;
+ }
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const {
+ return mRequestSync;
+ }
+
+ private:
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
+ std::shared_ptr<C2StreamUsageTuning::input> mUsage;
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+ std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
+};
+constexpr char COMPONENT_NAME[] = "c2.android.hevc.encoder";
+
+static size_t GetCPUCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+C2SoftHevcEnc::C2SoftHevcEnc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : SimpleC2Component(
+ std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mIvVideoColorFormat(IV_YUV_420P),
+ mHevcEncProfile(1),
+ mHevcEncLevel(30),
+ mStarted(false),
+ mSpsPpsHeaderReceived(false),
+ mSignalledEos(false),
+ mSignalledError(false),
+ mCodecCtx(nullptr) {
+ // If dump is enabled, then create an empty file
+ GENERATE_FILE_NAMES();
+ CREATE_DUMP_FILE(mInFile);
+ CREATE_DUMP_FILE(mOutFile);
+
+ gettimeofday(&mTimeStart, nullptr);
+ gettimeofday(&mTimeEnd, nullptr);
+}
+
+C2SoftHevcEnc::~C2SoftHevcEnc() {
+ releaseEncoder();
+}
+
+c2_status_t C2SoftHevcEnc::onInit() {
+ return initEncoder();
+}
+
+c2_status_t C2SoftHevcEnc::onStop() {
+ if (!mStarted) {
+ return C2_OK;
+ }
+ return releaseEncoder();
+}
+
+void C2SoftHevcEnc::onReset() {
+ onStop();
+ initEncoder();
+}
+
+void C2SoftHevcEnc::onRelease() {
+ onStop();
+}
+
+c2_status_t C2SoftHevcEnc::onFlush_sm() {
+ return C2_OK;
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("Signalling EOS");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+c2_status_t C2SoftHevcEnc::initEncParams() {
+ mCodecCtx = nullptr;
+ mNumCores = MIN(GetCPUCoreCount(), CODEC_MAX_CORES);
+ memset(&mEncParams, 0, sizeof(ihevce_static_cfg_params_t));
+
+ // default configuration
+ IHEVCE_PLUGIN_STATUS_T err = ihevce_set_def_params(&mEncParams);
+ if (IHEVCE_EOK != err) {
+ ALOGE("HEVC default init failed : 0x%x", err);
+ return C2_CORRUPTED;
+ }
+
+ // update configuration
+ mEncParams.s_src_prms.i4_width = mSize->width;
+ mEncParams.s_src_prms.i4_height = mSize->height;
+ mEncParams.s_src_prms.i4_frm_rate_denom = 1000;
+ mEncParams.s_src_prms.i4_frm_rate_num = mFrameRate->value * mEncParams.s_src_prms.i4_frm_rate_denom;
+ mEncParams.s_tgt_lyr_prms.as_tgt_params[0].i4_quality_preset = IHEVCE_QUALITY_P5;
+ mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_tgt_bitrate[0] =
+ mBitrate->value;
+ mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_peak_bitrate[0] =
+ mBitrate->value << 1;
+ mEncParams.s_tgt_lyr_prms.as_tgt_params[0].i4_codec_level = mHevcEncLevel;
+ mEncParams.s_coding_tools_prms.i4_max_i_open_gop_period = mIDRInterval;
+ mEncParams.s_coding_tools_prms.i4_max_cra_open_gop_period = mIDRInterval;
+ mIvVideoColorFormat = IV_YUV_420P;
+ mEncParams.s_multi_thrd_prms.i4_max_num_cores = mNumCores;
+ mEncParams.s_out_strm_prms.i4_codec_profile = mHevcEncProfile;
+ mEncParams.s_config_prms.i4_rate_control_mode = 2;
+ mEncParams.s_lap_prms.i4_rc_look_ahead_pics = 0;
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftHevcEnc::releaseEncoder() {
+ mSpsPpsHeaderReceived = false;
+ mSignalledEos = false;
+ mSignalledError = false;
+ mStarted = false;
+
+ if (mCodecCtx) {
+ IHEVCE_PLUGIN_STATUS_T err = ihevce_close(mCodecCtx);
+ if (IHEVCE_EOK != err) return C2_CORRUPTED;
+ mCodecCtx = nullptr;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftHevcEnc::drain(uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ (void)drainMode;
+ (void)pool;
+ return C2_OK;
+}
+c2_status_t C2SoftHevcEnc::initEncoder() {
+ CHECK(!mCodecCtx);
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ mSize = mIntf->getSize_l();
+ mBitrate = mIntf->getBitrate_l();
+ mFrameRate = mIntf->getFrameRate_l();
+ mHevcEncProfile = mIntf->getProfile_l();
+ mHevcEncLevel = mIntf->getLevel_l();
+ mIDRInterval = mIntf->getSyncFramePeriod_l();
+ }
+
+ c2_status_t status = initEncParams();
+
+ if (C2_OK != status) {
+ ALOGE("Failed to initialize encoder params : 0x%x", status);
+ mSignalledError = true;
+ return status;
+ }
+
+ IHEVCE_PLUGIN_STATUS_T err = IHEVCE_EOK;
+ err = ihevce_init(&mEncParams, &mCodecCtx);
+ if (IHEVCE_EOK != err) {
+ ALOGE("HEVC encoder init failed : 0x%x", err);
+ return C2_CORRUPTED;
+ }
+
+ mStarted = true;
+ return C2_OK;
+}
+
+c2_status_t C2SoftHevcEnc::setEncodeArgs(ihevce_inp_buf_t* ps_encode_ip,
+ const C2GraphicView* const input,
+ uint64_t timestamp) {
+ ihevce_static_cfg_params_t* params = &mEncParams;
+ memset(ps_encode_ip, 0, sizeof(ihevce_inp_buf_t));
+
+ if (!input) {
+ return C2_OK;
+ }
+
+ if (input->width() < mSize->width ||
+ input->height() < mSize->height) {
+ /* Expect width height to be configured */
+ ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", input->width(),
+ mSize->width, input->height(), mSize->height);
+ return C2_BAD_VALUE;
+ }
+
+ const C2PlanarLayout& layout = input->layout();
+ uint8_t* yPlane =
+ const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t* uPlane =
+ const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_U]);
+ uint8_t* vPlane =
+ const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_V]);
+ int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+ int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+ uint32_t width = mSize->width;
+ uint32_t height = mSize->height;
+
+ // width and height are always even
+ // width and height are always even (as block size is 16x16)
+ CHECK_EQ((width & 1u), 0u);
+ CHECK_EQ((height & 1u), 0u);
+
+ size_t yPlaneSize = width * height;
+
+ switch (layout.type) {
+ case C2PlanarLayout::TYPE_RGB:
+ [[fallthrough]];
+ case C2PlanarLayout::TYPE_RGBA: {
+ MemoryBlock conversionBuffer =
+ mConversionBuffers.fetch(yPlaneSize * 3 / 2);
+ mConversionBuffersInUse.emplace(conversionBuffer.data(),
+ conversionBuffer);
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ yStride = width;
+ uStride = vStride = yStride / 2;
+ ConvertRGBToPlanarYUV(yPlane, yStride, height,
+ conversionBuffer.size(), *input);
+ break;
+ }
+ case C2PlanarLayout::TYPE_YUV: {
+ if (!IsYUV420(*input)) {
+ ALOGE("input is not YUV420");
+ return C2_BAD_VALUE;
+ }
+
+ if (layout.planes[layout.PLANE_Y].colInc == 1 &&
+ layout.planes[layout.PLANE_U].colInc == 1 &&
+ layout.planes[layout.PLANE_V].colInc == 1 &&
+ uStride == vStride && yStride == 2 * vStride) {
+ // I420 compatible - already set up above
+ break;
+ }
+
+ // copy to I420
+ yStride = width;
+ uStride = vStride = yStride / 2;
+ MemoryBlock conversionBuffer =
+ mConversionBuffers.fetch(yPlaneSize * 3 / 2);
+ mConversionBuffersInUse.emplace(conversionBuffer.data(),
+ conversionBuffer);
+ MediaImage2 img =
+ CreateYUV420PlanarMediaImage2(width, height, yStride, height);
+ status_t err = ImageCopy(conversionBuffer.data(), &img, *input);
+ if (err != OK) {
+ ALOGE("Buffer conversion failed: %d", err);
+ return C2_BAD_VALUE;
+ }
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ break;
+ }
+
+ case C2PlanarLayout::TYPE_YUVA:
+ ALOGE("YUVA plane type is not supported");
+ return C2_BAD_VALUE;
+
+ default:
+ ALOGE("Unrecognized plane type: %d", layout.type);
+ return C2_BAD_VALUE;
+ }
+
+ switch (mIvVideoColorFormat) {
+ case IV_YUV_420P: {
+ // input buffer is supposed to be const but Ittiam API wants bare
+ // pointer.
+ ps_encode_ip->apv_inp_planes[0] = yPlane;
+ ps_encode_ip->apv_inp_planes[1] = uPlane;
+ ps_encode_ip->apv_inp_planes[2] = vPlane;
+
+ ps_encode_ip->ai4_inp_strd[0] = yStride;
+ ps_encode_ip->ai4_inp_strd[1] = uStride;
+ ps_encode_ip->ai4_inp_strd[2] = vStride;
+
+ ps_encode_ip->ai4_inp_size[0] = yStride * height;
+ ps_encode_ip->ai4_inp_size[1] = uStride * height >> 1;
+ ps_encode_ip->ai4_inp_size[2] = vStride * height >> 1;
+ break;
+ }
+
+ case IV_YUV_422ILE: {
+ // TODO
+ break;
+ }
+
+ case IV_YUV_420SP_UV:
+ case IV_YUV_420SP_VU:
+ default: {
+ ps_encode_ip->apv_inp_planes[0] = yPlane;
+ ps_encode_ip->apv_inp_planes[1] = uPlane;
+ ps_encode_ip->apv_inp_planes[2] = nullptr;
+
+ ps_encode_ip->ai4_inp_strd[0] = yStride;
+ ps_encode_ip->ai4_inp_strd[1] = uStride;
+ ps_encode_ip->ai4_inp_strd[2] = 0;
+
+ ps_encode_ip->ai4_inp_size[0] = yStride * height;
+ ps_encode_ip->ai4_inp_size[1] = uStride * height >> 1;
+ ps_encode_ip->ai4_inp_size[2] = 0;
+ break;
+ }
+ }
+
+ ps_encode_ip->i4_curr_bitrate =
+ params->s_tgt_lyr_prms.as_tgt_params[0].ai4_tgt_bitrate[0];
+ ps_encode_ip->i4_curr_peak_bitrate =
+ params->s_tgt_lyr_prms.as_tgt_params[0].ai4_peak_bitrate[0];
+ ps_encode_ip->i4_curr_rate_factor = params->s_config_prms.i4_rate_factor;
+ ps_encode_ip->u8_pts = timestamp;
+ return C2_OK;
+}
+
+void C2SoftHevcEnc::process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ if (mSignalledError || mSignalledEos) {
+ work->result = C2_BAD_VALUE;
+ ALOGD("Signalled Error / Signalled Eos");
+ return;
+ }
+ c2_status_t status = C2_OK;
+
+ // Initialize encoder if not already initialized
+ if (!mStarted) {
+ status = initEncoder();
+ if (C2_OK != status) {
+ ALOGE("Failed to initialize encoder : 0x%x", status);
+ mSignalledError = true;
+ work->result = status;
+ return;
+ }
+ }
+
+ std::shared_ptr<const C2GraphicView> view;
+ std::shared_ptr<C2Buffer> inputBuffer = nullptr;
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ if (!work->input.buffers.empty()) {
+ inputBuffer = work->input.buffers[0];
+ view = std::make_shared<const C2GraphicView>(
+ inputBuffer->data().graphicBlocks().front().map().get());
+ if (view->error() != C2_OK) {
+ ALOGE("graphic view map err = %d", view->error());
+ mSignalledError = true;
+ return;
+ }
+ }
+
+ IHEVCE_PLUGIN_STATUS_T err = IHEVCE_EOK;
+
+ fillEmptyWork(work);
+ if (!mSpsPpsHeaderReceived) {
+ ihevce_out_buf_t s_header_op{};
+ err = ihevce_encode_header(mCodecCtx, &s_header_op);
+ if (err == IHEVCE_EOK && s_header_op.i4_bytes_generated) {
+ std::unique_ptr<C2StreamInitDataInfo::output> csd =
+ C2StreamInitDataInfo::output::AllocUnique(
+ s_header_op.i4_bytes_generated, 0u);
+ if (!csd) {
+ ALOGE("CSD allocation failed");
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ memcpy(csd->m.value, s_header_op.pu1_output_buf,
+ s_header_op.i4_bytes_generated);
+ DUMP_TO_FILE(mOutFile, csd->m.value, csd->flexCount());
+ work->worklets.front()->output.configUpdate.push_back(
+ std::move(csd));
+ mSpsPpsHeaderReceived = true;
+ }
+ if (!inputBuffer) {
+ return;
+ }
+ }
+ ihevce_inp_buf_t s_encode_ip{};
+ ihevce_out_buf_t s_encode_op{};
+ uint64_t timestamp = work->input.ordinal.timestamp.peekull();
+
+ status = setEncodeArgs(&s_encode_ip, view.get(), timestamp);
+ if (C2_OK != status) {
+ mSignalledError = true;
+ ALOGE("setEncodeArgs failed : 0x%x", status);
+ work->result = status;
+ return;
+ }
+
+ uint64_t timeDelay = 0;
+ uint64_t timeTaken = 0;
+ GETTIME(&mTimeStart, nullptr);
+ TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+
+ ihevce_inp_buf_t* ps_encode_ip = (inputBuffer) ? &s_encode_ip : nullptr;
+
+ err = ihevce_encode(mCodecCtx, ps_encode_ip, &s_encode_op);
+ if (IHEVCE_EOK != err) {
+ ALOGE("Encode Frame failed : 0x%x", err);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ GETTIME(&mTimeEnd, nullptr);
+ /* Compute time taken for decode() */
+ TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+
+ ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", (int)timeTaken,
+ (int)timeDelay, s_encode_op.i4_bytes_generated);
+
+ if (s_encode_op.i4_bytes_generated) {
+ std::shared_ptr<C2LinearBlock> block;
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+ status = pool->fetchLinearBlock(s_encode_op.i4_bytes_generated, usage, &block);
+ if (C2_OK != status) {
+ ALOGE("fetchLinearBlock for Output failed with status 0x%x", status);
+ work->result = C2_NO_MEMORY;
+ mSignalledError = true;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (C2_OK != wView.error()) {
+ ALOGE("write view map failed with status 0x%x", wView.error());
+ work->result = wView.error();
+ mSignalledError = true;
+ return;
+ }
+ memcpy(wView.data(), s_encode_op.pu1_output_buf,
+ s_encode_op.i4_bytes_generated);
+
+ std::shared_ptr<C2Buffer> buffer =
+ createLinearBuffer(block, 0, s_encode_op.i4_bytes_generated);
+
+ DUMP_TO_FILE(mOutFile, s_encode_op.pu1_output_buf,
+ s_encode_op.i4_bytes_generated);
+
+ work->worklets.front()->output.ordinal.timestamp = s_encode_op.u8_pts;
+ if (s_encode_op.i4_is_key_frame) {
+ ALOGV("IDR frame produced");
+ buffer->setInfo(
+ std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+ 0u /* stream id */, C2Config::SYNC_FRAME));
+ }
+ work->worklets.front()->output.buffers.push_back(buffer);
+ }
+ if (eos) {
+ mSignalledEos = true;
+ }
+}
+
+class C2SoftHevcEncFactory : public C2ComponentFactory {
+ public:
+ C2SoftHevcEncFactory()
+ : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id, std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftHevcEnc(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftHevcEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftHevcEnc::IntfImpl>(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftHevcEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftHevcEncFactory() override = default;
+
+ private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftHevcEncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.h b/media/codec2/components/hevc/C2SoftHevcEnc.h
new file mode 100644
index 0000000..c22fea2
--- /dev/null
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.h
@@ -0,0 +1,156 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_HEVC_ENC_H_
+#define ANDROID_C2_SOFT_HEVC_ENC_H_
+
+#include <map>
+#include <utils/Vector.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <SimpleC2Component.h>
+
+#include "ihevc_typedefs.h"
+
+namespace android {
+#define MIN(a, b) ((a) < (b)) ? (a) : (b)
+
+/** Get time */
+#define GETTIME(a, b) gettimeofday(a, b);
+
+/** Compute difference between start and end */
+#define TIME_DIFF(start, end, diff) \
+ diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
+ ((end).tv_usec - (start).tv_usec);
+
+#define CODEC_MAX_CORES 4
+
+struct C2SoftHevcEnc : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftHevcEnc(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) override;
+ c2_status_t drain(uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool>& pool) override;
+
+ protected:
+ virtual ~C2SoftHevcEnc();
+
+ private:
+ std::shared_ptr<IntfImpl> mIntf;
+ ihevce_static_cfg_params_t mEncParams;
+ size_t mNumCores;
+ UWORD32 mIDRInterval;
+ IV_COLOR_FORMAT_T mIvVideoColorFormat;
+ UWORD32 mHevcEncProfile;
+ UWORD32 mHevcEncLevel;
+ bool mStarted;
+ bool mSpsPpsHeaderReceived;
+ bool mSignalledEos;
+ bool mSignalledError;
+ void* mCodecCtx;
+ MemoryBlockPool mConversionBuffers;
+ std::map<void*, MemoryBlock> mConversionBuffersInUse;
+ // configurations used by component in process
+ // (TODO: keep this in intf but make them internal only)
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+
+#ifdef FILE_DUMP_ENABLE
+ char mInFile[200];
+ char mOutFile[200];
+#endif /* FILE_DUMP_ENABLE */
+
+ // profile
+ struct timeval mTimeStart;
+ struct timeval mTimeEnd;
+
+ c2_status_t initEncParams();
+ c2_status_t initEncoder();
+ c2_status_t releaseEncoder();
+ c2_status_t setEncodeArgs(ihevce_inp_buf_t* ps_encode_ip,
+ const C2GraphicView* const input,
+ uint64_t timestamp);
+ C2_DO_NOT_COPY(C2SoftHevcEnc);
+};
+
+#ifdef FILE_DUMP_ENABLE
+
+#define INPUT_DUMP_PATH "/data/local/tmp/hevc"
+#define INPUT_DUMP_EXT "yuv"
+#define OUTPUT_DUMP_PATH "/data/local/tmp/hevc"
+#define OUTPUT_DUMP_EXT "h265"
+#define GENERATE_FILE_NAMES() \
+{ \
+ GETTIME(&mTimeStart, NULL); \
+ strcpy(mInFile, ""); \
+ ALOGD("GENERATE_FILE_NAMES"); \
+ sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, mTimeStart.tv_sec, \
+ mTimeStart.tv_usec, INPUT_DUMP_EXT); \
+ strcpy(mOutFile, ""); \
+ sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH, \
+ mTimeStart.tv_sec, mTimeStart.tv_usec, OUTPUT_DUMP_EXT); \
+}
+
+#define CREATE_DUMP_FILE(m_filename) \
+{ \
+ FILE* fp = fopen(m_filename, "wb"); \
+ if (fp != NULL) { \
+ ALOGD("Opened file %s", m_filename); \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not open file %s", m_filename); \
+ } \
+}
+#define DUMP_TO_FILE(m_filename, m_buf, m_size) \
+{ \
+ FILE* fp = fopen(m_filename, "ab"); \
+ if (fp != NULL && m_buf != NULL) { \
+ int i; \
+ ALOGD("Dump to file!"); \
+ i = fwrite(m_buf, 1, m_size, fp); \
+ if (i != (int)m_size) { \
+ ALOGD("Error in fwrite, returned %d", i); \
+ perror("Error in write to file"); \
+ } \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not write to file %s", m_filename); \
+ if (fp != NULL) fclose(fp); \
+ } \
+}
+#else /* FILE_DUMP_ENABLE */
+#define INPUT_DUMP_PATH
+#define INPUT_DUMP_EXT
+#define OUTPUT_DUMP_PATH
+#define OUTPUT_DUMP_EXT
+#define GENERATE_FILE_NAMES()
+#define CREATE_DUMP_FILE(m_filename)
+#define DUMP_TO_FILE(m_filename, m_buf, m_size)
+#endif /* FILE_DUMP_ENABLE */
+
+} // namespace android
+
+#endif // C2_SOFT_HEVC_ENC_H__
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.cpp b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
index c8b8397..9db6d8f 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.cpp
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
@@ -40,29 +40,29 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_AUDIO_MPEG))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::output(0u, 44100))
.withFields({C2F(mSampleRate, value).oneOf({8000, 11025, 12000, 16000,
22050, 24000, 32000, 44100, 48000})})
@@ -70,15 +70,15 @@
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::output(0u, 2))
.withFields({C2F(mChannelCount, value).inRange(1, 2)})
.withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::input(0u, 64000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::input(0u, 64000))
.withFields({C2F(mBitrate, value).inRange(8000, 320000)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -90,13 +90,13 @@
}
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
- std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
};
@@ -555,4 +555,3 @@
ALOGV("in %s", __func__);
delete factory;
}
-
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index da32ec0..290677e 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -180,7 +180,7 @@
}
static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
- C2P<C2VideoSizeStreamInfo::output> &me) {
+ C2P<C2StreamPictureSizeInfo::output> &me) {
(void)mayBlock;
C2R res = C2R::Ok();
if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
@@ -892,7 +892,7 @@
ALOGI("Configuring decoder: mWidth %d , mHeight %d ",
mWidth, mHeight);
- C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
std::vector<std::unique_ptr<C2SettingResult>> failures;
c2_status_t err =
mIntf->config({&size}, C2_MAY_BLOCK, &failures);
@@ -931,7 +931,7 @@
ALOGI("Configuring decoder out: mWidth %d , mHeight %d ",
mWidth, mHeight);
- C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
std::vector<std::unique_ptr<C2SettingResult>> failures;
c2_status_t err =
mIntf->config({&size}, C2_MAY_BLOCK, &failures);
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 0b89cff..3d4a733 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -186,7 +186,7 @@
}
static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
- C2P<C2VideoSizeStreamInfo::output> &me) {
+ C2P<C2StreamPictureSizeInfo::output> &me) {
(void)mayBlock;
C2R res = C2R::Ok();
if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
@@ -574,7 +574,7 @@
PVSetPostProcType(mDecHandle, 0);
if (handleResChange(work)) {
ALOGI("Setting width and height");
- C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
std::vector<std::unique_ptr<C2SettingResult>> failures;
c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
if (err == OK) {
@@ -646,7 +646,7 @@
return;
} else if (resChange) {
ALOGI("Setting width and height");
- C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
std::vector<std::unique_ptr<C2SettingResult>> failures;
c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
if (err == OK) {
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index c8796f3..89fa59d 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -52,26 +52,26 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
.withConstValue(
- new C2StreamFormatConfig::input(0u, C2FormatVideo))
+ new C2StreamBufferTypeSetting::input(0u, C2BufferData::GRAPHIC))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
.withConstValue(
- new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_VIDEO_RAW))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
#ifdef MPEG4
MEDIA_MIMETYPE_VIDEO_MPEG4
#else
@@ -80,14 +80,14 @@
))
.build());
- addParameter(DefineParam(mUsage, C2_NAME_INPUT_STREAM_USAGE_SETTING)
+ addParameter(DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
.withConstValue(new C2StreamUsageTuning::input(
0u, (uint64_t)C2MemoryUsage::CPU_READ))
.build());
addParameter(
- DefineParam(mSize, C2_NAME_STREAM_VIDEO_SIZE_SETTING)
- .withDefault(new C2VideoSizeStreamTuning::input(0u, 176, 144))
+ DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::input(0u, 176, 144))
.withFields({
#ifdef MPEG4
C2F(mSize, width).inRange(16, 176, 16),
@@ -101,7 +101,7 @@
.build());
addParameter(
- DefineParam(mFrameRate, C2_NAME_STREAM_FRAME_RATE_SETTING)
+ DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
.withDefault(new C2StreamFrameRateInfo::output(0u, 17.))
// TODO: More restriction?
.withFields({C2F(mFrameRate, value).greaterThan(0.)})
@@ -110,8 +110,8 @@
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::output(0u, 64000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
.withFields({C2F(mBitrate, value).inRange(4096, 12000000)})
.withSetter(BitrateSetter)
.build());
@@ -217,14 +217,14 @@
}
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
- std::shared_ptr<C2VideoSizeStreamTuning::input> mSize;
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
- std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
};
@@ -446,8 +446,8 @@
}
++mNumInputFrames;
- std::unique_ptr<C2StreamCsdInfo::output> csd =
- C2StreamCsdInfo::output::AllocUnique(outputSize, 0u);
+ std::unique_ptr<C2StreamInitDataInfo::output> csd =
+ C2StreamInitDataInfo::output::AllocUnique(outputSize, 0u);
if (!csd) {
ALOGE("CSD allocation failed");
mSignalledError = true;
@@ -595,7 +595,7 @@
work->worklets.front()->output.ordinal.timestamp = inputTimeStamp;
if (hintTrack.CodeType == 0) {
buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
- 0u /* stream id */, C2PictureTypeKeyFrame));
+ 0u /* stream id */, C2Config::SYNC_FRAME));
}
work->worklets.front()->output.buffers.push_back(buffer);
}
diff --git a/media/codec2/components/opus/C2SoftOpusDec.cpp b/media/codec2/components/opus/C2SoftOpusDec.cpp
index 3ce1fd6..680712e 100644
--- a/media/codec2/components/opus/C2SoftOpusDec.cpp
+++ b/media/codec2/components/opus/C2SoftOpusDec.cpp
@@ -40,44 +40,44 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_AUDIO_OPUS))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::output(0u, 48000))
.withFields({C2F(mSampleRate, value).equalTo(48000)})
.withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::output(0u, 1))
.withFields({C2F(mChannelCount, value).inRange(1, 8)})
.withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::input(0u, 6000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::input(0u, 6000))
.withFields({C2F(mBitrate, value).inRange(6000, 510000)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -89,13 +89,13 @@
}
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
- std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
};
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index 68fcea1..a0b2443 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -42,29 +42,29 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatAudio))
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_OPUS))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::input(0u, 48000))
.withFields({C2F(mSampleRate, value).oneOf({
8000, 12000, 16000, 24000, 48000})})
@@ -72,15 +72,15 @@
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::input(0u, 1))
.withFields({C2F(mChannelCount, value).inRange(1, 8)})
.withSetter((Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps))
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::output(0u, 128000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::output(0u, 128000))
.withFields({C2F(mBitrate, value).inRange(500, 512000)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -104,13 +104,13 @@
uint32_t getComplexity() const { return mComplexity->value; }
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
- std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
};
@@ -423,8 +423,8 @@
int headerLen = WriteOpusHeaders(opusHeader, mSampleRate, header,
sizeof(header), mCodecDelay, mSeekPreRoll);
- std::unique_ptr<C2StreamCsdInfo::output> csd =
- C2StreamCsdInfo::output::AllocUnique(headerLen, 0u);
+ std::unique_ptr<C2StreamInitDataInfo::output> csd =
+ C2StreamInitDataInfo::output::AllocUnique(headerLen, 0u);
if (!csd) {
ALOGE("CSD allocation failed");
mSignalledError = true;
diff --git a/media/codec2/components/raw/C2SoftRawDec.cpp b/media/codec2/components/raw/C2SoftRawDec.cpp
index 5c83481..802caa4 100644
--- a/media/codec2/components/raw/C2SoftRawDec.cpp
+++ b/media/codec2/components/raw/C2SoftRawDec.cpp
@@ -37,44 +37,44 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::output(0u, 44100))
.withFields({C2F(mSampleRate, value).inRange(8000, 192000)})
.withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::output(0u, 2))
.withFields({C2F(mChannelCount, value).inRange(1, 8)})
.withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::input(0u, 64000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::input(0u, 64000))
.withFields({C2F(mBitrate, value).inRange(1, 10000000)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -98,13 +98,13 @@
}
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
- std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
std::shared_ptr<C2StreamPcmEncodingInfo::output> mPcmEncodingInfo;
};
diff --git a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
index 48825e4..e7393ee 100644
--- a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
+++ b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp
@@ -45,44 +45,44 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_AUDIO_VORBIS))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::output(0u, 48000))
.withFields({C2F(mSampleRate, value).inRange(8000, 96000)})
.withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::output(0u, 1))
.withFields({C2F(mChannelCount, value).inRange(1, 8)})
.withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::input(0u, 64000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::input(0u, 64000))
.withFields({C2F(mBitrate, value).inRange(32000, 500000)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -94,13 +94,13 @@
}
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
- std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
};
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 9ba2362..3120f7a 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -215,7 +215,7 @@
}
static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
- C2P<C2VideoSizeStreamInfo::output> &me) {
+ C2P<C2StreamPictureSizeInfo::output> &me) {
(void)mayBlock;
C2R res = C2R::Ok();
if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
@@ -700,7 +700,7 @@
mWidth = img->d_w;
mHeight = img->d_h;
- C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
std::vector<std::unique_ptr<C2SettingResult>> failures;
c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
if (err == C2_OK) {
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 155a84f..6509a88 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -633,7 +633,7 @@
std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block);
if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY) {
buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
- 0u /* stream id */, C2PictureTypeKeyFrame));
+ 0u /* stream id */, C2Config::SYNC_FRAME));
}
work->worklets.front()->output.buffers.push_back(buffer);
work->worklets.front()->output.ordinal = work->input.ordinal;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index 87ed1a9..5591a49 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -229,26 +229,26 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
.withConstValue(
- new C2StreamFormatConfig::input(0u, C2FormatVideo))
+ new C2StreamBufferTypeSetting::input(0u, C2BufferData::GRAPHIC))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
.withConstValue(
- new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_VIDEO_RAW))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
#ifdef VP9
MEDIA_MIMETYPE_VIDEO_VP9
#else
@@ -257,14 +257,14 @@
))
.build());
- addParameter(DefineParam(mUsage, C2_NAME_INPUT_STREAM_USAGE_SETTING)
+ addParameter(DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
.withConstValue(new C2StreamUsageTuning::input(
0u, (uint64_t)C2MemoryUsage::CPU_READ))
.build());
addParameter(
- DefineParam(mSize, C2_NAME_STREAM_VIDEO_SIZE_SETTING)
- .withDefault(new C2VideoSizeStreamTuning::input(0u, 320, 240))
+ DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
.withFields({
C2F(mSize, width).inRange(2, 2048, 2),
C2F(mSize, height).inRange(2, 2048, 2),
@@ -285,7 +285,7 @@
.build());
addParameter(
- DefineParam(mFrameRate, C2_NAME_STREAM_FRAME_RATE_SETTING)
+ DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
.withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
// TODO: More restriction?
.withFields({C2F(mFrameRate, value).greaterThan(0.)})
@@ -312,8 +312,8 @@
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::output(0u, 64000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
.withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
.withSetter(BitrateSetter)
.build());
@@ -416,18 +416,18 @@
}
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
- std::shared_ptr<C2VideoSizeStreamTuning::input> mSize;
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
std::shared_ptr<C2StreamTemporalLayeringTuning::output> mLayering;
std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
- std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
};
diff --git a/media/codec2/components/xaac/C2SoftXaacDec.cpp b/media/codec2/components/xaac/C2SoftXaacDec.cpp
index 1c0e70b..86739c2 100644
--- a/media/codec2/components/xaac/C2SoftXaacDec.cpp
+++ b/media/codec2/components/xaac/C2SoftXaacDec.cpp
@@ -66,29 +66,29 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
+ DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::input(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
+ DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE)
+ .withConstValue(new C2StreamBufferTypeSetting::output(0u, C2BufferData::LINEAR))
.build());
addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::input>(
MEDIA_MIMETYPE_AUDIO_AAC))
.build());
addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE)
+ .withConstValue(AllocSharedString<C2PortMediaTypeSetting::output>(
MEDIA_MIMETYPE_AUDIO_RAW))
.build());
addParameter(
- DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING)
+ DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
.withDefault(new C2StreamSampleRateInfo::output(0u, 44100))
.withFields({C2F(mSampleRate, value).oneOf({
7350, 8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000
@@ -97,15 +97,15 @@
.build());
addParameter(
- DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING)
+ DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
.withDefault(new C2StreamChannelCountInfo::output(0u, 1))
.withFields({C2F(mChannelCount, value).inRange(1, 8)})
.withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
.build());
addParameter(
- DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
- .withDefault(new C2BitrateTuning::input(0u, 64000))
+ DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::input(0u, 64000))
.withFields({C2F(mBitrate, value).inRange(8000, 960000)})
.withSetter(Setter<decltype(*mBitrate)>::NonStrictValueWithNoDeps)
.build());
@@ -116,10 +116,10 @@
.build());
addParameter(
- DefineParam(mAacFormat, C2_NAME_STREAM_AAC_FORMAT_SETTING)
- .withDefault(new C2StreamAacFormatInfo::input(0u, C2AacStreamFormatRaw))
+ DefineParam(mAacFormat, C2_PARAMKEY_AAC_PACKAGING)
+ .withDefault(new C2StreamAacFormatInfo::input(0u, C2Config::AAC_PACKAGING_RAW))
.withFields({C2F(mAacFormat, value).oneOf({
- C2AacStreamFormatRaw, C2AacStreamFormatAdts
+ C2Config::AAC_PACKAGING_RAW, C2Config::AAC_PACKAGING_ADTS
})})
.withSetter(Setter<decltype(*mAacFormat)>::StrictValueWithNoDeps)
.build());
@@ -203,7 +203,7 @@
.build());
}
- bool isAdts() const { return mAacFormat->value == C2AacStreamFormatAdts; }
+ bool isAdts() const { return mAacFormat->value == C2Config::AAC_PACKAGING_ADTS; }
uint32_t getBitrate() const { return mBitrate->value; }
static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me) {
(void)mayBlock;
@@ -218,13 +218,13 @@
int32_t getDrcEffectType() const { return mDrcEffectType->value; }
private:
- std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
- std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
- std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
- std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamBufferTypeSetting::input> mInputFormat;
+ std::shared_ptr<C2StreamBufferTypeSetting::output> mOutputFormat;
+ std::shared_ptr<C2PortMediaTypeSetting::input> mInputMediaType;
+ std::shared_ptr<C2PortMediaTypeSetting::output> mOutputMediaType;
std::shared_ptr<C2StreamSampleRateInfo::output> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
- std::shared_ptr<C2BitrateTuning::input> mBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::input> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
std::shared_ptr<C2StreamAacFormatInfo::input> mAacFormat;
std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
diff --git a/media/codec2/core/include/C2Buffer.h b/media/codec2/core/include/C2Buffer.h
index c428122..3d3587c 100644
--- a/media/codec2/core/include/C2Buffer.h
+++ b/media/codec2/core/include/C2Buffer.h
@@ -1994,7 +1994,6 @@
GRAPHIC, ///< the buffer contains a single graphic block
GRAPHIC_CHUNKS, ///< the buffer contains one of more graphic blocks
};
- typedef type_t Type; // deprecated
/**
* Gets the type of this buffer (data).
@@ -2042,23 +2041,6 @@
*/
const C2BufferData data() const;
- /**
- * These will still work if used in onDeathNotify.
- */
-#if 0
- inline std::shared_ptr<C2LinearBuffer> asLinearBuffer() const {
- return mType == LINEAR ? std::shared_ptr::reinterpret_cast<C2LinearBuffer>(this) : nullptr;
- }
-
- inline std::shared_ptr<C2GraphicBuffer> asGraphicBuffer() const {
- return mType == GRAPHIC ? std::shared_ptr::reinterpret_cast<C2GraphicBuffer>(this) : nullptr;
- }
-
- inline std::shared_ptr<C2CircularBuffer> asCircularBuffer() const {
- return mType == CIRCULAR ? std::shared_ptr::reinterpret_cast<C2CircularBuffer>(this) : nullptr;
- }
-#endif
-
///@name Pre-destroy notification handling
///@{
@@ -2163,8 +2145,6 @@
*/
static std::shared_ptr<C2Buffer> CreateGraphicBuffer(const C2ConstGraphicBlock &block);
-
-
protected:
// no public constructor
explicit C2Buffer(const std::vector<C2ConstLinearBlock> &blocks);
@@ -2173,7 +2153,6 @@
private:
class Impl;
std::shared_ptr<Impl> mImpl;
-// Type _mType;
};
/**
@@ -2200,109 +2179,6 @@
/// @}
-/// \cond INTERNAL
-
-/// \todo These are no longer used
-
-/// \addtogroup linear
-/// @{
-
-/** \deprecated */
-class C2LinearBuffer
- : public C2Buffer, public _C2LinearRangeAspect,
- public std::enable_shared_from_this<C2LinearBuffer> {
-public:
- /** \todo what is this? */
- const C2Handle *handle() const;
-
-protected:
- inline C2LinearBuffer(const C2ConstLinearBlock &block);
-
-private:
- class Impl;
- Impl *mImpl;
-};
-
-class C2ReadCursor;
-
-class C2WriteCursor {
-public:
- uint32_t remaining() const; // remaining data to be read
- void commit(); // commits the current position. discard data before current position
- void reset() const; // resets position to the last committed position
- // slices off at most |size| bytes, and moves cursor ahead by the number of bytes
- // sliced off.
- C2ReadCursor slice(uint32_t size) const;
- // slices off at most |size| bytes, and moves cursor ahead by the number of bytes
- // sliced off.
- C2WriteCursor reserve(uint32_t size);
- // bool read(T&);
- // bool write(T&);
- C2Fence waitForSpace(uint32_t size);
-};
-
-/// @}
-
-/// \addtogroup graphic
-/// @{
-
-struct C2ColorSpace {
-//public:
- enum Standard {
- BT601,
- BT709,
- BT2020,
- // TODO
- };
-
- enum Range {
- LIMITED,
- FULL,
- // TODO
- };
-
- enum TransferFunction {
- BT709Transfer,
- BT2020Transfer,
- HybridLogGamma2,
- HybridLogGamma4,
- // TODO
- };
-};
-
-/** \deprecated */
-class C2GraphicBuffer : public C2Buffer {
-public:
- // constant attributes
- inline uint32_t width() const { return mWidth; }
- inline uint32_t height() const { return mHeight; }
- inline uint32_t format() const { return mFormat; }
- inline const C2MemoryUsage usage() const { return mUsage; }
-
- // modifiable attributes
-
-
- virtual const C2ColorSpace colorSpace() const = 0;
- // best effort
- virtual void setColorSpace_be(const C2ColorSpace &colorSpace) = 0;
- virtual bool setColorSpace(const C2ColorSpace &colorSpace) = 0;
-
- const C2Handle *handle() const;
-
-protected:
- uint32_t mWidth;
- uint32_t mHeight;
- uint32_t mFormat;
- C2MemoryUsage mUsage;
-
- class Impl;
- Impl *mImpl;
-};
-
-/// @}
-
-/// \endcond
-
/// @}
#endif // C2BUFFER_H_
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index fb6edb6..9545c45 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -240,19 +240,6 @@
kParamIndexTimestampGapAdjustment, // input-surface, struct
kParamIndexSurfaceAllocator, // u32
-
- // deprecated indices due to renaming
- kParamIndexAacStreamFormat = kParamIndexAacPackaging,
- kParamIndexCsd = kParamIndexInitData,
- kParamIndexMaxVideoSizeHint = kParamIndexMaxPictureSize,
- kParamIndexMime = kParamIndexMediaType,
- kParamIndexRequestedInfos = kParamIndexSubscribedParamIndices,
-
-
- // deprecated indices due to removal
- kParamIndexSupportedParams = 0xDEAD0000,
- kParamIndexReadOnlyParams,
- kParamIndexTemporal,
};
}
@@ -337,14 +324,8 @@
// read-only
typedef C2GlobalParam<C2Setting, C2SimpleValueStruct<C2Component::domain_t>, kParamIndexDomain>
C2ComponentDomainSetting;
-typedef C2ComponentDomainSetting C2ComponentDomainInfo; // deprecated
-typedef C2Component::domain_t C2DomainKind; // deprecated
constexpr char C2_PARAMKEY_COMPONENT_DOMAIN[] = "component.domain";
-constexpr C2Component::domain_t C2DomainAudio = C2Component::DOMAIN_AUDIO; // deprecated
-constexpr C2Component::domain_t C2DomainOther = C2Component::DOMAIN_OTHER; // deprecate
-constexpr C2Component::domain_t C2DomainVideo = C2Component::DOMAIN_VIDEO; // deprecate
-
/**
* Component attributes.
*
@@ -359,9 +340,6 @@
C2ComponentAttributesSetting;
constexpr char C2_PARAMKEY_COMPONENT_ATTRIBUTES[] = "component.attributes";
-// deprecated
-typedef C2ComponentAttributesSetting C2ComponentTemporalInfo;
-
/**
* Time stretching.
*
@@ -707,7 +685,6 @@
typedef C2StreamParam<C2Info, C2ProfileLevelStruct, kParamIndexProfileLevel>
C2StreamProfileLevelInfo;
constexpr char C2_PARAMKEY_PROFILE_LEVEL[] = "coded.pl";
-#define C2_PARAMKEY_STREAM_PROFILE_LEVEL C2_PARAMKEY_PROFILE_LEVEL
/**
* Codec-specific initialization data.
@@ -719,9 +696,7 @@
* TODO: define for other codecs.
*/
typedef C2StreamParam<C2Info, C2BlobValue, kParamIndexInitData> C2StreamInitDataInfo;
-typedef C2StreamInitDataInfo C2StreamCsdInfo; // deprecated
constexpr char C2_PARAMKEY_INIT_DATA[] = "coded.init-data";
-#define C2_PARAMKEY_STREAM_INIT_DATA C2_PARAMKEY_INIT_DATA
/**
* Supplemental Data.
@@ -781,11 +756,8 @@
* port media type.
*/
typedef C2PortParam<C2Setting, C2StringValue, kParamIndexMediaType> C2PortMediaTypeSetting;
-typedef C2PortMediaTypeSetting C2PortMimeConfig; // deprecated
constexpr char C2_PARAMKEY_INPUT_MEDIA_TYPE[] = "input.media-type";
constexpr char C2_PARAMKEY_OUTPUT_MEDIA_TYPE[] = "output.media-type";
-#define C2_NAME_INPUT_PORT_MIME_SETTING C2_PARAMKEY_INPUT_MEDIA_TYPE
-#define C2_NAME_OUTPUT_PORT_MIME_SETTING C2_PARAMKEY_OUTPUT_MEDIA_TYPE
typedef C2StreamParam<C2Setting, C2StringValue, kParamIndexMediaType> C2StreamMediaTypeSetting;
@@ -808,24 +780,20 @@
*/
typedef C2PortParam<C2Tuning, C2Uint32Value, kParamIndexDelayRequest> C2PortRequestedDelayTuning;
-typedef C2PortRequestedDelayTuning C2PortRequestedLatencyTuning; // deprecated
constexpr char C2_PARAMKEY_INPUT_DELAY_REQUEST[] = "input.delay.requested";
constexpr char C2_PARAMKEY_OUTPUT_DELAY_REQUEST[] = "output.delay.requested";
typedef C2GlobalParam<C2Tuning, C2Uint32Value, kParamIndexDelayRequest>
C2RequestedPipelineDelayTuning;
-typedef C2RequestedPipelineDelayTuning C2ComponentRequestedLatencyTuning; // deprecated
constexpr char C2_PARAMKEY_PIPELINE_DELAY_REQUEST[] = "pipeline-delay.requested";
// read-only
typedef C2PortParam<C2Tuning, C2Uint32Value, kParamIndexDelay> C2PortActualDelayTuning;
-typedef C2PortActualDelayTuning C2PortLatencyInfo; // deprecated
constexpr char C2_PARAMKEY_INPUT_DELAY[] = "input.delay.actual";
constexpr char C2_PARAMKEY_OUTPUT_DELAY[] = "output.delay.actual";
// read-only
typedef C2GlobalParam<C2Tuning, C2Uint32Value, kParamIndexDelay> C2ActualPipelineDelayTuning;
-typedef C2ActualPipelineDelayTuning C2ComponentLatencyInfo; // deprecated
constexpr char C2_PARAMKEY_PIPELINE_DELAY[] = "algo.delay.actual";
/**
@@ -875,7 +843,6 @@
*/
// private
typedef C2PortParam<C2Tuning, C2Uint32Value, kParamIndexStreamCount> C2PortStreamCountTuning;
-typedef C2PortStreamCountTuning C2PortStreamCountConfig; // deprecated
constexpr char C2_PARAMKEY_INPUT_STREAM_COUNT[] = "input.stream-count";
constexpr char C2_PARAMKEY_OUTPUT_STREAM_COUNT[] = "output.stream-count";
@@ -985,20 +952,9 @@
typedef C2StreamParam<C2Setting, C2SimpleValueStruct<C2EasyEnum<C2BufferData::type_t>>,
kParamIndexBufferType>
C2StreamBufferTypeSetting;
-
-constexpr C2BufferData::type_t C2FormatAudio = C2BufferData::LINEAR; // deprecated
-constexpr C2BufferData::type_t C2FormatCompressed = C2BufferData::LINEAR; // deprecated
-constexpr C2BufferData::type_t C2FormatVideo = C2BufferData::GRAPHIC; // deprecated
-typedef C2BufferData::type_t C2FormatKind; // deprecated
-
-typedef C2StreamBufferTypeSetting C2StreamFormatConfig; // deprecated
constexpr char C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE[] = "input.buffers.type";
constexpr char C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE[] = "output.buffers.type";
-// deprecated
-#define C2_NAME_INPUT_STREAM_FORMAT_SETTING C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE
-#define C2_NAME_OUTPUT_STREAM_FORMAT_SETTING C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE
-
/**
* Memory usage.
*
@@ -1007,8 +963,6 @@
typedef C2StreamParam<C2Tuning, C2Uint64Value, kParamIndexUsage> C2StreamUsageTuning;
constexpr char C2_PARAMKEY_INPUT_STREAM_USAGE[] = "input.buffers.usage";
constexpr char C2_PARAMKEY_OUTPUT_STREAM_USAGE[] = "output.buffers.usage";
-// deprecated
-#define C2_NAME_INPUT_STREAM_USAGE_SETTING C2_PARAMKEY_INPUT_STREAM_USAGE
/**
* Picture (video or image frame) size.
@@ -1068,8 +1022,6 @@
constexpr char C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE[] = "input.buffers.max-size";
constexpr char C2_PARAMKEY_OUTPUT_MAX_BUFFER_SIZE[] = "output.buffers.max-size";
-#define C2_NAME_STREAM_MAX_BUFFER_SIZE_SETTING C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE
-
/* ---------------------------------------- misc. state ---------------------------------------- */
/**
@@ -1170,9 +1122,7 @@
* Bitrate
*/
typedef C2StreamParam<C2Info, C2Uint32Value, kParamIndexBitrate> C2StreamBitrateInfo;
-typedef C2StreamBitrateInfo C2BitrateTuning; // deprecated
constexpr char C2_PARAMKEY_BITRATE[] = "coded.bitrate";
-#define C2_NAME_STREAM_BITRATE_SETTING C2_PARAMKEY_BITRATE
/**
* Bitrate mode.
@@ -1261,15 +1211,8 @@
*
* This is used for the output of the video decoder, and the input of the video encoder.
*/
-typedef C2PictureSizeStruct C2VideoSizeStruct; // deprecated
-
typedef C2StreamParam<C2Info, C2PictureSizeStruct, kParamIndexPictureSize> C2StreamPictureSizeInfo;
constexpr char C2_PARAMKEY_PICTURE_SIZE[] = "raw.size";
-#define C2_PARAMKEY_STREAM_PICTURE_SIZE C2_PARAMKEY_PICTURE_SIZE
-#define C2_NAME_STREAM_VIDEO_SIZE_INFO C2_PARAMKEY_PICTURE_SIZE
-typedef C2StreamPictureSizeInfo C2VideoSizeStreamInfo; // deprecated
-typedef C2StreamPictureSizeInfo C2VideoSizeStreamTuning; // deprecated
-#define C2_NAME_STREAM_VIDEO_SIZE_SETTING C2_PARAMKEY_PICTURE_SIZE
/**
* Crop rectangle.
@@ -1344,12 +1287,10 @@
kParamIndexScalingMethod>
C2StreamScalingMethodTuning;
constexpr char C2_PARAMKEY_SCALING_MODE[] = "raw.scaling-method";
-#define C2_PARAMKEY_STREAM_SCALING_MODE C2_PARAMKEY_SCALING_MODE
typedef C2StreamParam<C2Tuning, C2PictureSizeStruct, kParamIndexScaledPictureSize>
C2StreamScaledPictureSizeTuning;
constexpr char C2_PARAMKEY_SCALED_PICTURE_SIZE[] = "raw.scaled-size";
-#define C2_PARAMKEY_STREAM_SCALED_PICTURE_SIZE C2_PARAMKEY_SCALED_PICTURE_SIZE
typedef C2StreamParam<C2Tuning, C2RectStruct, kParamIndexScaledCropRect>
C2StreamScaledCropRectTuning;
@@ -1504,15 +1445,8 @@
MATRIX_BT2020_CONSTANT, ///< Rec.ITU-R BT.2020 constant luminance
MATRIX_VENDOR_START = 0x80, ///< vendor-specific matrix coefficient values start here
MATRIX_OTHER = 0xff, ///< max value, reserved for undefined values
-
- MATRIX_SMPTE240M = MATRIX_240M, // deprecated
- MATRIX_BT2020CONSTANT = MATRIX_BT2020_CONSTANT, // deprecated
)
-constexpr C2Color::matrix_t MATRIX_BT470_6M = MATRIX_FCC47_73_682; // deprecated
-constexpr C2Color::matrix_t MATRIX_BT709_5 = MATRIX_BT709; // deprecated
-constexpr C2Color::matrix_t MATRIX_BT601_6 = MATRIX_BT601; // deprecated
-
struct C2ColorAspectsStruct {
C2Color::range_t range;
C2Color::primaries_t primaries;
@@ -1635,7 +1569,6 @@
*/
typedef C2StreamParam<C2Info, C2FloatValue, kParamIndexFrameRate> C2StreamFrameRateInfo;
constexpr char C2_PARAMKEY_FRAME_RATE[] = "coded.frame-rate";
-#define C2_NAME_STREAM_FRAME_RATE_SETTING C2_PARAMKEY_FRAME_RATE
typedef C2PortParam<C2Info, C2FloatValue, kParamIndexFrameRate> C2PortFrameRateInfo;
constexpr char C2_PARAMKEY_INPUT_FRAME_RATE[] = "input.frame-rate";
@@ -1668,9 +1601,6 @@
B_FRAME = (1 << 3), ///< backward predicted (out-of-order) frame
)
-typedef C2Config::picture_type_t C2PictureTypeMask; // deprecated
-constexpr C2Config::picture_type_t C2PictureTypeKeyFrame = C2Config::SYNC_FRAME; // deprecated
-
/**
* Allowed picture types.
*/
@@ -1750,8 +1680,6 @@
typedef C2StreamParam<C2Tuning, C2Int64Value, kParamIndexSyncFrameInterval>
C2StreamSyncFrameIntervalTuning;
constexpr char C2_PARAMKEY_SYNC_FRAME_INTERVAL[] = "coding.sync-frame-interval";
-// deprecated
-#define C2_PARAMKEY_SYNC_FRAME_PERIOD C2_PARAMKEY_SYNC_FRAME_INTERVAL
/**
* Temporal layering
@@ -1885,8 +1813,6 @@
typedef C2StreamParam<C2Info, C2Uint32Value, kParamIndexSampleRate> C2StreamSampleRateInfo;
constexpr char C2_PARAMKEY_SAMPLE_RATE[] = "raw.sample-rate";
constexpr char C2_PARAMKEY_CODED_SAMPLE_RATE[] = "coded.sample-rate";
-// deprecated
-#define C2_NAME_STREAM_SAMPLE_RATE_SETTING C2_PARAMKEY_SAMPLE_RATE
/**
* Channel count.
@@ -1894,8 +1820,6 @@
typedef C2StreamParam<C2Info, C2Uint32Value, kParamIndexChannelCount> C2StreamChannelCountInfo;
constexpr char C2_PARAMKEY_CHANNEL_COUNT[] = "raw.channel-count";
constexpr char C2_PARAMKEY_CODED_CHANNEL_COUNT[] = "coded.channel-count";
-// deprecated
-#define C2_NAME_STREAM_CHANNEL_COUNT_SETTING C2_PARAMKEY_CHANNEL_COUNT
/**
* Max channel count. Used to limit the number of coded or decoded channels.
@@ -2005,16 +1929,10 @@
AAC_PACKAGING_ADTS
)
-typedef C2Config::aac_packaging_t C2AacStreamFormatKind; // deprecated
-// deprecated
-constexpr C2Config::aac_packaging_t C2AacStreamFormatRaw = C2Config::AAC_PACKAGING_RAW;
-constexpr C2Config::aac_packaging_t C2AacStreamFormatAdts = C2Config::AAC_PACKAGING_ADTS;
-
typedef C2StreamParam<C2Info, C2SimpleValueStruct<C2EasyEnum<C2Config::aac_packaging_t>>,
kParamIndexAacPackaging> C2StreamAacPackagingInfo;
typedef C2StreamAacPackagingInfo C2StreamAacFormatInfo;
constexpr char C2_PARAMKEY_AAC_PACKAGING[] = "coded.aac-packaging";
-#define C2_NAME_STREAM_AAC_FORMAT_SETTING C2_PARAMKEY_AAC_PACKAGING
/* ================================ PLATFORM-DEFINED PARAMETERS ================================ */
@@ -2134,7 +2052,6 @@
typedef C2GlobalParam<C2Tuning, C2EasyBoolValue, kParamIndexInputSurfaceEos>
C2InputSurfaceEosTuning;
constexpr char C2_PARAMKEY_INPUT_SURFACE_EOS[] = "input-surface.eos";
-#define C2_NAME_INPUT_SURFACE_EOS_TUNING C2_PARAMKEY_INPUT_SURFACE_EOS
/**
* Start/suspend/resume/stop controls and timestamps for input surface.
diff --git a/media/codec2/core/include/C2Param.h b/media/codec2/core/include/C2Param.h
index efc5c89..d264bf3 100644
--- a/media/codec2/core/include/C2Param.h
+++ b/media/codec2/core/include/C2Param.h
@@ -1012,15 +1012,6 @@
_mNamedValues(_NamedValuesGetter<B>::getNamedValues()),
_mFieldId(offset) {}
-/*
- template<typename T, typename B=typename std::remove_extent<T>::type>
- inline C2FieldDescriptor<T, B, false>(T* offset, const char *name)
- : _mType(this->GetType((B*)nullptr)),
- _mExtent(std::is_array<T>::value ? std::extent<T>::value : 1),
- _mName(name),
- _mFieldId(offset) {}
-*/
-
/// \deprecated
template<typename T, typename S, class B=typename std::remove_extent<T>::type>
inline C2FieldDescriptor(S*, T S::* field, const char *name)
diff --git a/media/codec2/hidl/1.0/utils/InputSurface.cpp b/media/codec2/hidl/1.0/utils/InputSurface.cpp
index 2cbe64b..85c44c3 100644
--- a/media/codec2/hidl/1.0/utils/InputSurface.cpp
+++ b/media/codec2/hidl/1.0/utils/InputSurface.cpp
@@ -45,7 +45,7 @@
setDerivedInstance(this);
addParameter(
- DefineParam(mEos, C2_NAME_INPUT_SURFACE_EOS_TUNING)
+ DefineParam(mEos, C2_PARAMKEY_INPUT_SURFACE_EOS)
.withDefault(new C2InputSurfaceEosTuning(false))
.withFields({C2F(mEos, value).oneOf({true, false})})
.withSetter(EosSetter)
diff --git a/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp b/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp
index 1024f50..c9932ef 100644
--- a/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp
+++ b/media/codec2/hidl/1.0/utils/InputSurfaceConnection.cpp
@@ -124,7 +124,7 @@
}
// TODO: read settings properly from the interface
- C2VideoSizeStreamTuning::input inputSize;
+ C2StreamPictureSizeInfo::input inputSize;
C2StreamUsageTuning::input usage;
c2_status_t c2Status = queryFromSink({ &inputSize, &usage },
{},
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index 31da111..1f36270 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -93,14 +93,14 @@
std::vector<std::unique_ptr<C2SettingResult>> failures;
for (size_t i = 0; i < updates.size(); ++i) {
C2Param* param = updates[i].get();
- if (param->index() == C2StreamCsdInfo::output::PARAM_TYPE) {
+ if (param->index() == C2StreamInitDataInfo::output::PARAM_TYPE) {
csd = true;
} else if ((param->index() ==
C2StreamSampleRateInfo::output::PARAM_TYPE) ||
(param->index() ==
C2StreamChannelCountInfo::output::PARAM_TYPE) ||
(param->index() ==
- C2VideoSizeStreamInfo::output::PARAM_TYPE)) {
+ C2StreamPictureSizeInfo::output::PARAM_TYPE)) {
configParam.push_back(param);
}
}
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoEncTest.cpp
index 95d1b72..7db41c0 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHidlC2V1_0TargetVideoEncTest.cpp
@@ -229,7 +229,7 @@
// Set Default config param.
bool Codec2VideoEncHidlTest::setupConfigParam(int32_t nWidth, int32_t nHeight) {
std::vector<std::unique_ptr<C2SettingResult>> failures;
- C2VideoSizeStreamTuning::input inputSize(0u, nWidth, nHeight);
+ C2StreamPictureSizeInfo::input inputSize(0u, nWidth, nHeight);
std::vector<C2Param*> configParam{&inputSize};
c2_status_t status =
mComponent->config(configParam, C2_DONT_BLOCK, &failures);
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 7e8d280..7a444a3 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -186,7 +186,7 @@
* MediaCodec behavior.
*/
virtual status_t registerCsd(
- const C2StreamCsdInfo::output * /* csd */,
+ const C2StreamInitDataInfo::output * /* csd */,
size_t * /* index */,
sp<MediaCodecBuffer> * /* clientBuffer */) = 0;
@@ -1187,7 +1187,7 @@
}
status_t registerCsd(
- const C2StreamCsdInfo::output *csd,
+ const C2StreamInitDataInfo::output *csd,
size_t *index,
sp<MediaCodecBuffer> *clientBuffer) final {
sp<Codec2Buffer> c2Buffer;
@@ -1286,7 +1286,7 @@
}
status_t registerCsd(
- const C2StreamCsdInfo::output *csd,
+ const C2StreamInitDataInfo::output *csd,
size_t *index,
sp<MediaCodecBuffer> *clientBuffer) final {
sp<Codec2Buffer> newBuffer = new LocalLinearBuffer(
@@ -2154,7 +2154,7 @@
1 << C2PlatformAllocatorStore::BUFFERQUEUE);
if (inputFormat != nullptr) {
- bool graphic = (iStreamFormat.value == C2FormatVideo);
+ bool graphic = (iStreamFormat.value == C2BufferData::GRAPHIC);
std::shared_ptr<C2BlockPool> pool;
{
Mutexed<BlockPools>::Locked pools(mBlockPools);
@@ -2279,7 +2279,7 @@
outputGeneration = output->generation;
}
- bool graphic = (oStreamFormat.value == C2FormatVideo);
+ bool graphic = (oStreamFormat.value == C2BufferData::GRAPHIC);
C2BlockPool::local_id_t outputPoolId_;
{
@@ -2452,7 +2452,7 @@
return OK;
}
- C2StreamFormatConfig::output oStreamFormat(0u);
+ C2StreamBufferTypeSetting::output oStreamFormat(0u);
c2_status_t err = mComponent->query({ &oStreamFormat }, {}, C2_DONT_BLOCK, nullptr);
if (err != C2_OK) {
return UNKNOWN_ERROR;
@@ -2744,7 +2744,7 @@
// TODO: properly translate these to metadata
switch (info->coreIndex().coreIndex()) {
case C2StreamPictureTypeMaskInfo::CORE_INDEX:
- if (((C2StreamPictureTypeMaskInfo *)info.get())->value & C2PictureTypeKeyFrame) {
+ if (((C2StreamPictureTypeMaskInfo *)info.get())->value & C2Config::SYNC_FRAME) {
flags |= MediaCodec::BUFFER_FLAG_SYNCFRAME;
}
break;
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 13b63c9..0fd5731 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -224,6 +224,7 @@
mInitCheck = BAD_VALUE;
return;
}
+ memset(mediaImage, 0, sizeof(*mediaImage));
mAllocatedDepth = layout.planes[0].allocatedDepth;
uint32_t bitDepth = layout.planes[0].bitDepth;
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 0a6a717..6da131f 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -101,7 +101,7 @@
{ C2Color::MATRIX_BT709, ColorAspects::MatrixBT709_5 },
{ C2Color::MATRIX_FCC47_73_682, ColorAspects::MatrixBT470_6M },
{ C2Color::MATRIX_BT601, ColorAspects::MatrixBT601_6 },
- { C2Color::MATRIX_SMPTE240M, ColorAspects::MatrixSMPTE240M },
+ { C2Color::MATRIX_240M, ColorAspects::MatrixSMPTE240M },
{ C2Color::MATRIX_BT2020, ColorAspects::MatrixBT2020 },
{ C2Color::MATRIX_BT2020_CONSTANT, ColorAspects::MatrixBT2020Constant },
{ C2Color::MATRIX_OTHER, ColorAspects::MatrixOther },
@@ -855,19 +855,19 @@
switch (primaries) {
case C2Color::PRIMARIES_BT601_525:
- *dataSpace |= (matrix == C2Color::MATRIX_SMPTE240M
+ *dataSpace |= (matrix == C2Color::MATRIX_240M
|| matrix == C2Color::MATRIX_BT709)
? HAL_DATASPACE_STANDARD_BT601_525_UNADJUSTED
: HAL_DATASPACE_STANDARD_BT601_525;
break;
case C2Color::PRIMARIES_BT601_625:
- *dataSpace |= (matrix == C2Color::MATRIX_SMPTE240M
+ *dataSpace |= (matrix == C2Color::MATRIX_240M
|| matrix == C2Color::MATRIX_BT709)
? HAL_DATASPACE_STANDARD_BT601_625_UNADJUSTED
: HAL_DATASPACE_STANDARD_BT601_625;
break;
case C2Color::PRIMARIES_BT2020:
- *dataSpace |= (matrix == C2Color::MATRIX_BT2020CONSTANT
+ *dataSpace |= (matrix == C2Color::MATRIX_BT2020_CONSTANT
? HAL_DATASPACE_STANDARD_BT2020_CONSTANT_LUMINANCE
: HAL_DATASPACE_STANDARD_BT2020);
break;
diff --git a/media/codec2/tests/C2ComponentInterface_test.cpp b/media/codec2/tests/C2ComponentInterface_test.cpp
index e907964..67f733d 100644
--- a/media/codec2/tests/C2ComponentInterface_test.cpp
+++ b/media/codec2/tests/C2ComponentInterface_test.cpp
@@ -182,9 +182,9 @@
return std::make_unique<T>();
}
-template <> std::unique_ptr<C2PortMimeConfig::input> makeParam() {
+template <> std::unique_ptr<C2PortMediaTypeSetting::input> makeParam() {
// TODO(hiroh): Set more precise length.
- return C2PortMimeConfig::input::AllocUnique(100);
+ return C2PortMediaTypeSetting::input::AllocUnique(100);
}
#define TRACED_FAILURE(func) \
@@ -323,17 +323,17 @@
EXPECT_EQ(C2SettingResult::BAD_VALUE, failures[0]->failure);
}
-// There is only used enum type for the field type, that is C2DomainKind.
+// There is only used enum type for the field type, that is C2Component::domain_t.
// If another field type is added, it is necessary to add function for that.
template <>
void C2CompIntfTest::getTestValues(
const C2FieldSupportedValues &validValueInfos,
- std::vector<C2DomainKind> *const validValues,
- std::vector<C2DomainKind> *const invalidValues) {
+ std::vector<C2Component::domain_t> *const validValues,
+ std::vector<C2Component::domain_t> *const invalidValues) {
UNUSED(validValueInfos);
- validValues->emplace_back(C2DomainVideo);
- validValues->emplace_back(C2DomainAudio);
- validValues->emplace_back(C2DomainOther);
+ validValues->emplace_back(C2Component::DOMAIN_VIDEO);
+ validValues->emplace_back(C2Component::DOMAIN_AUDIO);
+ validValues->emplace_back(C2Component::DOMAIN_OTHER);
// There is no invalid value.
UNUSED(invalidValues);
@@ -634,20 +634,20 @@
std::vector<std::shared_ptr<C2ParamDescriptor>> supportedParams;
ASSERT_EQ(C2_OK, mIntf->querySupportedParams_nb(&supportedParams));
- EACH_TEST_SELF(C2ComponentLatencyInfo, TEST_U32_WRITABLE_FIELD);
- EACH_TEST_SELF(C2ComponentTemporalInfo, TEST_U32_WRITABLE_FIELD);
- EACH_TEST_INPUT(C2PortLatencyInfo, TEST_U32_WRITABLE_FIELD);
- EACH_TEST_OUTPUT(C2PortLatencyInfo, TEST_U32_WRITABLE_FIELD);
- EACH_TEST_INPUT(C2StreamFormatConfig, TEST_U32_WRITABLE_FIELD);
- EACH_TEST_OUTPUT(C2StreamFormatConfig, TEST_U32_WRITABLE_FIELD);
- EACH_TEST_INPUT(C2PortStreamCountConfig, TEST_U32_WRITABLE_FIELD);
- EACH_TEST_OUTPUT(C2PortStreamCountConfig, TEST_U32_WRITABLE_FIELD);
+ EACH_TEST_SELF(C2ActualPipelineDelayTuning, TEST_U32_WRITABLE_FIELD);
+ EACH_TEST_SELF(C2ComponentAttributesSetting, TEST_U32_WRITABLE_FIELD);
+ EACH_TEST_INPUT(C2PortActualDelayTuning, TEST_U32_WRITABLE_FIELD);
+ EACH_TEST_OUTPUT(C2PortActualDelayTuning, TEST_U32_WRITABLE_FIELD);
+ EACH_TEST_INPUT(C2StreamBufferTypeSetting, TEST_U32_WRITABLE_FIELD);
+ EACH_TEST_OUTPUT(C2StreamBufferTypeSetting, TEST_U32_WRITABLE_FIELD);
+ EACH_TEST_INPUT(C2PortStreamCountTuning, TEST_U32_WRITABLE_FIELD);
+ EACH_TEST_OUTPUT(C2PortStreamCountTuning, TEST_U32_WRITABLE_FIELD);
- EACH_TEST_SELF(C2ComponentDomainInfo, TEST_ENUM_WRITABLE_FIELD);
+ EACH_TEST_SELF(C2ComponentDomainSetting, TEST_ENUM_WRITABLE_FIELD);
// TODO(hiroh): Support parameters based on uint32_t[] and char[].
- // EACH_TEST_INPUT(C2PortMimeConfig, TEST_STRING_WRITABLE_FIELD);
- // EACH_TEST_OUTPUT(C2PortMimeConfig, TEST_STRING_WRITABLE_FIELD);
+ // EACH_TEST_INPUT(C2PortMediaTypeSetting, TEST_STRING_WRITABLE_FIELD);
+ // EACH_TEST_OUTPUT(C2PortMediaTypeSetting, TEST_STRING_WRITABLE_FIELD);
// EACH_TEST_INPUT(C2StreamMimeConfig, TEST_STRING_WRITABLE_FIELD);
// EACH_TEST_OUTPUT(C2StreamMimeConfig, TEST_STRING_WRITABLE_FIELD);
@@ -656,10 +656,10 @@
// EACH_TEST_SELF(C2ReadOnlyParamsInfo, TEST_U32ARRAY_WRITABLE_FIELD);
// EACH_TEST_SELF(C2RequestedInfosInfo, TEST_U32ARRAY_WRITABLE_FIELD);
- EACH_TEST_INPUT(C2VideoSizeStreamInfo, TEST_VSSTRUCT_WRITABLE_FIELD);
- EACH_TEST_OUTPUT(C2VideoSizeStreamInfo, TEST_VSSTRUCT_WRITABLE_FIELD);
- EACH_TEST_INPUT(C2VideoSizeStreamTuning, TEST_VSSTRUCT_WRITABLE_FIELD);
- EACH_TEST_OUTPUT(C2VideoSizeStreamTuning, TEST_VSSTRUCT_WRITABLE_FIELD);
+ EACH_TEST_INPUT(C2StreamPictureSizeInfo, TEST_VSSTRUCT_WRITABLE_FIELD);
+ EACH_TEST_OUTPUT(C2StreamPictureSizeInfo, TEST_VSSTRUCT_WRITABLE_FIELD);
+ EACH_TEST_INPUT(C2StreamPictureSizeInfo, TEST_VSSTRUCT_WRITABLE_FIELD);
+ EACH_TEST_OUTPUT(C2StreamPictureSizeInfo, TEST_VSSTRUCT_WRITABLE_FIELD);
EACH_TEST_INPUT(C2MaxVideoSizeHintPortSetting, TEST_VSSTRUCT_WRITABLE_FIELD);
EACH_TEST_OUTPUT(C2MaxVideoSizeHintPortSetting, TEST_VSSTRUCT_WRITABLE_FIELD);
diff --git a/media/codec2/tests/C2SampleComponent_test.cpp b/media/codec2/tests/C2SampleComponent_test.cpp
index cd354ad..9956834 100644
--- a/media/codec2/tests/C2SampleComponent_test.cpp
+++ b/media/codec2/tests/C2SampleComponent_test.cpp
@@ -152,7 +152,7 @@
std::unordered_map<uint32_t, C2Param &> mMyParams;
- C2ComponentDomainInfo mDomainInfo;
+ C2ComponentDomainSetting mDomainInfo;
MyComponentInstance() {
mMyParams.insert({mDomainInfo.index(), mDomainInfo});
@@ -187,12 +187,12 @@
c2_blocking_t mayBlock) const override {
(void)mayBlock;
for (C2FieldSupportedValuesQuery &query : fields) {
- if (query.field() == C2ParamField(&mDomainInfo, &C2ComponentDomainInfo::value)) {
+ if (query.field() == C2ParamField(&mDomainInfo, &C2ComponentDomainSetting::value)) {
query.values = C2FieldSupportedValues(
false /* flag */,
&mDomainInfo.value
//,
- //{(int32_t)C2DomainVideo}
+ //{(int32_t)C2Component::DOMAIN_VIDEO}
);
query.status = C2_OK;
} else {
@@ -391,20 +391,20 @@
}
TEST_F(C2SampleComponentTest, ReflectorTest) {
- C2ComponentDomainInfo domainInfo;
+ C2ComponentDomainSetting domainInfo;
std::shared_ptr<MyComponentInstance> myComp(new MyComponentInstance);
std::shared_ptr<C2ComponentInterface> comp = myComp;
std::unique_ptr<C2StructDescriptor> desc{
- myComp->getParamReflector()->describe(C2ComponentDomainInfo::CORE_INDEX)};
+ myComp->getParamReflector()->describe(C2ComponentDomainSetting::CORE_INDEX)};
dumpStruct(*desc);
std::vector<C2FieldSupportedValuesQuery> query = {
- { C2ParamField(&domainInfo, &C2ComponentDomainInfo::value),
+ { C2ParamField(&domainInfo, &C2ComponentDomainSetting::value),
C2FieldSupportedValuesQuery::CURRENT },
- C2FieldSupportedValuesQuery(C2ParamField(&domainInfo, &C2ComponentDomainInfo::value),
+ C2FieldSupportedValuesQuery(C2ParamField(&domainInfo, &C2ComponentDomainSetting::value),
C2FieldSupportedValuesQuery::CURRENT),
- C2FieldSupportedValuesQuery::Current(C2ParamField(&domainInfo, &C2ComponentDomainInfo::value)),
+ C2FieldSupportedValuesQuery::Current(C2ParamField(&domainInfo, &C2ComponentDomainSetting::value)),
};
EXPECT_EQ(C2_OK, comp->querySupportedValues_vb(query, C2_DONT_BLOCK));
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index f07d9b0..e075849 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -712,8 +712,8 @@
}
uint32_t mediaTypeIndex =
- traits->kind == C2Component::KIND_ENCODER ? C2PortMimeConfig::output::PARAM_TYPE
- : C2PortMimeConfig::input::PARAM_TYPE;
+ traits->kind == C2Component::KIND_ENCODER ? C2PortMediaTypeSetting::output::PARAM_TYPE
+ : C2PortMediaTypeSetting::input::PARAM_TYPE;
std::vector<std::unique_ptr<C2Param>> params;
res = intf->query_vb({}, { mediaTypeIndex }, C2_MAY_BLOCK, ¶ms);
if (res != C2_OK) {
@@ -724,7 +724,7 @@
ALOGD("failed to query interface: unexpected vector size: %zu", params.size());
return mInit;
}
- C2PortMimeConfig *mediaTypeConfig = C2PortMimeConfig::From(params[0].get());
+ C2PortMediaTypeSetting *mediaTypeConfig = C2PortMediaTypeSetting::From(params[0].get());
if (mediaTypeConfig == nullptr) {
ALOGD("failed to query media type");
return mInit;
@@ -856,6 +856,7 @@
emplace("libcodec2_soft_h263dec.so");
emplace("libcodec2_soft_h263enc.so");
emplace("libcodec2_soft_hevcdec.so");
+ emplace("libcodec2_soft_hevcenc.so");
emplace("libcodec2_soft_mp3dec.so");
emplace("libcodec2_soft_mpeg2dec.so");
emplace("libcodec2_soft_mpeg4dec.so");
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index d9f6e36..1bce16f 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -1167,8 +1167,6 @@
case SET_STREAM_VOLUME:
case REGISTER_POLICY_MIXES:
case SET_MASTER_MONO:
- case START_AUDIO_SOURCE:
- case STOP_AUDIO_SOURCE:
case GET_SURROUND_FORMATS:
case SET_SURROUND_FORMAT_ENABLED:
case SET_ASSISTANT_UID:
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index a1e869f..b25f82e 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -123,15 +123,13 @@
status_t DeviceHalHidl::setMasterVolume(float volume) {
if (mDevice == 0) return NO_INIT;
- if (mPrimaryDevice == 0) return INVALID_OPERATION;
- return processReturn("setMasterVolume", mPrimaryDevice->setMasterVolume(volume));
+ return processReturn("setMasterVolume", mDevice->setMasterVolume(volume));
}
status_t DeviceHalHidl::getMasterVolume(float *volume) {
if (mDevice == 0) return NO_INIT;
- if (mPrimaryDevice == 0) return INVALID_OPERATION;
Result retval;
- Return<void> ret = mPrimaryDevice->getMasterVolume(
+ Return<void> ret = mDevice->getMasterVolume(
[&](Result r, float v) {
retval = r;
if (retval == Result::OK) {
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 998f096..9d3338b 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -171,11 +171,7 @@
}
struct CodecObserver : public BnOMXObserver {
- CodecObserver() {}
-
- void setNotificationMessage(const sp<AMessage> &msg) {
- mNotify = msg;
- }
+ explicit CodecObserver(const sp<AMessage> &msg) : mNotify(msg) {}
// from IOMXObserver
virtual void onMessages(const std::list<omx_message> &messages) {
@@ -251,7 +247,7 @@
virtual ~CodecObserver() {}
private:
- sp<AMessage> mNotify;
+ const sp<AMessage> mNotify;
DISALLOW_EVIL_CONSTRUCTORS(CodecObserver);
};
@@ -1248,6 +1244,7 @@
info.mRenderInfo = NULL;
info.mGraphicBuffer = graphicBuffer;
info.mNewGraphicBuffer = false;
+ info.mDequeuedAt = mDequeueCounter;
// TODO: We shouln't need to create MediaCodecBuffer. In metadata mode
// OMX doesn't use the shared memory buffer, but some code still
@@ -6629,7 +6626,8 @@
CHECK(mCodec->mOMXNode == NULL);
- sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec);
+ sp<AMessage> notify = new AMessage(kWhatOMXMessageList, mCodec);
+ notify->setInt32("generation", mCodec->mNodeGeneration + 1);
sp<RefBase> obj;
CHECK(msg->findObject("codecInfo", &obj));
@@ -6644,7 +6642,7 @@
AString componentName;
CHECK(msg->findString("componentName", &componentName));
- sp<CodecObserver> observer = new CodecObserver;
+ sp<CodecObserver> observer = new CodecObserver(notify);
sp<IOMX> omx;
sp<IOMXNode> omxNode;
@@ -6675,9 +6673,7 @@
mDeathNotifier.clear();
}
- notify = new AMessage(kWhatOMXMessageList, mCodec);
- notify->setInt32("generation", ++mCodec->mNodeGeneration);
- observer->setNotificationMessage(notify);
+ ++mCodec->mNodeGeneration;
mCodec->mComponentName = componentName;
mCodec->mRenderTracker.setComponentName(componentName);
@@ -8167,6 +8163,10 @@
OMX_CommandPortEnable, kPortIndexOutput);
}
+ // Clear the RenderQueue in which queued GraphicBuffers hold the
+ // actual buffer references in order to free them early.
+ mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
+
if (err == OK) {
err = mCodec->allocateBuffersOnPort(kPortIndexOutput);
ALOGE_IF(err != OK, "Failed to allocate output port buffers after port "
@@ -8572,7 +8572,7 @@
}
sp<IOMX> omx = client.interface();
- sp<CodecObserver> observer = new CodecObserver;
+ sp<CodecObserver> observer = new CodecObserver(new AMessage);
sp<IOMXNode> omxNode;
err = omx->allocateNode(name, observer, &omxNode);
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 42b98b1..18a6bd8 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -431,7 +431,7 @@
|| !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
if (frameTimeUs < 0) {
- int64_t thumbNailTime;
+ int64_t thumbNailTime = -1ll;
if (!trackMeta()->findInt64(kKeyThumbnailTime, &thumbNailTime)
|| thumbNailTime < 0) {
thumbNailTime = 0;
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index f34d54c..fa3d372 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -165,6 +165,9 @@
for (i = 0; i < n; ++i) {
sp<MetaData> meta = mExtractor->getTrackMetaData(i);
+ if (!meta) {
+ continue;
+ }
ALOGV("getting track %zu of %zu, meta=%s", i, n, meta->toString().c_str());
const char *mime;
@@ -186,6 +189,9 @@
}
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(i);
+ if (!trackMeta) {
+ return NULL;
+ }
if (metaOnly) {
return FrameDecoder::getMetadataOnly(trackMeta, colorFormat, thumbnail);
@@ -280,6 +286,9 @@
size_t i;
for (i = 0; i < n; ++i) {
sp<MetaData> meta = mExtractor->getTrackMetaData(i);
+ if (!meta) {
+ continue;
+ }
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
@@ -296,6 +305,9 @@
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(
i, MediaExtractor::kIncludeExtensiveMetaData);
+ if (!trackMeta) {
+ return UNKNOWN_ERROR;
+ }
if (metaOnly) {
if (outFrame != NULL) {
@@ -529,6 +541,9 @@
String8 timedTextLang;
for (size_t i = 0; i < numTracks; ++i) {
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(i);
+ if (!trackMeta) {
+ continue;
+ }
int64_t durationUs;
if (trackMeta->findInt64(kKeyDuration, &durationUs)) {
@@ -667,8 +682,9 @@
!strcasecmp(fileMIME, "video/x-matroska")) {
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(0);
const char *trackMIME;
- CHECK(trackMeta->findCString(kKeyMIMEType, &trackMIME));
-
+ if (trackMeta != nullptr) {
+ CHECK(trackMeta->findCString(kKeyMIMEType, &trackMIME));
+ }
if (!strncasecmp("audio/", trackMIME, 6)) {
// The matroska file only contains a single audio track,
// rewrite its mime type.
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 09424b8..16b3319 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -588,6 +588,7 @@
{ "genre", kKeyGenre },
{ "location", kKeyLocation },
{ "lyricist", kKeyWriter },
+ { "manufacturer", kKeyManufacturer },
{ "title", kKeyTitle },
{ "year", kKeyYear },
}
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index 5c2d96d..e20174f 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -107,6 +107,15 @@
<Limit name="bitrate" range="1-12000000" />
<Feature name="intra-refresh" />
</MediaCodec>
+ <MediaCodec name="c2.android.hevc.encoder" type="video/hevc">
+ <!-- profiles and levels: ProfileMain : MainTierLevel51 -->
+ <Limit name="size" min="320x128" max="512x512" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="8x8" />
+ <Limit name="block-count" range="1-4096" /> <!-- max 512x512 -->
+ <Limit name="blocks-per-second" range="1-122880" />
+ <Limit name="bitrate" range="1-10000000" />
+ </MediaCodec>
<MediaCodec name="c2.android.mpeg4.encoder" type="video/mp4v-es">
<Alias name="OMX.google.mpeg4.encoder" />
<!-- profiles and levels: ProfileCore : Level2 -->
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 2ecfa43..5e7f90a 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -1234,7 +1234,7 @@
const AString &uri, uint32_t streamMask, int64_t timeUs, bool newUri) {
ssize_t index = mFetcherInfos.indexOfKey(uri);
if (index < 0) {
- ALOGE("did not find fetcher for uri: %s", uri.c_str());
+ ALOGE("did not find fetcher for uri: %s", uriDebugString(uri).c_str());
return false;
}
@@ -2005,7 +2005,7 @@
if ((mNewStreamMask & stream) && mStreams[idx].mNewUri.empty()) {
ALOGW("swapping stream type %d %s to empty stream",
- stream, mStreams[idx].mUri.c_str());
+ stream, uriDebugString(mStreams[idx].mUri).c_str());
}
mStreams[idx].mUri = mStreams[idx].mNewUri;
mStreams[idx].mNewUri.clear();
@@ -2033,7 +2033,7 @@
CHECK(idx >= 0);
if (mStreams[idx].mNewUri.empty()) {
ALOGW("swapping extra stream type %d %s to empty stream",
- stream, mStreams[idx].mUri.c_str());
+ stream, uriDebugString(mStreams[idx].mUri).c_str());
}
mStreams[idx].mUri = mStreams[idx].mNewUri;
mStreams[idx].mNewUri.clear();
@@ -2138,7 +2138,7 @@
ALOGV("stopping newUri = %s", newUri.c_str());
ssize_t index = mFetcherInfos.indexOfKey(newUri);
if (index < 0) {
- ALOGE("did not find fetcher for newUri: %s", newUri.c_str());
+ ALOGE("did not find fetcher for newUri: %s", uriDebugString(newUri).c_str());
continue;
}
FetcherInfo &info = mFetcherInfos.editValueAt(index);
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index 4392799..b2361b8 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -1205,8 +1205,7 @@
if (val.size() < 2
|| val.c_str()[0] != '"'
|| val.c_str()[val.size() - 1] != '"') {
- ALOGE("Expected quoted string for URI, got '%s' instead.",
- val.c_str());
+ ALOGE("Expected quoted string for URI.");
return ERROR_MALFORMED;
}
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 562c625..d153598 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -365,10 +365,10 @@
if (err == ERROR_NOT_CONNECTED) {
return ERROR_NOT_CONNECTED;
} else if (err < 0) {
- ALOGE("failed to fetch cipher key from '%s'.", keyURI.c_str());
+ ALOGE("failed to fetch cipher key from '%s'.", uriDebugString(keyURI).c_str());
return ERROR_IO;
} else if (key->size() != 16) {
- ALOGE("key file '%s' wasn't 16 bytes in size.", keyURI.c_str());
+ ALOGE("key file '%s' wasn't 16 bytes in size.", uriDebugString(keyURI).c_str());
return ERROR_MALFORMED;
}
@@ -1366,7 +1366,7 @@
}
if (bytesRead < 0) {
status_t err = bytesRead;
- ALOGE("failed to fetch .ts segment at url '%s'", uri.c_str());
+ ALOGE("failed to fetch .ts segment at url '%s'", uriDebugString(uri).c_str());
notifyError(err);
return;
}
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 437bdb7..a0407af 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -144,6 +144,9 @@
// The language code for this media
kKeyMediaLanguage = 'lang', // cstring
+ // The manufacturer code for this media
+ kKeyManufacturer = 'manu', // cstring
+
// To store the timed text format data
kKeyTextFormatData = 'text', // raw data
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index 20cb415..789e62a 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -255,7 +255,7 @@
struct hostent *ent = gethostbyname(host.c_str());
if (ent == NULL) {
- ALOGE("Unknown host %s", host.c_str());
+ ALOGE("Unknown host %s", uriDebugString(host).c_str());
reply->setInt32("result", -ENOENT);
reply->post();
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index c581e9d..9263565 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -80,7 +80,7 @@
return false;
}
- ALOGI("%s", line.c_str());
+ ALOGV("%s", line.c_str());
switch (line.c_str()[0]) {
case 'v':
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 8454ca1..b4515e4 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -345,8 +345,7 @@
struct hostent *ent = gethostbyname(mSessionHost.c_str());
if (ent == NULL) {
- ALOGE("Failed to look up address of session host '%s'",
- mSessionHost.c_str());
+ ALOGE("Failed to look up address of session host");
return false;
}
@@ -531,7 +530,7 @@
mSessionURL.append(AStringPrintf("%u", port));
mSessionURL.append(path);
- ALOGI("rewritten session url: '%s'", mSessionURL.c_str());
+ ALOGV("rewritten session url: '%s'", mSessionURL.c_str());
}
sp<AMessage> reply = new AMessage('conn', this);
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index cd8ecb5..26a6238 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -342,6 +342,7 @@
EXPORT const char* AMEDIAFORMAT_KEY_LOCATION = "location";
EXPORT const char* AMEDIAFORMAT_KEY_LOOP = "loop";
EXPORT const char* AMEDIAFORMAT_KEY_LYRICIST = "lyricist";
+EXPORT const char* AMEDIAFORMAT_KEY_MANUFACTURER = "manufacturer";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_BIT_RATE = "max-bitrate";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_HEIGHT = "max-height";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE = "max-input-size";
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index cc1d9ef..ddf5291 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -214,6 +214,7 @@
extern const char* AMEDIAFORMAT_KEY_LOCATION __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_LOOP __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_LYRICIST __INTRODUCED_IN(29);
+extern const char* AMEDIAFORMAT_KEY_MANUFACTURER __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_MAX_BIT_RATE __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_MPEG2_STREAM_HEADER __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN __INTRODUCED_IN(29);
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 171167d..7bdd3ad 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -105,6 +105,7 @@
AMEDIAFORMAT_KEY_LOCATION; # var introduced=29
AMEDIAFORMAT_KEY_LOOP; # var introduced=29
AMEDIAFORMAT_KEY_LYRICIST; # var introduced=29
+ AMEDIAFORMAT_KEY_MANUFACTURER; # var introduced=29
AMEDIAFORMAT_KEY_MAX_BIT_RATE; # var introduced=29
AMEDIAFORMAT_KEY_MAX_HEIGHT; # var introduced=21
AMEDIAFORMAT_KEY_MAX_INPUT_SIZE; # var introduced=21
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 1c54aec..599c446 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -85,7 +85,7 @@
return false;
}
} else {
- if (appOps.noteOp(op, uid, resolvedOpPackageName) != AppOpsManager::MODE_ALLOWED) {
+ if (appOps.checkOp(op, uid, resolvedOpPackageName) != AppOpsManager::MODE_ALLOWED) {
ALOGE("Request denied by app op: %d", op);
return false;
}
diff --git a/services/audiopolicy/common/include/Volume.h b/services/audiopolicy/common/include/Volume.h
index 5ccc8fd..a3b6b36 100644
--- a/services/audiopolicy/common/include/Volume.h
+++ b/services/audiopolicy/common/include/Volume.h
@@ -20,6 +20,22 @@
#include <utils/Log.h>
#include <math.h>
+namespace android {
+/**
+ * VolumeSource is the discriminent for volume management on an output.
+ * It used to be the stream type by legacy, it may be host volume group or a volume curves if
+ * we allow to have more than one curve per volume group.
+ */
+enum VolumeSource : std::underlying_type<audio_stream_type_t>::type;
+static const VolumeSource VOLUME_SOURCE_NONE = static_cast<VolumeSource>(AUDIO_STREAM_DEFAULT);
+
+static inline VolumeSource streamToVolumeSource(audio_stream_type_t stream) {
+ return static_cast<VolumeSource>(stream);
+}
+
+
+} // namespace android
+
// Absolute min volume in dB (can be represented in single precision normal float value)
#define VOLUME_MIN_DB (-758)
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index c84636e..cf9519b 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -73,7 +73,7 @@
virtual void dump(String8 *dst, int spaces) const
{
- dst->appendFormat("%*s- ActivityCount: %d, StopTime: %" PRId64 " \n", spaces, "",
+ dst->appendFormat("%*s- ActivityCount: %d, StopTime: %" PRId64 ", ", spaces, "",
getActivityCount(), getStopTime());
}
private:
@@ -82,6 +82,37 @@
};
/**
+ * @brief VolumeActivity: it tracks the activity for volume policy (volume index, mute,
+ * memorize previous stop, and store mute if incompatible device with another strategy.
+ */
+class VolumeActivity : public ActivityTracking
+{
+public:
+ bool isMuted() const { return mMuteCount > 0; }
+ int getMuteCount() const { return mMuteCount; }
+ int incMuteCount() { return ++mMuteCount; }
+ int decMuteCount() { return mMuteCount > 0 ? --mMuteCount : -1; }
+
+ void dump(String8 *dst, int spaces) const override
+ {
+ ActivityTracking::dump(dst, spaces);
+ dst->appendFormat(", Volume: %.03f, MuteCount: %02d\n", mCurVolumeDb, mMuteCount);
+ }
+ void setVolume(float volume) { mCurVolumeDb = volume; }
+ float getVolume() const { return mCurVolumeDb; }
+
+private:
+ int mMuteCount = 0; /**< mute request counter */
+ float mCurVolumeDb = NAN; /**< current volume in dB. */
+};
+/**
+ * Note: volume activities shall be indexed by CurvesId if we want to allow multiple
+ * curves per volume group, inferring a mute management or volume balancing between HW and SW is
+ * done
+ */
+using VolumeActivities = std::map<VolumeSource, VolumeActivity>;
+
+/**
* @brief The Activity class: it tracks the activity for volume policy (volume index, mute,
* memorize previous stop, and store mute if incompatible device with another strategy.
* Having this class prevents from looping on all attributes (legacy streams) of the strategy
@@ -92,6 +123,10 @@
void setMutedByDevice( bool isMuted) { mIsMutedByDevice = isMuted; }
bool isMutedByDevice() const { return mIsMutedByDevice; }
+ void dump(String8 *dst, int spaces) const override {
+ ActivityTracking::dump(dst, spaces);
+ dst->appendFormat("\n");
+ }
private:
/**
* strategies muted because of incompatible device selection.
@@ -128,15 +163,6 @@
bool force);
/**
- * Changes the stream active count and mActiveClients only.
- * This does not change the client->active() state or the output descriptor's
- * global active count.
- */
- virtual void changeStreamActiveCount(const sp<TrackClientDescriptor>& client, int delta);
- uint32_t streamActiveCount(audio_stream_type_t stream) const
- { return mActiveCount[stream]; }
-
- /**
* @brief setStopTime set the stop time due to the client stoppage or a re routing of this
* client
* @param client to be considered
@@ -148,13 +174,61 @@
* Changes the client->active() state and the output descriptor's global active count,
* along with the stream active count and mActiveClients.
* The client must be previously added by the base class addClient().
+ * In case of duplicating thread, client shall be added on the duplicated thread, not on the
+ * involved outputs but setClientActive will be called on all output to track strategy and
+ * active client for a given output.
+ * Active ref count of the client will be incremented/decremented through setActive API
*/
- void setClientActive(const sp<TrackClientDescriptor>& client, bool active);
+ virtual void setClientActive(const sp<TrackClientDescriptor>& client, bool active);
- bool isActive(uint32_t inPastMs = 0) const;
- bool isStreamActive(audio_stream_type_t stream,
- uint32_t inPastMs = 0,
- nsecs_t sysTime = 0) const;
+ bool isActive(uint32_t inPastMs) const;
+ bool isActive(VolumeSource volumeSource = VOLUME_SOURCE_NONE,
+ uint32_t inPastMs = 0,
+ nsecs_t sysTime = 0) const;
+ bool isAnyActive(VolumeSource volumeSourceToIgnore) const;
+
+ std::vector<VolumeSource> getActiveVolumeSources() const {
+ std::vector<VolumeSource> activeList;
+ for (const auto &iter : mVolumeActivities) {
+ if (iter.second.isActive()) {
+ activeList.push_back(iter.first);
+ }
+ }
+ return activeList;
+ }
+ uint32_t getActivityCount(VolumeSource vs) const
+ {
+ return mVolumeActivities.find(vs) != std::end(mVolumeActivities)?
+ mVolumeActivities.at(vs).getActivityCount() : 0;
+ }
+ bool isMuted(VolumeSource vs) const
+ {
+ return mVolumeActivities.find(vs) != std::end(mVolumeActivities)?
+ mVolumeActivities.at(vs).isMuted() : false;
+ }
+ int getMuteCount(VolumeSource vs) const
+ {
+ return mVolumeActivities.find(vs) != std::end(mVolumeActivities)?
+ mVolumeActivities.at(vs).getMuteCount() : 0;
+ }
+ int incMuteCount(VolumeSource vs)
+ {
+ return mVolumeActivities[vs].incMuteCount();
+ }
+ int decMuteCount(VolumeSource vs)
+ {
+ return mVolumeActivities[vs].decMuteCount();
+ }
+ void setCurVolume(VolumeSource vs, float volume)
+ {
+ // Even if not activity for this group registered, need to create anyway
+ mVolumeActivities[vs].setVolume(volume);
+ }
+ float getCurVolume(VolumeSource vs) const
+ {
+ return mVolumeActivities.find(vs) != std::end(mVolumeActivities) ?
+ mVolumeActivities.at(vs).getVolume() : NAN;
+ }
bool isStrategyActive(product_strategy_t ps, uint32_t inPastMs = 0, nsecs_t sysTime = 0) const
{
@@ -195,40 +269,36 @@
// it is possible that when a client is removed, we could remove its
// associated active count by calling changeStreamActiveCount(),
// but that would be hiding a problem, so we log fatal instead.
- auto it2 = mActiveClients.find(client);
- LOG_ALWAYS_FATAL_IF(it2 != mActiveClients.end(),
- "%s(%d) removing client portId %d which is active (count %zu)",
- __func__, mId, portId, it2->second);
+ auto clientIter = std::find(begin(mActiveClients), end(mActiveClients), client);
+ LOG_ALWAYS_FATAL_IF(clientIter != mActiveClients.end(),
+ "%s(%d) removing client portId %d which is active (count %d)",
+ __func__, mId, portId, client->getActivityCount());
ClientMapHandler<TrackClientDescriptor>::removeClient(portId);
}
- using ActiveClientMap = std::map<sp<TrackClientDescriptor>, size_t /* count */>;
- // required for duplicating thread
- const ActiveClientMap& getActiveClients() const {
+ const TrackClientVector& getActiveClients() const {
return mActiveClients;
}
DeviceVector mDevices; /**< current devices this output is routed to */
- nsecs_t mStopTime[AUDIO_STREAM_CNT];
- int mMuteCount[AUDIO_STREAM_CNT]; // mute request counter
AudioMix *mPolicyMix = nullptr; // non NULL when used by a dynamic policy
protected:
const sp<AudioPort> mPort;
AudioPolicyClientInterface * const mClientInterface;
- float mCurVolume[AUDIO_STREAM_CNT]; // current stream volume in dB
- uint32_t mActiveCount[AUDIO_STREAM_CNT]; // number of streams of each type active on this output
uint32_t mGlobalActiveCount = 0; // non-client-specific active count
audio_patch_handle_t mPatchHandle = AUDIO_PATCH_HANDLE_NONE;
audio_port_handle_t mId = AUDIO_PORT_HANDLE_NONE;
- // The ActiveClientMap shows the clients that contribute to the streams counts
+ // The ActiveClients shows the clients that contribute to the @VolumeSource counts
// and may include upstream clients from a duplicating thread.
// Compare with the ClientMap (mClients) which are external AudioTrack clients of the
// output descriptor (and do not count internal PatchTracks).
- ActiveClientMap mActiveClients;
+ TrackClientVector mActiveClients;
RoutingActivities mRoutingActivities; /**< track routing activity on this ouput.*/
+
+ VolumeActivities mVolumeActivities; /**< track volume activity on this ouput.*/
};
// Audio output driven by a software mixer in audio flinger.
@@ -250,8 +320,13 @@
virtual bool isFixedVolume(audio_devices_t device);
sp<SwAudioOutputDescriptor> subOutput1() { return mOutput1; }
sp<SwAudioOutputDescriptor> subOutput2() { return mOutput2; }
- void changeStreamActiveCount(
- const sp<TrackClientDescriptor>& client, int delta) override;
+ void setClientActive(const sp<TrackClientDescriptor>& client, bool active) override;
+ void setAllClientsInactive()
+ {
+ for (const auto &client : clientsList(true)) {
+ setClientActive(client, false);
+ }
+ }
virtual bool setVolume(float volume,
audio_stream_type_t stream,
audio_devices_t device,
@@ -344,25 +419,27 @@
public DefaultKeyedVector< audio_io_handle_t, sp<SwAudioOutputDescriptor> >
{
public:
- bool isStreamActive(audio_stream_type_t stream, uint32_t inPastMs = 0) const;
+ bool isActive(VolumeSource volumeSource, uint32_t inPastMs = 0) const;
/**
- * return whether a stream is playing remotely, override to change the definition of
+ * return whether any source contributing to VolumeSource is playing remotely, override
+ * to change the definition of
* local/remote playback, used for instance by notification manager to not make
* media players lose audio focus when not playing locally
* For the base implementation, "remotely" means playing during screen mirroring which
* uses an output for playback with a non-empty, non "0" address.
*/
- bool isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs = 0) const;
+ bool isActiveRemotely(VolumeSource volumeSource, uint32_t inPastMs = 0) const;
/**
- * return whether a stream is playing, but not on a "remote" device.
+ * return whether any source contributing to VolumeSource is playing, but not on a "remote"
+ * device.
* Override to change the definition of a local/remote playback.
* Used for instance by policy manager to alter the speaker playback ("speaker safe" behavior)
* when media plays or not locally.
* For the base implementation, "remotely" means playing during screen mirroring.
*/
- bool isStreamActiveLocally(audio_stream_type_t stream, uint32_t inPastMs = 0) const;
+ bool isActiveLocally(VolumeSource volumeSource, uint32_t inPastMs = 0) const;
/**
* @brief isStrategyActiveOnSameModule checks if the given strategy is active (or was active
@@ -409,9 +486,21 @@
sp<SwAudioOutputDescriptor> getPrimaryOutput() const;
/**
- * return true if any output is playing anything besides the stream to ignore
+ * @brief isAnyOutputActive checks if any output is active (aka playing) except the one(s) that
+ * hold the volume source to be ignored
+ * @param volumeSourceToIgnore source not considered in the activity detection
+ * @return true if any output is active for any source except the one to be ignored
*/
- bool isAnyOutputActive(audio_stream_type_t streamToIgnore) const;
+ bool isAnyOutputActive(VolumeSource volumeSourceToIgnore) const
+ {
+ for (size_t i = 0; i < size(); i++) {
+ const sp<AudioOutputDescriptor> &outputDesc = valueAt(i);
+ if (outputDesc->isAnyActive(volumeSourceToIgnore)) {
+ return true;
+ }
+ }
+ return false;
+ }
audio_devices_t getSupportedDevices(audio_io_handle_t handle) const;
@@ -424,12 +513,24 @@
public DefaultKeyedVector< audio_io_handle_t, sp<HwAudioOutputDescriptor> >
{
public:
- bool isStreamActive(audio_stream_type_t stream, uint32_t inPastMs = 0) const;
+ bool isActive(VolumeSource volumeSource, uint32_t inPastMs = 0) const;
/**
- * return true if any output is playing anything besides the stream to ignore
+ * @brief isAnyOutputActive checks if any output is active (aka playing) except the one(s) that
+ * hold the volume source to be ignored
+ * @param volumeSourceToIgnore source not considered in the activity detection
+ * @return true if any output is active for any source except the one to be ignored
*/
- bool isAnyOutputActive(audio_stream_type_t streamToIgnore) const;
+ bool isAnyOutputActive(VolumeSource volumeSourceToIgnore) const
+ {
+ for (size_t i = 0; i < size(); i++) {
+ const sp<AudioOutputDescriptor> &outputDesc = valueAt(i);
+ if (outputDesc->isAnyActive(volumeSourceToIgnore)) {
+ return true;
+ }
+ }
+ return false;
+ }
void dump(String8 *dst) const;
};
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 2e44a60..4bb225d 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -28,6 +28,7 @@
#include <utils/RefBase.h>
#include <utils/String8.h>
#include <policy.h>
+#include <Volume.h>
#include "AudioPatch.h"
#include "EffectDescriptor.h"
@@ -62,7 +63,7 @@
mPreferredDeviceId = preferredDeviceId;
}
bool isPreferredDeviceForExclusiveUse() const { return mPreferredDeviceForExclusiveUse; }
- void setActive(bool active) { mActive = active; }
+ virtual void setActive(bool active) { mActive = active; }
bool active() const { return mActive; }
bool hasPreferredDevice(bool activeOnly = false) const {
return mPreferredDeviceId != AUDIO_PORT_HANDLE_NONE && (!activeOnly || mActive);
@@ -85,12 +86,13 @@
TrackClientDescriptor(audio_port_handle_t portId, uid_t uid, audio_session_t sessionId,
audio_attributes_t attributes, audio_config_base_t config,
audio_port_handle_t preferredDeviceId, audio_stream_type_t stream,
- product_strategy_t strategy, audio_output_flags_t flags,
+ product_strategy_t strategy, VolumeSource volumeSource,
+ audio_output_flags_t flags,
bool isPreferredDeviceForExclusiveUse,
std::vector<wp<SwAudioOutputDescriptor>> secondaryOutputs) :
ClientDescriptor(portId, uid, sessionId, attributes, config, preferredDeviceId,
isPreferredDeviceForExclusiveUse),
- mStream(stream), mStrategy(strategy), mFlags(flags),
+ mStream(stream), mStrategy(strategy), mVolumeSource(volumeSource), mFlags(flags),
mSecondaryOutputs(std::move(secondaryOutputs)) {}
~TrackClientDescriptor() override = default;
@@ -104,12 +106,41 @@
const std::vector<wp<SwAudioOutputDescriptor>>& getSecondaryOutputs() const {
return mSecondaryOutputs;
};
+ VolumeSource volumeSource() const { return mVolumeSource; }
+
+ void setActive(bool active) override
+ {
+ int delta = active ? 1 : -1;
+ changeActivityCount(delta);
+ }
+ void changeActivityCount(int delta)
+ {
+ if (delta > 0) {
+ mActivityCount += delta;
+ } else {
+ LOG_ALWAYS_FATAL_IF(!mActivityCount, "%s(%s) invalid delta %d, inactive client",
+ __func__, toShortString().c_str(), delta);
+ LOG_ALWAYS_FATAL_IF(static_cast<int>(mActivityCount) < -delta,
+ "%s(%s) invalid delta %d, active client count %d",
+ __func__, toShortString().c_str(), delta, mActivityCount);
+ mActivityCount += delta;
+ }
+ ClientDescriptor::setActive(mActivityCount > 0);
+ }
+ uint32_t getActivityCount() const { return mActivityCount; }
private:
const audio_stream_type_t mStream;
const product_strategy_t mStrategy;
+ const VolumeSource mVolumeSource;
const audio_output_flags_t mFlags;
const std::vector<wp<SwAudioOutputDescriptor>> mSecondaryOutputs;
+
+ /**
+ * required for duplicating thread, prevent from removing active client from an output
+ * involved in a duplication.
+ */
+ uint32_t mActivityCount = 0;
};
class RecordClientDescriptor: public ClientDescriptor
@@ -148,7 +179,8 @@
public:
SourceClientDescriptor(audio_port_handle_t portId, uid_t uid, audio_attributes_t attributes,
const sp<AudioPatch>& patchDesc, const sp<DeviceDescriptor>& srcDevice,
- audio_stream_type_t stream, product_strategy_t strategy);
+ audio_stream_type_t stream, product_strategy_t strategy,
+ VolumeSource volumeSource);
~SourceClientDescriptor() override = default;
sp<AudioPatch> patchDesc() const { return mPatchDesc; }
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 77e7add..7293bc4 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -34,16 +34,8 @@
AudioOutputDescriptor::AudioOutputDescriptor(const sp<AudioPort>& port,
AudioPolicyClientInterface *clientInterface)
- : mPort(port)
- , mClientInterface(clientInterface)
+ : mPort(port), mClientInterface(clientInterface)
{
- // clear usage count for all stream types
- for (int i = 0; i < AUDIO_STREAM_CNT; i++) {
- mActiveCount[i] = 0;
- mCurVolume[i] = -1.0;
- mMuteCount[i] = 0;
- mStopTime[i] = 0;
- }
if (mPort.get() != nullptr) {
mPort->pickAudioProfile(mSamplingRate, mChannelMask, mFormat);
if (mPort->mGains.size() > 0) {
@@ -85,124 +77,73 @@
return hasSameHwModuleAs(outputDesc);
}
-void AudioOutputDescriptor::changeStreamActiveCount(const sp<TrackClientDescriptor>& client,
- int delta)
-{
- if (delta == 0) return;
- const audio_stream_type_t stream = client->stream();
- if ((delta + (int)mActiveCount[stream]) < 0) {
- // any mismatched active count will abort.
- LOG_ALWAYS_FATAL("%s(%s) invalid delta %d, active stream count %d",
- __func__, client->toShortString().c_str(), delta, mActiveCount[stream]);
- // mActiveCount[stream] = 0;
- // return;
- }
- mActiveCount[stream] += delta;
- mRoutingActivities[client->strategy()].changeActivityCount(delta);
-
- if (delta > 0) {
- mActiveClients[client] += delta;
- } else {
- auto it = mActiveClients.find(client);
- if (it == mActiveClients.end()) { // client not found!
- LOG_ALWAYS_FATAL("%s(%s) invalid delta %d, inactive client",
- __func__, client->toShortString().c_str(), delta);
- } else if (it->second < -delta) { // invalid delta!
- LOG_ALWAYS_FATAL("%s(%s) invalid delta %d, active client count %zu",
- __func__, client->toShortString().c_str(), delta, it->second);
- }
- it->second += delta;
- if (it->second == 0) {
- (void)mActiveClients.erase(it);
- }
- }
-
- ALOGV("%s stream %d, count %d", __FUNCTION__, stream, mActiveCount[stream]);
-}
-
void AudioOutputDescriptor::setStopTime(const sp<TrackClientDescriptor>& client, nsecs_t sysTime)
{
- mStopTime[client->stream()] = sysTime;
+ mVolumeActivities[client->volumeSource()].setStopTime(sysTime);
mRoutingActivities[client->strategy()].setStopTime(sysTime);
}
void AudioOutputDescriptor::setClientActive(const sp<TrackClientDescriptor>& client, bool active)
{
- LOG_ALWAYS_FATAL_IF(getClient(client->portId()) == nullptr,
- "%s(%d) does not exist on output descriptor", __func__, client->portId());
-
- if (active == client->active()) {
- ALOGW("%s(%s): ignored active: %d, current stream count %d",
- __func__, client->toShortString().c_str(),
- active, mActiveCount[client->stream()]);
+ auto clientIter = std::find(begin(mActiveClients), end(mActiveClients), client);
+ if (active == (clientIter != end(mActiveClients))) {
+ ALOGW("%s(%s): ignored active: %d, current stream count %d", __func__,
+ client->toShortString().c_str(), active,
+ mRoutingActivities.at(client->strategy()).getActivityCount());
return;
}
+ if (active) {
+ mActiveClients.push_back(client);
+ } else {
+ mActiveClients.erase(clientIter);
+ }
const int delta = active ? 1 : -1;
- changeStreamActiveCount(client, delta);
+ // If ps is unknown, it is time to track it!
+ mRoutingActivities[client->strategy()].changeActivityCount(delta);
+ mVolumeActivities[client->volumeSource()].changeActivityCount(delta);
// Handle non-client-specific activity ref count
int32_t oldGlobalActiveCount = mGlobalActiveCount;
if (!active && mGlobalActiveCount < 1) {
ALOGW("%s(%s): invalid deactivation with globalRefCount %d",
- __func__, client->toShortString().c_str(), mGlobalActiveCount);
+ __func__, client->toShortString().c_str(), mGlobalActiveCount);
mGlobalActiveCount = 1;
}
mGlobalActiveCount += delta;
- if ((oldGlobalActiveCount == 0) && (mGlobalActiveCount > 0)) {
- if ((mPolicyMix != NULL) && ((mPolicyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0))
- {
+ if ((mPolicyMix != NULL) && ((mPolicyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0)) {
+ if ((oldGlobalActiveCount == 0) || (mGlobalActiveCount == 0)) {
mClientInterface->onDynamicPolicyMixStateUpdate(mPolicyMix->mDeviceAddress,
- MIX_STATE_MIXING);
- }
- } else if ((oldGlobalActiveCount > 0) && (mGlobalActiveCount == 0)) {
- if ((mPolicyMix != NULL) && ((mPolicyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0))
- {
- mClientInterface->onDynamicPolicyMixStateUpdate(mPolicyMix->mDeviceAddress,
- MIX_STATE_IDLE);
+ mGlobalActiveCount > 0 ? MIX_STATE_MIXING : MIX_STATE_IDLE);
}
}
-
client->setActive(active);
}
+bool AudioOutputDescriptor::isActive(VolumeSource vs, uint32_t inPastMs, nsecs_t sysTime) const
+{
+ return (vs == VOLUME_SOURCE_NONE) ?
+ isActive(inPastMs) : (mVolumeActivities.find(vs) != std::end(mVolumeActivities)?
+ mVolumeActivities.at(vs).isActive(inPastMs, sysTime) : false);
+}
+
bool AudioOutputDescriptor::isActive(uint32_t inPastMs) const
{
nsecs_t sysTime = 0;
if (inPastMs != 0) {
sysTime = systemTime();
}
- for (int i = 0; i < (int)AUDIO_STREAM_CNT; i++) {
- if (i == AUDIO_STREAM_PATCH) {
+ for (const auto &iter : mVolumeActivities) {
+ if (iter.first == streamToVolumeSource(AUDIO_STREAM_PATCH)) {
continue;
}
- if (isStreamActive((audio_stream_type_t)i, inPastMs, sysTime)) {
+ if (iter.second.isActive(inPastMs, sysTime)) {
return true;
}
}
return false;
}
-bool AudioOutputDescriptor::isStreamActive(audio_stream_type_t stream,
- uint32_t inPastMs,
- nsecs_t sysTime) const
-{
- if (mActiveCount[stream] != 0) {
- return true;
- }
- if (inPastMs == 0) {
- return false;
- }
- if (sysTime == 0) {
- sysTime = systemTime();
- }
- if (ns2ms(sysTime - mStopTime[stream]) < inPastMs) {
- return true;
- }
- return false;
-}
-
-
bool AudioOutputDescriptor::isFixedVolume(audio_devices_t device __unused)
{
return false;
@@ -217,9 +158,9 @@
// We actually change the volume if:
// - the float value returned by computeVolume() changed
// - the force flag is set
- if (volume != mCurVolume[stream] || force) {
+ if (volume != getCurVolume(static_cast<VolumeSource>(stream)) || force) {
ALOGV("setVolume() for stream %d, volume %f, delay %d", stream, volume, delayMs);
- mCurVolume[stream] = volume;
+ setCurVolume(static_cast<VolumeSource>(stream), volume);
return true;
}
return false;
@@ -266,6 +207,13 @@
return clients;
}
+bool AudioOutputDescriptor::isAnyActive(VolumeSource volumeSourceToIgnore) const
+{
+ return std::find_if(begin(mActiveClients), end(mActiveClients),
+ [&volumeSourceToIgnore](const auto &client) {
+ return client->volumeSource() != volumeSourceToIgnore; }) != end(mActiveClients);
+}
+
void AudioOutputDescriptor::dump(String8 *dst) const
{
dst->appendFormat(" ID: %d\n", mId);
@@ -274,20 +222,22 @@
dst->appendFormat(" Channels: %08x\n", mChannelMask);
dst->appendFormat(" Devices: %s\n", devices().toString().c_str());
dst->appendFormat(" Global active count: %u\n", mGlobalActiveCount);
- dst->append(" Stream volume activeCount muteCount\n");
- for (int i = 0; i < (int)AUDIO_STREAM_CNT; i++) {
- dst->appendFormat(" %02d %.03f %02d %02d\n",
- i, mCurVolume[i], streamActiveCount((audio_stream_type_t)i), mMuteCount[i]);
+ for (const auto &iter : mRoutingActivities) {
+ dst->appendFormat(" Product Strategy id: %d", iter.first);
+ iter.second.dump(dst, 4);
+ }
+ for (const auto &iter : mVolumeActivities) {
+ dst->appendFormat(" Volume Activities id: %d", iter.first);
+ iter.second.dump(dst, 4);
}
dst->append(" AudioTrack Clients:\n");
ClientMapHandler<TrackClientDescriptor>::dump(dst);
dst->append("\n");
- if (mActiveClients.size() > 0) {
+ if (!mActiveClients.empty()) {
dst->append(" AudioTrack active (stream) clients:\n");
size_t index = 0;
- for (const auto& clientPair : mActiveClients) {
- dst->appendFormat(" Refcount: %zu", clientPair.second);
- clientPair.first->dump(dst, 2, index++);
+ for (const auto& client : mActiveClients) {
+ client->dump(dst, 2, index++);
}
dst->append(" \n");
}
@@ -388,15 +338,14 @@
}
}
-void SwAudioOutputDescriptor::changeStreamActiveCount(const sp<TrackClientDescriptor>& client,
- int delta)
+void SwAudioOutputDescriptor::setClientActive(const sp<TrackClientDescriptor>& client, bool active)
{
// forward usage count change to attached outputs
if (isDuplicated()) {
- mOutput1->changeStreamActiveCount(client, delta);
- mOutput2->changeStreamActiveCount(client, delta);
+ mOutput1->setClientActive(client, active);
+ mOutput2->setClientActive(client, active);
}
- AudioOutputDescriptor::changeStreamActiveCount(client, delta);
+ AudioOutputDescriptor::setClientActive(client, active);
}
bool SwAudioOutputDescriptor::isFixedVolume(audio_devices_t device)
@@ -445,19 +394,16 @@
uint32_t delayMs,
bool force)
{
- bool changed = AudioOutputDescriptor::setVolume(volume, stream, device, delayMs, force);
-
- if (changed) {
- // Force VOICE_CALL to track BLUETOOTH_SCO stream volume when bluetooth audio is
- // enabled
- float volume = Volume::DbToAmpl(mCurVolume[stream]);
- if (stream == AUDIO_STREAM_BLUETOOTH_SCO) {
- mClientInterface->setStreamVolume(
- AUDIO_STREAM_VOICE_CALL, volume, mIoHandle, delayMs);
- }
- mClientInterface->setStreamVolume(stream, volume, mIoHandle, delayMs);
+ if (!AudioOutputDescriptor::setVolume(volume, stream, device, delayMs, force)) {
+ return false;
}
- return changed;
+ // Force VOICE_CALL to track BLUETOOTH_SCO stream volume when bluetooth audio is enabled
+ float volumeAmpl = Volume::DbToAmpl(getCurVolume(static_cast<VolumeSource>(stream)));
+ if (stream == AUDIO_STREAM_BLUETOOTH_SCO) {
+ mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL, volumeAmpl, mIoHandle, delayMs);
+ }
+ mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+ return true;
}
status_t SwAudioOutputDescriptor::open(const audio_config_t *config,
@@ -660,24 +606,24 @@
}
// SwAudioOutputCollection implementation
-bool SwAudioOutputCollection::isStreamActive(audio_stream_type_t stream, uint32_t inPastMs) const
+bool SwAudioOutputCollection::isActive(VolumeSource volumeSource, uint32_t inPastMs) const
{
nsecs_t sysTime = systemTime();
for (size_t i = 0; i < this->size(); i++) {
const sp<SwAudioOutputDescriptor> outputDesc = this->valueAt(i);
- if (outputDesc->isStreamActive(stream, inPastMs, sysTime)) {
+ if (outputDesc->isActive(volumeSource, inPastMs, sysTime)) {
return true;
}
}
return false;
}
-bool SwAudioOutputCollection::isStreamActiveLocally(audio_stream_type_t stream, uint32_t inPastMs) const
+bool SwAudioOutputCollection::isActiveLocally(VolumeSource volumeSource, uint32_t inPastMs) const
{
nsecs_t sysTime = systemTime();
for (size_t i = 0; i < this->size(); i++) {
const sp<SwAudioOutputDescriptor> outputDesc = this->valueAt(i);
- if (outputDesc->isStreamActive(stream, inPastMs, sysTime)
+ if (outputDesc->isActive(volumeSource, inPastMs, sysTime)
&& ((outputDesc->devices().types() & APM_AUDIO_OUT_DEVICE_REMOTE_ALL) == 0)) {
return true;
}
@@ -685,14 +631,13 @@
return false;
}
-bool SwAudioOutputCollection::isStreamActiveRemotely(audio_stream_type_t stream,
- uint32_t inPastMs) const
+bool SwAudioOutputCollection::isActiveRemotely(VolumeSource volumeSource, uint32_t inPastMs) const
{
nsecs_t sysTime = systemTime();
for (size_t i = 0; i < size(); i++) {
const sp<SwAudioOutputDescriptor> outputDesc = valueAt(i);
if (((outputDesc->devices().types() & APM_AUDIO_OUT_DEVICE_REMOTE_ALL) != 0) &&
- outputDesc->isStreamActive(stream, inPastMs, sysTime)) {
+ outputDesc->isActive(volumeSource, inPastMs, sysTime)) {
// do not consider re routing (when the output is going to a dynamic policy)
// as "remote playback"
if (outputDesc->mPolicyMix == NULL) {
@@ -775,22 +720,6 @@
return NULL;
}
-bool SwAudioOutputCollection::isAnyOutputActive(audio_stream_type_t streamToIgnore) const
-{
- for (size_t s = 0 ; s < AUDIO_STREAM_CNT ; s++) {
- if (s == (size_t) streamToIgnore) {
- continue;
- }
- for (size_t i = 0; i < size(); i++) {
- const sp<SwAudioOutputDescriptor> outputDesc = valueAt(i);
- if (outputDesc->streamActiveCount((audio_stream_type_t)s)!= 0) {
- return true;
- }
- }
- }
- return false;
-}
-
sp<SwAudioOutputDescriptor> SwAudioOutputCollection::getOutputForClient(audio_port_handle_t portId)
{
for (size_t i = 0; i < size(); i++) {
@@ -825,34 +754,18 @@
}
// HwAudioOutputCollection implementation
-bool HwAudioOutputCollection::isStreamActive(audio_stream_type_t stream, uint32_t inPastMs) const
+bool HwAudioOutputCollection::isActive(VolumeSource volumeSource, uint32_t inPastMs) const
{
nsecs_t sysTime = systemTime();
for (size_t i = 0; i < this->size(); i++) {
const sp<HwAudioOutputDescriptor> outputDesc = this->valueAt(i);
- if (outputDesc->isStreamActive(stream, inPastMs, sysTime)) {
+ if (outputDesc->isActive(volumeSource, inPastMs, sysTime)) {
return true;
}
}
return false;
}
-bool HwAudioOutputCollection::isAnyOutputActive(audio_stream_type_t streamToIgnore) const
-{
- for (size_t s = 0 ; s < AUDIO_STREAM_CNT ; s++) {
- if (s == (size_t) streamToIgnore) {
- continue;
- }
- for (size_t i = 0; i < size(); i++) {
- const sp<HwAudioOutputDescriptor> outputDesc = valueAt(i);
- if (outputDesc->streamActiveCount((audio_stream_type_t)s) != 0) {
- return true;
- }
- }
- }
- return false;
-}
-
void HwAudioOutputCollection::dump(String8 *dst) const
{
dst->append("\nOutputs dump:\n");
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 633c40e..ad07ab1 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -20,6 +20,7 @@
#include <sstream>
#include <utils/Log.h>
#include <utils/String8.h>
+#include <TypeConverter.h>
#include "AudioGain.h"
#include "AudioOutputDescriptor.h"
#include "AudioPatch.h"
@@ -45,6 +46,7 @@
mPortId, mSessionId, mUid);
dst->appendFormat("%*s- Format: %08x Sampling rate: %d Channels: %08x\n", spaces, "",
mConfig.format, mConfig.sample_rate, mConfig.channel_mask);
+ dst->appendFormat("%*s- Attributes: %s\n", spaces, "", toString(mAttributes).c_str());
dst->appendFormat("%*s- Preferred Device Id: %08x\n", spaces, "", mPreferredDeviceId);
dst->appendFormat("%*s- State: %s\n", spaces, "", mActive ? "Active" : "Inactive");
}
@@ -53,6 +55,7 @@
{
ClientDescriptor::dump(dst, spaces, index);
dst->appendFormat("%*s- Stream: %d flags: %08x\n", spaces, "", mStream, mFlags);
+ dst->appendFormat("%*s- Refcount: %d\n", spaces, "", mActivityCount);
}
std::string TrackClientDescriptor::toShortString() const
@@ -82,10 +85,10 @@
SourceClientDescriptor::SourceClientDescriptor(audio_port_handle_t portId, uid_t uid,
audio_attributes_t attributes, const sp<AudioPatch>& patchDesc,
const sp<DeviceDescriptor>& srcDevice, audio_stream_type_t stream,
- product_strategy_t strategy) :
+ product_strategy_t strategy, VolumeSource volumeSource) :
TrackClientDescriptor::TrackClientDescriptor(portId, uid, AUDIO_SESSION_NONE, attributes,
AUDIO_CONFIG_BASE_INITIALIZER, AUDIO_PORT_HANDLE_NONE,
- stream, strategy, AUDIO_OUTPUT_FLAG_NONE, false,
+ stream, strategy, volumeSource, AUDIO_OUTPUT_FLAG_NONE, false,
{} /* Sources do not support secondary outputs*/),
mPatchDesc(patchDesc), mSrcDevice(srcDevice)
{
diff --git a/services/audiopolicy/config/hearing_aid_audio_policy_configuration.xml b/services/audiopolicy/config/hearing_aid_audio_policy_configuration.xml
index 3c48e88..e6e6bdb 100644
--- a/services/audiopolicy/config/hearing_aid_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/hearing_aid_audio_policy_configuration.xml
@@ -2,7 +2,7 @@
<!-- Hearing aid Audio HAL Audio Policy Configuration file -->
<module name="hearing_aid" halVersion="2.0">
<mixPorts>
- <mixPort name="hearing aid output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+ <mixPort name="hearing aid output" role="source">
<profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
samplingRates="24000,16000"
channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index f486dca..719ef01 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -224,16 +224,16 @@
audio_devices_t devices = AUDIO_DEVICE_NONE;
if (ps == getProductStrategyForStream(AUDIO_STREAM_NOTIFICATION) &&
!is_state_in_call(getPhoneState()) &&
- !outputs.isStreamActiveRemotely(AUDIO_STREAM_MUSIC,
- SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY) &&
- outputs.isStreamActive(AUDIO_STREAM_MUSIC,
- SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)) {
+ !outputs.isActiveRemotely(streamToVolumeSource(AUDIO_STREAM_MUSIC),
+ SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY) &&
+ outputs.isActive(streamToVolumeSource(AUDIO_STREAM_MUSIC),
+ SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)) {
product_strategy_t strategyForMedia =
getProductStrategyForStream(AUDIO_STREAM_MUSIC);
devices = productStrategies.getDeviceTypesForProductStrategy(strategyForMedia);
} else if (ps == getProductStrategyForStream(AUDIO_STREAM_ACCESSIBILITY) &&
- (outputs.isStreamActive(AUDIO_STREAM_RING) ||
- outputs.isStreamActive(AUDIO_STREAM_ALARM))) {
+ (outputs.isActive(streamToVolumeSource(AUDIO_STREAM_RING)) ||
+ outputs.isActive(streamToVolumeSource(AUDIO_STREAM_ALARM)))) {
// do not route accessibility prompts to a digital output currently configured with a
// compressed format as they would likely not be mixed and dropped.
// Device For Sonification conf file has HDMI, SPDIF and HDMI ARC unreacheable.
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 93af8a6..43023a8 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -182,16 +182,17 @@
break;
case STRATEGY_SONIFICATION_RESPECTFUL:
- if (isInCall() || outputs.isStreamActiveLocally(AUDIO_STREAM_VOICE_CALL)) {
+ if (isInCall() || outputs.isActiveLocally(streamToVolumeSource(AUDIO_STREAM_VOICE_CALL))) {
device = getDeviceForStrategyInt(
STRATEGY_SONIFICATION, availableOutputDevices, availableInputDevices, outputs,
outputDeviceTypesToIgnore);
} else {
bool media_active_locally =
- outputs.isStreamActiveLocally(
- AUDIO_STREAM_MUSIC, SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)
- || outputs.isStreamActiveLocally(
- AUDIO_STREAM_ACCESSIBILITY, SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY);
+ outputs.isActiveLocally(streamToVolumeSource(AUDIO_STREAM_MUSIC),
+ SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)
+ || outputs.isActiveLocally(
+ streamToVolumeSource(AUDIO_STREAM_ACCESSIBILITY),
+ SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY);
// routing is same as media without the "remote" device
device = getDeviceForStrategyInt(STRATEGY_MEDIA,
availableOutputDevices,
@@ -324,7 +325,8 @@
case STRATEGY_SONIFICATION:
// If incall, just select the STRATEGY_PHONE device
- if (isInCall() || outputs.isStreamActiveLocally(AUDIO_STREAM_VOICE_CALL)) {
+ if (isInCall() ||
+ outputs.isActiveLocally(streamToVolumeSource(AUDIO_STREAM_VOICE_CALL))) {
device = getDeviceForStrategyInt(
STRATEGY_PHONE, availableOutputDevices, availableInputDevices, outputs,
outputDeviceTypesToIgnore);
@@ -397,8 +399,8 @@
}
availableOutputDevices =
availableOutputDevices.getDevicesFromTypeMask(availableOutputDevicesType);
- if (outputs.isStreamActive(AUDIO_STREAM_RING) ||
- outputs.isStreamActive(AUDIO_STREAM_ALARM)) {
+ if (outputs.isActive(streamToVolumeSource(AUDIO_STREAM_RING)) ||
+ outputs.isActive(streamToVolumeSource(AUDIO_STREAM_ALARM))) {
return getDeviceForStrategyInt(
STRATEGY_SONIFICATION, availableOutputDevices, availableInputDevices, outputs,
outputDeviceTypesToIgnore);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 32cc380..5b752ea 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -209,7 +209,26 @@
return BAD_VALUE;
}
- checkForDeviceAndOutputChanges([&]() {
+ // No need to evaluate playback routing when connecting a remote submix
+ // output device used by a dynamic policy of type recorder as no
+ // playback use case is affected.
+ bool doCheckForDeviceAndOutputChanges = true;
+ if (device->type() == AUDIO_DEVICE_OUT_REMOTE_SUBMIX
+ && strncmp(device_address, "0", AUDIO_DEVICE_MAX_ADDRESS_LEN) != 0) {
+ for (audio_io_handle_t output : outputs) {
+ sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
+ if (desc->mPolicyMix != nullptr
+ && desc->mPolicyMix->mMixType == MIX_TYPE_RECORDERS
+ && strncmp(device_address,
+ desc->mPolicyMix->mDeviceAddress.string(),
+ AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0) {
+ doCheckForDeviceAndOutputChanges = false;
+ break;
+ }
+ }
+ }
+
+ auto checkCloseOutputs = [&]() {
// outputs must be closed after checkOutputForAllStrategies() is executed
if (!outputs.isEmpty()) {
for (audio_io_handle_t output : outputs) {
@@ -218,7 +237,7 @@
// been opened by checkOutputsForDevice() to query dynamic parameters
if ((state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) ||
(((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) &&
- (desc->mDirectOpenCount == 0))) {
+ (desc->mDirectOpenCount == 0))) {
closeOutput(output);
}
}
@@ -226,7 +245,13 @@
return true;
}
return false;
- });
+ };
+
+ if (doCheckForDeviceAndOutputChanges) {
+ checkForDeviceAndOutputChanges(checkCloseOutputs);
+ } else {
+ checkCloseOutputs();
+ }
if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
@@ -1078,6 +1103,7 @@
new TrackClientDescriptor(*portId, uid, session, resultAttr, clientConfig,
sanitizedRequestedPortId, *stream,
mEngine->getProductStrategyForAttributes(resultAttr),
+ streamToVolumeSource(*stream),
*flags, isRequestedDeviceForExclusiveUse,
std::move(weakSecondaryOutputDescs));
sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(*output);
@@ -1569,11 +1595,13 @@
*delayMs = 0;
audio_stream_type_t stream = client->stream();
+ auto clientVolSrc = client->volumeSource();
auto clientStrategy = client->strategy();
auto clientAttr = client->attributes();
if (stream == AUDIO_STREAM_TTS) {
ALOGV("\t found BEACON stream");
- if (!mTtsOutputAvailable && mOutputs.isAnyOutputActive(AUDIO_STREAM_TTS /*streamToIgnore*/)) {
+ if (!mTtsOutputAvailable && mOutputs.isAnyOutputActive(
+ streamToVolumeSource(AUDIO_STREAM_TTS) /*sourceToIgnore*/)) {
return INVALID_OPERATION;
} else {
beaconMuteLatency = handleEventForBeacon(STARTING_BEACON);
@@ -1628,7 +1656,7 @@
selectOutputForMusicEffects();
}
- if (outputDesc->streamActiveCount(stream) == 1 || !devices.isEmpty()) {
+ if (outputDesc->getActivityCount(clientVolSrc) == 1 || !devices.isEmpty()) {
// starting an output being rerouted?
if (devices.isEmpty()) {
devices = getNewOutputDevices(outputDesc, false /*fromCache*/);
@@ -1754,11 +1782,12 @@
{
// always handle stream stop, check which stream type is stopping
audio_stream_type_t stream = client->stream();
+ auto clientVolSrc = client->volumeSource();
handleEventForBeacon(stream == AUDIO_STREAM_TTS ? STOPPING_BEACON : STOPPING_OUTPUT);
- if (outputDesc->streamActiveCount(stream) > 0) {
- if (outputDesc->streamActiveCount(stream) == 1) {
+ if (outputDesc->getActivityCount(clientVolSrc) > 0) {
+ if (outputDesc->getActivityCount(clientVolSrc) == 1) {
// Automatically disable the remote submix input when output is stopped on a
// re routing mix of type MIX_TYPE_RECORDERS
if (audio_is_remote_submix_device(outputDesc->devices().types()) &&
@@ -1780,7 +1809,7 @@
outputDesc->setClientActive(client, false);
// store time at which the stream was stopped - see isStreamActive()
- if (outputDesc->streamActiveCount(stream) == 0 || forceDeviceUpdate) {
+ if (outputDesc->getActivityCount(clientVolSrc) == 0 || forceDeviceUpdate) {
outputDesc->setStopTime(client, systemTime());
DeviceVector newDevices = getNewOutputDevices(outputDesc, false /*fromCache*/);
// delay the device switch by twice the latency because stopOutput() is executed when
@@ -2411,7 +2440,7 @@
if (!(streamsMatchForvolume(stream, (audio_stream_type_t)curStream))) {
continue;
}
- if (!(desc->isStreamActive((audio_stream_type_t)curStream) || isInCall())) {
+ if (!(desc->isActive(streamToVolumeSource((audio_stream_type_t)curStream)) || isInCall())) {
continue;
}
audio_devices_t curStreamDevice = Volume::getDeviceForVolume(
@@ -2499,7 +2528,7 @@
for (audio_io_handle_t output : outputs) {
sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
- if (activeOnly && !desc->isStreamActive(AUDIO_STREAM_MUSIC)) {
+ if (activeOnly && !desc->isActive(streamToVolumeSource(AUDIO_STREAM_MUSIC))) {
continue;
}
ALOGV("selectOutputForMusicEffects activeOnly %d output %d flags 0x%08x",
@@ -2593,14 +2622,14 @@
if (!streamsMatchForvolume(stream, (audio_stream_type_t)curStream)) {
continue;
}
- active = mOutputs.isStreamActive((audio_stream_type_t)curStream, inPastMs);
+ active = mOutputs.isActive(streamToVolumeSource((audio_stream_type_t)curStream), inPastMs);
}
return active;
}
bool AudioPolicyManager::isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs) const
{
- return mOutputs.isStreamActiveRemotely(stream, inPastMs);
+ return mOutputs.isActiveRemotely(streamToVolumeSource((audio_stream_type_t)stream), inPastMs);
}
bool AudioPolicyManager::isSourceActive(audio_source_t source) const
@@ -3630,10 +3659,11 @@
struct audio_patch dummyPatch = {};
sp<AudioPatch> patchDesc = new AudioPatch(&dummyPatch, uid);
- sp<SourceClientDescriptor> sourceDesc =
- new SourceClientDescriptor(*portId, uid, *attributes, patchDesc, srcDevice,
- mEngine->getStreamTypeForAttributes(*attributes),
- mEngine->getProductStrategyForAttributes(*attributes));
+ sp<SourceClientDescriptor> sourceDesc = new SourceClientDescriptor(
+ *portId, uid, *attributes, patchDesc, srcDevice,
+ mEngine->getStreamTypeForAttributes(*attributes),
+ mEngine->getProductStrategyForAttributes(*attributes),
+ streamToVolumeSource(mEngine->getStreamTypeForAttributes(*attributes)));
status_t status = connectAudioSource(sourceDesc);
if (status == NO_ERROR) {
@@ -4048,14 +4078,18 @@
std::vector<const char*> fileNames;
status_t ret;
- if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false) &&
- property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
- // A2DP offload supported but disabled: try to use special XML file
- if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.enabled", false)) {
- fileNames.push_back(AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME);
- } else {
+ if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false)) {
+ if (property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
fileNames.push_back(AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME);
+ } else if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.enabled", false)) {
+ // This property persist.bluetooth.bluetooth_audio_hal.enabled is temporary only.
+ // xml files AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME, although having
+ // the same name, must be different in offload and non offload cases in device
+ // specific configuration file.
+ fileNames.push_back(AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME);
}
+ } else if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.enabled", false)) {
+ fileNames.push_back(AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME);
}
fileNames.push_back(AUDIO_POLICY_XML_CONFIG_FILE_NAME);
@@ -4704,37 +4738,31 @@
{
ALOGV("closeOutput(%d)", output);
- sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
- if (outputDesc == NULL) {
+ sp<SwAudioOutputDescriptor> closingOutput = mOutputs.valueFor(output);
+ if (closingOutput == NULL) {
ALOGW("closeOutput() unknown output %d", output);
return;
}
- mPolicyMixes.closeOutput(outputDesc);
+ mPolicyMixes.closeOutput(closingOutput);
// look for duplicated outputs connected to the output being removed.
for (size_t i = 0; i < mOutputs.size(); i++) {
- sp<SwAudioOutputDescriptor> dupOutputDesc = mOutputs.valueAt(i);
- if (dupOutputDesc->isDuplicated() &&
- (dupOutputDesc->mOutput1 == outputDesc ||
- dupOutputDesc->mOutput2 == outputDesc)) {
- sp<SwAudioOutputDescriptor> outputDesc2;
- if (dupOutputDesc->mOutput1 == outputDesc) {
- outputDesc2 = dupOutputDesc->mOutput2;
- } else {
- outputDesc2 = dupOutputDesc->mOutput1;
- }
+ sp<SwAudioOutputDescriptor> dupOutput = mOutputs.valueAt(i);
+ if (dupOutput->isDuplicated() &&
+ (dupOutput->mOutput1 == closingOutput || dupOutput->mOutput2 == closingOutput)) {
+ sp<SwAudioOutputDescriptor> remainingOutput =
+ dupOutput->mOutput1 == closingOutput ? dupOutput->mOutput2 : dupOutput->mOutput1;
// As all active tracks on duplicated output will be deleted,
// and as they were also referenced on the other output, the reference
// count for their stream type must be adjusted accordingly on
// the other output.
- const bool wasActive = outputDesc2->isActive();
- for (const auto &clientPair : dupOutputDesc->getActiveClients()) {
- outputDesc2->changeStreamActiveCount(clientPair.first, -clientPair.second);
- }
+ const bool wasActive = remainingOutput->isActive();
+ // Note: no-op on the closing output where all clients has already been set inactive
+ dupOutput->setAllClientsInactive();
// stop() will be a no op if the output is still active but is needed in case all
// active streams refcounts where cleared above
if (wasActive) {
- outputDesc2->stop();
+ remainingOutput->stop();
}
audio_io_handle_t duplicatedOutput = mOutputs.keyAt(i);
ALOGV("closeOutput() closing also duplicated output %d", duplicatedOutput);
@@ -4746,7 +4774,7 @@
nextAudioPortGeneration();
- ssize_t index = mAudioPatches.indexOfKey(outputDesc->getPatchHandle());
+ ssize_t index = mAudioPatches.indexOfKey(closingOutput->getPatchHandle());
if (index >= 0) {
sp<AudioPatch> patchDesc = mAudioPatches.valueAt(index);
(void) /*status_t status*/ mpClientInterface->releaseAudioPatch(patchDesc->mAfPatchHandle, 0);
@@ -4754,7 +4782,7 @@
mpClientInterface->onAudioPatchListUpdate();
}
- outputDesc->close();
+ closingOutput->close();
removeOutput(output);
mPreviousOutputs = mOutputs;
@@ -5105,7 +5133,7 @@
devices.merge(curDevices);
for (audio_io_handle_t output : getOutputsForDevices(curDevices, mOutputs)) {
sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
- if (outputDesc->isStreamActive((audio_stream_type_t)curStream)) {
+ if (outputDesc->isActive(streamToVolumeSource((audio_stream_type_t)curStream))) {
activeDevices.merge(outputDesc->devices());
}
}
@@ -5519,7 +5547,7 @@
// in-call: always cap volume by voice volume + some low headroom
if ((stream != AUDIO_STREAM_VOICE_CALL) &&
- (isInCall() || mOutputs.isStreamActiveLocally(AUDIO_STREAM_VOICE_CALL))) {
+ (isInCall() || mOutputs.isActiveLocally(streamToVolumeSource(AUDIO_STREAM_VOICE_CALL)))) {
switch (stream) {
case AUDIO_STREAM_SYSTEM:
case AUDIO_STREAM_RING:
@@ -5637,9 +5665,8 @@
bool force)
{
// do not change actual stream volume if the stream is muted
- if (outputDesc->mMuteCount[stream] != 0) {
- ALOGVV("checkAndSetVolume() stream %d muted count %d",
- stream, outputDesc->mMuteCount[stream]);
+ if (outputDesc->isMuted(streamToVolumeSource(stream))) {
+ ALOGVV("%s() stream %d muted count %d", __func__, stream, outputDesc->getMuteCount(stream));
return NO_ERROR;
}
audio_policy_forced_cfg_t forceUseForComm =
@@ -5726,10 +5753,10 @@
}
ALOGVV("setStreamMute() stream %d, mute %d, mMuteCount %d device %04x",
- stream, on, outputDesc->mMuteCount[stream], device);
+ stream, on, outputDesc->getMuteCount(stream), device);
if (on) {
- if (outputDesc->mMuteCount[stream] == 0) {
+ if (!outputDesc->isMuted(streamToVolumeSource(stream))) {
if (mVolumeCurves->canBeMuted(stream) &&
((stream != AUDIO_STREAM_ENFORCED_AUDIBLE) ||
(mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_NONE))) {
@@ -5737,13 +5764,13 @@
}
}
// increment mMuteCount after calling checkAndSetVolume() so that volume change is not ignored
- outputDesc->mMuteCount[stream]++;
+ outputDesc->incMuteCount(streamToVolumeSource(stream));
} else {
- if (outputDesc->mMuteCount[stream] == 0) {
+ if (!outputDesc->isMuted(streamToVolumeSource(stream))) {
ALOGV("setStreamMute() unmuting non muted stream!");
return;
}
- if (--outputDesc->mMuteCount[stream] == 0) {
+ if (outputDesc->decMuteCount(streamToVolumeSource(stream)) == 0) {
checkAndSetVolume(stream,
mVolumeCurves->getVolumeIndex(stream, device),
outputDesc,
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 2ca8356..7ec0e4c 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -94,6 +94,7 @@
"libsensorprivacy",
"libstagefright",
"libstagefright_foundation",
+ "libyuv",
"android.frameworks.cameraservice.common@2.0",
"android.frameworks.cameraservice.service@2.0",
"android.frameworks.cameraservice.device@2.0",
@@ -137,6 +138,7 @@
name: "libdepthphoto",
srcs: [
+ "utils/ExifUtils.cpp",
"common/DepthPhotoProcessor.cpp",
],
@@ -150,6 +152,8 @@
"libcutils",
"libjpeg",
"libmemunreachable",
+ "libexif",
+ "libcamera_client",
],
include_dirs: [
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index e002e18..162b50f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -1767,14 +1767,13 @@
case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
ALOGW("%s: Received recoverable error %d from HAL - ignoring, requestId %" PRId32,
__FUNCTION__, errorCode, resultExtras.requestId);
+ mCaptureSequencer->notifyError(errorCode, resultExtras);
return;
default:
err = CAMERA_ERROR_UNKNOWN;
break;
}
- mCaptureSequencer->notifyError(errorCode, resultExtras);
-
ALOGE("%s: Error condition %d reported by HAL, requestId %" PRId32, __FUNCTION__, errorCode,
resultExtras.requestId);
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 2eec0f7..9525ad2 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -339,6 +339,21 @@
} else {
depthPhoto.mIsLensDistortionValid = 0;
}
+ entry = inputFrame.result.find(ANDROID_JPEG_ORIENTATION);
+ if (entry.count > 0) {
+ // The camera jpeg orientation values must be within [0, 90, 180, 270].
+ switch (entry.data.i32[0]) {
+ case 0:
+ case 90:
+ case 180:
+ case 270:
+ depthPhoto.mOrientation = static_cast<DepthPhotoOrientation> (entry.data.i32[0]);
+ break;
+ default:
+ ALOGE("%s: Unexpected jpeg orientation value: %d, default to 0 degrees",
+ __FUNCTION__, entry.data.i32[0]);
+ }
+ }
size_t actualJpegSize = 0;
res = mDepthPhotoProcess(depthPhoto, finalJpegBufferSize, dstBuffer, &actualJpegSize);
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index a61cdee..9fd0e8b 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -23,6 +23,7 @@
#include <sys/syscall.h>
#include <android/hardware/camera/device/3.5/types.h>
+#include <libyuv.h>
#include <gui/Surface.h>
#include <utils/Log.h>
#include <utils/Trace.h>
@@ -192,6 +193,7 @@
return res;
}
+ initCopyRowFunction(width);
return res;
}
@@ -1373,7 +1375,7 @@
for (auto row = top; row < top+height; row++) {
uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
- memcpy(dst, yuvBuffer.data+row*yuvBuffer.stride+left, width);
+ mFnCopyRow(yuvBuffer.data+row*yuvBuffer.stride+left, dst, width);
}
// U is Cb, V is Cr
@@ -1406,24 +1408,25 @@
for (auto row = top/2; row < (top+height)/2; row++) {
uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
- memcpy(dst, src+row*yuvBuffer.chromaStride+left, width);
+ mFnCopyRow(src+row*yuvBuffer.chromaStride+left, dst, width);
}
} else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
// U plane
for (auto row = top/2; row < (top+height)/2; row++) {
uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
- memcpy(dst, yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, width/2);
+ mFnCopyRow(yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, dst, width/2);
}
// V plane
for (auto row = top/2; row < (top+height)/2; row++) {
uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
- memcpy(dst, yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, width/2);
+ mFnCopyRow(yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, dst, width/2);
}
} else {
- // Convert between semiplannar and plannar
+ // Convert between semiplannar and plannar, or when UV orders are
+ // different.
uint8_t *dst = codecBuffer->data();
for (auto row = top/2; row < (top+height)/2; row++) {
for (auto col = left/2; col < (left+width)/2; col++) {
@@ -1446,6 +1449,38 @@
return OK;
}
+void HeicCompositeStream::initCopyRowFunction(int32_t width)
+{
+ using namespace libyuv;
+
+ mFnCopyRow = CopyRow_C;
+#if defined(HAS_COPYROW_SSE2)
+ if (TestCpuFlag(kCpuHasSSE2)) {
+ mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2;
+ }
+#endif
+#if defined(HAS_COPYROW_AVX)
+ if (TestCpuFlag(kCpuHasAVX)) {
+ mFnCopyRow = IS_ALIGNED(width, 64) ? CopyRow_AVX : CopyRow_Any_AVX;
+ }
+#endif
+#if defined(HAS_COPYROW_ERMS)
+ if (TestCpuFlag(kCpuHasERMS)) {
+ mFnCopyRow = CopyRow_ERMS;
+ }
+#endif
+#if defined(HAS_COPYROW_NEON)
+ if (TestCpuFlag(kCpuHasNEON)) {
+ mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON;
+ }
+#endif
+#if defined(HAS_COPYROW_MIPS)
+ if (TestCpuFlag(kCpuHasMIPS)) {
+ mFnCopyRow = CopyRow_MIPS;
+ }
+#endif
+}
+
size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
size_t maxAppsSegment = 1;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 4cd9af0..2aa3c38 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -195,6 +195,7 @@
status_t copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
const CpuConsumer::LockedBuffer& yuvBuffer,
size_t top, size_t left, size_t width, size_t height);
+ void initCopyRowFunction(int32_t width);
static size_t calcAppSegmentMaxSize(const CameraMetadata& info);
static const nsecs_t kWaitDuration = 10000000; // 10 ms
@@ -244,6 +245,9 @@
// In most common use case, entries are accessed in order.
std::map<int64_t, InputFrame> mPendingInputFrames;
+
+ // Function pointer of libyuv row copy.
+ void (*mFnCopyRow)(const uint8_t* src, uint8_t* dst, int width);
};
}; // namespace camera3
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
index a945aca..6d96163 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
@@ -32,9 +32,12 @@
#include <dynamic_depth/profile.h>
#include <dynamic_depth/profiles.h>
#include <jpeglib.h>
+#include <libexif/exif-data.h>
+#include <libexif/exif-system.h>
#include <math.h>
#include <sstream>
#include <utils/Errors.h>
+#include <utils/ExifUtils.h>
#include <utils/Log.h>
#include <xmpmeta/xmp_data.h>
#include <xmpmeta/xmp_writer.h>
@@ -61,8 +64,44 @@
namespace android {
namespace camera3 {
+ExifOrientation getExifOrientation(const unsigned char *jpegBuffer, size_t jpegBufferSize) {
+ if ((jpegBuffer == nullptr) || (jpegBufferSize == 0)) {
+ return ExifOrientation::ORIENTATION_UNDEFINED;
+ }
+
+ auto exifData = exif_data_new();
+ exif_data_load_data(exifData, jpegBuffer, jpegBufferSize);
+ ExifEntry *orientation = exif_content_get_entry(exifData->ifd[EXIF_IFD_0],
+ EXIF_TAG_ORIENTATION);
+ if ((orientation == nullptr) || (orientation->size != sizeof(ExifShort))) {
+ ALOGV("%s: Orientation EXIF entry invalid!", __FUNCTION__);
+ exif_data_unref(exifData);
+ return ExifOrientation::ORIENTATION_0_DEGREES;
+ }
+
+ auto orientationValue = exif_get_short(orientation->data, exif_data_get_byte_order(exifData));
+ ExifOrientation ret;
+ switch (orientationValue) {
+ case ExifOrientation::ORIENTATION_0_DEGREES:
+ case ExifOrientation::ORIENTATION_90_DEGREES:
+ case ExifOrientation::ORIENTATION_180_DEGREES:
+ case ExifOrientation::ORIENTATION_270_DEGREES:
+ ret = static_cast<ExifOrientation> (orientationValue);
+ break;
+ default:
+ ALOGE("%s: Unexpected EXIF orientation value: %d, defaulting to 0 degrees",
+ __FUNCTION__, orientationValue);
+ ret = ExifOrientation::ORIENTATION_0_DEGREES;
+ }
+
+ exif_data_unref(exifData);
+
+ return ret;
+}
+
status_t encodeGrayscaleJpeg(size_t width, size_t height, uint8_t *in, void *out,
- const size_t maxOutSize, uint8_t jpegQuality, size_t &actualSize) {
+ const size_t maxOutSize, uint8_t jpegQuality, ExifOrientation exifOrientation,
+ size_t &actualSize) {
status_t ret;
// libjpeg is a C library so we use C-style "inheritance" by
// putting libjpeg's jpeg_destination_mgr first in our custom
@@ -151,6 +190,23 @@
jpeg_start_compress(&cinfo, TRUE);
+ if (exifOrientation != ExifOrientation::ORIENTATION_UNDEFINED) {
+ std::unique_ptr<ExifUtils> utils(ExifUtils::create());
+ utils->initializeEmpty();
+ utils->setImageWidth(width);
+ utils->setImageHeight(height);
+ utils->setOrientationValue(exifOrientation);
+
+ if (utils->generateApp1()) {
+ const uint8_t* exifBuffer = utils->getApp1Buffer();
+ size_t exifBufferSize = utils->getApp1Length();
+ jpeg_write_marker(&cinfo, JPEG_APP0 + 1, static_cast<const JOCTET*>(exifBuffer),
+ exifBufferSize);
+ } else {
+ ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
+ }
+ }
+
for (size_t i = 0; i < cinfo.image_height; i++) {
auto currentRow = static_cast<JSAMPROW>(in + i*width);
jpeg_write_scanlines(&cinfo, ¤tRow, /*num_lines*/1);
@@ -168,8 +224,106 @@
return ret;
}
+inline void unpackDepth16(uint16_t value, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ // Android densely packed depth map. The units for the range are in
+ // millimeters and need to be scaled to meters.
+ // The confidence value is encoded in the 3 most significant bits.
+ // The confidence data needs to be additionally normalized with
+ // values 1.0f, 0.0f representing maximum and minimum confidence
+ // respectively.
+ auto point = static_cast<float>(value & 0x1FFF) / 1000.f;
+ points->push_back(point);
+
+ auto conf = (value >> 13) & 0x7;
+ float normConfidence = (conf == 0) ? 1.f : (static_cast<float>(conf) - 1) / 7.f;
+ confidence->push_back(normConfidence);
+
+ if (*near > point) {
+ *near = point;
+ }
+ if (*far < point) {
+ *far = point;
+ }
+}
+
+// Trivial case, read forward from top,left corner.
+void rotate0AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ for (size_t i = 0; i < inputFrame.mDepthMapHeight; i++) {
+ for (size_t j = 0; j < inputFrame.mDepthMapWidth; j++) {
+ unpackDepth16(inputFrame.mDepthMapBuffer[i*inputFrame.mDepthMapStride + j], points,
+ confidence, near, far);
+ }
+ }
+}
+
+// 90 degrees CW rotation can be applied by starting to read from bottom, left corner
+// transposing rows and columns.
+void rotate90AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ for (size_t i = 0; i < inputFrame.mDepthMapWidth; i++) {
+ for (ssize_t j = inputFrame.mDepthMapHeight-1; j >= 0; j--) {
+ unpackDepth16(inputFrame.mDepthMapBuffer[j*inputFrame.mDepthMapStride + i], points,
+ confidence, near, far);
+ }
+ }
+}
+
+// 180 CW degrees rotation can be applied by starting to read backwards from bottom, right corner.
+void rotate180AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ for (ssize_t i = inputFrame.mDepthMapHeight-1; i >= 0; i--) {
+ for (ssize_t j = inputFrame.mDepthMapWidth-1; j >= 0; j--) {
+ unpackDepth16(inputFrame.mDepthMapBuffer[i*inputFrame.mDepthMapStride + j], points,
+ confidence, near, far);
+ }
+ }
+}
+
+// 270 degrees CW rotation can be applied by starting to read from top, right corner
+// transposing rows and columns.
+void rotate270AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ for (ssize_t i = inputFrame.mDepthMapWidth-1; i >= 0; i--) {
+ for (size_t j = 0; j < inputFrame.mDepthMapHeight; j++) {
+ unpackDepth16(inputFrame.mDepthMapBuffer[j*inputFrame.mDepthMapStride + i], points,
+ confidence, near, far);
+ }
+ }
+}
+
+bool rotateAndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
+ std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
+ switch (inputFrame.mOrientation) {
+ case DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES:
+ rotate0AndUnpack(inputFrame, points, confidence, near, far);
+ return false;
+ case DepthPhotoOrientation::DEPTH_ORIENTATION_90_DEGREES:
+ rotate90AndUnpack(inputFrame, points, confidence, near, far);
+ return true;
+ case DepthPhotoOrientation::DEPTH_ORIENTATION_180_DEGREES:
+ rotate180AndUnpack(inputFrame, points, confidence, near, far);
+ return false;
+ case DepthPhotoOrientation::DEPTH_ORIENTATION_270_DEGREES:
+ rotate270AndUnpack(inputFrame, points, confidence, near, far);
+ return true;
+ default:
+ ALOGE("%s: Unsupported depth photo rotation: %d, default to 0", __FUNCTION__,
+ inputFrame.mOrientation);
+ rotate0AndUnpack(inputFrame, points, confidence, near, far);
+ }
+
+ return false;
+}
+
std::unique_ptr<dynamic_depth::DepthMap> processDepthMapFrame(DepthPhotoInputFrame inputFrame,
- std::vector<std::unique_ptr<Item>> *items /*out*/) {
+ ExifOrientation exifOrientation, std::vector<std::unique_ptr<Item>> *items /*out*/,
+ bool *switchDimensions /*out*/) {
+ if ((items == nullptr) || (switchDimensions == nullptr)) {
+ return nullptr;
+ }
+
std::vector<float> points, confidence;
size_t pointCount = inputFrame.mDepthMapWidth * inputFrame.mDepthMapHeight;
@@ -177,29 +331,21 @@
confidence.reserve(pointCount);
float near = UINT16_MAX;
float far = .0f;
- for (size_t i = 0; i < inputFrame.mDepthMapHeight; i++) {
- for (size_t j = 0; j < inputFrame.mDepthMapWidth; j++) {
- // Android densely packed depth map. The units for the range are in
- // millimeters and need to be scaled to meters.
- // The confidence value is encoded in the 3 most significant bits.
- // The confidence data needs to be additionally normalized with
- // values 1.0f, 0.0f representing maximum and minimum confidence
- // respectively.
- auto value = inputFrame.mDepthMapBuffer[i*inputFrame.mDepthMapStride + j];
- auto point = static_cast<float>(value & 0x1FFF) / 1000.f;
- points.push_back(point);
+ *switchDimensions = false;
+ // Physical rotation of depth and confidence maps may be needed in case
+ // the EXIF orientation is set to 0 degrees and the depth photo orientation
+ // (source color image) has some different value.
+ if (exifOrientation == ExifOrientation::ORIENTATION_0_DEGREES) {
+ *switchDimensions = rotateAndUnpack(inputFrame, &points, &confidence, &near, &far);
+ } else {
+ rotate0AndUnpack(inputFrame, &points, &confidence, &near, &far);
+ }
- auto conf = (value >> 13) & 0x7;
- float normConfidence = (conf == 0) ? 1.f : (static_cast<float>(conf) - 1) / 7.f;
- confidence.push_back(normConfidence);
-
- if (near > point) {
- near = point;
- }
- if (far < point) {
- far = point;
- }
- }
+ size_t width = inputFrame.mDepthMapWidth;
+ size_t height = inputFrame.mDepthMapHeight;
+ if (*switchDimensions) {
+ width = inputFrame.mDepthMapHeight;
+ height = inputFrame.mDepthMapWidth;
}
if (near == far) {
@@ -225,18 +371,18 @@
depthParams.depth_image_data.resize(inputFrame.mMaxJpegSize);
depthParams.confidence_data.resize(inputFrame.mMaxJpegSize);
size_t actualJpegSize;
- auto ret = encodeGrayscaleJpeg(inputFrame.mDepthMapWidth, inputFrame.mDepthMapHeight,
- pointsQuantized.data(), depthParams.depth_image_data.data(), inputFrame.mMaxJpegSize,
- inputFrame.mJpegQuality, actualJpegSize);
+ auto ret = encodeGrayscaleJpeg(width, height, pointsQuantized.data(),
+ depthParams.depth_image_data.data(), inputFrame.mMaxJpegSize,
+ inputFrame.mJpegQuality, exifOrientation, actualJpegSize);
if (ret != NO_ERROR) {
ALOGE("%s: Depth map compression failed!", __FUNCTION__);
return nullptr;
}
depthParams.depth_image_data.resize(actualJpegSize);
- ret = encodeGrayscaleJpeg(inputFrame.mDepthMapWidth, inputFrame.mDepthMapHeight,
- confidenceQuantized.data(), depthParams.confidence_data.data(), inputFrame.mMaxJpegSize,
- inputFrame.mJpegQuality, actualJpegSize);
+ ret = encodeGrayscaleJpeg(width, height, confidenceQuantized.data(),
+ depthParams.confidence_data.data(), inputFrame.mMaxJpegSize,
+ inputFrame.mJpegQuality, exifOrientation, actualJpegSize);
if (ret != NO_ERROR) {
ALOGE("%s: Confidence map compression failed!", __FUNCTION__);
return nullptr;
@@ -262,7 +408,12 @@
return BAD_VALUE;
}
- cameraParams->depth_map = processDepthMapFrame(inputFrame, &items);
+ ExifOrientation exifOrientation = getExifOrientation(
+ reinterpret_cast<const unsigned char*> (inputFrame.mMainJpegBuffer),
+ inputFrame.mMainJpegSize);
+ bool switchDimensions;
+ cameraParams->depth_map = processDepthMapFrame(inputFrame, exifOrientation, &items,
+ &switchDimensions);
if (cameraParams->depth_map == nullptr) {
ALOGE("%s: Depth map processing failed!", __FUNCTION__);
return BAD_VALUE;
@@ -274,7 +425,13 @@
// [focalLengthX, focalLengthY, opticalCenterX, opticalCenterY, skew]
const dynamic_depth::Point<double> focalLength(inputFrame.mInstrinsicCalibration[0],
inputFrame.mInstrinsicCalibration[1]);
- const Dimension imageSize(inputFrame.mMainJpegWidth, inputFrame.mMainJpegHeight);
+ size_t width = inputFrame.mMainJpegWidth;
+ size_t height = inputFrame.mMainJpegHeight;
+ if (switchDimensions) {
+ width = inputFrame.mMainJpegHeight;
+ height = inputFrame.mMainJpegWidth;
+ }
+ const Dimension imageSize(width, height);
ImagingModelParams imagingParams(focalLength, imageSize);
imagingParams.principal_point.x = inputFrame.mInstrinsicCalibration[2];
imagingParams.principal_point.y = inputFrame.mInstrinsicCalibration[3];
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.h b/services/camera/libcameraservice/common/DepthPhotoProcessor.h
index 19889a1..6a2fbff 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.h
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.h
@@ -23,19 +23,27 @@
namespace android {
namespace camera3 {
+enum DepthPhotoOrientation {
+ DEPTH_ORIENTATION_0_DEGREES = 0,
+ DEPTH_ORIENTATION_90_DEGREES = 90,
+ DEPTH_ORIENTATION_180_DEGREES = 180,
+ DEPTH_ORIENTATION_270_DEGREES = 270,
+};
+
struct DepthPhotoInputFrame {
- const char* mMainJpegBuffer;
- size_t mMainJpegSize;
- size_t mMainJpegWidth, mMainJpegHeight;
- uint16_t* mDepthMapBuffer;
- size_t mDepthMapWidth, mDepthMapHeight, mDepthMapStride;
- size_t mMaxJpegSize;
- uint8_t mJpegQuality;
- uint8_t mIsLogical;
- float mInstrinsicCalibration[5];
- uint8_t mIsInstrinsicCalibrationValid;
- float mLensDistortion[5];
- uint8_t mIsLensDistortionValid;
+ const char* mMainJpegBuffer;
+ size_t mMainJpegSize;
+ size_t mMainJpegWidth, mMainJpegHeight;
+ uint16_t* mDepthMapBuffer;
+ size_t mDepthMapWidth, mDepthMapHeight, mDepthMapStride;
+ size_t mMaxJpegSize;
+ uint8_t mJpegQuality;
+ uint8_t mIsLogical;
+ float mInstrinsicCalibration[5];
+ uint8_t mIsInstrinsicCalibrationValid;
+ float mLensDistortion[5];
+ uint8_t mIsLensDistortionValid;
+ DepthPhotoOrientation mOrientation;
DepthPhotoInputFrame() :
mMainJpegBuffer(nullptr),
@@ -52,7 +60,8 @@
mInstrinsicCalibration{0.f},
mIsInstrinsicCalibrationValid(0),
mLensDistortion{0.f},
- mIsLensDistortionValid(0) {}
+ mIsLensDistortionValid(0),
+ mOrientation(DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES) {}
};
static const char *kDepthPhotoLibrary = "libdepthphoto.so";
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index f9ef996..923d17a 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -2151,7 +2151,11 @@
// Pause to reconfigure
status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration) {
- mRequestThread->setPaused(true);
+ if (mRequestThread.get() != nullptr) {
+ mRequestThread->setPaused(true);
+ } else {
+ return NO_INIT;
+ }
ALOGV("%s: Camera %s: Internal wait until idle (% " PRIi64 " ns)", __FUNCTION__, mId.string(),
maxExpectedDuration);
@@ -4558,7 +4562,7 @@
return;
}
- auto err = mHidlSession_3_5->signalStreamFlush(streamIds, mNextStreamConfigCounter);
+ auto err = mHidlSession_3_5->signalStreamFlush(streamIds, mNextStreamConfigCounter - 1);
if (!err.isOk()) {
ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
return;
@@ -5890,16 +5894,16 @@
if (mPaused == false) {
ALOGV("%s: RequestThread: Going idle", __FUNCTION__);
mPaused = true;
- // Let the tracker know
- sp<StatusTracker> statusTracker = mStatusTracker.promote();
- if (statusTracker != 0) {
- statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
- }
if (mNotifyPipelineDrain) {
mInterface->signalPipelineDrain(mStreamIdsToBeDrained);
mNotifyPipelineDrain = false;
mStreamIdsToBeDrained.clear();
}
+ // Let the tracker know
+ sp<StatusTracker> statusTracker = mStatusTracker.promote();
+ if (statusTracker != 0) {
+ statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
+ }
sp<Camera3Device> parent = mParent.promote();
if (parent != nullptr) {
parent->mRequestBufferSM.onRequestThreadPaused();
@@ -5983,16 +5987,16 @@
if (mPaused == false) {
mPaused = true;
ALOGV("%s: RequestThread: Paused", __FUNCTION__);
- // Let the tracker know
- sp<StatusTracker> statusTracker = mStatusTracker.promote();
- if (statusTracker != 0) {
- statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
- }
if (mNotifyPipelineDrain) {
mInterface->signalPipelineDrain(mStreamIdsToBeDrained);
mNotifyPipelineDrain = false;
mStreamIdsToBeDrained.clear();
}
+ // Let the tracker know
+ sp<StatusTracker> statusTracker = mStatusTracker.promote();
+ if (statusTracker != 0) {
+ statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
+ }
sp<Camera3Device> parent = mParent.promote();
if (parent != nullptr) {
parent->mRequestBufferSM.onRequestThreadPaused();
diff --git a/services/camera/libcameraservice/tests/Android.mk b/services/camera/libcameraservice/tests/Android.mk
index d777ca1..b4e7c32 100644
--- a/services/camera/libcameraservice/tests/Android.mk
+++ b/services/camera/libcameraservice/tests/Android.mk
@@ -27,6 +27,8 @@
libcamera_client \
libcamera_metadata \
libutils \
+ libjpeg \
+ libexif \
android.hardware.camera.common@1.0 \
android.hardware.camera.provider@2.4 \
android.hardware.camera.provider@2.5 \
@@ -36,6 +38,8 @@
LOCAL_C_INCLUDES += \
system/media/private/camera/include \
+ external/dynamic_depth/includes \
+ external/dynamic_depth/internal \
LOCAL_CFLAGS += -Wall -Wextra -Werror
diff --git a/services/camera/libcameraservice/tests/DepthProcessorTest.cpp b/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
new file mode 100644
index 0000000..2162514
--- /dev/null
+++ b/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
@@ -0,0 +1,382 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "DepthProcessorTest"
+
+#include <array>
+#include <random>
+
+#include <dlfcn.h>
+#include <gtest/gtest.h>
+
+#include "../common/DepthPhotoProcessor.h"
+#include "../utils/ExifUtils.h"
+#include "NV12Compressor.h"
+
+using namespace android;
+using namespace android::camera3;
+
+static const size_t kTestBufferWidth = 640;
+static const size_t kTestBufferHeight = 480;
+static const size_t kTestBufferNV12Size ((((kTestBufferWidth) * (kTestBufferHeight)) * 3) / 2);
+static const size_t kTestBufferDepthSize (kTestBufferWidth * kTestBufferHeight);
+static const size_t kSeed = 1234;
+
+void linkToDepthPhotoLibrary(void **libHandle /*out*/,
+ process_depth_photo_frame *processFrameFunc /*out*/) {
+ ASSERT_NE(libHandle, nullptr);
+ ASSERT_NE(processFrameFunc, nullptr);
+
+ *libHandle = dlopen(kDepthPhotoLibrary, RTLD_NOW | RTLD_LOCAL);
+ if (*libHandle != nullptr) {
+ *processFrameFunc = reinterpret_cast<camera3::process_depth_photo_frame> (
+ dlsym(*libHandle, kDepthPhotoProcessFunction));
+ ASSERT_NE(*processFrameFunc, nullptr);
+ }
+}
+
+void generateColorJpegBuffer(int jpegQuality, ExifOrientation orientationValue, bool includeExif,
+ bool switchDimensions, std::vector<uint8_t> *colorJpegBuffer /*out*/) {
+ ASSERT_NE(colorJpegBuffer, nullptr);
+
+ std::array<uint8_t, kTestBufferNV12Size> colorSourceBuffer;
+ std::default_random_engine gen(kSeed);
+ std::uniform_int_distribution<int> uniDist(0, UINT8_MAX - 1);
+ for (size_t i = 0; i < colorSourceBuffer.size(); i++) {
+ colorSourceBuffer[i] = uniDist(gen);
+ }
+
+ size_t width = kTestBufferWidth;
+ size_t height = kTestBufferHeight;
+ if (switchDimensions) {
+ width = kTestBufferHeight;
+ height = kTestBufferWidth;
+ }
+
+ NV12Compressor jpegCompressor;
+ if (includeExif) {
+ ASSERT_TRUE(jpegCompressor.compressWithExifOrientation(
+ reinterpret_cast<const unsigned char*> (colorSourceBuffer.data()), width, height,
+ jpegQuality, orientationValue));
+ } else {
+ ASSERT_TRUE(jpegCompressor.compress(
+ reinterpret_cast<const unsigned char*> (colorSourceBuffer.data()), width, height,
+ jpegQuality));
+ }
+
+ *colorJpegBuffer = std::move(jpegCompressor.getCompressedData());
+ ASSERT_FALSE(colorJpegBuffer->empty());
+}
+
+void generateDepth16Buffer(std::array<uint16_t, kTestBufferDepthSize> *depth16Buffer /*out*/) {
+ ASSERT_NE(depth16Buffer, nullptr);
+ std::default_random_engine gen(kSeed+1);
+ std::uniform_int_distribution<int> uniDist(0, UINT16_MAX - 1);
+ for (size_t i = 0; i < depth16Buffer->size(); i++) {
+ (*depth16Buffer)[i] = uniDist(gen);
+ }
+}
+
+TEST(DepthProcessorTest, LinkToLibray) {
+ void *libHandle;
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle != nullptr) {
+ dlclose(libHandle);
+ }
+}
+
+TEST(DepthProcessorTest, BadInput) {
+ void *libHandle;
+ int jpegQuality = 95;
+
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle == nullptr) {
+ // Depth library no present, nothing more to test.
+ return;
+ }
+
+ DepthPhotoInputFrame inputFrame;
+ // Worst case both depth and confidence maps have the same size as the main color image.
+ inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+
+ std::vector<uint8_t> colorJpegBuffer;
+ generateColorJpegBuffer(jpegQuality, ExifOrientation::ORIENTATION_UNDEFINED,
+ /*includeExif*/ false, /*switchDimensions*/ false, &colorJpegBuffer);
+
+ std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+ generateDepth16Buffer(&depth16Buffer);
+
+ std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+ size_t actualDepthPhotoSize = 0;
+
+ inputFrame.mMainJpegWidth = kTestBufferWidth;
+ inputFrame.mMainJpegHeight = kTestBufferHeight;
+ inputFrame.mJpegQuality = jpegQuality;
+ ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+
+ inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
+ inputFrame.mMainJpegSize = colorJpegBuffer.size();
+ ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+
+ inputFrame.mDepthMapBuffer = depth16Buffer.data();
+ inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = kTestBufferWidth;
+ inputFrame.mDepthMapHeight = kTestBufferHeight;
+ ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), nullptr,
+ &actualDepthPhotoSize), 0);
+
+ ASSERT_NE(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(), nullptr),
+ 0);
+
+ dlclose(libHandle);
+}
+
+TEST(DepthProcessorTest, BasicDepthPhotoValidation) {
+ void *libHandle;
+ int jpegQuality = 95;
+
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle == nullptr) {
+ // Depth library no present, nothing more to test.
+ return;
+ }
+
+ std::vector<uint8_t> colorJpegBuffer;
+ generateColorJpegBuffer(jpegQuality, ExifOrientation::ORIENTATION_UNDEFINED,
+ /*includeExif*/ false, /*switchDimensions*/ false, &colorJpegBuffer);
+
+ std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+ generateDepth16Buffer(&depth16Buffer);
+
+ DepthPhotoInputFrame inputFrame;
+ inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
+ inputFrame.mMainJpegSize = colorJpegBuffer.size();
+ // Worst case both depth and confidence maps have the same size as the main color image.
+ inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+ inputFrame.mMainJpegWidth = kTestBufferWidth;
+ inputFrame.mMainJpegHeight = kTestBufferHeight;
+ inputFrame.mJpegQuality = jpegQuality;
+ inputFrame.mDepthMapBuffer = depth16Buffer.data();
+ inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = kTestBufferWidth;
+ inputFrame.mDepthMapHeight = kTestBufferHeight;
+
+ std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+ size_t actualDepthPhotoSize = 0;
+ ASSERT_EQ(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+ ASSERT_TRUE((actualDepthPhotoSize > 0) && (depthPhotoBuffer.size() >= actualDepthPhotoSize));
+
+ // The final depth photo must consist of three jpeg images:
+ // - the main color image
+ // - the depth map image
+ // - the confidence map image
+ size_t mainJpegSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data(), actualDepthPhotoSize,
+ &mainJpegSize), OK);
+ ASSERT_TRUE((mainJpegSize > 0) && (mainJpegSize < actualDepthPhotoSize));
+ size_t depthMapSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data() + mainJpegSize,
+ actualDepthPhotoSize - mainJpegSize, &depthMapSize), OK);
+ ASSERT_TRUE((depthMapSize > 0) && (depthMapSize < (actualDepthPhotoSize - mainJpegSize)));
+
+ dlclose(libHandle);
+}
+
+TEST(DepthProcessorTest, TestDepthPhotoExifOrientation) {
+ void *libHandle;
+ int jpegQuality = 95;
+
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle == nullptr) {
+ // Depth library no present, nothing more to test.
+ return;
+ }
+
+ ExifOrientation exifOrientations[] = { ExifOrientation::ORIENTATION_UNDEFINED,
+ ExifOrientation::ORIENTATION_0_DEGREES, ExifOrientation::ORIENTATION_90_DEGREES,
+ ExifOrientation::ORIENTATION_180_DEGREES, ExifOrientation::ORIENTATION_270_DEGREES };
+ for (auto exifOrientation : exifOrientations) {
+ std::vector<uint8_t> colorJpegBuffer;
+ generateColorJpegBuffer(jpegQuality, exifOrientation, /*includeExif*/ true,
+ /*switchDimensions*/ false, &colorJpegBuffer);
+ if (exifOrientation != ExifOrientation::ORIENTATION_UNDEFINED) {
+ auto jpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(colorJpegBuffer.data(),
+ colorJpegBuffer.size(), &jpegExifOrientation), OK);
+ ASSERT_EQ(exifOrientation, jpegExifOrientation);
+ }
+
+ std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+ generateDepth16Buffer(&depth16Buffer);
+
+ DepthPhotoInputFrame inputFrame;
+ inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
+ inputFrame.mMainJpegSize = colorJpegBuffer.size();
+ // Worst case both depth and confidence maps have the same size as the main color image.
+ inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+ inputFrame.mMainJpegWidth = kTestBufferWidth;
+ inputFrame.mMainJpegHeight = kTestBufferHeight;
+ inputFrame.mJpegQuality = jpegQuality;
+ inputFrame.mDepthMapBuffer = depth16Buffer.data();
+ inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = kTestBufferWidth;
+ inputFrame.mDepthMapHeight = kTestBufferHeight;
+
+ std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+ size_t actualDepthPhotoSize = 0;
+ ASSERT_EQ(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+ ASSERT_TRUE((actualDepthPhotoSize > 0) &&
+ (depthPhotoBuffer.size() >= actualDepthPhotoSize));
+
+ size_t mainJpegSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data(), actualDepthPhotoSize,
+ &mainJpegSize), OK);
+ ASSERT_TRUE((mainJpegSize > 0) && (mainJpegSize < actualDepthPhotoSize));
+ size_t depthMapSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data() + mainJpegSize,
+ actualDepthPhotoSize - mainJpegSize, &depthMapSize), OK);
+ ASSERT_TRUE((depthMapSize > 0) && (depthMapSize < (actualDepthPhotoSize - mainJpegSize)));
+ size_t confidenceMapSize = actualDepthPhotoSize - (mainJpegSize + depthMapSize);
+
+ //Depth and confidence images must have the same EXIF orientation as the source
+ auto depthJpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(depthPhotoBuffer.data() + mainJpegSize,
+ depthMapSize, &depthJpegExifOrientation), OK);
+ if (exifOrientation == ORIENTATION_UNDEFINED) {
+ // In case of undefined or missing EXIF orientation, always expect 0 degrees in the
+ // depth map.
+ ASSERT_EQ(depthJpegExifOrientation, ExifOrientation::ORIENTATION_0_DEGREES);
+ } else {
+ ASSERT_EQ(depthJpegExifOrientation, exifOrientation);
+ }
+
+ auto confidenceJpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(
+ depthPhotoBuffer.data() + mainJpegSize + depthMapSize,
+ confidenceMapSize, &confidenceJpegExifOrientation), OK);
+ if (exifOrientation == ORIENTATION_UNDEFINED) {
+ // In case of undefined or missing EXIF orientation, always expect 0 degrees in the
+ // confidence map.
+ ASSERT_EQ(confidenceJpegExifOrientation, ExifOrientation::ORIENTATION_0_DEGREES);
+ } else {
+ ASSERT_EQ(confidenceJpegExifOrientation, exifOrientation);
+ }
+ }
+
+ dlclose(libHandle);
+}
+
+TEST(DepthProcessorTest, TestDephtPhotoPhysicalRotation) {
+ void *libHandle;
+ int jpegQuality = 95;
+
+ process_depth_photo_frame processFunc;
+ linkToDepthPhotoLibrary(&libHandle, &processFunc);
+ if (libHandle == nullptr) {
+ // Depth library no present, nothing more to test.
+ return;
+ }
+
+ // In case of physical rotation, the EXIF orientation must always be 0.
+ auto exifOrientation = ExifOrientation::ORIENTATION_0_DEGREES;
+ DepthPhotoOrientation depthOrientations[] = {
+ DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES,
+ DepthPhotoOrientation::DEPTH_ORIENTATION_90_DEGREES,
+ DepthPhotoOrientation::DEPTH_ORIENTATION_180_DEGREES,
+ DepthPhotoOrientation::DEPTH_ORIENTATION_270_DEGREES };
+ for (auto depthOrientation : depthOrientations) {
+ std::vector<uint8_t> colorJpegBuffer;
+ bool switchDimensions = false;
+ size_t expectedWidth = kTestBufferWidth;
+ size_t expectedHeight = kTestBufferHeight;
+ if ((depthOrientation == DepthPhotoOrientation::DEPTH_ORIENTATION_90_DEGREES) ||
+ (depthOrientation == DepthPhotoOrientation::DEPTH_ORIENTATION_270_DEGREES)) {
+ switchDimensions = true;
+ expectedWidth = kTestBufferHeight;
+ expectedHeight = kTestBufferWidth;
+ }
+ generateColorJpegBuffer(jpegQuality, exifOrientation, /*includeExif*/ true,
+ switchDimensions, &colorJpegBuffer);
+ auto jpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(colorJpegBuffer.data(), colorJpegBuffer.size(),
+ &jpegExifOrientation), OK);
+ ASSERT_EQ(exifOrientation, jpegExifOrientation);
+
+ std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
+ generateDepth16Buffer(&depth16Buffer);
+
+ DepthPhotoInputFrame inputFrame;
+ inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
+ inputFrame.mMainJpegSize = colorJpegBuffer.size();
+ // Worst case both depth and confidence maps have the same size as the main color image.
+ inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+ inputFrame.mMainJpegWidth = kTestBufferWidth;
+ inputFrame.mMainJpegHeight = kTestBufferHeight;
+ inputFrame.mJpegQuality = jpegQuality;
+ inputFrame.mDepthMapBuffer = depth16Buffer.data();
+ inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = kTestBufferWidth;
+ inputFrame.mDepthMapHeight = kTestBufferHeight;
+ inputFrame.mOrientation = depthOrientation;
+
+ std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
+ size_t actualDepthPhotoSize = 0;
+ ASSERT_EQ(processFunc(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+ &actualDepthPhotoSize), 0);
+ ASSERT_TRUE((actualDepthPhotoSize > 0) &&
+ (depthPhotoBuffer.size() >= actualDepthPhotoSize));
+
+ size_t mainJpegSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data(), actualDepthPhotoSize,
+ &mainJpegSize), OK);
+ ASSERT_TRUE((mainJpegSize > 0) && (mainJpegSize < actualDepthPhotoSize));
+ size_t depthMapSize = 0;
+ ASSERT_EQ(NV12Compressor::findJpegSize(depthPhotoBuffer.data() + mainJpegSize,
+ actualDepthPhotoSize - mainJpegSize, &depthMapSize), OK);
+ ASSERT_TRUE((depthMapSize > 0) && (depthMapSize < (actualDepthPhotoSize - mainJpegSize)));
+ size_t confidenceMapSize = actualDepthPhotoSize - (mainJpegSize + depthMapSize);
+
+ //Depth and confidence images must have the same EXIF orientation as the source
+ auto depthJpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(depthPhotoBuffer.data() + mainJpegSize,
+ depthMapSize, &depthJpegExifOrientation), OK);
+ ASSERT_EQ(depthJpegExifOrientation, exifOrientation);
+ size_t depthMapWidth, depthMapHeight;
+ ASSERT_EQ(NV12Compressor::getJpegImageDimensions(depthPhotoBuffer.data() + mainJpegSize,
+ depthMapSize, &depthMapWidth, &depthMapHeight), OK);
+ ASSERT_EQ(depthMapWidth, expectedWidth);
+ ASSERT_EQ(depthMapHeight, expectedHeight);
+
+ auto confidenceJpegExifOrientation = ExifOrientation::ORIENTATION_UNDEFINED;
+ ASSERT_EQ(NV12Compressor::getExifOrientation(
+ depthPhotoBuffer.data() + mainJpegSize + depthMapSize, confidenceMapSize,
+ &confidenceJpegExifOrientation), OK);
+ ASSERT_EQ(confidenceJpegExifOrientation, exifOrientation);
+ size_t confidenceMapWidth, confidenceMapHeight;
+ ASSERT_EQ(NV12Compressor::getJpegImageDimensions(
+ depthPhotoBuffer.data() + mainJpegSize + depthMapSize, confidenceMapSize,
+ &confidenceMapWidth, &confidenceMapHeight), OK);
+ ASSERT_EQ(confidenceMapWidth, expectedWidth);
+ ASSERT_EQ(confidenceMapHeight, expectedHeight);
+ }
+
+ dlclose(libHandle);
+}
diff --git a/services/camera/libcameraservice/tests/NV12Compressor.cpp b/services/camera/libcameraservice/tests/NV12Compressor.cpp
new file mode 100644
index 0000000..0a41a1f
--- /dev/null
+++ b/services/camera/libcameraservice/tests/NV12Compressor.cpp
@@ -0,0 +1,379 @@
+/*
+* Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "Test_NV12Compressor"
+
+#include "NV12Compressor.h"
+
+#include <libexif/exif-data.h>
+#include <netinet/in.h>
+
+using namespace android;
+using namespace android::camera3;
+
+namespace std {
+template <>
+struct default_delete<ExifEntry> {
+ inline void operator()(ExifEntry* entry) const { exif_entry_unref(entry); }
+};
+
+template <>
+struct default_delete<ExifData> {
+ inline void operator()(ExifData* data) const { exif_data_unref(data); }
+};
+
+} // namespace std
+
+bool NV12Compressor::compress(const unsigned char* data, int width, int height, int quality) {
+ if (!configureCompressor(width, height, quality)) {
+ // the method will have logged a more detailed error message than we can
+ // provide here so just return.
+ return false;
+ }
+
+ return compressData(data, /*exifData*/ nullptr);
+}
+
+bool NV12Compressor::compressWithExifOrientation(const unsigned char* data, int width, int height,
+ int quality, android::camera3::ExifOrientation exifValue) {
+ std::unique_ptr<ExifData> exifData(exif_data_new());
+ if (exifData.get() == nullptr) {
+ return false;
+ }
+
+ exif_data_set_option(exifData.get(), EXIF_DATA_OPTION_FOLLOW_SPECIFICATION);
+ exif_data_set_data_type(exifData.get(), EXIF_DATA_TYPE_COMPRESSED);
+ exif_data_set_byte_order(exifData.get(), EXIF_BYTE_ORDER_INTEL);
+ std::unique_ptr<ExifEntry> exifEntry(exif_entry_new());
+ if (exifEntry.get() == nullptr) {
+ return false;
+ }
+
+ exifEntry->tag = EXIF_TAG_ORIENTATION;
+ exif_content_add_entry(exifData->ifd[EXIF_IFD_0], exifEntry.get());
+ exif_entry_initialize(exifEntry.get(), exifEntry->tag);
+ exif_set_short(exifEntry->data, EXIF_BYTE_ORDER_INTEL, exifValue);
+
+ if (!configureCompressor(width, height, quality)) {
+ return false;
+ }
+
+ return compressData(data, exifData.get());
+}
+
+const std::vector<uint8_t>& NV12Compressor::getCompressedData() const {
+ return mDestManager.mBuffer;
+}
+
+bool NV12Compressor::configureCompressor(int width, int height, int quality) {
+ mCompressInfo.err = jpeg_std_error(&mErrorManager);
+ // NOTE! DANGER! Do not construct any non-trivial objects below setjmp!
+ // The compiler will not generate code to destroy them during the return
+ // below so they will leak. Additionally, do not place any calls to libjpeg
+ // that can fail above this line or any error will cause undefined behavior.
+ if (setjmp(mErrorManager.mJumpBuffer)) {
+ // This is where the error handler will jump in case setup fails
+ // The error manager will ALOG an appropriate error message
+ return false;
+ }
+
+ jpeg_create_compress(&mCompressInfo);
+
+ mCompressInfo.image_width = width;
+ mCompressInfo.image_height = height;
+ mCompressInfo.input_components = 3;
+ mCompressInfo.in_color_space = JCS_YCbCr;
+ jpeg_set_defaults(&mCompressInfo);
+
+ jpeg_set_quality(&mCompressInfo, quality, TRUE);
+ // It may seem weird to set color space here again but this will also set
+ // other fields. These fields might be overwritten by jpeg_set_defaults
+ jpeg_set_colorspace(&mCompressInfo, JCS_YCbCr);
+ mCompressInfo.raw_data_in = TRUE;
+ mCompressInfo.dct_method = JDCT_IFAST;
+ // Set sampling factors
+ mCompressInfo.comp_info[0].h_samp_factor = 2;
+ mCompressInfo.comp_info[0].v_samp_factor = 2;
+ mCompressInfo.comp_info[1].h_samp_factor = 1;
+ mCompressInfo.comp_info[1].v_samp_factor = 1;
+ mCompressInfo.comp_info[2].h_samp_factor = 1;
+ mCompressInfo.comp_info[2].v_samp_factor = 1;
+
+ mCompressInfo.dest = &mDestManager;
+
+ return true;
+}
+
+static void deinterleave(const uint8_t* vuPlanar, std::vector<uint8_t>& uRows,
+ std::vector<uint8_t>& vRows, int rowIndex, int width, int height, int stride) {
+ int numRows = (height - rowIndex) / 2;
+ if (numRows > 8) numRows = 8;
+ for (int row = 0; row < numRows; ++row) {
+ int offset = ((rowIndex >> 1) + row) * stride;
+ const uint8_t* vu = vuPlanar + offset;
+ for (int i = 0; i < (width >> 1); ++i) {
+ int index = row * (width >> 1) + i;
+ uRows[index] = vu[1];
+ vRows[index] = vu[0];
+ vu += 2;
+ }
+ }
+}
+
+bool NV12Compressor::compressData(const unsigned char* data, ExifData* exifData) {
+ const uint8_t* y[16];
+ const uint8_t* cb[8];
+ const uint8_t* cr[8];
+ const uint8_t** planes[3] = { y, cb, cr };
+
+ int i, offset;
+ int width = mCompressInfo.image_width;
+ int height = mCompressInfo.image_height;
+ const uint8_t* yPlanar = data;
+ const uint8_t* vuPlanar = data + (width * height);
+ std::vector<uint8_t> uRows(8 * (width >> 1));
+ std::vector<uint8_t> vRows(8 * (width >> 1));
+
+ // NOTE! DANGER! Do not construct any non-trivial objects below setjmp!
+ // The compiler will not generate code to destroy them during the return
+ // below so they will leak. Additionally, do not place any calls to libjpeg
+ // that can fail above this line or any error will cause undefined behavior.
+ if (setjmp(mErrorManager.mJumpBuffer)) {
+ // This is where the error handler will jump in case compression fails
+ // The error manager will ALOG an appropriate error message
+ return false;
+ }
+
+ jpeg_start_compress(&mCompressInfo, TRUE);
+
+ attachExifData(exifData);
+
+ // process 16 lines of Y and 8 lines of U/V each time.
+ while (mCompressInfo.next_scanline < mCompressInfo.image_height) {
+ //deinterleave u and v
+ deinterleave(vuPlanar, uRows, vRows, mCompressInfo.next_scanline,
+ width, height, width);
+
+ // Jpeg library ignores the rows whose indices are greater than height.
+ for (i = 0; i < 16; i++) {
+ // y row
+ y[i] = yPlanar + (mCompressInfo.next_scanline + i) * width;
+
+ // construct u row and v row
+ if ((i & 1) == 0) {
+ // height and width are both halved because of downsampling
+ offset = (i >> 1) * (width >> 1);
+ cb[i/2] = &uRows[offset];
+ cr[i/2] = &vRows[offset];
+ }
+ }
+ jpeg_write_raw_data(&mCompressInfo, const_cast<JSAMPIMAGE>(planes), 16);
+ }
+
+ jpeg_finish_compress(&mCompressInfo);
+ jpeg_destroy_compress(&mCompressInfo);
+
+ return true;
+}
+
+bool NV12Compressor::attachExifData(ExifData* exifData) {
+ if (exifData == nullptr) {
+ // This is not an error, we don't require EXIF data
+ return true;
+ }
+
+ // Save the EXIF data to memory
+ unsigned char* rawData = nullptr;
+ unsigned int size = 0;
+ exif_data_save_data(exifData, &rawData, &size);
+ if (rawData == nullptr) {
+ ALOGE("Failed to create EXIF data block");
+ return false;
+ }
+
+ jpeg_write_marker(&mCompressInfo, JPEG_APP0 + 1, rawData, size);
+ free(rawData);
+ return true;
+}
+
+NV12Compressor::ErrorManager::ErrorManager() {
+ error_exit = &onJpegError;
+}
+
+void NV12Compressor::ErrorManager::onJpegError(j_common_ptr cinfo) {
+ // NOTE! Do not construct any non-trivial objects in this method at the top
+ // scope. Their destructors will not be called. If you do need such an
+ // object create a local scope that does not include the longjmp call,
+ // that ensures the object is destroyed before longjmp is called.
+ ErrorManager* errorManager = reinterpret_cast<ErrorManager*>(cinfo->err);
+
+ // Format and log error message
+ char errorMessage[JMSG_LENGTH_MAX];
+ (*errorManager->format_message)(cinfo, errorMessage);
+ errorMessage[sizeof(errorMessage) - 1] = '\0';
+ ALOGE("JPEG compression error: %s", errorMessage);
+ jpeg_destroy(cinfo);
+
+ // And through the looking glass we go
+ longjmp(errorManager->mJumpBuffer, 1);
+}
+
+NV12Compressor::DestinationManager::DestinationManager() {
+ init_destination = &initDestination;
+ empty_output_buffer = &emptyOutputBuffer;
+ term_destination = &termDestination;
+}
+
+void NV12Compressor::DestinationManager::initDestination(j_compress_ptr cinfo) {
+ auto manager = reinterpret_cast<DestinationManager*>(cinfo->dest);
+
+ // Start out with some arbitrary but not too large buffer size
+ manager->mBuffer.resize(16 * 1024);
+ manager->next_output_byte = &manager->mBuffer[0];
+ manager->free_in_buffer = manager->mBuffer.size();
+}
+
+boolean NV12Compressor::DestinationManager::emptyOutputBuffer(
+ j_compress_ptr cinfo) {
+ auto manager = reinterpret_cast<DestinationManager*>(cinfo->dest);
+
+ // Keep doubling the size of the buffer for a very low, amortized
+ // performance cost of the allocations
+ size_t oldSize = manager->mBuffer.size();
+ manager->mBuffer.resize(oldSize * 2);
+ manager->next_output_byte = &manager->mBuffer[oldSize];
+ manager->free_in_buffer = manager->mBuffer.size() - oldSize;
+ return manager->free_in_buffer != 0;
+}
+
+void NV12Compressor::DestinationManager::termDestination(j_compress_ptr cinfo) {
+ auto manager = reinterpret_cast<DestinationManager*>(cinfo->dest);
+
+ // Resize down to the exact size of the output, that is remove as many
+ // bytes as there are left in the buffer
+ manager->mBuffer.resize(manager->mBuffer.size() - manager->free_in_buffer);
+}
+
+status_t NV12Compressor::findJpegSize(uint8_t *jpegBuffer, size_t maxSize, size_t *size /*out*/) {
+ if ((size == nullptr) || (jpegBuffer == nullptr)) {
+ return BAD_VALUE;
+ }
+
+ if (checkJpegStart(jpegBuffer) == 0) {
+ return BAD_VALUE;
+ }
+
+ // Read JFIF segment markers, skip over segment data
+ *size = kMarkerLength; //jump to Start Of Image
+ while (*size <= maxSize - kMarkerLength) {
+ segment_t *segment = (segment_t*)(jpegBuffer + *size);
+ uint8_t type = checkJpegMarker(segment->marker);
+ if (type == 0) { // invalid marker, no more segments, begin JPEG data
+ break;
+ }
+ if (type == kEndOfImage || *size > maxSize - sizeof(segment_t)) {
+ return BAD_VALUE;
+ }
+
+ size_t length = ntohs(segment->length);
+ *size += length + kMarkerLength;
+ }
+
+ // Find End of Image
+ // Scan JPEG buffer until End of Image
+ bool foundEnd = false;
+ for ( ; *size <= maxSize - kMarkerLength; (*size)++) {
+ if (checkJpegEnd(jpegBuffer + *size)) {
+ foundEnd = true;
+ *size += kMarkerLength;
+ break;
+ }
+ }
+
+ if (!foundEnd) {
+ return BAD_VALUE;
+ }
+
+ if (*size > maxSize) {
+ *size = maxSize;
+ }
+
+ return OK;
+}
+
+status_t NV12Compressor::getJpegImageDimensions(uint8_t *jpegBuffer,
+ size_t jpegBufferSize, size_t *width /*out*/, size_t *height /*out*/) {
+ if ((jpegBuffer == nullptr) || (width == nullptr) || (height == nullptr) ||
+ (jpegBufferSize == 0u)) {
+ return BAD_VALUE;
+ }
+
+ // Scan JPEG buffer until Start of Frame
+ bool foundSOF = false;
+ size_t currentPos;
+ for (currentPos = 0; currentPos <= jpegBufferSize - kMarkerLength; currentPos++) {
+ if (checkStartOfFrame(jpegBuffer + currentPos)) {
+ foundSOF = true;
+ currentPos += kMarkerLength;
+ break;
+ }
+ }
+
+ if (!foundSOF) {
+ ALOGE("%s: Start of Frame not found", __func__);
+ return BAD_VALUE;
+ }
+
+ sof_t *startOfFrame = reinterpret_cast<sof_t *> (jpegBuffer + currentPos);
+ *width = ntohs(startOfFrame->width);
+ *height = ntohs(startOfFrame->height);
+
+ return OK;
+}
+
+status_t NV12Compressor::getExifOrientation(uint8_t *jpegBuffer, size_t jpegBufferSize,
+ ExifOrientation *exifValue /*out*/) {
+ if ((jpegBuffer == nullptr) || (exifValue == nullptr) || (jpegBufferSize == 0u)) {
+ return BAD_VALUE;
+ }
+
+ std::unique_ptr<ExifData> exifData(exif_data_new());
+ exif_data_load_data(exifData.get(), jpegBuffer, jpegBufferSize);
+ ExifEntry *orientation = exif_content_get_entry(exifData->ifd[EXIF_IFD_0],
+ EXIF_TAG_ORIENTATION);
+ if ((orientation == nullptr) || (orientation->size != sizeof(ExifShort))) {
+ return BAD_VALUE;
+ }
+
+ auto orientationValue = exif_get_short(orientation->data,
+ exif_data_get_byte_order(exifData.get()));
+ status_t ret;
+ switch (orientationValue) {
+ case ExifOrientation::ORIENTATION_0_DEGREES:
+ case ExifOrientation::ORIENTATION_90_DEGREES:
+ case ExifOrientation::ORIENTATION_180_DEGREES:
+ case ExifOrientation::ORIENTATION_270_DEGREES:
+ *exifValue = static_cast<ExifOrientation> (orientationValue);
+ ret = OK;
+ break;
+ default:
+ ALOGE("%s: Unexpected EXIF orientation value: %u", __FUNCTION__, orientationValue);
+ ret = BAD_VALUE;
+ }
+
+ return ret;
+}
diff --git a/services/camera/libcameraservice/tests/NV12Compressor.h b/services/camera/libcameraservice/tests/NV12Compressor.h
new file mode 100644
index 0000000..ee22d5e
--- /dev/null
+++ b/services/camera/libcameraservice/tests/NV12Compressor.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef TEST_CAMERA_JPEG_STUB_NV12_COMPRESSOR_H
+#define TEST_CAMERA_JPEG_STUB_NV12_COMPRESSOR_H
+
+#include <setjmp.h>
+#include <stdlib.h>
+extern "C" {
+#include <jpeglib.h>
+#include <jerror.h>
+}
+
+#include <utils/Errors.h>
+#include <vector>
+
+#include "../utils/ExifUtils.h"
+
+struct _ExifData;
+typedef _ExifData ExifData;
+
+class NV12Compressor {
+public:
+ NV12Compressor() {}
+
+ /* Compress |data| which represents raw NV21 encoded data of dimensions
+ * |width| * |height|.
+ */
+ bool compress(const unsigned char* data, int width, int height, int quality);
+ bool compressWithExifOrientation(const unsigned char* data, int width, int height, int quality,
+ android::camera3::ExifOrientation exifValue);
+
+ /* Get a reference to the compressed data, this will return an empty vector
+ * if compress has not been called yet
+ */
+ const std::vector<unsigned char>& getCompressedData() const;
+
+ // Utility methods
+ static android::status_t findJpegSize(uint8_t *jpegBuffer, size_t maxSize,
+ size_t *size /*out*/);
+
+ static android::status_t getExifOrientation(uint8_t *jpegBuffer,
+ size_t jpegBufferSize, android::camera3::ExifOrientation *exifValue /*out*/);
+
+ /* Get Jpeg image dimensions from the first Start Of Frame. Please note that due to the
+ * way the jpeg buffer is scanned if the image contains a thumbnail, then the size returned
+ * will be of the thumbnail and not the main image.
+ */
+ static android::status_t getJpegImageDimensions(uint8_t *jpegBuffer, size_t jpegBufferSize,
+ size_t *width /*out*/, size_t *height /*out*/);
+
+private:
+
+ struct DestinationManager : jpeg_destination_mgr {
+ DestinationManager();
+
+ static void initDestination(j_compress_ptr cinfo);
+ static boolean emptyOutputBuffer(j_compress_ptr cinfo);
+ static void termDestination(j_compress_ptr cinfo);
+
+ std::vector<unsigned char> mBuffer;
+ };
+
+ struct ErrorManager : jpeg_error_mgr {
+ ErrorManager();
+
+ static void onJpegError(j_common_ptr cinfo);
+
+ jmp_buf mJumpBuffer;
+ };
+
+ static const size_t kMarkerLength = 2; // length of a marker
+ static const uint8_t kMarker = 0xFF; // First byte of marker
+ static const uint8_t kStartOfImage = 0xD8; // Start of Image
+ static const uint8_t kEndOfImage = 0xD9; // End of Image
+ static const uint8_t kStartOfFrame = 0xC0; // Start of Frame
+
+ struct __attribute__((packed)) segment_t {
+ uint8_t marker[kMarkerLength];
+ uint16_t length;
+ };
+
+ struct __attribute__((packed)) sof_t {
+ uint16_t length;
+ uint8_t precision;
+ uint16_t height;
+ uint16_t width;
+ };
+
+ // check for start of image marker
+ static bool checkStartOfFrame(uint8_t* buf) {
+ return buf[0] == kMarker && buf[1] == kStartOfFrame;
+ }
+
+ // check for start of image marker
+ static bool checkJpegStart(uint8_t* buf) {
+ return buf[0] == kMarker && buf[1] == kStartOfImage;
+ }
+
+ // check for End of Image marker
+ static bool checkJpegEnd(uint8_t *buf) {
+ return buf[0] == kMarker && buf[1] == kEndOfImage;
+ }
+
+ // check for arbitrary marker, returns marker type (second byte)
+ // returns 0 if no marker found. Note: 0x00 is not a valid marker type
+ static uint8_t checkJpegMarker(uint8_t *buf) {
+ return (buf[0] == kMarker) ? buf[1] : 0;
+ }
+
+ jpeg_compress_struct mCompressInfo;
+ DestinationManager mDestManager;
+ ErrorManager mErrorManager;
+
+ bool configureCompressor(int width, int height, int quality);
+ bool compressData(const unsigned char* data, ExifData* exifData);
+ bool attachExifData(ExifData* exifData);
+};
+
+#endif // TEST_CAMERA_JPEG_STUB_NV12_COMPRESSOR_H
+
diff --git a/services/camera/libcameraservice/utils/ExifUtils.cpp b/services/camera/libcameraservice/utils/ExifUtils.cpp
index 4dea8b5..c0afdc1 100644
--- a/services/camera/libcameraservice/utils/ExifUtils.cpp
+++ b/services/camera/libcameraservice/utils/ExifUtils.cpp
@@ -55,6 +55,7 @@
// Initialize() can be called multiple times. The setting of Exif tags will be
// cleared.
virtual bool initialize(const unsigned char *app1Segment, size_t app1SegmentSize);
+ virtual bool initializeEmpty();
// set all known fields from a metadata structure
virtual bool setFromMetadata(const CameraMetadata& metadata,
@@ -150,7 +151,11 @@
// sets image orientation.
// Returns false if memory allocation fails.
- virtual bool setOrientation(uint16_t orientation);
+ virtual bool setOrientation(uint16_t degrees);
+
+ // sets image orientation.
+ // Returns false if memory allocation fails.
+ virtual bool setOrientationValue(ExifOrientation orientationValue);
// sets the shutter speed.
// Returns false if memory allocation fails.
@@ -314,6 +319,26 @@
return true;
}
+bool ExifUtilsImpl::initializeEmpty() {
+ reset();
+ exif_data_ = exif_data_new();
+ if (exif_data_ == nullptr) {
+ ALOGE("%s: allocate memory for exif_data_ failed", __FUNCTION__);
+ return false;
+ }
+ // set the image options.
+ exif_data_set_option(exif_data_, EXIF_DATA_OPTION_FOLLOW_SPECIFICATION);
+ exif_data_set_data_type(exif_data_, EXIF_DATA_TYPE_COMPRESSED);
+ exif_data_set_byte_order(exif_data_, EXIF_BYTE_ORDER_INTEL);
+
+ // set exif version to 2.2.
+ if (!setExifVersion("0220")) {
+ return false;
+ }
+
+ return true;
+}
+
bool ExifUtilsImpl::setAperture(float aperture) {
float apexValue = convertToApex(aperture);
SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_APERTURE_VALUE,
@@ -609,32 +634,26 @@
return true;
}
-bool ExifUtilsImpl::setOrientation(uint16_t orientation) {
- /*
- * Orientation value:
- * 1 2 3 4 5 6 7 8
- *
- * 888888 888888 88 88 8888888888 88 88 8888888888
- * 88 88 88 88 88 88 88 88 88 88 88 88
- * 8888 8888 8888 8888 88 8888888888 8888888888 88
- * 88 88 88 88
- * 88 88 888888 888888
- */
- int value = 1;
- switch (orientation) {
+bool ExifUtilsImpl::setOrientation(uint16_t degrees) {
+ ExifOrientation value = ExifOrientation::ORIENTATION_0_DEGREES;
+ switch (degrees) {
case 90:
- value = 6;
+ value = ExifOrientation::ORIENTATION_90_DEGREES;
break;
case 180:
- value = 3;
+ value = ExifOrientation::ORIENTATION_180_DEGREES;
break;
case 270:
- value = 8;
+ value = ExifOrientation::ORIENTATION_270_DEGREES;
break;
default:
break;
}
- SET_SHORT(EXIF_IFD_0, EXIF_TAG_ORIENTATION, value);
+ return setOrientationValue(value);
+}
+
+bool ExifUtilsImpl::setOrientationValue(ExifOrientation orientationValue) {
+ SET_SHORT(EXIF_IFD_0, EXIF_TAG_ORIENTATION, orientationValue);
return true;
}
diff --git a/services/camera/libcameraservice/utils/ExifUtils.h b/services/camera/libcameraservice/utils/ExifUtils.h
index c78bab9..f1d0205 100644
--- a/services/camera/libcameraservice/utils/ExifUtils.h
+++ b/services/camera/libcameraservice/utils/ExifUtils.h
@@ -22,6 +22,24 @@
namespace android {
namespace camera3 {
+/*
+ * Orientation value:
+ * 1 2 3 4 5 6 7 8
+ *
+ * 888888 888888 88 88 8888888888 88 88 8888888888
+ * 88 88 88 88 88 88 88 88 88 88 88 88
+ * 8888 8888 8888 8888 88 8888888888 8888888888 88
+ * 88 88 88 88
+ * 88 88 888888 888888
+ */
+enum ExifOrientation : uint16_t {
+ ORIENTATION_UNDEFINED = 0x0,
+ ORIENTATION_0_DEGREES = 0x1,
+ ORIENTATION_90_DEGREES = 0x6,
+ ORIENTATION_180_DEGREES = 0x3,
+ ORIENTATION_270_DEGREES = 0x8,
+};
+
// This is based on the camera HIDL shim implementation, which was in turned
// based on original ChromeOS ARC implementation of a V4L2 HAL
@@ -49,6 +67,7 @@
// Initialize() can be called multiple times. The setting of Exif tags will be
// cleared.
virtual bool initialize(const unsigned char *app1Segment, size_t app1SegmentSize) = 0;
+ virtual bool initializeEmpty() = 0;
// Set all known fields from a metadata structure
virtual bool setFromMetadata(const CameraMetadata& metadata,
@@ -142,7 +161,11 @@
// Sets image orientation.
// Returns false if memory allocation fails.
- virtual bool setOrientation(uint16_t orientation) = 0;
+ virtual bool setOrientation(uint16_t degrees) = 0;
+
+ // Sets image orientation.
+ // Returns false if memory allocation fails.
+ virtual bool setOrientationValue(ExifOrientation orientationValue) = 0;
// Sets the shutter speed.
// Returns false if memory allocation fails.
diff --git a/services/mediacodec/registrant/Android.bp b/services/mediacodec/registrant/Android.bp
index 80d3630..1470de2 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/services/mediacodec/registrant/Android.bp
@@ -28,6 +28,7 @@
"libcodec2_soft_amrwbdec",
"libcodec2_soft_amrwbenc",
"libcodec2_soft_hevcdec",
+ "libcodec2_soft_hevcenc",
"libcodec2_soft_g711alawdec",
"libcodec2_soft_g711mlawdec",
"libcodec2_soft_mpeg2dec",