Merge "media: update on concurrent codec usage" into udc-dev
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index 95610fa..703033b 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -265,14 +265,16 @@
needsUpdate = true;
}
}
- // If not found, set to the highest supported level.
- if (!found) {
+ // If not found or exceeds max level, set to the highest supported level.
#ifdef MPEG4
+ if (!found || me.v.level > LEVEL_MP4V_2) {
me.set().level = LEVEL_MP4V_2;
-#else
- me.set().level = LEVEL_H263_40;
-#endif
}
+#else
+ if (!found || (me.v.level != LEVEL_H263_45 && me.v.level > LEVEL_H263_40)) {
+ me.set().level = LEVEL_H263_40;
+ }
+#endif
return C2R::Ok();
}
@@ -288,18 +290,6 @@
return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
}
- ProfileLevelType getProfileLevel_l() const {
-#ifdef MPEG4
- if (mProfileLevel->level == LEVEL_MP4V_0) return SIMPLE_PROFILE_LEVEL0;
- else if (mProfileLevel->level == LEVEL_MP4V_1) return SIMPLE_PROFILE_LEVEL1;
- return SIMPLE_PROFILE_LEVEL2; // level == LEVEL_MP4V_2
-#else
- // library does not export h263 specific levels. No way to map C2 enums to
- // library specific constants. Return max supported level.
- return CORE_PROFILE_LEVEL2;
-#endif
- }
-
private:
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
@@ -416,7 +406,7 @@
mEncParams->encFrameRate[0] = mFrameRate->value + 0.5;
mEncParams->rcType = VBR_1;
mEncParams->vbvDelay = VBV_DELAY;
- mEncParams->profile_level = mProfileLevel;
+ mEncParams->profile_level = CORE_PROFILE_LEVEL2;
mEncParams->packetSize = 32;
mEncParams->rvlcEnable = PV_OFF;
mEncParams->numLayers = 1;
@@ -457,7 +447,6 @@
mSize = mIntf->getSize_l();
mBitrate = mIntf->getBitrate_l();
mFrameRate = mIntf->getFrameRate_l();
- mProfileLevel = mIntf->getProfileLevel_l();
}
c2_status_t err = initEncParams();
if (C2_OK != err) {
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
index e5c8ea6..43461fc 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
@@ -65,7 +65,6 @@
std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
- ProfileLevelType mProfileLevel;
int64_t mNumInputFrames;
MP4EncodingMode mEncodeMode;
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index e818759..a45365a 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -144,7 +144,7 @@
"libhidlbase",
"libimage_io",
"libjpeg",
- "libjpegrecoverymap",
+ "libultrahdr",
"libmedia_codeclist",
"libmedia_omx",
"libmemunreachable",
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index 8a65a67..8223371 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -26,7 +26,7 @@
#include "common/CameraProviderManager.h"
#include <gui/Surface.h>
-#include <jpegrecoverymap/jpegr.h>
+#include <ultrahdr/jpegr.h>
#include <utils/ExifUtils.h>
#include <utils/Log.h>
#include "utils/SessionConfigurationUtils.h"
@@ -292,13 +292,13 @@
}
size_t actualJpegRSize = 0;
- jpegrecoverymap::jpegr_uncompressed_struct p010;
- jpegrecoverymap::jpegr_compressed_struct jpegR;
- jpegrecoverymap::JpegR jpegREncoder;
+ ultrahdr::jpegr_uncompressed_struct p010;
+ ultrahdr::jpegr_compressed_struct jpegR;
+ ultrahdr::JpegR jpegREncoder;
p010.height = inputFrame.p010Buffer.height;
p010.width = inputFrame.p010Buffer.width;
- p010.colorGamut = jpegrecoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_BT2100;
+ p010.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT2100;
p010.data = inputFrame.p010Buffer.data;
p010.chroma_data = inputFrame.p010Buffer.dataCb;
// Strides are expected to be in pixels not bytes
@@ -308,18 +308,18 @@
jpegR.data = dstBuffer;
jpegR.maxLength = maxJpegRBufferSize;
- jpegrecoverymap::jpegr_transfer_function transferFunction;
+ ultrahdr::ultrahdr_transfer_function transferFunction;
switch (mP010DynamicRange) {
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
- transferFunction = jpegrecoverymap::jpegr_transfer_function::JPEGR_TF_PQ;
+ transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_PQ;
break;
default:
- transferFunction = jpegrecoverymap::jpegr_transfer_function::JPEGR_TF_HLG;
+ transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_HLG;
}
if (mSupportInternalJpeg) {
- jpegrecoverymap::jpegr_compressed_struct jpeg;
+ ultrahdr::jpegr_compressed_struct jpeg;
jpeg.data = inputFrame.jpegBuffer.data;
jpeg.length = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
@@ -331,9 +331,9 @@
}
if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) {
- jpeg.colorGamut = jpegrecoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_P3;
+ jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_P3;
} else {
- jpeg.colorGamut = jpegrecoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_BT709;
+ jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT709;
}
res = jpegREncoder.encodeJPEGR(&p010, &jpeg, transferFunction, &jpegR);
@@ -351,7 +351,7 @@
ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
}
- jpegrecoverymap::jpegr_exif_struct exif;
+ ultrahdr::jpegr_exif_struct exif;
exif.data = reinterpret_cast<void*>(const_cast<uint8_t*>(exifBuffer));
exif.length = exifBufferSize;