Merge "Bufferpool: Check hidl call return value." into qt-dev
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index efeab6c..0d3357f 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -86,8 +86,8 @@
DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
.withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(320, 1920, 2),
- C2F(mSize, height).inRange(128, 1088, 2),
+ C2F(mSize, width).inRange(2, 1920, 2),
+ C2F(mSize, height).inRange(2, 1088, 2),
})
.withSetter(SizeSetter)
.build());
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 42f507f..a52ca15 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -18,6 +18,8 @@
#define LOG_TAG "C2SoftVpxDec"
#include <log/log.h>
+#include <algorithm>
+
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/MediaDefs.h>
@@ -303,13 +305,43 @@
#endif
};
+C2SoftVpxDec::ConverterThread::ConverterThread(
+ const std::shared_ptr<Mutexed<ConversionQueue>> &queue)
+ : Thread(false), mQueue(queue) {}
+
+bool C2SoftVpxDec::ConverterThread::threadLoop() {
+ Mutexed<ConversionQueue>::Locked queue(*mQueue);
+ if (queue->entries.empty()) {
+ queue.waitForCondition(queue->cond);
+ if (queue->entries.empty()) {
+ return true;
+ }
+ }
+ std::function<void()> convert = queue->entries.front();
+ queue->entries.pop_front();
+ if (!queue->entries.empty()) {
+ queue->cond.signal();
+ }
+ queue.unlock();
+
+ convert();
+
+ queue.lock();
+ if (--queue->numPending == 0u) {
+ queue->cond.broadcast();
+ }
+ return true;
+}
+
C2SoftVpxDec::C2SoftVpxDec(
const char *name,
c2_node_id_t id,
const std::shared_ptr<IntfImpl> &intfImpl)
: SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
mIntf(intfImpl),
- mCodecCtx(nullptr) {
+ mCodecCtx(nullptr),
+ mCoreCount(1),
+ mQueue(new Mutexed<ConversionQueue>) {
}
C2SoftVpxDec::~C2SoftVpxDec() {
@@ -399,7 +431,7 @@
vpx_codec_dec_cfg_t cfg;
memset(&cfg, 0, sizeof(vpx_codec_dec_cfg_t));
- cfg.threads = GetCPUCoreCount();
+ cfg.threads = mCoreCount = GetCPUCoreCount();
vpx_codec_flags_t flags;
memset(&flags, 0, sizeof(vpx_codec_flags_t));
@@ -413,6 +445,18 @@
return UNKNOWN_ERROR;
}
+ if (mMode == MODE_VP9) {
+ using namespace std::string_literals;
+ for (int i = 0; i < mCoreCount; ++i) {
+ sp<ConverterThread> thread(new ConverterThread(mQueue));
+ mConverterThreads.push_back(thread);
+ if (thread->run(("vp9conv #"s + std::to_string(i)).c_str(),
+ ANDROID_PRIORITY_AUDIO) != OK) {
+ return UNKNOWN_ERROR;
+ }
+ }
+ }
+
return OK;
}
@@ -422,6 +466,21 @@
delete mCodecCtx;
mCodecCtx = nullptr;
}
+ bool running = true;
+ for (const sp<ConverterThread> &thread : mConverterThreads) {
+ thread->requestExit();
+ }
+ while (running) {
+ mQueue->lock()->cond.broadcast();
+ running = false;
+ for (const sp<ConverterThread> &thread : mConverterThreads) {
+ if (thread->isRunning()) {
+ running = true;
+ break;
+ }
+ }
+ }
+ mConverterThreads.clear();
return OK;
}
@@ -759,15 +818,35 @@
const uint16_t *srcV = (const uint16_t *)img->planes[VPX_PLANE_V];
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
- convertYUV420Planar16ToY410((uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2,
- dstYStride / sizeof(uint32_t),
- mWidth, mHeight);
+ Mutexed<ConversionQueue>::Locked queue(*mQueue);
+ size_t i = 0;
+ constexpr size_t kHeight = 64;
+ for (; i < mHeight; i += kHeight) {
+ queue->entries.push_back(
+ [dst, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride, dstYStride,
+ width = mWidth, height = std::min(mHeight - i, kHeight)] {
+ convertYUV420Planar16ToY410(
+ (uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
+ srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
+ width, height);
+ });
+ srcY += srcYStride / 2 * kHeight;
+ srcU += srcUStride / 2 * (kHeight / 2);
+ srcV += srcVStride / 2 * (kHeight / 2);
+ dst += dstYStride * kHeight;
+ }
+ CHECK_EQ(0u, queue->numPending);
+ queue->numPending = queue->entries.size();
+ while (queue->numPending > 0) {
+ queue->cond.signal();
+ queue.waitForCondition(queue->cond);
+ }
} else {
convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2,
- dstYStride, dstUVStride,
- mWidth, mHeight);
+ srcUStride / 2, srcVStride / 2,
+ dstYStride, dstUVStride,
+ mWidth, mHeight);
}
} else {
const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y];
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index 60c8484..e51bcee 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -50,6 +50,19 @@
MODE_VP9,
} mMode;
+ struct ConversionQueue;
+
+ class ConverterThread : public Thread {
+ public:
+ explicit ConverterThread(
+ const std::shared_ptr<Mutexed<ConversionQueue>> &queue);
+ ~ConverterThread() override = default;
+ bool threadLoop() override;
+
+ private:
+ std::shared_ptr<Mutexed<ConversionQueue>> mQueue;
+ };
+
std::shared_ptr<IntfImpl> mIntf;
vpx_codec_ctx_t *mCodecCtx;
bool mFrameParallelMode; // Frame parallel is only supported by VP9 decoder.
@@ -59,6 +72,15 @@
bool mSignalledOutputEos;
bool mSignalledError;
+ int mCoreCount;
+ struct ConversionQueue {
+ std::list<std::function<void()>> entries;
+ Condition cond;
+ size_t numPending{0u};
+ };
+ std::shared_ptr<Mutexed<ConversionQueue>> mQueue;
+ std::vector<sp<ConverterThread>> mConverterThreads;
+
status_t initDecoder();
status_t destroyDecoder();
void finishWork(uint64_t index, const std::unique_ptr<C2Work> &work,
diff --git a/media/extractors/mkv/MatroskaExtractor.cpp b/media/extractors/mkv/MatroskaExtractor.cpp
index ab76edc..b1eb301 100644
--- a/media/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/extractors/mkv/MatroskaExtractor.cpp
@@ -1993,6 +1993,12 @@
}
} else if (!strcmp("V_AV1", codecID)) {
AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_VIDEO_AV1);
+ if (codecPrivateSize > 0) {
+ // 'csd-0' for AV1 is the Blob of Codec Private data as
+ // specified in https://aomediacodec.github.io/av1-isobmff/.
+ AMediaFormat_setBuffer(
+ meta, AMEDIAFORMAT_KEY_CSD_0, codecPrivate, codecPrivateSize);
+ }
} else if (!strcmp("V_MPEG2", codecID) || !strcmp("V_MPEG1", codecID)) {
AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME,
MEDIA_MIMETYPE_VIDEO_MPEG2);
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index e01fc90..f54690d 100755
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1772,7 +1772,7 @@
int64_t codecDelay = pre_skip * 1000000000ll / kOpusSampleRate;
AMediaFormat_setBuffer(mLastTrack->meta,
- AMEDIAFORMAT_KEY_CSD_0, opusInfo, sizeof(opusInfo));
+ AMEDIAFORMAT_KEY_CSD_0, opusInfo, opusInfoSize);
AMediaFormat_setBuffer(mLastTrack->meta,
AMEDIAFORMAT_KEY_CSD_1, &codecDelay, sizeof(codecDelay));
AMediaFormat_setBuffer(mLastTrack->meta,
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 3de934f..135151f 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -39,6 +39,7 @@
#include <media/stagefright/foundation/ByteUtils.h>
#include <media/stagefright/foundation/OpusHeader.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaDefs.h>
#include <media/AudioSystem.h>
#include <media/MediaPlayerInterface.h>
@@ -573,6 +574,68 @@
}
}
+static void parseAV1ProfileLevelFromCsd(const sp<ABuffer> &csd, sp<AMessage> &format) {
+ // Parse CSD structure to extract profile level information
+ // https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox
+ const uint8_t *data = csd->data();
+ size_t remaining = csd->size();
+ if (remaining < 4 || data[0] != 0x81) { // configurationVersion == 1
+ return;
+ }
+ uint8_t profileData = (data[1] & 0xE0) >> 5;
+ uint8_t levelData = data[1] & 0x1F;
+ uint8_t highBitDepth = (data[2] & 0x40) >> 6;
+
+ const static ALookup<std::pair<uint8_t, uint8_t>, int32_t> profiles {
+ { { 0, 0 }, AV1ProfileMain8 },
+ { { 1, 0 }, AV1ProfileMain10 },
+ };
+
+ int32_t profile;
+ if (profiles.map(std::make_pair(highBitDepth, profileData), &profile)) {
+ // bump to HDR profile
+ if (isHdr(format) && profile == AV1ProfileMain10) {
+ if (format->contains("hdr10-plus-info")) {
+ profile = AV1ProfileMain10HDR10Plus;
+ } else {
+ profile = AV1ProfileMain10HDR10;
+ }
+ }
+ format->setInt32("profile", profile);
+ }
+ const static ALookup<uint8_t, int32_t> levels {
+ { 0, AV1Level2 },
+ { 1, AV1Level21 },
+ { 2, AV1Level22 },
+ { 3, AV1Level23 },
+ { 4, AV1Level3 },
+ { 5, AV1Level31 },
+ { 6, AV1Level32 },
+ { 7, AV1Level33 },
+ { 8, AV1Level4 },
+ { 9, AV1Level41 },
+ { 10, AV1Level42 },
+ { 11, AV1Level43 },
+ { 12, AV1Level5 },
+ { 13, AV1Level51 },
+ { 14, AV1Level52 },
+ { 15, AV1Level53 },
+ { 16, AV1Level6 },
+ { 17, AV1Level61 },
+ { 18, AV1Level62 },
+ { 19, AV1Level63 },
+ { 20, AV1Level7 },
+ { 21, AV1Level71 },
+ { 22, AV1Level72 },
+ { 23, AV1Level73 },
+ };
+
+ int32_t level;
+ if (levels.map(levelData, &level)) {
+ format->setInt32("level", level);
+ }
+}
+
static std::vector<std::pair<const char *, uint32_t>> stringMappings {
{
@@ -1234,6 +1297,7 @@
buffer->meta()->setInt32("csd", true);
buffer->meta()->setInt64("timeUs", 0);
msg->setBuffer("csd-0", buffer);
+ parseAV1ProfileLevelFromCsd(buffer, msg);
} else if (meta->findData(kKeyESDS, &type, &data, &size)) {
ESDS esds((const char *)data, size);
if (esds.InitCheck() != (status_t)OK) {
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index f785bfa..04041eb 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -109,11 +109,12 @@
</MediaCodec>
<MediaCodec name="c2.android.hevc.encoder" type="video/hevc">
<!-- profiles and levels: ProfileMain : MainTierLevel51 -->
- <Limit name="size" min="320x128" max="512x512" />
+ <Limit name="size" min="2x2" max="512x512" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="8x8" />
<Limit name="block-count" range="1-4096" /> <!-- max 512x512 -->
<Limit name="blocks-per-second" range="1-122880" />
+ <Limit name="frame-rate" range="1-120" />
<Limit name="bitrate" range="1-10000000" />
<Limit name="complexity" range="0-10" default="0" />
<Limit name="quality" range="0-100" default="80" />
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 7ee1f4d..37f3f61 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -296,11 +296,12 @@
</MediaCodec>
<MediaCodec name="c2.android.hevc.encoder" type="video/hevc" variant="!slow-cpu">
<!-- profiles and levels: ProfileMain : MainTierLevel51 -->
- <Limit name="size" min="320x128" max="512x512" />
+ <Limit name="size" min="2x2" max="512x512" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="8x8" />
<Limit name="block-count" range="1-4096" /> <!-- max 512x512 -->
<Limit name="blocks-per-second" range="1-122880" />
+ <Limit name="frame-rate" range="1-120" />
<Limit name="bitrate" range="1-10000000" />
<Limit name="complexity" range="0-10" default="0" />
<Limit name="quality" range="0-100" default="80" />
diff --git a/media/libstagefright/foundation/OpusHeader.cpp b/media/libstagefright/foundation/OpusHeader.cpp
index acb9ccf..513e41f 100644
--- a/media/libstagefright/foundation/OpusHeader.cpp
+++ b/media/libstagefright/foundation/OpusHeader.cpp
@@ -208,7 +208,7 @@
headerLen += AOPUS_LENGTH_SIZE;
int headerSize = WriteOpusHeader(header, inputSampleRate, output + headerLen,
- outputSize);
+ outputSize - headerLen);
if (headerSize < 0) {
ALOGD("%s: WriteOpusHeader failed", __func__);
return -1;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 7008cee..a093893 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -31,6 +31,7 @@
private:
OpPlayAudioMonitor(uid_t uid, audio_usage_t usage, int id);
void onFirstRef() override;
+ static void getPackagesForUid(uid_t uid, Vector<String16>& packages);
AppOpsManager mAppOpsManager;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index b0817ed..78db80c 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -389,9 +389,16 @@
AudioFlinger::PlaybackThread::OpPlayAudioMonitor::createIfNeeded(
uid_t uid, const audio_attributes_t& attr, int id, audio_stream_type_t streamType)
{
- if (isAudioServerOrRootUid(uid)) {
- ALOGD("OpPlayAudio: not muting track:%d usage:%d root or audioserver", id, attr.usage);
- return nullptr;
+ if (isServiceUid(uid)) {
+ Vector <String16> packages;
+ getPackagesForUid(uid, packages);
+ if (packages.isEmpty()) {
+ ALOGD("OpPlayAudio: not muting track:%d usage:%d for service UID %d",
+ id,
+ attr.usage,
+ uid);
+ return nullptr;
+ }
}
// stream type has been filtered by audio policy to indicate whether it can be muted
if (streamType == AUDIO_STREAM_ENFORCED_AUDIBLE) {
@@ -423,8 +430,7 @@
void AudioFlinger::PlaybackThread::OpPlayAudioMonitor::onFirstRef()
{
- PermissionController permissionController;
- permissionController.getPackagesForUid(mUid, mPackages);
+ getPackagesForUid(mUid, mPackages);
checkPlayAudioForUsage();
if (!mPackages.isEmpty()) {
mOpCallback = new PlayAudioOpCallback(this);
@@ -475,6 +481,14 @@
}
}
+// static
+void AudioFlinger::PlaybackThread::OpPlayAudioMonitor::getPackagesForUid(
+ uid_t uid, Vector<String16>& packages)
+{
+ PermissionController permissionController;
+ permissionController.getPackagesForUid(uid, packages);
+}
+
// ----------------------------------------------------------------------------
#undef LOG_TAG
#define LOG_TAG "AF::Track"
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 743c816..5a87134 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -330,9 +330,9 @@
}
bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
- bool* useHeic, bool* useGrid, int64_t* stall) {
+ bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) {
static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
- return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall);
+ return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
}
bool HeicCompositeStream::isInMemoryTempFileSupported() {
@@ -1115,8 +1115,9 @@
ALOGV("%s", __FUNCTION__);
bool useGrid = false;
+ AString hevcName;
bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
- &mUseHeic, &useGrid, nullptr);
+ &mUseHeic, &useGrid, nullptr, &hevcName);
if (!isSizeSupported) {
ALOGE("%s: Encoder doesnt' support size %u x %u!",
__FUNCTION__, width, height);
@@ -1138,7 +1139,11 @@
}
// Create HEIC/HEVC codec.
- mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
+ if (mUseHeic) {
+ mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
+ } else {
+ mCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
+ }
if (mCodec == nullptr) {
ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
return NO_INIT;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 2aa3c38..260c68e 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -71,7 +71,7 @@
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
- bool* useHeic, bool* useGrid, int64_t* stall);
+ bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr);
static bool isInMemoryTempFileSupported();
protected:
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
index ed9be6e..d7cc2bf 100644
--- a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
@@ -49,7 +49,7 @@
}
bool HeicEncoderInfoManager::isSizeSupported(int32_t width, int32_t height, bool* useHeic,
- bool* useGrid, int64_t* stall) const {
+ bool* useGrid, int64_t* stall, AString* hevcName) const {
if (useHeic == nullptr || useGrid == nullptr) {
ALOGE("%s: invalid parameters: useHeic %p, useGrid %p",
__FUNCTION__, useHeic, useGrid);
@@ -72,6 +72,9 @@
(width <= 1920 && height <= 1080))) {
enableGrid = false;
}
+ if (hevcName != nullptr) {
+ *hevcName = mHevcName;
+ }
} else {
// No encoder available for the requested size.
return false;
@@ -113,9 +116,8 @@
}
sp<AMessage> heicDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
- sp<AMessage> hevcDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC);
- if (hevcDetails == nullptr) {
+ if (!getHevcCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC)) {
if (heicDetails != nullptr) {
ALOGE("%s: Device must support HEVC codec if HEIC codec is available!",
__FUNCTION__);
@@ -123,22 +125,7 @@
}
return OK;
}
-
- // Check CQ mode for HEVC codec
- {
- AString bitrateModes;
- auto hasItem = hevcDetails->findString("feature-bitrate-modes", &bitrateModes);
- if (!hasItem) {
- ALOGE("%s: Failed to query bitrate modes for HEVC codec", __FUNCTION__);
- return BAD_VALUE;
- }
- ALOGV("%s: HEVC codec's feature-bitrate-modes value is %d, %s",
- __FUNCTION__, hasItem, bitrateModes.c_str());
- std::regex pattern("(^|,)CQ($|,)", std::regex_constants::icase);
- if (!std::regex_search(bitrateModes.c_str(), pattern)) {
- return OK;
- }
- }
+ mHasHEVC = true;
// HEIC size range
if (heicDetails != nullptr) {
@@ -152,19 +139,6 @@
mHasHEIC = true;
}
- // HEVC size range
- {
- auto res = getCodecSizeRange(MEDIA_MIMETYPE_VIDEO_HEVC,
- hevcDetails, &mMinSizeHevc, &mMaxSizeHevc, &mHevcFrameRateMaps);
- if (res != OK) {
- ALOGE("%s: Failed to get HEVC codec size range: %s (%d)", __FUNCTION__,
- strerror(-res), res);
- return BAD_VALUE;
- }
-
- mHasHEVC = true;
- }
-
return OK;
}
@@ -290,5 +264,80 @@
return details;
}
+
+bool HeicEncoderInfoManager::getHevcCodecDetails(
+ sp<IMediaCodecList> codecsList, const char* mime) {
+ bool found = false;
+ ssize_t idx = 0;
+ while ((idx = codecsList->findCodecByType(mime, true /*encoder*/, idx)) >= 0) {
+ const sp<MediaCodecInfo> info = codecsList->getCodecInfo(idx++);
+ if (info == nullptr) {
+ ALOGE("%s: Failed to get codec info for %s", __FUNCTION__, mime);
+ break;
+ }
+
+ // Filter out software ones as they may be too slow
+ if (!(info->getAttributes() & MediaCodecInfo::kFlagIsHardwareAccelerated)) {
+ continue;
+ }
+
+ const sp<MediaCodecInfo::Capabilities> caps =
+ info->getCapabilitiesFor(mime);
+ if (caps == nullptr) {
+ ALOGE("%s: [%s] Failed to get capabilities", __FUNCTION__,
+ info->getCodecName());
+ break;
+ }
+ const sp<AMessage> details = caps->getDetails();
+ if (details == nullptr) {
+ ALOGE("%s: [%s] Failed to get details", __FUNCTION__,
+ info->getCodecName());
+ break;
+ }
+
+ // Check CQ mode
+ AString bitrateModes;
+ auto hasItem = details->findString("feature-bitrate-modes", &bitrateModes);
+ if (!hasItem) {
+ ALOGE("%s: [%s] Failed to query bitrate modes", __FUNCTION__,
+ info->getCodecName());
+ break;
+ }
+ ALOGV("%s: [%s] feature-bitrate-modes value is %d, %s",
+ __FUNCTION__, info->getCodecName(), hasItem, bitrateModes.c_str());
+ std::regex pattern("(^|,)CQ($|,)", std::regex_constants::icase);
+ if (!std::regex_search(bitrateModes.c_str(), pattern)) {
+ continue; // move on to next encoder
+ }
+
+ std::pair<int32_t, int32_t> minSizeHevc, maxSizeHevc;
+ FrameRateMaps hevcFrameRateMaps;
+ auto res = getCodecSizeRange(MEDIA_MIMETYPE_VIDEO_HEVC,
+ details, &minSizeHevc, &maxSizeHevc, &hevcFrameRateMaps);
+ if (res != OK) {
+ ALOGE("%s: [%s] Failed to get size range: %s (%d)", __FUNCTION__,
+ info->getCodecName(), strerror(-res), res);
+ break;
+ }
+ if (kGridWidth < minSizeHevc.first
+ || kGridWidth > maxSizeHevc.first
+ || kGridHeight < minSizeHevc.second
+ || kGridHeight > maxSizeHevc.second) {
+ continue; // move on to next encoder
+ }
+
+ // Found: save name, size, frame rate
+ mHevcName = info->getCodecName();
+ mMinSizeHevc = minSizeHevc;
+ mMaxSizeHevc = maxSizeHevc;
+ mHevcFrameRateMaps = hevcFrameRateMaps;
+
+ found = true;
+ break;
+ }
+
+ return found;
+}
+
} //namespace camera3
} // namespace android
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
index fb0b914..58edba2 100644
--- a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
@@ -36,7 +36,7 @@
}
bool isSizeSupported(int32_t width, int32_t height,
- bool* useHeic, bool* useGrid, int64_t* stall) const;
+ bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) const;
static const auto kGridWidth = 512;
static const auto kGridHeight = 512;
@@ -61,11 +61,13 @@
FrameRateMaps::const_iterator findClosestSize(const FrameRateMaps& maps,
int32_t width, int32_t height) const;
sp<AMessage> getCodecDetails(sp<IMediaCodecList> codecsList, const char* name);
+ bool getHevcCodecDetails(sp<IMediaCodecList> codecsList, const char* mime);
bool mIsInited;
std::pair<int32_t, int32_t> mMinSizeHeic, mMaxSizeHeic;
std::pair<int32_t, int32_t> mMinSizeHevc, mMaxSizeHevc;
bool mHasHEVC, mHasHEIC;
+ AString mHevcName;
FrameRateMaps mHeicFrameRateMaps, mHevcFrameRateMaps;
bool mDisableGrid;