Merge "AudioMixer: Enable 5.1 based for fallback downmix" into udc-dev
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index 95610fa..703033b 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -265,14 +265,16 @@
needsUpdate = true;
}
}
- // If not found, set to the highest supported level.
- if (!found) {
+ // If not found or exceeds max level, set to the highest supported level.
#ifdef MPEG4
+ if (!found || me.v.level > LEVEL_MP4V_2) {
me.set().level = LEVEL_MP4V_2;
-#else
- me.set().level = LEVEL_H263_40;
-#endif
}
+#else
+ if (!found || (me.v.level != LEVEL_H263_45 && me.v.level > LEVEL_H263_40)) {
+ me.set().level = LEVEL_H263_40;
+ }
+#endif
return C2R::Ok();
}
@@ -288,18 +290,6 @@
return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
}
- ProfileLevelType getProfileLevel_l() const {
-#ifdef MPEG4
- if (mProfileLevel->level == LEVEL_MP4V_0) return SIMPLE_PROFILE_LEVEL0;
- else if (mProfileLevel->level == LEVEL_MP4V_1) return SIMPLE_PROFILE_LEVEL1;
- return SIMPLE_PROFILE_LEVEL2; // level == LEVEL_MP4V_2
-#else
- // library does not export h263 specific levels. No way to map C2 enums to
- // library specific constants. Return max supported level.
- return CORE_PROFILE_LEVEL2;
-#endif
- }
-
private:
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
@@ -416,7 +406,7 @@
mEncParams->encFrameRate[0] = mFrameRate->value + 0.5;
mEncParams->rcType = VBR_1;
mEncParams->vbvDelay = VBV_DELAY;
- mEncParams->profile_level = mProfileLevel;
+ mEncParams->profile_level = CORE_PROFILE_LEVEL2;
mEncParams->packetSize = 32;
mEncParams->rvlcEnable = PV_OFF;
mEncParams->numLayers = 1;
@@ -457,7 +447,6 @@
mSize = mIntf->getSize_l();
mBitrate = mIntf->getBitrate_l();
mFrameRate = mIntf->getFrameRate_l();
- mProfileLevel = mIntf->getProfileLevel_l();
}
c2_status_t err = initEncParams();
if (C2_OK != err) {
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
index e5c8ea6..43461fc 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
@@ -65,7 +65,6 @@
std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
- ProfileLevelType mProfileLevel;
int64_t mNumInputFrames;
MP4EncodingMode mEncodeMode;
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index d2df4f3..3b29c57 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -147,6 +147,8 @@
mCCodecCallback(callback),
mFrameIndex(0u),
mFirstValidFrameIndex(0u),
+ mIsSurfaceToDisplay(false),
+ mHasPresentFenceTimes(false),
mMetaMode(MODE_NONE),
mInputMetEos(false),
mSendEncryptedInfoBuffer(false) {
@@ -988,20 +990,36 @@
int64_t mediaTimeUs = 0;
(void)buffer->meta()->findInt64("timeUs", &mediaTimeUs);
- trackReleasedFrame(qbo, mediaTimeUs, timestampNs);
- processRenderedFrames(qbo.frameTimestamps);
+ if (mIsSurfaceToDisplay) {
+ trackReleasedFrame(qbo, mediaTimeUs, timestampNs);
+ processRenderedFrames(qbo.frameTimestamps);
+ } else {
+ // When the surface is an intermediate surface, onFrameRendered is triggered immediately
+ // when the frame is queued to the non-display surface
+ mCCodecCallback->onOutputFramesRendered(mediaTimeUs, timestampNs);
+ }
return OK;
}
void CCodecBufferChannel::initializeFrameTrackingFor(ANativeWindow * window) {
+ mTrackedFrames.clear();
+
+ int isSurfaceToDisplay = 0;
+ window->query(window, NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &isSurfaceToDisplay);
+ mIsSurfaceToDisplay = isSurfaceToDisplay == 1;
+ // No frame tracking is needed if we're not sending frames to the display
+ if (!mIsSurfaceToDisplay) {
+ // Return early so we don't call into SurfaceFlinger (requiring permissions)
+ return;
+ }
+
int hasPresentFenceTimes = 0;
window->query(window, NATIVE_WINDOW_FRAME_TIMESTAMPS_SUPPORTS_PRESENT, &hasPresentFenceTimes);
mHasPresentFenceTimes = hasPresentFenceTimes == 1;
if (mHasPresentFenceTimes) {
ALOGI("Using latch times for frame rendered signals - present fences not supported");
}
- mTrackedFrames.clear();
}
void CCodecBufferChannel::trackReleasedFrame(const IGraphicBufferProducer::QueueBufferOutput& qbo,
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 20dca2b..0d25d6d 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -331,6 +331,7 @@
sp<MemoryDealer> makeMemoryDealer(size_t heapSize);
std::deque<TrackedFrame> mTrackedFrames;
+ bool mIsSurfaceToDisplay;
bool mHasPresentFenceTimes;
struct OutputSurface {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index c9287e5..09c5d64 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -296,6 +296,7 @@
void notifyClientCreated();
void notifyClientStarted(ClientConfigParcel& clientConfig);
void notifyClientStopped(ClientConfigParcel& clientConfig);
+ void notifyClientConfigChanged(ClientConfigParcel& clientConfig);
inline void setCodecName(const char* name) {
mCodecName = name;
@@ -483,7 +484,7 @@
}
void MediaCodec::ResourceManagerServiceProxy::notifyClientStarted(
- ClientConfigParcel& clientConfig) {
+ ClientConfigParcel& clientConfig) {
clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
clientConfig.clientInfo.id = getId(mClient);
@@ -492,7 +493,7 @@
}
void MediaCodec::ResourceManagerServiceProxy::notifyClientStopped(
- ClientConfigParcel& clientConfig) {
+ ClientConfigParcel& clientConfig) {
clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
clientConfig.clientInfo.id = getId(mClient);
@@ -500,6 +501,15 @@
mService->notifyClientStopped(clientConfig);
}
+void MediaCodec::ResourceManagerServiceProxy::notifyClientConfigChanged(
+ ClientConfigParcel& clientConfig) {
+ clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
+ clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
+ clientConfig.clientInfo.id = getId(mClient);
+ clientConfig.clientInfo.name = mCodecName;
+ mService->notifyClientConfigChanged(clientConfig);
+}
+
////////////////////////////////////////////////////////////////////////////////
MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
@@ -5241,15 +5251,28 @@
postActivityNotificationIfPossible();
}
- // Notify mCrypto of video resolution changes
- if (mCrypto != NULL) {
- int32_t left, top, right, bottom, width, height;
- if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
- mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
- } else if (mOutputFormat->findInt32("width", &width)
- && mOutputFormat->findInt32("height", &height)) {
- mCrypto->notifyResolution(width, height);
+ // Update the width and the height.
+ int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
+ bool resolutionChanged = false;
+ if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
+ mWidth = right - left + 1;
+ mHeight = bottom - top + 1;
+ resolutionChanged = true;
+ } else if (mOutputFormat->findInt32("width", &width) &&
+ mOutputFormat->findInt32("height", &height)) {
+ mWidth = width;
+ mHeight = height;
+ resolutionChanged = true;
+ }
+
+ // Notify mCrypto and the RM of video resolution changes
+ if (resolutionChanged) {
+ if (mCrypto != NULL) {
+ mCrypto->notifyResolution(mWidth, mHeight);
}
+ ClientConfigParcel clientConfig;
+ initClientConfigParcel(clientConfig);
+ mResourceManagerProxy->notifyClientConfigChanged(clientConfig);
}
updateHdrMetrics(false /* isConfig */);
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 077fa26..323ce0e 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -425,6 +425,8 @@
bool btNrecIsOff() const { return mBtNrecIsOff.load(); }
+ void lock() ACQUIRE(mLock) { mLock.lock(); }
+ void unlock() RELEASE(mLock) { mLock.unlock(); }
private:
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 3b428bb..d25d46f 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -135,6 +135,10 @@
status_t AudioFlinger::PatchPanel::createAudioPatch(const struct audio_patch *patch,
audio_patch_handle_t *handle,
bool endpointPatch)
+ //unlocks AudioFlinger::mLock when calling ThreadBase::sendCreateAudioPatchConfigEvent
+ //to avoid deadlocks if the thread loop needs to acquire AudioFlinger::mLock
+ //before processing the create patch request.
+ NO_THREAD_SAFETY_ANALYSIS
{
if (handle == NULL || patch == NULL) {
return BAD_VALUE;
@@ -245,7 +249,6 @@
status = INVALID_OPERATION;
goto exit;
}
-
sp<ThreadBase> thread =
mAudioFlinger.checkPlaybackThread_l(patch->sources[1].ext.mix.handle);
if (thread == 0) {
@@ -356,11 +359,12 @@
goto exit;
}
}
+ mAudioFlinger.unlock();
status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
+ mAudioFlinger.lock();
if (status == NO_ERROR) {
newPatch.setThread(thread);
}
-
// remove stale audio patch with same input as sink if any
for (auto& iter : mPatches) {
if (iter.second.mAudioPatch.sinks[0].ext.mix.handle == thread->id()) {
@@ -422,7 +426,9 @@
mAudioFlinger.updateOutDevicesForRecordThreads_l(devices);
}
+ mAudioFlinger.unlock();
status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
+ mAudioFlinger.lock();
if (status == NO_ERROR) {
newPatch.setThread(thread);
}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 76c9ad8..de0abf0 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -4976,7 +4976,7 @@
const NBAIO_Format offersFast[1] = {format};
size_t numCounterOffersFast = 0;
#if !LOG_NDEBUG
- ssize_t index =
+ index =
#else
(void)
#endif
@@ -7874,15 +7874,15 @@
Pipe *pipe = new Pipe(pipeFramesP2, format, pipeBuffer);
const NBAIO_Format offersFast[1] = {format};
size_t numCounterOffersFast = 0;
- [[maybe_unused]] ssize_t index = pipe->negotiate(offersFast, std::size(offersFast),
+ [[maybe_unused]] ssize_t index2 = pipe->negotiate(offersFast, std::size(offersFast),
nullptr /* counterOffers */, numCounterOffersFast);
- ALOG_ASSERT(index == 0);
+ ALOG_ASSERT(index2 == 0);
mPipeSink = pipe;
PipeReader *pipeReader = new PipeReader(*pipe);
numCounterOffersFast = 0;
- index = pipeReader->negotiate(offersFast, std::size(offersFast),
+ index2 = pipeReader->negotiate(offersFast, std::size(offersFast),
nullptr /* counterOffers */, numCounterOffersFast);
- ALOG_ASSERT(index == 0);
+ ALOG_ASSERT(index2 == 0);
mPipeSource = pipeReader;
mPipeFramesP2 = pipeFramesP2;
mPipeMemory = pipeMemory;
@@ -9297,7 +9297,7 @@
if (stepCount == 0) {
return;
}
- ALOG_ASSERT(stepCount <= mRsmpInUnrel);
+ ALOG_ASSERT(stepCount <= (int32_t)mRsmpInUnrel);
mRsmpInUnrel -= stepCount;
mRsmpInFront = audio_utils::safe_add_overflow(mRsmpInFront, stepCount);
buffer->raw = NULL;
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index e818759..a45365a 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -144,7 +144,7 @@
"libhidlbase",
"libimage_io",
"libjpeg",
- "libjpegrecoverymap",
+ "libultrahdr",
"libmedia_codeclist",
"libmedia_omx",
"libmemunreachable",
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index 8a65a67..8223371 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -26,7 +26,7 @@
#include "common/CameraProviderManager.h"
#include <gui/Surface.h>
-#include <jpegrecoverymap/jpegr.h>
+#include <ultrahdr/jpegr.h>
#include <utils/ExifUtils.h>
#include <utils/Log.h>
#include "utils/SessionConfigurationUtils.h"
@@ -292,13 +292,13 @@
}
size_t actualJpegRSize = 0;
- jpegrecoverymap::jpegr_uncompressed_struct p010;
- jpegrecoverymap::jpegr_compressed_struct jpegR;
- jpegrecoverymap::JpegR jpegREncoder;
+ ultrahdr::jpegr_uncompressed_struct p010;
+ ultrahdr::jpegr_compressed_struct jpegR;
+ ultrahdr::JpegR jpegREncoder;
p010.height = inputFrame.p010Buffer.height;
p010.width = inputFrame.p010Buffer.width;
- p010.colorGamut = jpegrecoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_BT2100;
+ p010.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT2100;
p010.data = inputFrame.p010Buffer.data;
p010.chroma_data = inputFrame.p010Buffer.dataCb;
// Strides are expected to be in pixels not bytes
@@ -308,18 +308,18 @@
jpegR.data = dstBuffer;
jpegR.maxLength = maxJpegRBufferSize;
- jpegrecoverymap::jpegr_transfer_function transferFunction;
+ ultrahdr::ultrahdr_transfer_function transferFunction;
switch (mP010DynamicRange) {
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
- transferFunction = jpegrecoverymap::jpegr_transfer_function::JPEGR_TF_PQ;
+ transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_PQ;
break;
default:
- transferFunction = jpegrecoverymap::jpegr_transfer_function::JPEGR_TF_HLG;
+ transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_HLG;
}
if (mSupportInternalJpeg) {
- jpegrecoverymap::jpegr_compressed_struct jpeg;
+ ultrahdr::jpegr_compressed_struct jpeg;
jpeg.data = inputFrame.jpegBuffer.data;
jpeg.length = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
@@ -331,9 +331,9 @@
}
if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) {
- jpeg.colorGamut = jpegrecoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_P3;
+ jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_P3;
} else {
- jpeg.colorGamut = jpegrecoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_BT709;
+ jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT709;
}
res = jpegREncoder.encodeJPEGR(&p010, &jpeg, transferFunction, &jpegR);
@@ -351,7 +351,7 @@
ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
}
- jpegrecoverymap::jpegr_exif_struct exif;
+ ultrahdr::jpegr_exif_struct exif;
exif.data = reinterpret_cast<void*>(const_cast<uint8_t*>(exifBuffer));
exif.length = exifBufferSize;
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index 8d591df..f8cdb80 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -175,6 +175,22 @@
}
}
+void ResourceManagerMetrics::notifyClientConfigChanged(const ClientConfigParcel& clientConfig) {
+ std::scoped_lock lock(mLock);
+ ClientConfigMap::iterator entry = mClientConfigMap.find(clientConfig.clientInfo.id);
+ if (entry != mClientConfigMap.end() &&
+ (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
+ clientConfig.codecType == MediaResourceSubType::kImageCodec)) {
+ int pid = clientConfig.clientInfo.pid;
+ // Update the pixel count for this process
+ updatePixelCount(pid, clientConfig.width * (long)clientConfig.height,
+ entry->second.width * (long)entry->second.height);
+ // Update the resolution in the record.
+ entry->second.width = clientConfig.width;
+ entry->second.height = clientConfig.height;
+ }
+}
+
void ResourceManagerMetrics::notifyClientStarted(const ClientConfigParcel& clientConfig) {
std::scoped_lock lock(mLock);
int pid = clientConfig.clientInfo.pid;
@@ -197,9 +213,15 @@
}
// System concurrent codec usage
- int systemConcurrentCodecCount = mConcurrentCodecsMap[codecBucket];
+ int systemConcurrentCodecs = mConcurrentCodecsMap[codecBucket];
// Process/Application concurrent codec usage for this type of codec
- int appConcurrentCodecCount = mProcessConcurrentCodecsMap[pid].mCurrent[codecBucket];
+ const ConcurrentCodecs& concurrentCodecs = mProcessConcurrentCodecsMap[pid];
+ int appConcurrentCodecs = concurrentCodecs.mCurrent[codecBucket];
+ int hwVideoCodecs = concurrentCodecs.mHWVideoCodecs;
+ int swVideoCodecs = concurrentCodecs.mSWVideoCodecs;
+ int videoCodecs = concurrentCodecs.mVideoCodecs;
+ int audioCodecs = concurrentCodecs.mAudioCodecs;
+ int imageCodecs = concurrentCodecs.mImageCodecs;
// Process/Application's current pixel count.
long pixelCount = 0;
std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
@@ -216,9 +238,14 @@
clientConfig.isEncoder,
clientConfig.isHardware,
clientConfig.width, clientConfig.height,
- systemConcurrentCodecCount,
- appConcurrentCodecCount,
- pixelCount);
+ systemConcurrentCodecs,
+ appConcurrentCodecs,
+ pixelCount,
+ hwVideoCodecs,
+ swVideoCodecs,
+ videoCodecs,
+ audioCodecs,
+ imageCodecs);
ALOGV("%s: Pushed MEDIA_CODEC_STARTED atom: "
"Process[pid(%d): uid(%d)] "
@@ -226,6 +253,7 @@
"Timestamp: %jd "
"Resolution: %d x %d "
"ConcurrentCodec[%d]={System: %d App: %d} "
+ "AppConcurrentCodecs{Video: %d(HW[%d] SW[%d]) Audio: %d Image: %d} "
"result: %d",
__func__,
pid, clientConfig.clientInfo.uid,
@@ -236,7 +264,8 @@
clientConfig.isEncoder? "encoder" : "decoder",
clientConfig.timeStamp,
clientConfig.width, clientConfig.height,
- codecBucket, systemConcurrentCodecCount, appConcurrentCodecCount,
+ codecBucket, systemConcurrentCodecs, appConcurrentCodecs,
+ videoCodecs, hwVideoCodecs, swVideoCodecs, audioCodecs, imageCodecs,
result);
}
@@ -256,12 +285,12 @@
}
// System concurrent codec usage
- int systemConcurrentCodecCount = mConcurrentCodecsMap[codecBucket];
+ int systemConcurrentCodecs = mConcurrentCodecsMap[codecBucket];
// Process/Application concurrent codec usage for this type of codec
- int appConcurrentCodecCount = 0;
+ int appConcurrentCodecs = 0;
std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
if (found != mProcessConcurrentCodecsMap.end()) {
- appConcurrentCodecCount = found->second.mCurrent[codecBucket];
+ appConcurrentCodecs = found->second.mCurrent[codecBucket];
}
// Process/Application's current pixel count.
long pixelCount = 0;
@@ -292,8 +321,8 @@
clientConfig.isEncoder,
clientConfig.isHardware,
clientConfig.width, clientConfig.height,
- systemConcurrentCodecCount,
- appConcurrentCodecCount,
+ systemConcurrentCodecs,
+ appConcurrentCodecs,
pixelCount,
usageTime);
ALOGV("%s: Pushed MEDIA_CODEC_STOPPED atom: "
@@ -312,7 +341,7 @@
clientConfig.isEncoder? "encoder" : "decoder",
clientConfig.timeStamp, usageTime,
clientConfig.width, clientConfig.height,
- codecBucket, systemConcurrentCodecCount, appConcurrentCodecCount,
+ codecBucket, systemConcurrentCodecs, appConcurrentCodecs,
result);
}
@@ -484,7 +513,8 @@
ConcurrentCodecs codecs;
codecs.mCurrent[codecBucket] = 1;
codecs.mPeak[codecBucket] = 1;
- mProcessConcurrentCodecsMap.emplace(pid, codecs);
+ auto added = mProcessConcurrentCodecsMap.emplace(pid, codecs);
+ found = added.first;
} else {
found->second.mCurrent[codecBucket]++;
// Check if it's the peak count for this slot.
@@ -492,6 +522,34 @@
found->second.mPeak[codecBucket] = found->second.mCurrent[codecBucket];
}
}
+
+ switch (codecBucket) {
+ case HwVideoEncoder:
+ case HwVideoDecoder:
+ case SwVideoEncoder:
+ case SwVideoDecoder:
+ if (codecBucket == HwVideoEncoder || codecBucket == HwVideoDecoder) {
+ found->second.mHWVideoCodecs++;
+ } else {
+ found->second.mSWVideoCodecs++;
+ }
+ found->second.mVideoCodecs++;
+ break;
+ case HwAudioEncoder:
+ case HwAudioDecoder:
+ case SwAudioEncoder:
+ case SwAudioDecoder:
+ found->second.mAudioCodecs++;
+ break;
+ case HwImageEncoder:
+ case HwImageDecoder:
+ case SwImageEncoder:
+ case SwImageDecoder:
+ found->second.mImageCodecs++;
+ break;
+ default:
+ break;
+ }
}
void ResourceManagerMetrics::decreaseConcurrentCodecs(int32_t pid,
@@ -507,6 +565,34 @@
if (found->second.mCurrent[codecBucket] > 0) {
found->second.mCurrent[codecBucket]--;
}
+
+ switch (codecBucket) {
+ case HwVideoEncoder:
+ case HwVideoDecoder:
+ case SwVideoEncoder:
+ case SwVideoDecoder:
+ if (codecBucket == HwVideoEncoder || codecBucket == HwVideoDecoder) {
+ found->second.mHWVideoCodecs--;
+ } else {
+ found->second.mSWVideoCodecs--;
+ }
+ found->second.mVideoCodecs--;
+ break;
+ case HwAudioEncoder:
+ case HwAudioDecoder:
+ case SwAudioEncoder:
+ case SwAudioDecoder:
+ found->second.mAudioCodecs--;
+ break;
+ case HwImageEncoder:
+ case HwImageDecoder:
+ case SwImageEncoder:
+ case SwImageDecoder:
+ found->second.mImageCodecs--;
+ break;
+ default:
+ break;
+ }
}
}
@@ -528,6 +614,13 @@
}
}
+void ResourceManagerMetrics::updatePixelCount(int32_t pid, long newPixels, long lastPixels) {
+ // Since there is change in resolution, decrease it by last pixels and
+ // increase it by new pixels.
+ decreasePixelCount(pid, lastPixels);
+ increasePixelCount(pid, newPixels);
+}
+
void ResourceManagerMetrics::decreasePixelCount(int32_t pid, long pixels) {
// Now update the current pixel usage for this (pid) process.
std::map<int32_t, PixelCount>::iterator found = mProcessPixelsMap.find(pid);
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
index b7810e5..3124aa2 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.h
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -77,6 +77,16 @@
struct ConcurrentCodecs {
ConcurrentCodecsMap mCurrent;
ConcurrentCodecsMap mPeak;
+ // concurrent HW Video codecs.
+ int mHWVideoCodecs;
+ // concurrent SW Video codecs.
+ int mSWVideoCodecs;
+ // concurrent Video codecs.
+ int mVideoCodecs;
+ // concurrent Audio codecs.
+ int mAudioCodecs;
+ // concurrent Image codecs.
+ int mImageCodecs;
};
// Current and Peak pixel count for a process.
@@ -119,6 +129,9 @@
// To be called when a client is stopped.
void notifyClientStopped(const ClientConfigParcel& clientConfig);
+ // To be called when a client's configuration has changed.
+ void notifyClientConfigChanged(const ClientConfigParcel& clientConfig);
+
// To be called when after a reclaim event.
void pushReclaimAtom(const ClientInfoParcel& clientInfo,
const std::vector<int>& priorities,
@@ -143,8 +156,9 @@
void increaseConcurrentCodecs(int32_t pid, CodecBucket codecBucket);
void decreaseConcurrentCodecs(int32_t pid, CodecBucket codecBucket);
- // To increase/decrease the concurrent pixels usage for a process.
+ // To increase/update/decrease the concurrent pixels usage for a process.
void increasePixelCount(int32_t pid, long pixels);
+ void updatePixelCount(int32_t pid, long newPixels, long lastPixels);
void decreasePixelCount(int32_t pid, long pixels);
// Issued when the process/application with given pid/uid is terminated.
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 6822b06..53cc431 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -1208,6 +1208,11 @@
return Status::ok();
}
+Status ResourceManagerService::notifyClientConfigChanged(const ClientConfigParcel& clientConfig) {
+ mResourceManagerMetrics->notifyClientConfigChanged(clientConfig);
+ return Status::ok();
+}
+
long ResourceManagerService::getPeakConcurrentPixelCount(int pid) const {
return mResourceManagerMetrics->getPeakConcurrentPixelCount(pid);
}
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index b9756ae..1519e0e 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -127,6 +127,8 @@
Status notifyClientStopped(const ClientConfigParcel& clientConfig) override;
+ Status notifyClientConfigChanged(const ClientConfigParcel& clientConfig) override;
+
private:
friend class ResourceManagerServiceTest;
friend class DeathNotifier;
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
index fcade38..5071fa3 100644
--- a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
@@ -156,4 +156,16 @@
* @param clientConfig Configuration information of the client.
*/
void notifyClientStopped(in ClientConfigParcel clientConfig);
+
+ /**
+ * Notify that the client's configuration has changed.
+ *
+ * This call is made to collect the (concurrent) metrics about the
+ * resources associated with the Codec (and also DRM sessions).
+ * This is called after notifyClientStarted (and before notifyClientStopped)
+ * to make changes to some of the configurations associated with the client.
+ *
+ * @param clientConfig Configuration information of the client.
+ */
+ void notifyClientConfigChanged(in ClientConfigParcel clientConfig);
}