Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 1 | /* |
| 2 | ** |
| 3 | ** Copyright 2023, The Android Open Source Project |
| 4 | ** |
| 5 | ** Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | ** you may not use this file except in compliance with the License. |
| 7 | ** You may obtain a copy of the License at |
| 8 | ** |
| 9 | ** http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | ** |
| 11 | ** Unless required by applicable law or agreed to in writing, software |
| 12 | ** distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | ** See the License for the specific language governing permissions and |
| 15 | ** limitations under the License. |
| 16 | */ |
| 17 | |
| 18 | //#define LOG_NDEBUG 0 |
| 19 | #define LOG_TAG "ResourceManagerMetrics" |
| 20 | #include <utils/Log.h> |
| 21 | #include <mediautils/ProcessInfo.h> |
| 22 | |
| 23 | #include <stats_media_metrics.h> |
| 24 | |
| 25 | #include "UidObserver.h" |
| 26 | #include "ResourceManagerMetrics.h" |
| 27 | |
| 28 | #include <cmath> |
| 29 | #include <sstream> |
| 30 | |
| 31 | namespace android { |
| 32 | |
| 33 | using stats::media_metrics::stats_write; |
| 34 | using stats::media_metrics::MEDIA_CODEC_STARTED; |
| 35 | using stats::media_metrics::MEDIA_CODEC_STOPPED; |
| 36 | // Disabling this for now. |
| 37 | #ifdef ENABLE_MEDIA_CODEC_CONCURRENT_USAGE_REPORTED |
| 38 | using stats::media_metrics::MEDIA_CODEC_CONCURRENT_USAGE_REPORTED; |
| 39 | #endif |
| 40 | using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED; |
| 41 | using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS; |
| 42 | using stats::media_metrics::\ |
| 43 | MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS; |
| 44 | using stats::media_metrics::\ |
| 45 | MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES; |
Girish | a2b8828 | 2024-02-07 22:50:13 +0000 | [diff] [blame] | 46 | using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_UNSPECIFIED; |
| 47 | using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_AUDIO; |
| 48 | using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_VIDEO; |
| 49 | using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_IMAGE; |
| 50 | |
| 51 | // Map MediaResourceSubType to stats::media_metrics::CodecType |
| 52 | inline int32_t getMetricsCodecType(MediaResourceSubType codecType) { |
| 53 | switch (codecType) { |
| 54 | case MediaResourceSubType::kHwAudioCodec: |
| 55 | case MediaResourceSubType::kSwAudioCodec: |
| 56 | return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_AUDIO; |
| 57 | case MediaResourceSubType::kHwVideoCodec: |
| 58 | case MediaResourceSubType::kSwVideoCodec: |
| 59 | return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_VIDEO; |
| 60 | case MediaResourceSubType::kHwImageCodec: |
| 61 | case MediaResourceSubType::kSwImageCodec: |
| 62 | return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_IMAGE; |
| 63 | case MediaResourceSubType::kUnspecifiedSubType: |
| 64 | return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_UNSPECIFIED; |
| 65 | } |
| 66 | return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_UNSPECIFIED; |
| 67 | } |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 68 | |
| 69 | inline const char* getCodecType(MediaResourceSubType codecType) { |
| 70 | switch (codecType) { |
Girish | a5a2d67 | 2023-09-20 18:40:20 +0000 | [diff] [blame] | 71 | case MediaResourceSubType::kHwAudioCodec: return "Hw Audio"; |
| 72 | case MediaResourceSubType::kSwAudioCodec: return "Sw Audio"; |
| 73 | case MediaResourceSubType::kHwVideoCodec: return "Hw Video"; |
| 74 | case MediaResourceSubType::kSwVideoCodec: return "Sw Video"; |
| 75 | case MediaResourceSubType::kHwImageCodec: return "Hw Image"; |
| 76 | case MediaResourceSubType::kSwImageCodec: return "Sw Image"; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 77 | case MediaResourceSubType::kUnspecifiedSubType: |
| 78 | default: |
| 79 | return "Unspecified"; |
| 80 | } |
| 81 | return "Unspecified"; |
| 82 | } |
| 83 | |
Girish | a5a2d67 | 2023-09-20 18:40:20 +0000 | [diff] [blame] | 84 | inline bool isHardwareCodec(MediaResourceSubType codecType) { |
| 85 | return (codecType == MediaResourceSubType::kHwAudioCodec || |
| 86 | codecType == MediaResourceSubType::kHwVideoCodec || |
| 87 | codecType == MediaResourceSubType::kHwImageCodec); |
| 88 | } |
| 89 | |
| 90 | static CodecBucket getCodecBucket(bool isEncoder, MediaResourceSubType codecType) { |
| 91 | switch (codecType) { |
| 92 | case MediaResourceSubType::kHwAudioCodec: |
| 93 | return isEncoder? HwAudioEncoder : HwAudioDecoder; |
| 94 | case MediaResourceSubType::kSwAudioCodec: |
| 95 | return isEncoder? SwAudioEncoder : SwAudioDecoder; |
| 96 | case MediaResourceSubType::kHwVideoCodec: |
| 97 | return isEncoder? HwVideoEncoder : HwVideoDecoder; |
| 98 | case MediaResourceSubType::kSwVideoCodec: |
| 99 | return isEncoder? SwVideoEncoder : SwVideoDecoder; |
| 100 | case MediaResourceSubType::kHwImageCodec: |
| 101 | return isEncoder? HwImageEncoder : HwImageDecoder; |
| 102 | case MediaResourceSubType::kSwImageCodec: |
| 103 | return isEncoder? SwImageEncoder : SwImageDecoder; |
| 104 | case MediaResourceSubType::kUnspecifiedSubType: |
| 105 | default: |
| 106 | return CodecBucketUnspecified; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 107 | } |
| 108 | |
| 109 | return CodecBucketUnspecified; |
| 110 | } |
| 111 | |
Girish | dc18755 | 2024-04-02 21:51:46 +0000 | [diff] [blame] | 112 | static std::string getLogMessage(const std::string& firstKey, const long& firstValue, |
| 113 | const std::string& secondKey, const long& secondValue) { |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 114 | |
Girish | dc18755 | 2024-04-02 21:51:46 +0000 | [diff] [blame] | 115 | std::stringstream logMsg; |
| 116 | if (firstValue > 0) { |
| 117 | logMsg << firstKey << firstValue; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 118 | } |
Girish | dc18755 | 2024-04-02 21:51:46 +0000 | [diff] [blame] | 119 | if (secondValue > 0) { |
| 120 | logMsg << secondKey << secondValue; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 121 | } |
Girish | dc18755 | 2024-04-02 21:51:46 +0000 | [diff] [blame] | 122 | return logMsg.str(); |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 123 | } |
| 124 | |
| 125 | ResourceManagerMetrics::ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo) { |
| 126 | // Create a process termination watcher, with 5seconds of polling frequency. |
| 127 | mUidObserver = sp<UidObserver>::make(processInfo, |
| 128 | [this] (int32_t pid, uid_t uid) { |
| 129 | onProcessTerminated(pid, uid); |
| 130 | }); |
| 131 | mUidObserver->start(); |
| 132 | } |
| 133 | |
| 134 | ResourceManagerMetrics::~ResourceManagerMetrics() { |
| 135 | mUidObserver->stop(); |
| 136 | } |
| 137 | |
| 138 | void ResourceManagerMetrics::addPid(int pid, uid_t uid) { |
| 139 | if (uid != 0) { |
| 140 | std::scoped_lock lock(mLock); |
| 141 | mUidObserver->add(pid, uid); |
| 142 | } |
| 143 | } |
| 144 | |
| 145 | void ResourceManagerMetrics::notifyClientCreated(const ClientInfoParcel& clientInfo) { |
| 146 | std::scoped_lock lock(mLock); |
| 147 | // Update the resource instance count. |
| 148 | std::map<std::string, int>::iterator found = mConcurrentResourceCountMap.find(clientInfo.name); |
| 149 | if (found == mConcurrentResourceCountMap.end()) { |
| 150 | mConcurrentResourceCountMap[clientInfo.name] = 1; |
| 151 | } else { |
| 152 | found->second++; |
| 153 | } |
| 154 | } |
| 155 | |
| 156 | void ResourceManagerMetrics::notifyClientReleased(const ClientInfoParcel& clientInfo) { |
| 157 | bool stopCalled = true; |
Girish | c963768 | 2023-03-23 23:33:32 +0000 | [diff] [blame] | 158 | ClientConfigParcel clientConfig; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 159 | { |
| 160 | std::scoped_lock lock(mLock); |
Girish | c963768 | 2023-03-23 23:33:32 +0000 | [diff] [blame] | 161 | ClientConfigMap::iterator found = mClientConfigMap.find(clientInfo.id); |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 162 | if (found != mClientConfigMap.end()) { |
| 163 | // Release is called without Stop! |
| 164 | stopCalled = false; |
Girish | c963768 | 2023-03-23 23:33:32 +0000 | [diff] [blame] | 165 | clientConfig = found->second; |
| 166 | // Update the timestamp for stopping the codec. |
| 167 | clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 168 | } |
| 169 | } |
| 170 | if (!stopCalled) { |
| 171 | // call Stop to update the metrics. |
Girish | c963768 | 2023-03-23 23:33:32 +0000 | [diff] [blame] | 172 | notifyClientStopped(clientConfig); |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 173 | } |
| 174 | { |
| 175 | std::scoped_lock lock(mLock); |
| 176 | // Update the resource instance count also. |
| 177 | std::map<std::string, int>::iterator found = |
| 178 | mConcurrentResourceCountMap.find(clientInfo.name); |
| 179 | if (found != mConcurrentResourceCountMap.end()) { |
| 180 | if (found->second > 0) { |
| 181 | found->second--; |
| 182 | } |
| 183 | } |
| 184 | } |
| 185 | } |
| 186 | |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 187 | void ResourceManagerMetrics::notifyClientConfigChanged(const ClientConfigParcel& clientConfig) { |
| 188 | std::scoped_lock lock(mLock); |
| 189 | ClientConfigMap::iterator entry = mClientConfigMap.find(clientConfig.clientInfo.id); |
| 190 | if (entry != mClientConfigMap.end() && |
Girish | a5a2d67 | 2023-09-20 18:40:20 +0000 | [diff] [blame] | 191 | (clientConfig.codecType == MediaResourceSubType::kHwVideoCodec || |
| 192 | clientConfig.codecType == MediaResourceSubType::kSwVideoCodec || |
| 193 | clientConfig.codecType == MediaResourceSubType::kHwImageCodec || |
| 194 | clientConfig.codecType == MediaResourceSubType::kSwImageCodec)) { |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 195 | int pid = clientConfig.clientInfo.pid; |
| 196 | // Update the pixel count for this process |
| 197 | updatePixelCount(pid, clientConfig.width * (long)clientConfig.height, |
| 198 | entry->second.width * (long)entry->second.height); |
| 199 | // Update the resolution in the record. |
| 200 | entry->second.width = clientConfig.width; |
| 201 | entry->second.height = clientConfig.height; |
| 202 | } |
| 203 | } |
| 204 | |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 205 | void ResourceManagerMetrics::notifyClientStarted(const ClientConfigParcel& clientConfig) { |
| 206 | std::scoped_lock lock(mLock); |
| 207 | int pid = clientConfig.clientInfo.pid; |
| 208 | // We need to observer this process. |
| 209 | mUidObserver->add(pid, clientConfig.clientInfo.uid); |
| 210 | |
| 211 | // Update the client config for thic client. |
| 212 | mClientConfigMap[clientConfig.clientInfo.id] = clientConfig; |
| 213 | |
| 214 | // Update the concurrent codec count for this process. |
Girish | a5a2d67 | 2023-09-20 18:40:20 +0000 | [diff] [blame] | 215 | CodecBucket codecBucket = getCodecBucket(clientConfig.isEncoder, clientConfig.codecType); |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 216 | increaseConcurrentCodecs(pid, codecBucket); |
| 217 | |
Girish | a5a2d67 | 2023-09-20 18:40:20 +0000 | [diff] [blame] | 218 | if (clientConfig.codecType == MediaResourceSubType::kHwVideoCodec || |
| 219 | clientConfig.codecType == MediaResourceSubType::kSwVideoCodec || |
| 220 | clientConfig.codecType == MediaResourceSubType::kHwImageCodec || |
| 221 | clientConfig.codecType == MediaResourceSubType::kSwImageCodec) { |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 222 | // Update the pixel count for this process |
| 223 | increasePixelCount(pid, clientConfig.width * (long)clientConfig.height); |
| 224 | } |
| 225 | |
| 226 | // System concurrent codec usage |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 227 | int systemConcurrentCodecs = mConcurrentCodecsMap[codecBucket]; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 228 | // Process/Application concurrent codec usage for this type of codec |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 229 | const ConcurrentCodecs& concurrentCodecs = mProcessConcurrentCodecsMap[pid]; |
| 230 | int appConcurrentCodecs = concurrentCodecs.mCurrent[codecBucket]; |
| 231 | int hwVideoCodecs = concurrentCodecs.mHWVideoCodecs; |
| 232 | int swVideoCodecs = concurrentCodecs.mSWVideoCodecs; |
| 233 | int videoCodecs = concurrentCodecs.mVideoCodecs; |
| 234 | int audioCodecs = concurrentCodecs.mAudioCodecs; |
| 235 | int imageCodecs = concurrentCodecs.mImageCodecs; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 236 | // Process/Application's current pixel count. |
| 237 | long pixelCount = 0; |
| 238 | std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid); |
| 239 | if (it != mProcessPixelsMap.end()) { |
| 240 | pixelCount = it->second.mCurrent; |
| 241 | } |
| 242 | |
| 243 | int result = stats_write( |
| 244 | MEDIA_CODEC_STARTED, |
| 245 | clientConfig.clientInfo.uid, |
| 246 | clientConfig.id, |
| 247 | clientConfig.clientInfo.name.c_str(), |
Girish | a2b8828 | 2024-02-07 22:50:13 +0000 | [diff] [blame] | 248 | getMetricsCodecType(clientConfig.codecType), |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 249 | clientConfig.isEncoder, |
Girish | a5a2d67 | 2023-09-20 18:40:20 +0000 | [diff] [blame] | 250 | isHardwareCodec(clientConfig.codecType), |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 251 | clientConfig.width, clientConfig.height, |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 252 | systemConcurrentCodecs, |
| 253 | appConcurrentCodecs, |
| 254 | pixelCount, |
| 255 | hwVideoCodecs, |
| 256 | swVideoCodecs, |
| 257 | videoCodecs, |
| 258 | audioCodecs, |
| 259 | imageCodecs); |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 260 | |
| 261 | ALOGV("%s: Pushed MEDIA_CODEC_STARTED atom: " |
| 262 | "Process[pid(%d): uid(%d)] " |
Girish | a5a2d67 | 2023-09-20 18:40:20 +0000 | [diff] [blame] | 263 | "Codec: [%s: %ju] is %s %s " |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 264 | "Timestamp: %jd " |
| 265 | "Resolution: %d x %d " |
| 266 | "ConcurrentCodec[%d]={System: %d App: %d} " |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 267 | "AppConcurrentCodecs{Video: %d(HW[%d] SW[%d]) Audio: %d Image: %d} " |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 268 | "result: %d", |
| 269 | __func__, |
| 270 | pid, clientConfig.clientInfo.uid, |
| 271 | clientConfig.clientInfo.name.c_str(), |
| 272 | clientConfig.id, |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 273 | getCodecType(clientConfig.codecType), |
| 274 | clientConfig.isEncoder? "encoder" : "decoder", |
| 275 | clientConfig.timeStamp, |
| 276 | clientConfig.width, clientConfig.height, |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 277 | codecBucket, systemConcurrentCodecs, appConcurrentCodecs, |
| 278 | videoCodecs, hwVideoCodecs, swVideoCodecs, audioCodecs, imageCodecs, |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 279 | result); |
| 280 | } |
| 281 | |
| 282 | void ResourceManagerMetrics::notifyClientStopped(const ClientConfigParcel& clientConfig) { |
| 283 | std::scoped_lock lock(mLock); |
| 284 | int pid = clientConfig.clientInfo.pid; |
| 285 | // Update the concurrent codec count for this process. |
Girish | a5a2d67 | 2023-09-20 18:40:20 +0000 | [diff] [blame] | 286 | CodecBucket codecBucket = getCodecBucket(clientConfig.isEncoder, clientConfig.codecType); |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 287 | decreaseConcurrentCodecs(pid, codecBucket); |
| 288 | |
Girish | a5a2d67 | 2023-09-20 18:40:20 +0000 | [diff] [blame] | 289 | if (clientConfig.codecType == MediaResourceSubType::kHwVideoCodec || |
| 290 | clientConfig.codecType == MediaResourceSubType::kSwVideoCodec || |
| 291 | clientConfig.codecType == MediaResourceSubType::kHwImageCodec || |
| 292 | clientConfig.codecType == MediaResourceSubType::kSwImageCodec) { |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 293 | // Update the pixel count for this process |
| 294 | decreasePixelCount(pid, clientConfig.width * (long)clientConfig.height); |
| 295 | } |
| 296 | |
| 297 | // System concurrent codec usage |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 298 | int systemConcurrentCodecs = mConcurrentCodecsMap[codecBucket]; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 299 | // Process/Application concurrent codec usage for this type of codec |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 300 | int appConcurrentCodecs = 0; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 301 | std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid); |
| 302 | if (found != mProcessConcurrentCodecsMap.end()) { |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 303 | appConcurrentCodecs = found->second.mCurrent[codecBucket]; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 304 | } |
| 305 | // Process/Application's current pixel count. |
| 306 | long pixelCount = 0; |
| 307 | std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid); |
| 308 | if (it != mProcessPixelsMap.end()) { |
| 309 | pixelCount = it->second.mCurrent; |
| 310 | } |
| 311 | |
| 312 | // calculate the usageTime as: |
| 313 | // MediaCodecStopped.clientConfig.timeStamp - |
| 314 | // MediaCodecStarted.clientConfig.timeStamp |
| 315 | int64_t usageTime = 0; |
| 316 | ClientConfigMap::iterator entry = mClientConfigMap.find(clientConfig.clientInfo.id); |
| 317 | if (entry != mClientConfigMap.end()) { |
| 318 | usageTime = clientConfig.timeStamp - entry->second.timeStamp; |
| 319 | // And we can erase this config now. |
| 320 | mClientConfigMap.erase(entry); |
| 321 | } else { |
| 322 | ALOGW("%s: Start Config is missing!", __func__); |
| 323 | } |
| 324 | |
| 325 | int result = stats_write( |
| 326 | MEDIA_CODEC_STOPPED, |
| 327 | clientConfig.clientInfo.uid, |
| 328 | clientConfig.id, |
| 329 | clientConfig.clientInfo.name.c_str(), |
Girish | a2b8828 | 2024-02-07 22:50:13 +0000 | [diff] [blame] | 330 | getMetricsCodecType(clientConfig.codecType), |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 331 | clientConfig.isEncoder, |
Girish | a5a2d67 | 2023-09-20 18:40:20 +0000 | [diff] [blame] | 332 | isHardwareCodec(clientConfig.codecType), |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 333 | clientConfig.width, clientConfig.height, |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 334 | systemConcurrentCodecs, |
| 335 | appConcurrentCodecs, |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 336 | pixelCount, |
| 337 | usageTime); |
| 338 | ALOGV("%s: Pushed MEDIA_CODEC_STOPPED atom: " |
| 339 | "Process[pid(%d): uid(%d)] " |
Girish | a5a2d67 | 2023-09-20 18:40:20 +0000 | [diff] [blame] | 340 | "Codec: [%s: %ju] is %s %s " |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 341 | "Timestamp: %jd Usage time: %jd " |
| 342 | "Resolution: %d x %d " |
| 343 | "ConcurrentCodec[%d]={System: %d App: %d} " |
| 344 | "result: %d", |
| 345 | __func__, |
| 346 | pid, clientConfig.clientInfo.uid, |
| 347 | clientConfig.clientInfo.name.c_str(), |
| 348 | clientConfig.id, |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 349 | getCodecType(clientConfig.codecType), |
| 350 | clientConfig.isEncoder? "encoder" : "decoder", |
| 351 | clientConfig.timeStamp, usageTime, |
| 352 | clientConfig.width, clientConfig.height, |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 353 | codecBucket, systemConcurrentCodecs, appConcurrentCodecs, |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 354 | result); |
| 355 | } |
| 356 | |
| 357 | void ResourceManagerMetrics::onProcessTerminated(int32_t pid, uid_t uid) { |
| 358 | std::scoped_lock lock(mLock); |
| 359 | // post MediaCodecConcurrentUsageReported for this terminated pid. |
| 360 | pushConcurrentUsageReport(pid, uid); |
Girish | dc18755 | 2024-04-02 21:51:46 +0000 | [diff] [blame] | 361 | // Remove all the metrics associated with this process. |
| 362 | std::map<int32_t, ConcurrentCodecs>::iterator it1 = mProcessConcurrentCodecsMap.find(pid); |
| 363 | if (it1 != mProcessConcurrentCodecsMap.end()) { |
| 364 | mProcessConcurrentCodecsMap.erase(it1); |
| 365 | } |
| 366 | std::map<int32_t, PixelCount>::iterator it2 = mProcessPixelsMap.find(pid); |
| 367 | if (it2 != mProcessPixelsMap.end()) { |
| 368 | mProcessPixelsMap.erase(it2); |
| 369 | } |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 370 | } |
| 371 | |
| 372 | void ResourceManagerMetrics::pushConcurrentUsageReport(int32_t pid, uid_t uid) { |
| 373 | // Process/Application peak concurrent codec usage |
| 374 | std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid); |
| 375 | if (found == mProcessConcurrentCodecsMap.end()) { |
| 376 | ALOGI("%s: No MEDIA_CODEC_CONCURRENT_USAGE_REPORTED atom Entry for: " |
| 377 | "Application[pid(%d): uid(%d)]", __func__, pid, uid); |
| 378 | return; |
| 379 | } |
| 380 | const ConcurrentCodecsMap& codecsMap = found->second.mPeak; |
| 381 | int peakHwAudioEncoderCount = codecsMap[HwAudioEncoder]; |
| 382 | int peakHwAudioDecoderCount = codecsMap[HwAudioDecoder]; |
| 383 | int peakHwVideoEncoderCount = codecsMap[HwVideoEncoder]; |
| 384 | int peakHwVideoDecoderCount = codecsMap[HwVideoDecoder]; |
| 385 | int peakHwImageEncoderCount = codecsMap[HwImageEncoder]; |
| 386 | int peakHwImageDecoderCount = codecsMap[HwImageDecoder]; |
| 387 | int peakSwAudioEncoderCount = codecsMap[SwAudioEncoder]; |
| 388 | int peakSwAudioDecoderCount = codecsMap[SwAudioDecoder]; |
| 389 | int peakSwVideoEncoderCount = codecsMap[SwVideoEncoder]; |
| 390 | int peakSwVideoDecoderCount = codecsMap[SwVideoDecoder]; |
| 391 | int peakSwImageEncoderCount = codecsMap[SwImageEncoder]; |
| 392 | int peakSwImageDecoderCount = codecsMap[SwImageDecoder]; |
| 393 | |
| 394 | long peakPixels = 0; |
| 395 | std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid); |
| 396 | if (it == mProcessPixelsMap.end()) { |
| 397 | ALOGI("%s: No Video Codec Entry for Application[pid(%d): uid(%d)]", |
| 398 | __func__, pid, uid); |
| 399 | } else { |
| 400 | peakPixels = it->second.mPeak; |
| 401 | } |
| 402 | std::string peakPixelsLog("Peak Pixels: " + std::to_string(peakPixels)); |
| 403 | |
| 404 | std::stringstream peakCodecLog; |
| 405 | peakCodecLog << "Peak { "; |
Girish | dc18755 | 2024-04-02 21:51:46 +0000 | [diff] [blame] | 406 | std::string logMsg; |
| 407 | logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount); |
| 408 | if (!logMsg.empty()) { |
| 409 | peakCodecLog << "AudioEnc[ " << logMsg << " ] "; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 410 | } |
Girish | dc18755 | 2024-04-02 21:51:46 +0000 | [diff] [blame] | 411 | logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount); |
| 412 | if (!logMsg.empty()) { |
| 413 | peakCodecLog << "AudioDec[" << logMsg << " ] "; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 414 | } |
Girish | dc18755 | 2024-04-02 21:51:46 +0000 | [diff] [blame] | 415 | logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount); |
| 416 | if (!logMsg.empty()) { |
| 417 | peakCodecLog << "VideoEnc[" << logMsg << " ] "; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 418 | } |
Girish | dc18755 | 2024-04-02 21:51:46 +0000 | [diff] [blame] | 419 | logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount); |
| 420 | if (!logMsg.empty()) { |
| 421 | peakCodecLog << "VideoDec[" << logMsg << " ] "; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 422 | } |
Girish | dc18755 | 2024-04-02 21:51:46 +0000 | [diff] [blame] | 423 | logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount); |
| 424 | if (!logMsg.empty()) { |
| 425 | peakCodecLog << "ImageEnc[" << logMsg << " ] "; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 426 | } |
Girish | dc18755 | 2024-04-02 21:51:46 +0000 | [diff] [blame] | 427 | logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount); |
| 428 | if (!logMsg.empty()) { |
| 429 | peakCodecLog << "ImageDec[" << logMsg << " ] "; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 430 | } |
| 431 | peakCodecLog << "}"; |
| 432 | |
| 433 | #ifdef ENABLE_MEDIA_CODEC_CONCURRENT_USAGE_REPORTED |
| 434 | int result = stats_write( |
| 435 | MEDIA_CODEC_CONCURRENT_USAGE_REPORTED, |
| 436 | uid, |
| 437 | peakHwVideoDecoderCount, |
| 438 | peakHwVideoEncoderCount, |
| 439 | peakSwVideoDecoderCount, |
| 440 | peakSwVideoEncoderCount, |
| 441 | peakHwAudioDecoderCount, |
| 442 | peakHwAudioEncoderCount, |
| 443 | peakSwAudioDecoderCount, |
| 444 | peakSwAudioEncoderCount, |
| 445 | peakHwImageDecoderCount, |
| 446 | peakHwImageEncoderCount, |
| 447 | peakSwImageDecoderCount, |
| 448 | peakSwImageEncoderCount, |
| 449 | peakPixels); |
| 450 | ALOGI("%s: Pushed MEDIA_CODEC_CONCURRENT_USAGE_REPORTED atom: " |
| 451 | "Process[pid(%d): uid(%d)] %s %s result: %d", |
| 452 | __func__, pid, uid, peakCodecLog.str().c_str(), peakPixelsLog.c_str(), result); |
| 453 | #else |
| 454 | ALOGI("%s: Concurrent Codec Usage Report for the Process[pid(%d): uid(%d)] is %s %s", |
| 455 | __func__, pid, uid, peakCodecLog.str().c_str(), peakPixelsLog.c_str()); |
| 456 | #endif |
| 457 | } |
| 458 | |
Girish | e7b338f | 2024-02-08 22:03:51 +0000 | [diff] [blame] | 459 | inline void pushReclaimStats(int32_t callingPid, |
| 460 | int32_t requesterUid, |
| 461 | int requesterPriority, |
| 462 | const std::string& clientName, |
| 463 | int32_t noOfConcurrentCodecs, |
| 464 | int32_t reclaimStatus, |
| 465 | int32_t noOfCodecsReclaimed = 0, |
| 466 | int32_t targetIndex = -1, |
| 467 | int32_t targetClientPid = -1, |
| 468 | int32_t targetClientUid = -1, |
| 469 | int32_t targetPriority = -1) { |
| 470 | // Post the pushed atom |
| 471 | int result = stats_write( |
| 472 | MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED, |
| 473 | requesterUid, |
| 474 | requesterPriority, |
| 475 | clientName.c_str(), |
| 476 | noOfConcurrentCodecs, |
| 477 | reclaimStatus, |
| 478 | noOfCodecsReclaimed, |
| 479 | targetIndex, |
| 480 | targetClientUid, |
| 481 | targetPriority); |
| 482 | ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: " |
| 483 | "Requester[pid(%d): uid(%d): priority(%d)] " |
| 484 | "Codec: [%s] " |
| 485 | "No of concurrent codecs: %d " |
| 486 | "Reclaim Status: %d " |
| 487 | "No of codecs reclaimed: %d " |
| 488 | "Target[%d][pid(%d): uid(%d): priority(%d)] result: %d", |
| 489 | __func__, callingPid, requesterUid, requesterPriority, |
| 490 | clientName.c_str(), noOfConcurrentCodecs, |
| 491 | reclaimStatus, noOfCodecsReclaimed, |
| 492 | targetIndex, targetClientPid, targetClientUid, targetPriority, result); |
| 493 | } |
| 494 | |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 495 | void ResourceManagerMetrics::pushReclaimAtom(const ClientInfoParcel& clientInfo, |
Girish | 56fda31 | 2023-10-12 21:32:35 +0000 | [diff] [blame] | 496 | const std::vector<int>& priorities, |
| 497 | const std::vector<ClientInfo>& targetClients, |
| 498 | bool reclaimed) { |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 499 | // Construct the metrics for codec reclaim as a pushed atom. |
| 500 | // 1. Information about the requester. |
| 501 | // - UID and the priority (oom score) |
| 502 | int32_t callingPid = clientInfo.pid; |
| 503 | int32_t requesterUid = clientInfo.uid; |
| 504 | std::string clientName = clientInfo.name; |
| 505 | int requesterPriority = priorities[0]; |
| 506 | |
| 507 | // 2. Information about the codec. |
| 508 | // - Name of the codec requested |
| 509 | // - Number of concurrent codecs running. |
| 510 | int32_t noOfConcurrentCodecs = 0; |
| 511 | std::map<std::string, int>::iterator found = mConcurrentResourceCountMap.find(clientName); |
| 512 | if (found != mConcurrentResourceCountMap.end()) { |
| 513 | noOfConcurrentCodecs = found->second; |
| 514 | } |
| 515 | |
| 516 | // 3. Information about the Reclaim: |
| 517 | // - Status of reclaim request |
| 518 | // - How many codecs are reclaimed |
| 519 | // - For each codecs reclaimed, information of the process that it belonged to: |
| 520 | // - UID and the Priority (oom score) |
| 521 | int32_t reclaimStatus = MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS; |
| 522 | if (!reclaimed) { |
Girish | 56fda31 | 2023-10-12 21:32:35 +0000 | [diff] [blame] | 523 | if (targetClients.size() == 0) { |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 524 | // No clients to reclaim from |
| 525 | reclaimStatus = |
| 526 | MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS; |
| 527 | } else { |
| 528 | // Couldn't reclaim resources from the clients |
| 529 | reclaimStatus = |
| 530 | MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES; |
| 531 | } |
| 532 | } |
Girish | e7b338f | 2024-02-08 22:03:51 +0000 | [diff] [blame] | 533 | |
| 534 | if (targetClients.empty()) { |
| 535 | // Push the reclaim atom to stats. |
| 536 | pushReclaimStats(callingPid, |
| 537 | requesterUid, |
| 538 | requesterPriority, |
| 539 | clientName, |
| 540 | noOfConcurrentCodecs, |
| 541 | reclaimStatus); |
| 542 | return; |
| 543 | } |
| 544 | |
Girish | 56fda31 | 2023-10-12 21:32:35 +0000 | [diff] [blame] | 545 | int32_t noOfCodecsReclaimed = targetClients.size(); |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 546 | int32_t targetIndex = 1; |
Girish | 56fda31 | 2023-10-12 21:32:35 +0000 | [diff] [blame] | 547 | for (const ClientInfo& targetClient : targetClients) { |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 548 | int targetPriority = priorities[targetIndex]; |
Girish | e7b338f | 2024-02-08 22:03:51 +0000 | [diff] [blame] | 549 | // Push the reclaim atom to stats. |
| 550 | pushReclaimStats(callingPid, |
| 551 | requesterUid, |
| 552 | requesterPriority, |
| 553 | clientName, |
| 554 | noOfConcurrentCodecs, |
| 555 | reclaimStatus, |
| 556 | noOfCodecsReclaimed, |
| 557 | targetIndex, |
| 558 | targetClient.mPid, |
| 559 | targetClient.mUid, |
| 560 | targetPriority); |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 561 | targetIndex++; |
| 562 | } |
| 563 | } |
| 564 | |
| 565 | void ResourceManagerMetrics::increaseConcurrentCodecs(int32_t pid, |
| 566 | CodecBucket codecBucket) { |
| 567 | // Increase the codec usage across the system. |
| 568 | mConcurrentCodecsMap[codecBucket]++; |
| 569 | |
| 570 | // Now update the codec usage for this (pid) process. |
| 571 | std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid); |
| 572 | if (found == mProcessConcurrentCodecsMap.end()) { |
| 573 | ConcurrentCodecs codecs; |
| 574 | codecs.mCurrent[codecBucket] = 1; |
| 575 | codecs.mPeak[codecBucket] = 1; |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 576 | auto added = mProcessConcurrentCodecsMap.emplace(pid, codecs); |
| 577 | found = added.first; |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 578 | } else { |
| 579 | found->second.mCurrent[codecBucket]++; |
| 580 | // Check if it's the peak count for this slot. |
| 581 | if (found->second.mPeak[codecBucket] < found->second.mCurrent[codecBucket]) { |
| 582 | found->second.mPeak[codecBucket] = found->second.mCurrent[codecBucket]; |
| 583 | } |
| 584 | } |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 585 | |
| 586 | switch (codecBucket) { |
| 587 | case HwVideoEncoder: |
| 588 | case HwVideoDecoder: |
| 589 | case SwVideoEncoder: |
| 590 | case SwVideoDecoder: |
| 591 | if (codecBucket == HwVideoEncoder || codecBucket == HwVideoDecoder) { |
| 592 | found->second.mHWVideoCodecs++; |
| 593 | } else { |
| 594 | found->second.mSWVideoCodecs++; |
| 595 | } |
| 596 | found->second.mVideoCodecs++; |
| 597 | break; |
| 598 | case HwAudioEncoder: |
| 599 | case HwAudioDecoder: |
| 600 | case SwAudioEncoder: |
| 601 | case SwAudioDecoder: |
| 602 | found->second.mAudioCodecs++; |
| 603 | break; |
| 604 | case HwImageEncoder: |
| 605 | case HwImageDecoder: |
| 606 | case SwImageEncoder: |
| 607 | case SwImageDecoder: |
| 608 | found->second.mImageCodecs++; |
| 609 | break; |
| 610 | default: |
| 611 | break; |
| 612 | } |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 613 | } |
| 614 | |
| 615 | void ResourceManagerMetrics::decreaseConcurrentCodecs(int32_t pid, |
| 616 | CodecBucket codecBucket) { |
| 617 | // Decrease the codec usage across the system. |
| 618 | if (mConcurrentCodecsMap[codecBucket] > 0) { |
| 619 | mConcurrentCodecsMap[codecBucket]--; |
| 620 | } |
| 621 | |
| 622 | // Now update the codec usage for this (pid) process. |
| 623 | std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid); |
| 624 | if (found != mProcessConcurrentCodecsMap.end()) { |
| 625 | if (found->second.mCurrent[codecBucket] > 0) { |
| 626 | found->second.mCurrent[codecBucket]--; |
| 627 | } |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 628 | |
| 629 | switch (codecBucket) { |
| 630 | case HwVideoEncoder: |
| 631 | case HwVideoDecoder: |
| 632 | case SwVideoEncoder: |
| 633 | case SwVideoDecoder: |
| 634 | if (codecBucket == HwVideoEncoder || codecBucket == HwVideoDecoder) { |
| 635 | found->second.mHWVideoCodecs--; |
| 636 | } else { |
| 637 | found->second.mSWVideoCodecs--; |
| 638 | } |
| 639 | found->second.mVideoCodecs--; |
| 640 | break; |
| 641 | case HwAudioEncoder: |
| 642 | case HwAudioDecoder: |
| 643 | case SwAudioEncoder: |
| 644 | case SwAudioDecoder: |
| 645 | found->second.mAudioCodecs--; |
| 646 | break; |
| 647 | case HwImageEncoder: |
| 648 | case HwImageDecoder: |
| 649 | case SwImageEncoder: |
| 650 | case SwImageDecoder: |
| 651 | found->second.mImageCodecs--; |
| 652 | break; |
| 653 | default: |
| 654 | break; |
| 655 | } |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 656 | } |
| 657 | } |
| 658 | |
| 659 | void ResourceManagerMetrics::increasePixelCount(int32_t pid, long pixels) { |
| 660 | // Now update the current pixel usage for this (pid) process. |
| 661 | std::map<int32_t, PixelCount>::iterator found = mProcessPixelsMap.find(pid); |
| 662 | if (found == mProcessPixelsMap.end()) { |
| 663 | PixelCount pixelCount {pixels, pixels}; |
| 664 | mProcessPixelsMap.emplace(pid, pixelCount); |
| 665 | } else { |
| 666 | if (__builtin_add_overflow(found->second.mCurrent, pixels, &found->second.mCurrent)) { |
| 667 | ALOGI("Pixel Count overflow"); |
| 668 | return; |
| 669 | } |
| 670 | // Check if it's the peak count for this slot. |
| 671 | if (found->second.mPeak < found->second.mCurrent) { |
| 672 | found->second.mPeak = found->second.mCurrent; |
| 673 | } |
| 674 | } |
| 675 | } |
| 676 | |
Girish | de8eb59 | 2023-04-13 18:49:17 +0000 | [diff] [blame] | 677 | void ResourceManagerMetrics::updatePixelCount(int32_t pid, long newPixels, long lastPixels) { |
| 678 | // Since there is change in resolution, decrease it by last pixels and |
| 679 | // increase it by new pixels. |
| 680 | decreasePixelCount(pid, lastPixels); |
| 681 | increasePixelCount(pid, newPixels); |
| 682 | } |
| 683 | |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 684 | void ResourceManagerMetrics::decreasePixelCount(int32_t pid, long pixels) { |
| 685 | // Now update the current pixel usage for this (pid) process. |
| 686 | std::map<int32_t, PixelCount>::iterator found = mProcessPixelsMap.find(pid); |
| 687 | if (found != mProcessPixelsMap.end()) { |
| 688 | if (found->second.mCurrent < pixels) { |
| 689 | found->second.mCurrent = 0; |
| 690 | } else { |
| 691 | if (__builtin_sub_overflow(found->second.mCurrent, pixels, &found->second.mCurrent)) { |
| 692 | ALOGI("Pixel Count overflow"); |
| 693 | return; |
| 694 | } |
| 695 | } |
| 696 | } |
| 697 | } |
| 698 | |
| 699 | long ResourceManagerMetrics::getPeakConcurrentPixelCount(int pid) const { |
| 700 | std::map<int32_t, PixelCount>::const_iterator found = mProcessPixelsMap.find(pid); |
| 701 | if (found != mProcessPixelsMap.end()) { |
| 702 | return found->second.mPeak; |
| 703 | } |
| 704 | |
| 705 | return 0; |
| 706 | } |
| 707 | |
| 708 | long ResourceManagerMetrics::getCurrentConcurrentPixelCount(int pid) const { |
| 709 | std::map<int32_t, PixelCount>::const_iterator found = mProcessPixelsMap.find(pid); |
| 710 | if (found != mProcessPixelsMap.end()) { |
| 711 | return found->second.mCurrent; |
| 712 | } |
| 713 | |
| 714 | return 0; |
| 715 | } |
| 716 | |
Girish | dc18755 | 2024-04-02 21:51:46 +0000 | [diff] [blame] | 717 | static std::string getConcurrentInstanceCount(const std::map<std::string, int>& resourceMap) { |
| 718 | if (resourceMap.empty()) { |
| 719 | return ""; |
| 720 | } |
| 721 | std::stringstream concurrentInstanceInfo; |
| 722 | for (const auto& [name, count] : resourceMap) { |
| 723 | if (count > 0) { |
| 724 | concurrentInstanceInfo << " Name: " << name << " Instances: " << count << "\n"; |
| 725 | } |
| 726 | } |
| 727 | |
| 728 | std::string info = concurrentInstanceInfo.str(); |
| 729 | if (info.empty()) { |
| 730 | return ""; |
| 731 | } |
| 732 | return " Current Concurrent Codec Instances:\n" + info; |
| 733 | } |
| 734 | |
| 735 | static std::string getAppsPixelCount(const std::map<int32_t, PixelCount>& pixelMap) { |
| 736 | if (pixelMap.empty()) { |
| 737 | return ""; |
| 738 | } |
| 739 | std::stringstream pixelInfo; |
| 740 | for (const auto& [pid, pixelCount] : pixelMap) { |
| 741 | std::string logMsg = getLogMessage(" Current Pixels: ", pixelCount.mCurrent, |
| 742 | " Peak Pixels: ", pixelCount.mPeak); |
| 743 | if (!logMsg.empty()) { |
| 744 | pixelInfo << " PID[" << pid << "]: {" << logMsg << " }\n"; |
| 745 | } |
| 746 | } |
| 747 | |
| 748 | return " Applications Pixel Usage:\n" + pixelInfo.str(); |
| 749 | } |
| 750 | |
| 751 | static std::string getCodecUsageMetrics(const ConcurrentCodecsMap& codecsMap) { |
| 752 | int peakHwAudioEncoderCount = codecsMap[HwAudioEncoder]; |
| 753 | int peakHwAudioDecoderCount = codecsMap[HwAudioDecoder]; |
| 754 | int peakHwVideoEncoderCount = codecsMap[HwVideoEncoder]; |
| 755 | int peakHwVideoDecoderCount = codecsMap[HwVideoDecoder]; |
| 756 | int peakHwImageEncoderCount = codecsMap[HwImageEncoder]; |
| 757 | int peakHwImageDecoderCount = codecsMap[HwImageDecoder]; |
| 758 | int peakSwAudioEncoderCount = codecsMap[SwAudioEncoder]; |
| 759 | int peakSwAudioDecoderCount = codecsMap[SwAudioDecoder]; |
| 760 | int peakSwVideoEncoderCount = codecsMap[SwVideoEncoder]; |
| 761 | int peakSwVideoDecoderCount = codecsMap[SwVideoDecoder]; |
| 762 | int peakSwImageEncoderCount = codecsMap[SwImageEncoder]; |
| 763 | int peakSwImageDecoderCount = codecsMap[SwImageDecoder]; |
| 764 | std::stringstream usageMetrics; |
| 765 | std::string logMsg; |
| 766 | logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount); |
| 767 | if (!logMsg.empty()) { |
| 768 | usageMetrics << "AudioEnc[" << logMsg << " ] "; |
| 769 | } |
| 770 | logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount); |
| 771 | if (!logMsg.empty()) { |
| 772 | usageMetrics << "AudioDec[" << logMsg << " ] "; |
| 773 | } |
| 774 | logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount); |
| 775 | if (!logMsg.empty()) { |
| 776 | usageMetrics << "VideoEnc[" << logMsg << " ] "; |
| 777 | } |
| 778 | logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount); |
| 779 | if (!logMsg.empty()) { |
| 780 | usageMetrics << "VideoDec[" << logMsg << " ] "; |
| 781 | } |
| 782 | logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount); |
| 783 | if (!logMsg.empty()) { |
| 784 | usageMetrics << "ImageEnc[" << logMsg << " ] "; |
| 785 | } |
| 786 | logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount); |
| 787 | if (!logMsg.empty()) { |
| 788 | usageMetrics << "ImageDec[" << logMsg << " ] "; |
| 789 | } |
| 790 | |
| 791 | return usageMetrics.str(); |
| 792 | } |
| 793 | |
| 794 | static std::string getAppsCodecUsageMetrics( |
| 795 | const std::map<int32_t, ConcurrentCodecs>& processCodecsMap) { |
| 796 | if (processCodecsMap.empty()) { |
| 797 | return ""; |
| 798 | } |
| 799 | std::stringstream codecUsage; |
| 800 | std::string info; |
| 801 | for (const auto& [pid, codecMap] : processCodecsMap) { |
| 802 | codecUsage << " PID[" << pid << "]: "; |
| 803 | info = getCodecUsageMetrics(codecMap.mCurrent); |
| 804 | if (!info.empty()) { |
| 805 | codecUsage << "Current Codec Usage: { " << info << "} "; |
| 806 | } |
| 807 | info = getCodecUsageMetrics(codecMap.mPeak); |
| 808 | if (!info.empty()) { |
| 809 | codecUsage << "Peak Codec Usage: { " << info << "}"; |
| 810 | } |
| 811 | codecUsage << "\n"; |
| 812 | } |
| 813 | |
| 814 | return " Applications Codec Usage:\n" + codecUsage.str(); |
| 815 | } |
| 816 | |
| 817 | |
| 818 | std::string ResourceManagerMetrics::dump() const { |
| 819 | std::string metricsLog(" Metrics logs:\n"); |
| 820 | metricsLog += getConcurrentInstanceCount(mConcurrentResourceCountMap); |
| 821 | metricsLog += getAppsPixelCount(mProcessPixelsMap); |
| 822 | metricsLog += getAppsCodecUsageMetrics(mProcessConcurrentCodecsMap); |
| 823 | |
Yi Kong | 3ac211f | 2024-08-12 07:31:44 +0800 | [diff] [blame] | 824 | return metricsLog; |
Girish | dc18755 | 2024-04-02 21:51:46 +0000 | [diff] [blame] | 825 | } |
| 826 | |
Girish | 1f002cf | 2023-02-17 00:36:29 +0000 | [diff] [blame] | 827 | } // namespace android |