Merge "CSD: Add method to check if HAL supports CSD" into udc-dev
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 52b2041..8da09c4 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -1104,14 +1104,14 @@
static_cast<MediaBufferHolder*>(holder.get())->mediaBuffer() : nullptr;
}
if (mediaBuf != NULL) {
- if (mediaBuf->size() > codecBuffer->capacity()) {
+ if (mediaBuf->range_length() > codecBuffer->capacity()) {
handleError(ERROR_BUFFER_TOO_SMALL);
mDequeuedInputBuffers.push_back(bufferIx);
return false;
}
- codecBuffer->setRange(0, mediaBuf->size());
- memcpy(codecBuffer->data(), mediaBuf->data(), mediaBuf->size());
+ codecBuffer->setRange(0, mediaBuf->range_length());
+ memcpy(codecBuffer->data(), mediaBuf->data(), mediaBuf->range_length());
MetaDataBase &meta_data = mediaBuf->meta_data();
cryptInfo = NuPlayerDrm::getSampleCryptoInfo(meta_data);
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 2370a7b..6e97bf7 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -542,7 +542,7 @@
if (dstFormat() == COLOR_Format32bitABGR2101010) {
videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
} else {
- videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+ videoFormat->setInt32("color-format", COLOR_FormatYUV420Flexible);
}
// For the thumbnail extraction case, try to allocate single buffer in both
@@ -685,7 +685,6 @@
if (mCaptureLayer != nullptr) {
return captureSurface();
}
-
ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
uint32_t standard, range, transfer;
@@ -698,8 +697,18 @@
if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
transfer = 0;
}
+ sp<ABuffer> imgObj;
+ if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+ MediaImage2 *imageData = nullptr;
+ imageData = (MediaImage2 *)(imgObj.get()->data());
+ if (imageData != nullptr) {
+ converter.setSrcMediaImage2(*imageData);
+ }
+ }
+ if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
+ return ERROR_UNSUPPORTED;
+ }
converter.setSrcColorSpace(standard, range, transfer);
-
if (converter.isValid()) {
converter.convert(
(const uint8_t *)videoFrameBuffer->data(),
@@ -864,7 +873,7 @@
if (dstFormat() == COLOR_Format32bitABGR2101010) {
videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
} else {
- videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+ videoFormat->setInt32("color-format", COLOR_FormatYUV420Flexible);
}
if ((mGridRows == 1) && (mGridCols == 1)) {
@@ -967,6 +976,17 @@
if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
transfer = 0;
}
+ sp<ABuffer> imgObj;
+ if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+ MediaImage2 *imageData = nullptr;
+ imageData = (MediaImage2 *)(imgObj.get()->data());
+ if (imageData != nullptr) {
+ converter.setSrcMediaImage2(*imageData);
+ }
+ }
+ if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
+ return ERROR_UNSUPPORTED;
+ }
converter.setSrcColorSpace(standard, range, transfer);
int32_t crop_left, crop_top, crop_right, crop_bottom;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index c4a29c0..e5cc991 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -20,6 +20,7 @@
#include <utils/Log.h>
#include <set>
+#include <random>
#include <stdlib.h>
#include <inttypes.h>
@@ -99,6 +100,7 @@
// These must be kept synchronized with the constants there.
static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
static const char *kCodecCodec = "android.media.mediacodec.codec"; /* e.g. OMX.google.aac.decoder */
+static const char *kCodecId = "android.media.mediacodec.id";
static const char *kCodecMime = "android.media.mediacodec.mime"; /* e.g. audio/mime */
static const char *kCodecMode = "android.media.mediacodec.mode"; /* audio, video */
static const char *kCodecModeVideo = "video"; /* values returned for kCodecMode */
@@ -218,7 +220,7 @@
sp<MediaCodec> codec = mMediaCodec.promote();
if (codec == NULL) {
// Codec is already gone, so remove the resources as well
- ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
+ ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
std::shared_ptr<IResourceManagerService> service =
IResourceManagerService::fromBinder(binder);
if (service == nullptr) {
@@ -290,6 +292,9 @@
void removeClient();
void markClientForPendingRemoval();
bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
+ void notifyClientCreated();
+ void notifyClientStarted(ClientConfigParcel& clientConfig);
+ void notifyClientStopped(ClientConfigParcel& clientConfig);
inline void setCodecName(const char* name) {
mCodecName = name;
@@ -331,7 +336,7 @@
}
status_t MediaCodec::ResourceManagerServiceProxy::init() {
- ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
+ ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
mService = IResourceManagerService::fromBinder(binder);
if (mService == nullptr) {
ALOGE("Failed to get ResourceManagerService");
@@ -468,6 +473,32 @@
return status.isOk() && success;
}
+void MediaCodec::ResourceManagerServiceProxy::notifyClientCreated() {
+ ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+ .uid = static_cast<int32_t>(mUid),
+ .id = getId(mClient),
+ .name = mCodecName};
+ mService->notifyClientCreated(clientInfo);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::notifyClientStarted(
+ ClientConfigParcel& clientConfig) {
+ clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
+ clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
+ clientConfig.clientInfo.id = getId(mClient);
+ clientConfig.clientInfo.name = mCodecName;
+ mService->notifyClientStarted(clientConfig);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::notifyClientStopped(
+ ClientConfigParcel& clientConfig) {
+ clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
+ clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
+ clientConfig.clientInfo.id = getId(mClient);
+ clientConfig.clientInfo.name = mCodecName;
+ mService->notifyClientStopped(clientConfig);
+}
+
////////////////////////////////////////////////////////////////////////////////
MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
@@ -860,6 +891,23 @@
return new PersistentSurface(bufferProducer, bufferSource);
}
+// GenerateCodecId generates a 64bit Random ID for each codec that is created.
+// The Codec ID is generated as:
+// - A process-unique random high 32bits
+// - An atomic sequence low 32bits
+//
+static uint64_t GenerateCodecId() {
+ static std::atomic_uint64_t sId = [] {
+ std::random_device rd;
+ std::mt19937 gen(rd());
+ std::uniform_int_distribution<uint32_t> distrib(0, UINT32_MAX);
+ uint32_t randomID = distrib(gen);
+ uint64_t id = randomID;
+ return id << 32;
+ }();
+ return sId++;
+}
+
MediaCodec::MediaCodec(
const sp<ALooper> &looper, pid_t pid, uid_t uid,
std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
@@ -902,6 +950,7 @@
mInputBufferCounter(0),
mGetCodecBase(getCodecBase),
mGetCodecInfo(getCodecInfo) {
+ mCodecId = GenerateCodecId();
mResourceManagerProxy = new ResourceManagerServiceProxy(pid, uid,
::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid, uid));
if (!mGetCodecBase) {
@@ -1235,12 +1284,14 @@
// ensure mutex while we do our own work
Mutex::Autolock _lock(mMetricsLock);
if (mMetricsHandle != 0) {
- if (mediametrics_count(mMetricsHandle) > 0) {
+ if (mMetricsToUpload && mediametrics_count(mMetricsHandle) > 0) {
mediametrics_selfRecord(mMetricsHandle);
}
mediametrics_delete(mMetricsHandle);
mMetricsHandle = 0;
}
+ // we no longer have anything pending upload
+ mMetricsToUpload = false;
}
void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
@@ -1793,6 +1844,12 @@
break;
}
}
+
+ if (OK == err) {
+ // Notify the ResourceManager that, this codec has been created
+ // (initialized) successfully.
+ mResourceManagerProxy->notifyClientCreated();
+ }
return err;
}
@@ -1839,6 +1896,7 @@
const sp<ICrypto> &crypto,
const sp<IDescrambler> &descrambler,
uint32_t flags) {
+
sp<AMessage> msg = new AMessage(kWhatConfigure, this);
mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
@@ -1846,6 +1904,7 @@
format->findString("log-session-id", &mLogSessionId);
if (nextMetricsHandle != 0) {
+ mediametrics_setInt64(nextMetricsHandle, kCodecId, mCodecId);
int32_t profile = 0;
if (format->findInt32("profile", &profile)) {
mediametrics_setInt32(nextMetricsHandle, kCodecProfile, profile);
@@ -3340,6 +3399,17 @@
return DequeueOutputResult::kRepliedWithError;
}
+
+inline void MediaCodec::initClientConfigParcel(ClientConfigParcel& clientConfig) {
+ clientConfig.codecType = toMediaResourceSubType(mDomain);
+ clientConfig.isEncoder = mFlags & kFlagIsEncoder;
+ clientConfig.isHardware = !MediaCodecList::isSoftwareCodec(mComponentName);
+ clientConfig.width = mWidth;
+ clientConfig.height = mHeight;
+ clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
+ clientConfig.id = mCodecId;
+}
+
void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatCodecNotify:
@@ -3586,14 +3656,8 @@
mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
}
- MediaCodecInfo::Attributes attr = mCodecInfo
- ? mCodecInfo->getAttributes()
- : MediaCodecInfo::Attributes(0);
- if (mDomain == DOMAIN_VIDEO || !(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
- // software audio codecs are currently ignored.
- mResourceManagerProxy->addResource(MediaResource::CodecResource(
+ mResourceManagerProxy->addResource(MediaResource::CodecResource(
mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
- }
postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
break;
@@ -3763,6 +3827,11 @@
mResourceManagerProxy->addResource(
MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
}
+ // Notify the RM that the codec is in use (has been started).
+ ClientConfigParcel clientConfig;
+ initClientConfigParcel(clientConfig);
+ mResourceManagerProxy->notifyClientStarted(clientConfig);
+
setState(STARTED);
postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
@@ -3993,6 +4062,11 @@
mState, stateString(mState).c_str());
break;
}
+ // Notify the RM that the codec has been stopped.
+ ClientConfigParcel clientConfig;
+ initClientConfigParcel(clientConfig);
+ mResourceManagerProxy->notifyClientStopped(clientConfig);
+
setState(INITIALIZED);
if (mReplyID) {
postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
@@ -4175,6 +4249,10 @@
initMediametrics();
}
+ // from this point forward, in this configure/use/release lifecycle, we want to
+ // upload our data
+ mMetricsToUpload = true;
+
int32_t push;
if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
mFlags |= kFlagPushBlankBuffersOnShutdown;
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 485759c..4ad3276 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -405,7 +405,7 @@
// a suitable codec.
//
int profile = -1;
- if (matches->empty() && format->findInt32(KEY_PROFILE, &profile)) {
+ if (matches->empty() && format != nullptr && format->findInt32(KEY_PROFILE, &profile)) {
ALOGV("no matching codec found, retrying without profile");
sp<AMessage> formatNoProfile = format->dup();
formatNoProfile->removeEntryByName(KEY_PROFILE);
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 92c5eb3..06d5680 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -33,10 +33,8 @@
#include <functional>
#include <sys/time.h>
-#define USE_LIBYUV
#define PERF_PROFILING 0
-
#if defined(__aarch64__) || defined(__ARM_NEON__)
#define USE_NEON_Y410 1
#else
@@ -48,6 +46,48 @@
#endif
namespace android {
+typedef const struct libyuv::YuvConstants LibyuvConstants;
+
+struct LibyuvConstPair {
+ const LibyuvConstants *yuv;
+ const LibyuvConstants *yvu;
+};
+
+// Function to resolve YUV Matrices defined in libyuv
+static LibyuvConstPair getLibYUVMatrix(
+ const ColorConverter::ColorSpace &colorSpace, bool is10Bit) {
+ LibyuvConstPair matrix = {nullptr, nullptr};
+ const bool isFullRange = (colorSpace.mRange == ColorUtils::kColorRangeFull);
+ if (colorSpace.isI601()) {
+ matrix.yuv = &libyuv::kYuvI601Constants;
+ matrix.yvu = &libyuv::kYvuI601Constants;
+ } else if (colorSpace.isJ601()) {
+ matrix.yuv = &libyuv::kYuvJPEGConstants;
+ matrix.yvu = &libyuv::kYvuJPEGConstants;
+ } else if (colorSpace.isH709()) {
+ matrix.yuv = &libyuv::kYuvH709Constants;
+ matrix.yvu = &libyuv::kYvuH709Constants;
+ } else if (colorSpace.isF709()) {
+ matrix.yuv = &libyuv::kYuvF709Constants;
+ matrix.yvu = &libyuv::kYvuF709Constants;
+ } else if (colorSpace.isBt2020()) {
+ matrix.yuv = &libyuv::kYuv2020Constants;
+ matrix.yvu = &libyuv::kYvu2020Constants;
+ } else if (colorSpace.isBtV2020()) {
+ matrix.yuv = &libyuv::kYuvV2020Constants;
+ matrix.yvu = &libyuv::kYvuV2020Constants;
+ } else {
+ // unspecified
+ if (isFullRange) {
+ matrix.yuv = is10Bit ? &libyuv::kYuvV2020Constants : &libyuv::kYuvJPEGConstants;
+ matrix.yvu = is10Bit ? &libyuv::kYvuV2020Constants : &libyuv::kYvuJPEGConstants;
+ } else {
+ matrix.yuv = is10Bit ? &libyuv::kYuv2020Constants : &libyuv::kYuvI601Constants;
+ matrix.yvu = is10Bit ? &libyuv::kYvu2020Constants : &libyuv::kYvuI601Constants;
+ }
+ }
+ return matrix;
+}
static bool isRGB(OMX_COLOR_FORMATTYPE colorFormat) {
return colorFormat == OMX_COLOR_Format16bitRGB565
@@ -56,28 +96,234 @@
|| colorFormat == COLOR_Format32bitABGR2101010;
}
-bool ColorConverter::ColorSpace::isBt2020() const {
- return (mStandard == ColorUtils::kColorStandardBT2020);
+// check for limited Range
+bool ColorConverter::ColorSpace::isLimitedRange() const {
+ return mRange == ColorUtils::kColorRangeLimited;
}
-bool ColorConverter::ColorSpace::isH420() const {
+// BT.2020 limited range YUV to RGB
+bool ColorConverter::ColorSpace::isBt2020() const {
+ return (mStandard == ColorUtils::kColorStandardBT2020
+ && mRange == ColorUtils::kColorRangeLimited);
+}
+
+// BT.2020 full range YUV to RGB
+bool ColorConverter::ColorSpace::isBtV2020() const {
+ return (mStandard == ColorUtils::kColorStandardBT2020
+ && mRange == ColorUtils::kColorRangeFull);
+}
+
+// BT.709 full range YUV to RGB
+bool ColorConverter::ColorSpace::isF709() const {
+ return (mStandard == ColorUtils::kColorStandardBT709
+ && mRange == ColorUtils::kColorRangeFull);
+}
+
+// BT.709 limited range YUV to RGB
+bool ColorConverter::ColorSpace::isH709() const {
return (mStandard == ColorUtils::kColorStandardBT709)
&& (mRange == ColorUtils::kColorRangeLimited);
}
+// BT.601 limited range YUV to RGB
// the matrix coefficients are the same for both 601.625 and 601.525 standards
-bool ColorConverter::ColorSpace::isI420() const {
+bool ColorConverter::ColorSpace::isI601() const {
return ((mStandard == ColorUtils::kColorStandardBT601_625)
|| (mStandard == ColorUtils::kColorStandardBT601_525))
&& (mRange == ColorUtils::kColorRangeLimited);
}
-bool ColorConverter::ColorSpace::isJ420() const {
+// BT.601 full range YUV to RGB
+bool ColorConverter::ColorSpace::isJ601() const {
return ((mStandard == ColorUtils::kColorStandardBT601_625)
|| (mStandard == ColorUtils::kColorStandardBT601_525))
&& (mRange == ColorUtils::kColorRangeFull);
}
+// Utility functions for MediaImage2
+static MediaImage2 CreateYUV420PlanarMediaImage2(
+ uint32_t width, uint32_t height, uint32_t stride,
+ uint32_t vstride, uint32_t bitDepth) {
+ const uint32_t componentBytes = (bitDepth + 7) / 8;
+ return MediaImage2 {
+ .mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV,
+ .mNumPlanes = 3,
+ .mWidth = width,
+ .mHeight = height,
+ .mBitDepth = bitDepth,
+ .mBitDepthAllocated = componentBytes * 8,
+ .mPlane = {
+ {
+ .mOffset = 0,
+ .mColInc = static_cast<int32_t>(componentBytes),
+ .mRowInc = static_cast<int32_t>(stride),
+ .mHorizSubsampling = 1,
+ .mVertSubsampling = 1,
+ },
+ {
+ .mOffset = stride * vstride,
+ .mColInc = static_cast<int32_t>(componentBytes),
+ .mRowInc = static_cast<int32_t>(stride / 2),
+ .mHorizSubsampling = 2,
+ .mVertSubsampling = 2,
+ },
+ {
+ .mOffset = stride * vstride * 5 / 4,
+ .mColInc = static_cast<int32_t>(componentBytes),
+ .mRowInc = static_cast<int32_t>(stride / 2),
+ .mHorizSubsampling = 2,
+ .mVertSubsampling = 2,
+ }
+ },
+ };
+}
+
+static MediaImage2 CreateYUV420SemiPlanarMediaImage2(
+ uint32_t width, uint32_t height, uint32_t stride,
+ uint32_t vstride, uint32_t bitDepth, bool uv = true /*nv12 or not*/) {
+ const uint32_t componentBytes = (bitDepth + 7) / 8;
+ return MediaImage2 {
+ .mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV,
+ .mNumPlanes = 3,
+ .mWidth = width,
+ .mHeight = height,
+ .mBitDepth = bitDepth,
+ .mBitDepthAllocated = componentBytes * 8,
+ .mPlane = {
+ {
+ .mOffset = 0,
+ .mColInc = static_cast<int32_t>(componentBytes),
+ .mRowInc = static_cast<int32_t>(stride),
+ .mHorizSubsampling = 1,
+ .mVertSubsampling = 1,
+ },
+ {
+ .mOffset = stride * vstride + (uv ? 0 : componentBytes),
+ .mColInc = static_cast<int32_t>(2 * componentBytes),
+ .mRowInc = static_cast<int32_t>(stride),
+ .mHorizSubsampling = 2,
+ .mVertSubsampling = 2,
+ },
+ {
+ .mOffset = stride * vstride + (uv ? componentBytes : 0),
+ .mColInc = static_cast<int32_t>(2 * componentBytes),
+ .mRowInc = static_cast<int32_t>(stride),
+ .mHorizSubsampling = 2,
+ .mVertSubsampling = 2,
+ }
+ },
+ };
+}
+
+ColorConverter::Image::Image(const MediaImage2& img)
+ :mImage(img),
+ mLayout(ImageLayoutUnknown),
+ mSampling(ImageSamplingUnknown) {
+ const MediaImage2::PlaneInfo &yPlane =
+ img.mPlane[MediaImage2::PlaneIndex::Y];
+ const MediaImage2::PlaneInfo &uPlane =
+ img.mPlane[MediaImage2::PlaneIndex::U];
+ const MediaImage2::PlaneInfo &vPlane =
+ img.mPlane[MediaImage2::PlaneIndex::V];
+
+ if (mImage.mNumPlanes != 3) {
+ ALOGE("Conversion error: MediaImage2 mNumPlanes != 3");
+ mLayout = ImageLayoutUnknown;
+ mSampling = ImageSamplingUnknown;
+ mBitDepth = ImageBitDepthInvalid;
+ return;
+ }
+
+ if (mImage.mBitDepth == 8
+ && yPlane.mColInc == 1
+ && uPlane.mColInc == 1
+ && vPlane.mColInc == 1
+ && yPlane.mVertSubsampling == 1
+ && uPlane.mVertSubsampling == 2
+ && vPlane.mVertSubsampling == 2) {
+ mLayout = ImageLayout420Planar;
+ mSampling = ImageSamplingYUV420;
+ } else if (mImage.mBitDepth == 8
+ && yPlane.mColInc == 1
+ && uPlane.mColInc == 2
+ && vPlane.mColInc == 2
+ && yPlane.mVertSubsampling == 1
+ && uPlane.mVertSubsampling == 2
+ && vPlane.mVertSubsampling == 2
+ && ((vPlane.mOffset == uPlane.mOffset + 1) ||
+ (uPlane.mOffset == vPlane.mOffset + 1))) {
+ mLayout = ImageLayout420SemiPlanar;
+ mSampling = ImageSamplingYUV420;
+ }
+
+ mBitDepth = ImageBitDepthInvalid;
+ switch (img.mBitDepth) {
+ case 8:
+ mBitDepth = ImageBitDepth8;
+ break;
+
+ case 10:
+ case 12:
+ case 16:
+ default:
+ // TODO: Implement 10b, 12b and 16b using MediaImage2
+ mBitDepth = ImageBitDepthInvalid;
+ }
+
+}
+
+status_t ColorConverter::Image::getYUVPlaneOffsetAndStride(
+ const BitmapParams &src,
+ uint32_t *y_offset,
+ uint32_t *u_offset,
+ uint32_t *v_offset,
+ size_t *y_stride,
+ size_t *u_stride,
+ size_t *v_stride) const {
+
+ if (y_offset == nullptr || v_offset == nullptr || v_offset == nullptr
+ || y_stride == nullptr || u_stride == nullptr || v_stride == nullptr) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ if (mImage.mNumPlanes != 3) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ const MediaImage2::PlaneInfo &yPlane = mImage.mPlane[MediaImage2::PlaneIndex::Y];
+ *y_offset = yPlane.mOffset
+ + src.mCropTop * yPlane.mRowInc
+ + src.mCropLeft * yPlane.mColInc;
+
+ const MediaImage2::PlaneInfo &uPlane = mImage.mPlane[MediaImage2::PlaneIndex::U];
+ *u_offset = uPlane.mOffset
+ + (src.mCropTop / uPlane.mVertSubsampling) * uPlane.mRowInc
+ + (src.mCropLeft / uPlane.mHorizSubsampling) * uPlane.mColInc;
+
+ const MediaImage2::PlaneInfo &vPlane = mImage.mPlane[MediaImage2::PlaneIndex::V];
+ *v_offset = vPlane.mOffset
+ + (src.mCropTop / vPlane.mVertSubsampling) * vPlane.mRowInc
+ + (src.mCropLeft / vPlane.mHorizSubsampling) * vPlane.mColInc;
+
+ *y_stride = yPlane.mRowInc;
+ *u_stride = uPlane.mRowInc;
+ *v_stride = vPlane.mRowInc;
+
+ return OK;
+}
+
+bool ColorConverter::Image::isNV21() const {
+ if (getLayout() == ImageLayout420SemiPlanar) {
+ const MediaImage2::PlaneInfo &uPlane = mImage.mPlane[MediaImage2::PlaneIndex::U];
+ const MediaImage2::PlaneInfo &vPlane = mImage.mPlane[MediaImage2::PlaneIndex::V];
+
+ int componentBytes = (mImage.mBitDepthAllocated) / 8;
+
+ return (((vPlane.mOffset + componentBytes) == uPlane.mOffset));
+ }
+ return false;
+}
+
/**
* This class approximates the standard YUV to RGB conversions by factoring the matrix
* coefficients to 1/256th-s (as dividing by 256 is easy to do with right shift). The chosen value
@@ -227,8 +473,42 @@
mClip10Bit = NULL;
}
+// Set MediaImage2 Flexible formats
+void ColorConverter::setSrcMediaImage2(MediaImage2 img) {
+ mSrcImage = Image(img);
+ }
+
+bool ColorConverter::isValidForMediaImage2() const {
+
+ if (!mSrcImage
+ || mSrcImage->getMediaImage2().mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
+ // TODO: support Yonly or RGB etc?
+ return false;
+ }
+ // try to identify the src format
+
+ BitDepth_t srcBitDepth = mSrcImage->getBitDepth();
+
+ //TODO: support 12b and 16b ?
+ if (srcBitDepth == ImageBitDepthInvalid) {
+ return false;
+ }
+
+ return ((srcBitDepth == ImageBitDepth8 &&
+ (mDstFormat == OMX_COLOR_Format16bitRGB565
+ || mDstFormat == OMX_COLOR_Format32BitRGBA8888
+ || mDstFormat == OMX_COLOR_Format32bitBGRA8888))
+
+ || (srcBitDepth == ImageBitDepth10
+ && (mDstFormat == COLOR_Format32bitABGR2101010)));
+}
+
bool ColorConverter::isValid() const {
switch ((int32_t)mSrcFormat) {
+ case COLOR_FormatYUV420Flexible:
+ return isValidForMediaImage2();
+ break;
+
case OMX_COLOR_FormatYUV420Planar16:
if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
return true;
@@ -240,22 +520,23 @@
|| mDstFormat == OMX_COLOR_Format32bitBGRA8888;
case OMX_COLOR_FormatCbYCrY:
- case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
- case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
return mDstFormat == OMX_COLOR_Format16bitRGB565;
case OMX_COLOR_FormatYUV420SemiPlanar:
-#ifdef USE_LIBYUV
+ case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
+ case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+ if (mSrcImage) {
+ return isValidForMediaImage2();
+ }
return mDstFormat == OMX_COLOR_Format16bitRGB565
|| mDstFormat == OMX_COLOR_Format32BitRGBA8888
|| mDstFormat == OMX_COLOR_Format32bitBGRA8888;
-#else
- return mDstFormat == OMX_COLOR_Format16bitRGB565;
-#endif
+
case COLOR_FormatYUVP010:
return mDstFormat == COLOR_Format32bitABGR2101010;
default:
+ //TODO: Should this be enabled for MediaImage2?
return false;
}
}
@@ -320,6 +601,13 @@
mStride = mWidth;
break;
+ case COLOR_FormatYUV420Flexible:
+ // MediaImage2 should be used.
+ mBpp = 1;
+ mStride = mWidth;
+
+ break;
+
default:
ALOGE("Unsupported color format %d", mColorFormat);
mBpp = 1;
@@ -360,7 +648,8 @@
BitmapParams src(
const_cast<void *>(srcBits),
srcWidth, srcHeight, srcStride,
- srcCropLeft, srcCropTop, srcCropRight, srcCropBottom, mSrcFormat);
+ srcCropLeft, srcCropTop, srcCropRight, srcCropBottom,
+ mSrcFormat);
BitmapParams dst(
dstBits,
@@ -374,71 +663,68 @@
&& src.cropHeight() == dst.cropHeight())) {
return ERROR_UNSUPPORTED;
}
-
- status_t err;
-
- switch ((int32_t)mSrcFormat) {
- case OMX_COLOR_FormatYUV420Planar:
-#ifdef USE_LIBYUV
- err = convertYUV420PlanarUseLibYUV(src, dst);
-#else
- err = convertYUV420Planar(src, dst);
+#if PERF_PROFILING
+ int64_t startTimeUs = ALooper::GetNowUs();
#endif
+ status_t err;
+ switch ((int32_t)mSrcFormat) {
+ case COLOR_FormatYUV420Flexible:
+ err = convertYUVMediaImage(src, dst);
+ break;
+
+ case OMX_COLOR_FormatYUV420Planar:
+ if (!mSrcImage) {
+ mSrcImage = Image(CreateYUV420PlanarMediaImage2(
+ srcWidth, srcHeight, srcStride, srcHeight, 8 /*bitDepth*/));
+ }
+ err = convertYUVMediaImage(src, dst);
+
break;
case OMX_COLOR_FormatYUV420Planar16:
- {
-#if PERF_PROFILING
- int64_t startTimeUs = ALooper::GetNowUs();
-#endif
err = convertYUV420Planar16(src, dst);
-#if PERF_PROFILING
- int64_t endTimeUs = ALooper::GetNowUs();
- ALOGD("convertYUV420Planar16 took %lld us", (long long) (endTimeUs - startTimeUs));
-#endif
break;
- }
case COLOR_FormatYUVP010:
- {
-#if PERF_PROFILING
- int64_t startTimeUs = ALooper::GetNowUs();
-#endif
err = convertYUVP010(src, dst);
-#if PERF_PROFILING
- int64_t endTimeUs = ALooper::GetNowUs();
- ALOGD("convertYUVP010 took %lld us", (long long) (endTimeUs - startTimeUs));
-#endif
+
break;
- }
case OMX_COLOR_FormatCbYCrY:
err = convertCbYCrY(src, dst);
break;
case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
- err = convertQCOMYUV420SemiPlanar(src, dst);
+ if (!mSrcImage) {
+ mSrcImage = Image(CreateYUV420SemiPlanarMediaImage2(
+ srcWidth, srcHeight, srcStride, srcHeight, 8 /*bitDepth*/, false));
+ }
+ err = convertYUVMediaImage(src, dst);
+
break;
case OMX_COLOR_FormatYUV420SemiPlanar:
-#ifdef USE_LIBYUV
- err = convertYUV420SemiPlanarUseLibYUV(src, dst);
-#else
- err = convertYUV420SemiPlanar(src, dst);
-#endif
- break;
-
case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
- err = convertTIYUV420PackedSemiPlanar(src, dst);
+ if (!mSrcImage) {
+ mSrcImage = Image(CreateYUV420SemiPlanarMediaImage2(
+ srcWidth, srcHeight, srcStride, srcHeight, 8 /*bitDepth*/));
+ }
+ err = convertYUVMediaImage(src, dst);
+
break;
default:
- {
+
CHECK(!"Should not be here. Unknown color conversion.");
break;
- }
}
+#if PERF_PROFILING
+ int64_t endTimeUs = ALooper::GetNowUs();
+ ALOGD("%s image took %lld us", asString_ColorFormat(mSrcFormat,"Unknown"),
+ (long long) (endTimeUs - startTimeUs));
+#endif
+
return err;
}
@@ -547,67 +833,103 @@
return OK;
}
+status_t ColorConverter::getSrcYUVPlaneOffsetAndStride(
+ const BitmapParams &src,
+ uint32_t *y_offset, uint32_t *u_offset, uint32_t *v_offset,
+ size_t *y_stride, size_t *u_stride, size_t *v_stride) const {
+ if (y_offset == nullptr || u_offset == nullptr || v_offset == nullptr
+ || y_stride == nullptr || u_stride == nullptr || v_stride == nullptr) {
+ ALOGE("nullptrs given for yuv source offset / stride");
+ return ERROR_MALFORMED;
+ }
+
+ if (mSrcImage) {
+ // if we have MediaImage2; get the info from MediaImage2
+ return mSrcImage->getYUVPlaneOffsetAndStride(src, y_offset, u_offset, v_offset,
+ y_stride, u_stride, v_stride);
+ }
+ return ERROR_UNSUPPORTED;
+}
/*
libyuv supports the following color spaces:
- I420: BT.601 limited range
- J420: BT.601 full range (jpeg)
- H420: BT.709 limited range
+ I601: BT.601 limited range
+ J601: BT.601 full range (jpeg)
+ H709: BT.709 limited range
+ F709: BT.709 Full range
+ 2020: BT.2020 limited range
+ V2020: BT.2020 Full range
*/
-#define DECLARE_YUV2RGBFUNC(func, rgb) int (*func)( \
- const uint8_t*, int, const uint8_t*, int, \
- const uint8_t*, int, uint8_t*, int, int, int) \
- = mSrcColorSpace.isH420() ? libyuv::H420To##rgb \
- : mSrcColorSpace.isJ420() ? libyuv::J420To##rgb \
- : libyuv::I420To##rgb
-
status_t ColorConverter::convertYUV420PlanarUseLibYUV(
const BitmapParams &src, const BitmapParams &dst) {
- // Fall back to our conversion if libyuv does not support the color space.
- // I420 (BT.601 limited) is default, so don't fall back if we end up using it anyway.
- if (!mSrcColorSpace.isH420() && !mSrcColorSpace.isJ420()
- // && !mSrcColorSpace.isI420() /* same as line below */
- && getMatrix() != &BT601_LIMITED) {
- return convertYUV420Planar(src, dst);
+ LibyuvConstPair yuvConstants =
+ getLibYUVMatrix(mSrcColorSpace, false);
+
+ uint32_t y_offset = 0, u_offset = 0, v_offset = 0;
+ size_t src_stride_y =0, src_stride_u = 0, src_stride_v = 0;
+ if (getSrcYUVPlaneOffsetAndStride(src, &y_offset, &u_offset, &v_offset,
+ &src_stride_y, &src_stride_u, &src_stride_v) != OK) {
+ return ERROR_UNSUPPORTED;
}
uint8_t *dst_ptr = (uint8_t *)dst.mBits
+ dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
- const uint8_t *src_y =
- (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
+ const uint8_t *src_y = (const uint8_t *)src.mBits + y_offset;
- const uint8_t *src_u =
- (const uint8_t *)src.mBits + src.mStride * src.mHeight
- + (src.mCropTop / 2) * (src.mStride / 2) + (src.mCropLeft / 2);
+ const uint8_t *src_u = (const uint8_t *)src.mBits + u_offset;
- const uint8_t *src_v =
- src_u + (src.mStride / 2) * (src.mHeight / 2);
+ const uint8_t *src_v = (const uint8_t *)src.mBits + v_offset;
switch (mDstFormat) {
case OMX_COLOR_Format16bitRGB565:
{
- DECLARE_YUV2RGBFUNC(func, RGB565);
- (*func)(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
- (uint8_t *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
- break;
- }
+ libyuv::I420ToRGB565Matrix(src_y,
+ src_stride_y,
+ src_u,
+ src_stride_u,
+ src_v,
+ src_stride_v,
+ dst_ptr,
+ dst.mStride,
+ yuvConstants.yuv,
+ src.cropWidth(),
+ src.cropHeight());
- case OMX_COLOR_Format32BitRGBA8888:
- {
- DECLARE_YUV2RGBFUNC(func, ABGR);
- (*func)(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
- (uint8_t *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
break;
}
case OMX_COLOR_Format32bitBGRA8888:
{
- DECLARE_YUV2RGBFUNC(func, ARGB);
- (*func)(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
- (uint8_t *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
+ libyuv::I420ToARGBMatrix(src_y,
+ src_stride_y,
+ src_u,
+ src_stride_u,
+ src_v,
+ src_stride_v,
+ (uint8_t*)dst_ptr,
+ dst.mStride,
+ yuvConstants.yuv,
+ src.cropWidth(),
+ src.cropHeight());
+ break;
+ }
+
+ case OMX_COLOR_Format32BitRGBA8888:
+ {
+ libyuv::I420ToARGBMatrix(src_y,
+ src_stride_y,
+ src_v,
+ src_stride_v,
+ src_u,
+ src_stride_u,
+ (uint8_t*)dst_ptr,
+ dst.mStride,
+ yuvConstants.yvu,
+ src.cropWidth(),
+ src.cropHeight());
break;
}
@@ -620,38 +942,90 @@
status_t ColorConverter::convertYUV420SemiPlanarUseLibYUV(
const BitmapParams &src, const BitmapParams &dst) {
- // Fall back to our conversion if libyuv does not support the color space.
- // libyuv only supports BT.601 limited range NV12. Don't fall back if we end up using it anyway.
- if (// !mSrcColorSpace.isI420() && /* same as below */
- getMatrix() != &BT601_LIMITED) {
- return convertYUV420SemiPlanar(src, dst);
- }
+ LibyuvConstPair yuvConstants =
+ getLibYUVMatrix(mSrcColorSpace, false);
+ uint32_t y_offset = 0, u_offset = 0, v_offset = 0;
+ size_t src_stride_y =0, src_stride_u = 0, src_stride_v = 0;
+ if (getSrcYUVPlaneOffsetAndStride(src, &y_offset, &u_offset, &v_offset,
+ &src_stride_y, &src_stride_u, &src_stride_v) != OK) {
+ return ERROR_UNSUPPORTED;
+ }
+ (void)v_offset;
uint8_t *dst_ptr = (uint8_t *)dst.mBits
+ dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
- const uint8_t *src_y =
- (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
+ const uint8_t *src_y = (const uint8_t *)src.mBits + y_offset;
- const uint8_t *src_u =
- (const uint8_t *)src.mBits + src.mStride * src.mHeight
- + (src.mCropTop / 2) * src.mStride + src.mCropLeft;
+ const uint8_t *src_u = (const uint8_t *)src.mBits + u_offset;
+
+ const uint8_t *src_v = (const uint8_t *)src.mBits + v_offset;
+
+ bool isNV21 = (u_offset == (v_offset + 1)) ? true : false;
+
+ // libyuv function signature for semiplanar formats;
+ std::function<int(const uint8_t*, int,
+ const uint8_t*, int, uint8_t *, int,
+ LibyuvConstants *, int, int)> libyuvFunc;
switch (mDstFormat) {
case OMX_COLOR_Format16bitRGB565:
- libyuv::NV12ToRGB565(src_y, src.mStride, src_u, src.mStride, (uint8_t *)dst_ptr,
- dst.mStride, src.cropWidth(), src.cropHeight());
+ {
+ // Note: We don't seem to have similar function for NV21
+ libyuv::NV12ToRGB565Matrix(src_y,
+ src_stride_y,
+ src_u,
+ src_stride_u,
+ (uint8_t*)dst_ptr,
+ dst.mStride,
+ yuvConstants.yuv,
+ src.cropWidth(),
+ src.cropHeight());
break;
-
+ }
case OMX_COLOR_Format32bitBGRA8888:
- libyuv::NV12ToARGB(src_y, src.mStride, src_u, src.mStride, (uint8_t *)dst_ptr,
- dst.mStride, src.cropWidth(), src.cropHeight());
+ {
+ if (src_stride_u != src_stride_v) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ libyuvFunc = isNV21 ? libyuv:: NV21ToARGBMatrix : libyuv:: NV12ToARGBMatrix;
+
+ libyuvFunc(src_y,
+ src_stride_y,
+ isNV21 ? src_v: src_u,
+ // src_stride_v should be equal to src_stride_u
+ // but this is done like this for readability
+ isNV21 ? src_stride_v : src_stride_u,
+ (uint8_t*)dst_ptr,
+ dst.mStride,
+ yuvConstants.yuv,
+ src.cropWidth(),
+ src.cropHeight());
break;
+ }
case OMX_COLOR_Format32BitRGBA8888:
- libyuv::NV12ToABGR(src_y, src.mStride, src_u, src.mStride, (uint8_t *)dst_ptr,
- dst.mStride, src.cropWidth(), src.cropHeight());
+ {
+
+ if (src_stride_u != src_stride_v) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ libyuvFunc = isNV21 ? libyuv::NV12ToARGBMatrix : libyuv::NV21ToARGBMatrix;
+
+ libyuvFunc(src_y,
+ src_stride_y,
+ isNV21 ? src_v : src_u,
+ // src_stride_v should be equal to src_stride_u
+ isNV21 ? src_stride_v : src_stride_u,
+ (uint8_t*)dst_ptr,
+ dst.mStride,
+ yuvConstants.yvu,
+ src.cropWidth(),
+ src.cropHeight());
break;
+ }
default:
return ERROR_UNSUPPORTED;
@@ -661,20 +1035,40 @@
}
std::function<void (void *, void *, void *, size_t,
- signed *, signed *, signed *, signed *)>
-getReadFromSrc(OMX_COLOR_FORMATTYPE srcFormat) {
- switch(srcFormat) {
- case OMX_COLOR_FormatYUV420Planar:
- return [](void *src_y, void *src_u, void *src_v, size_t x,
- signed *y1, signed *y2, signed *u, signed *v) {
- *y1 = ((uint8_t*)src_y)[x];
- *y2 = ((uint8_t*)src_y)[x + 1];
- *u = ((uint8_t*)src_u)[x / 2] - 128;
- *v = ((uint8_t*)src_v)[x / 2] - 128;
- };
- // this format stores 10 bits content with 16 bits
- // converting it to 8 bits src
- case OMX_COLOR_FormatYUV420Planar16:
+ signed *, signed *, signed *, signed *)>
+getReadFromChromaHorizSubsampled2Image8b(std::optional<MediaImage2> image,
+ OMX_COLOR_FORMATTYPE srcFormat) {
+ // this function is for reading src only
+ // when both chromas are horizontally subsampled by 2
+ // this returns 2 luma for one chroma.
+ if (image) {
+ uint32_t uColInc =
+ image->mPlane[MediaImage2::PlaneIndex::U].mColInc;
+ uint32_t vColInc =
+ image->mPlane[MediaImage2::PlaneIndex::V].mColInc;
+ uint32_t uHorizSubsampling =
+ image->mPlane[MediaImage2::PlaneIndex::U].mHorizSubsampling;
+ uint32_t vHorizSubsampling =
+ image->mPlane[MediaImage2::PlaneIndex::V].mHorizSubsampling;
+
+ if (!(uHorizSubsampling == 2 && vHorizSubsampling == 2)) {
+ return nullptr;
+ }
+
+ if (image->mBitDepthAllocated == 8) {
+
+ return [uColInc, vColInc, uHorizSubsampling, vHorizSubsampling]
+ (void *src_y, void *src_u, void *src_v, size_t x,
+ signed *y1, signed *y2, signed *u, signed *v) {
+ *y1 = ((uint8_t *)src_y)[x];
+ *y2 = ((uint8_t *)src_y)[x + 1];
+ *u = ((uint8_t *)src_u)[(x / uHorizSubsampling) * uColInc] - 128;
+ *v = ((uint8_t *)src_v)[(x / vHorizSubsampling) * vColInc] - 128;
+ };
+ }
+ }
+ if (srcFormat == OMX_COLOR_FormatYUV420Planar16) {
+ // OMX_COLOR_FormatYUV420Planar16
return [](void *src_y, void *src_u, void *src_v, size_t x,
signed *y1, signed *y2, signed *u, signed *v) {
*y1 = (uint8_t)(((uint16_t*)src_y)[x] >> 2);
@@ -682,8 +1076,34 @@
*u = (uint8_t)(((uint16_t*)src_u)[x / 2] >> 2) - 128;
*v = (uint8_t)(((uint16_t*)src_v)[x / 2] >> 2) - 128;
};
- default:
- TRESPASS();
+ }
+ return nullptr;
+}
+
+std::function<void (void *, void *, void *, size_t,
+ signed *, signed *, signed *)>
+getReadFromImage(std::optional<MediaImage2> image, OMX_COLOR_FORMATTYPE &srcFormat) {
+ (void)srcFormat;
+ if (image) {
+ uint32_t uColInc =
+ image->mPlane[MediaImage2::PlaneIndex::U].mColInc;
+ uint32_t vColInc =
+ image->mPlane[MediaImage2::PlaneIndex::V].mColInc;
+ uint32_t uHorizSubsampling =
+ image->mPlane[MediaImage2::PlaneIndex::U].mHorizSubsampling;
+ uint32_t vHorizSubsampling =
+ image->mPlane[MediaImage2::PlaneIndex::V].mHorizSubsampling;
+
+ if (image->mBitDepthAllocated == 8) {
+
+ return [uColInc, vColInc, uHorizSubsampling, vHorizSubsampling]
+ (void *src_y, void *src_u, void *src_v, size_t x,
+ signed *y1, signed *u, signed *v) {
+ *y1 = ((uint8_t *)src_y)[x];
+ *u = ((uint8_t *)src_u)[(x / uHorizSubsampling) * uColInc] - 128;
+ *v = ((uint8_t *)src_v)[(x / vHorizSubsampling) * vColInc] - 128;
+ };
+ }
}
return nullptr;
}
@@ -782,8 +1202,178 @@
return nullptr;
}
-status_t ColorConverter::convertYUV420Planar(
+status_t ColorConverter::convertYUVMediaImage(
const BitmapParams &src, const BitmapParams &dst) {
+ // first see if we can do this as a 420Planar or 420SemiPlanar 8b
+
+ if(!mSrcImage ||
+ mSrcImage->getMediaImage2().mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV
+ || mSrcImage->getMediaImage2().mNumPlanes != 3) {
+ ALOGE("Cannot convert without MediaImage2 or MediaImage is not Valid YUV");
+ return ERROR_UNSUPPORTED;
+ }
+ if (mSrcImage->getBitDepth() == ImageBitDepth8
+ && mSrcImage->getSampling() == ImageSamplingYUV420) {
+ Layout_t layout = mSrcImage->getLayout();
+ switch (layout) {
+ case Layout_t::ImageLayout420Planar:
+ {
+ return convertYUV420PlanarUseLibYUV(src, dst);
+ break;
+ }
+
+ case Layout_t::ImageLayout420SemiPlanar:
+ {
+ // Note: libyuv doesn't support NV21 -> RGB565
+ if (!(mSrcImage->isNV21() && mDstFormat == OMX_COLOR_Format16bitRGB565)) {
+ status_t ret = convertYUV420SemiPlanarUseLibYUV(src, dst);
+ // This function may fail if some specific conditions are not
+ // met for semiPlanar formats like strideU != strideV.
+ // if failed, this will fail before attempting conversion, so
+ // no additional memcpy will be involved here.
+ // Upon failure, this will fall into pixel based processing below.
+ if (ret == OK) {
+ return ret;
+ }
+ }
+ break;
+ }
+ default:
+ // we will handle this case below.
+ break;
+ }
+ }
+ const struct Coeffs *matrix = getMatrix();
+ if (!matrix) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ signed _b_u = matrix->_b_u;
+ signed _neg_g_u = -matrix->_g_u;
+ signed _neg_g_v = -matrix->_g_v;
+ signed _r_v = matrix->_r_v;
+ signed _y = matrix->_y;
+
+ uint8_t *dst_ptr = (uint8_t *)dst.mBits
+ + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
+
+
+ uint32_t y_offset = 0, u_offset = 0, v_offset = 0;
+ size_t src_stride_y =0, src_stride_u = 0, src_stride_v = 0;
+ if (getSrcYUVPlaneOffsetAndStride(src, &y_offset, &u_offset, &v_offset,
+ &src_stride_y, &src_stride_u, &src_stride_v) != OK) {
+ return ERROR_UNSUPPORTED;
+ }
+ uint32_t uVertSubsampling =
+ mSrcImage->getMediaImage2().mPlane[MediaImage2::PlaneIndex::U].mVertSubsampling;
+ uint32_t vVertSubsampling =
+ mSrcImage->getMediaImage2().mPlane[MediaImage2::PlaneIndex::V].mVertSubsampling;
+
+ //TODO: optimize for chroma sampling, reading and writing multiple pixels
+ // within the same loop
+ signed _c16 = 0;
+ void *kAdjustedClip = nullptr;
+ if (mSrcImage->getBitDepth() != ImageBitDepth8) {
+ ALOGE("BitDepth != 8 for MediaImage2");
+ return ERROR_UNSUPPORTED;
+ }
+ _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
+ kAdjustedClip = initClip();
+
+ auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip);
+ uint8_t *src_y = (uint8_t *)src.mBits + y_offset;
+ uint8_t *src_u = (uint8_t *)src.mBits + u_offset;
+ uint8_t *src_v = (uint8_t *)src.mBits + v_offset;
+
+ switch (mSrcImage->getSampling()) {
+
+ case ImageSamplingYUV420:
+ {
+ // get read function that can read
+ // chroma sampling 2 with image
+ auto readFromSrcImage = getReadFromChromaHorizSubsampled2Image8b(
+ mSrcImage->getMediaImage2(), mSrcFormat);
+ if (readFromSrcImage == nullptr) {
+ ALOGE("Cannot get a read function for this MediaImage2");
+ return ERROR_UNSUPPORTED;
+ }
+ for (size_t y = 0; y < src.cropHeight(); ++y) {
+ for (size_t x = 0; x < src.cropWidth(); x += 2) {
+ signed y1, y2, u, v;
+ readFromSrcImage(src_y, src_u, src_v, x, &y1, &y2, &u, &v);
+
+ signed u_b = u * _b_u;
+ signed u_g = u * _neg_g_u;
+ signed v_g = v * _neg_g_v;
+ signed v_r = v * _r_v;
+
+ y1 = y1 - _c16;
+ signed tmp1 = y1 * _y + 128;
+ signed b1 = (tmp1 + u_b) / 256;
+ signed g1 = (tmp1 + v_g + u_g) / 256;
+ signed r1 = (tmp1 + v_r) / 256;
+
+ y2 = y2 - _c16;
+ signed tmp2 = y2 * _y + 128;
+ signed b2 = (tmp2 + u_b) / 256;
+ signed g2 = (tmp2 + v_g + u_g) / 256;
+ signed r2 = (tmp2 + v_r) / 256;
+
+ bool uncropped = x + 1 < src.cropWidth();
+ writeToDst(dst_ptr + x * dst.mBpp, uncropped, r1, g1, b1, r2, g2, b2);
+ }
+ src_y += src_stride_y;
+ src_u += (((y + 1) % uVertSubsampling) == 0) ? src_stride_u : 0;
+ src_v += (((y + 1) % vVertSubsampling) == 0) ? src_stride_v : 0;
+
+ dst_ptr += dst.mStride;
+ }
+ break;
+ }
+
+ default:
+ {
+ // Interleaved or any other formats.
+ auto readFromSrcImage = getReadFromImage(mSrcImage->getMediaImage2(), mSrcFormat);
+ if (readFromSrcImage == nullptr) {
+ ALOGE("Cannot get a read function for this MediaImage2");
+ return ERROR_UNSUPPORTED;
+ }
+ for (size_t y = 0; y < src.cropHeight(); ++y) {
+ for (size_t x = 0; x < src.cropWidth(); x += 1) {
+ signed y1, y2, u, v;
+ readFromSrcImage(src_y, src_u, src_v, x, &y1, &u, &v);
+
+ signed u_b = u * _b_u;
+ signed u_g = u * _neg_g_u;
+ signed v_g = v * _neg_g_v;
+ signed v_r = v * _r_v;
+
+ y1 = y1 - _c16;
+ signed tmp1 = y1 * _y + 128;
+ signed b1 = (tmp1 + u_b) / 256;
+ signed g1 = (tmp1 + v_g + u_g) / 256;
+ signed r1 = (tmp1 + v_r) / 256;
+
+ writeToDst(dst_ptr + x * dst.mBpp, false, r1, g1, b1, 0, 0, 0);
+ }
+ src_y += src_stride_y;
+ src_u += (((y + 1) % uVertSubsampling) == 0) ? src_stride_u : 0;
+ src_v += (((y + 1) % vVertSubsampling) == 0) ? src_stride_v : 0;
+
+ dst_ptr += dst.mStride;
+ }
+ }
+ }
+ return OK;
+}
+
+status_t ColorConverter::convertYUV420Planar16(
+ const BitmapParams &src, const BitmapParams &dst) {
+ if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
+ return convertYUV420Planar16ToY410(src, dst);
+ }
+
const struct Coeffs *matrix = getMatrix();
if (!matrix) {
return ERROR_UNSUPPORTED;
@@ -798,7 +1388,7 @@
uint8_t *kAdjustedClip = initClip();
- auto readFromSrc = getReadFromSrc(mSrcFormat);
+ auto readFromSrc = getReadFromChromaHorizSubsampled2Image8b(std::nullopt, mSrcFormat);
auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip);
uint8_t *dst_ptr = (uint8_t *)dst.mBits
@@ -845,19 +1435,9 @@
dst_ptr += dst.mStride;
}
-
return OK;
}
-status_t ColorConverter::convertYUV420Planar16(
- const BitmapParams &src, const BitmapParams &dst) {
- if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
- return convertYUV420Planar16ToY410(src, dst);
- }
-
- return convertYUV420Planar(src, dst);
-}
-
status_t ColorConverter::convertYUVP010(
const BitmapParams &src, const BitmapParams &dst) {
if (mDstFormat == COLOR_Format32bitABGR2101010) {
@@ -1136,102 +1716,6 @@
#endif // USE_NEON_Y410
-status_t ColorConverter::convertQCOMYUV420SemiPlanar(
- const BitmapParams &src, const BitmapParams &dst) {
- /* QCOMYUV420SemiPlanar is NV21, while MediaCodec uses NV12 */
- return convertYUV420SemiPlanarBase(
- src, dst, src.mWidth /* row_inc */, true /* isNV21 */);
-}
-
-status_t ColorConverter::convertTIYUV420PackedSemiPlanar(
- const BitmapParams &src, const BitmapParams &dst) {
- return convertYUV420SemiPlanarBase(
- src, dst, src.mWidth /* row_inc */);
-}
-
-status_t ColorConverter::convertYUV420SemiPlanar(
- const BitmapParams &src, const BitmapParams &dst) {
- return convertYUV420SemiPlanarBase(
- src, dst, src.mStride /* row_inc */);
-}
-
-status_t ColorConverter::convertYUV420SemiPlanarBase(const BitmapParams &src,
- const BitmapParams &dst, size_t row_inc, bool isNV21) {
- const struct Coeffs *matrix = getMatrix();
- if (!matrix) {
- return ERROR_UNSUPPORTED;
- }
-
- signed _b_u = matrix->_b_u;
- signed _neg_g_u = -matrix->_g_u;
- signed _neg_g_v = -matrix->_g_v;
- signed _r_v = matrix->_r_v;
- signed _y = matrix->_y;
- signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
-
- uint8_t *kAdjustedClip = initClip();
-
- uint16_t *dst_ptr = (uint16_t *)((uint8_t *)
- dst.mBits + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp);
-
- const uint8_t *src_y =
- (const uint8_t *)src.mBits + src.mCropTop * row_inc + src.mCropLeft;
-
- const uint8_t *src_u = (const uint8_t *)src.mBits + src.mHeight * row_inc +
- (src.mCropTop / 2) * row_inc + src.mCropLeft;
-
- for (size_t y = 0; y < src.cropHeight(); ++y) {
- for (size_t x = 0; x < src.cropWidth(); x += 2) {
- signed y1 = (signed)src_y[x] - _c16;
- signed y2 = (signed)src_y[x + 1] - _c16;
-
- signed u = (signed)src_u[(x & ~1) + isNV21] - 128;
- signed v = (signed)src_u[(x & ~1) + !isNV21] - 128;
-
- signed u_b = u * _b_u;
- signed u_g = u * _neg_g_u;
- signed v_g = v * _neg_g_v;
- signed v_r = v * _r_v;
-
- signed tmp1 = y1 * _y + 128;
- signed b1 = (tmp1 + u_b) / 256;
- signed g1 = (tmp1 + v_g + u_g) / 256;
- signed r1 = (tmp1 + v_r) / 256;
-
- signed tmp2 = y2 * _y + 128;
- signed b2 = (tmp2 + u_b) / 256;
- signed g2 = (tmp2 + v_g + u_g) / 256;
- signed r2 = (tmp2 + v_r) / 256;
-
- uint32_t rgb1 =
- ((kAdjustedClip[r1] >> 3) << 11)
- | ((kAdjustedClip[g1] >> 2) << 5)
- | (kAdjustedClip[b1] >> 3);
-
- uint32_t rgb2 =
- ((kAdjustedClip[r2] >> 3) << 11)
- | ((kAdjustedClip[g2] >> 2) << 5)
- | (kAdjustedClip[b2] >> 3);
-
- if (x + 1 < src.cropWidth()) {
- *(uint32_t *)(&dst_ptr[x]) = (rgb2 << 16) | rgb1;
- } else {
- dst_ptr[x] = rgb1;
- }
- }
-
- src_y += row_inc;
-
- if (y & 1) {
- src_u += row_inc;
- }
-
- dst_ptr = (uint16_t*)((uint8_t*)dst_ptr + dst.mStride);
- }
-
- return OK;
-}
-
uint8_t *ColorConverter::initClip() {
if (mClip == NULL) {
mClip = new uint8_t[CLIP_RANGE_MAX_8BIT - CLIP_RANGE_MIN_8BIT + 1];
diff --git a/media/libstagefright/include/media/stagefright/ColorConverter.h b/media/libstagefright/include/media/stagefright/ColorConverter.h
index da3267e..e8b89c7 100644
--- a/media/libstagefright/include/media/stagefright/ColorConverter.h
+++ b/media/libstagefright/include/media/stagefright/ColorConverter.h
@@ -23,7 +23,10 @@
#include <stdint.h>
#include <utils/Errors.h>
+#include <optional>
+
#include <OMX_Video.h>
+#include <media/hardware/VideoAPI.h>
namespace android {
@@ -35,6 +38,8 @@
bool isDstRGB() const;
+ void setSrcMediaImage2(MediaImage2 img);
+
void setSrcColorSpace(uint32_t standard, uint32_t range, uint32_t transfer);
status_t convert(
@@ -49,18 +54,91 @@
struct Coeffs; // matrix coefficients
-private:
struct ColorSpace {
uint32_t mStandard;
uint32_t mRange;
uint32_t mTransfer;
- bool isBt2020() const;
-
+ bool isLimitedRange() const;
// libyuv helper methods
- bool isH420() const;
- bool isI420() const;
- bool isJ420() const;
+ // BT.2020 limited Range
+ bool isBt2020() const;
+ // BT.2020 full range
+ bool isBtV2020() const;
+ // 709 limited range
+ bool isH709() const;
+ // 709 full range
+ bool isF709() const;
+ // 601 limited range
+ bool isI601() const;
+ // 601 full range
+ // also called "JPEG" in libyuv
+ bool isJ601() const;
+ };
+
+private:
+
+ typedef enum : uint8_t {
+ ImageLayoutUnknown = 0x0,
+ ImageLayout420SemiPlanar = 0x1,
+ ImageLayout420Planar = 0x2
+ } Layout_t;
+
+ typedef enum : uint8_t {
+ ImageSamplingUnknown = 0x0,
+ ImageSamplingYUV420 = 0x1,
+ } Sampling_t;
+
+ //this is the actual usable bit
+ typedef enum : uint8_t {
+ ImageBitDepthInvalid = 0x0,
+ ImageBitDepth8 = 0x1,
+ ImageBitDepth10 = 0x2,
+ ImageBitDepth12 = 0x3,
+ ImageBitDepth16 = 0x4
+ } BitDepth_t;
+
+ struct BitmapParams;
+
+
+ class Image {
+ public:
+ Image(const MediaImage2& img);
+ virtual ~Image() {}
+
+ const MediaImage2 getMediaImage2() const {
+ return mImage;
+ }
+
+ Layout_t getLayout() const {
+ return mLayout;
+ }
+ Sampling_t getSampling() const {
+ return mSampling;
+ }
+ BitDepth_t getBitDepth() const {
+ return mBitDepth;
+ }
+
+ // Returns the plane offset for this image
+ // after accounting for the src Crop offsets
+ status_t getYUVPlaneOffsetAndStride(
+ const BitmapParams &src,
+ uint32_t *y_offset,
+ uint32_t *u_offset,
+ uint32_t *v_offset,
+ size_t *y_stride,
+ size_t *u_stride,
+ size_t *v_stride
+ ) const;
+
+ bool isNV21() const;
+
+ private:
+ MediaImage2 mImage;
+ Layout_t mLayout;
+ Sampling_t mSampling;
+ BitDepth_t mBitDepth;
};
struct BitmapParams {
@@ -84,6 +162,7 @@
};
OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
+ std::optional<Image> mSrcImage;
ColorSpace mSrcColorSpace;
uint8_t *mClip;
uint16_t *mClip10Bit;
@@ -91,14 +170,30 @@
uint8_t *initClip();
uint16_t *initClip10Bit();
+ // resolve YUVFormat from YUV420Flexible
+ bool isValidForMediaImage2() const;
+
+ // get plane offsets from Formats
+ status_t getSrcYUVPlaneOffsetAndStride(
+ const BitmapParams &src,
+ uint32_t *y_offset,
+ uint32_t *u_offset,
+ uint32_t *v_offset,
+ size_t *y_stride,
+ size_t *u_stride,
+ size_t *v_stride) const;
+
+ status_t convertYUVMediaImage(
+ const BitmapParams &src, const BitmapParams &dst);
+
// returns the YUV2RGB matrix coefficients according to the color aspects and bit depth
const struct Coeffs *getMatrix() const;
status_t convertCbYCrY(
const BitmapParams &src, const BitmapParams &dst);
- status_t convertYUV420Planar(
- const BitmapParams &src, const BitmapParams &dst);
+ // status_t convertYUV420Planar(
+ // const BitmapParams &src, const BitmapParams &dst);
status_t convertYUV420PlanarUseLibYUV(
const BitmapParams &src, const BitmapParams &dst);
@@ -115,19 +210,6 @@
status_t convertYUV420Planar16ToRGB(
const BitmapParams &src, const BitmapParams &dst);
- status_t convertQCOMYUV420SemiPlanar(
- const BitmapParams &src, const BitmapParams &dst);
-
- status_t convertYUV420SemiPlanar(
- const BitmapParams &src, const BitmapParams &dst);
-
- status_t convertYUV420SemiPlanarBase(
- const BitmapParams &src, const BitmapParams &dst,
- size_t row_inc, bool isNV21 = false);
-
- status_t convertTIYUV420PackedSemiPlanar(
- const BitmapParams &src, const BitmapParams &dst);
-
status_t convertYUVP010(
const BitmapParams &src, const BitmapParams &dst);
@@ -135,6 +217,7 @@
const BitmapParams &src, const BitmapParams &dst);
ColorConverter(const ColorConverter &);
+
ColorConverter &operator=(const ColorConverter &);
};
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index ad04b1f..77394d5 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -39,6 +39,7 @@
namespace android {
namespace media {
class MediaResourceParcel;
+class ClientConfigParcel;
} // media
} // android
} // aidl
@@ -71,6 +72,7 @@
using hardware::cas::native::V1_0::IDescrambler;
using aidl::android::media::MediaResourceParcel;
+using aidl::android::media::ClientConfigParcel;
struct MediaCodec : public AHandler {
enum Domain {
@@ -442,6 +444,7 @@
Mutex mMetricsLock;
mediametrics_handle_t mMetricsHandle = 0;
+ bool mMetricsToUpload = false;
nsecs_t mLifetimeStartNs = 0;
void initMediametrics();
void updateMediametrics();
@@ -453,6 +456,8 @@
void updateTunnelPeek(const sp<AMessage> &msg);
void updatePlaybackDuration(const sp<AMessage> &msg);
+ inline void initClientConfigParcel(ClientConfigParcel& clientConfig);
+
sp<AMessage> mOutputFormat;
sp<AMessage> mInputFormat;
sp<AMessage> mCallback;
@@ -705,6 +710,8 @@
};
Histogram mLatencyHist;
+ // An unique ID for the codec - Used by the metrics.
+ uint64_t mCodecId = 0;
std::function<sp<CodecBase>(const AString &, const char *)> mGetCodecBase;
std::function<status_t(const AString &, sp<MediaCodecInfo> *)> mGetCodecInfo;
diff --git a/media/libstagefright/tests/HEVC/Android.bp b/media/libstagefright/tests/HEVC/Android.bp
index 7a0ba52..7f2ff12 100644
--- a/media/libstagefright/tests/HEVC/Android.bp
+++ b/media/libstagefright/tests/HEVC/Android.bp
@@ -56,4 +56,7 @@
"signed-integer-overflow",
],
},
+ data: [
+ ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/foundation/tests/HEVCUtils/HEVCUtilsUnitTest.zip?unzip=true",
+ ],
}
diff --git a/media/libstagefright/tests/extractorFactory/Android.bp b/media/libstagefright/tests/extractorFactory/Android.bp
index a067284..20ebe44 100644
--- a/media/libstagefright/tests/extractorFactory/Android.bp
+++ b/media/libstagefright/tests/extractorFactory/Android.bp
@@ -66,4 +66,7 @@
"signed-integer-overflow",
],
},
+ data: [
+ ":https://storage.googleapis.com/android_media/frameworks/av/media/extractors/tests/extractor.zip?unzip=true",
+ ],
}
diff --git a/media/libstagefright/timedtext/test/Android.bp b/media/libstagefright/timedtext/test/Android.bp
index ae97c50..953da79 100644
--- a/media/libstagefright/timedtext/test/Android.bp
+++ b/media/libstagefright/timedtext/test/Android.bp
@@ -62,4 +62,7 @@
"signed-integer-overflow",
],
},
+ data: [
+ ":https://storage.googleapis.com/android_media/frameworks/av/media/libstagefright/timedtext/test/TimedTextUnitTest.zip?unzip=true",
+ ],
}
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
index cdbd745..7d1442b 100644
--- a/media/libstagefright/webm/WebmFrameThread.cpp
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -336,7 +336,6 @@
}
void WebmFrameMediaSourceThread::run() {
- int32_t count = 0;
int64_t timestampUs = 0xdeadbeef;
int64_t lastTimestampUs = 0; // Previous sample time stamp
int64_t lastDurationUs = 0; // Previous sample duration
@@ -367,7 +366,6 @@
buffer = NULL;
continue;
}
- ++count;
// adjust time-stamps after pause/resume
if (mResumed) {
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index 13f16b1..6296351 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -30,10 +30,64 @@
static constexpr int32_t INVALID_ADJ = -10000;
static constexpr int32_t NATIVE_ADJ = -1000;
+/* Make sure this matches with ActivityManager::PROCESS_STATE_NONEXISTENT
+ * #include <binder/ActivityManager.h>
+ * using ActivityManager::PROCESS_STATE_NONEXISTENT;
+ */
+static constexpr int32_t PROCESS_STATE_NONEXISTENT = 20;
+
ProcessInfo::ProcessInfo() {}
+/*
+ * Checks whether the list of processes with given pids exist or not.
+ *
+ * Arguments:
+ * - pids (input): List of pids for which to check whether they are Existent or not.
+ * - existent (output): boolean vector corresponds to Existent state of each pids.
+ *
+ * On successful return:
+ * - existent[i] true corresponds to pids[i] still active and
+ * - existent[i] false corresponds to pids[i] already terminated (Nonexistent)
+ * On unsuccessful return, the output argument existent is invalid.
+ */
+bool ProcessInfo::checkProcessExistent(const std::vector<int32_t>& pids,
+ std::vector<bool>* existent) {
+ sp<IBinder> binder = defaultServiceManager()->waitForService(String16("processinfo"));
+ sp<IProcessInfoService> service = interface_cast<IProcessInfoService>(binder);
+
+ // Get the process state of the applications managed/tracked by the ActivityManagerService.
+ // Don't have to look into the native processes.
+ // If we really need the state of native process, then we can use ==> mOverrideMap
+ size_t count = pids.size();
+ std::vector<int32_t> states(count, PROCESS_STATE_NONEXISTENT);
+ status_t err = service->getProcessStatesFromPids(count,
+ const_cast<int32_t*>(pids.data()),
+ states.data());
+ if (err != OK) {
+ ALOGE("%s: IProcessInfoService::getProcessStatesFromPids failed with %d",
+ __func__, err);
+ return false;
+ }
+
+ existent->clear();
+ for (size_t index = 0; index < states.size(); index++) {
+ // If this process is not tracked by ActivityManagerService, look for overrides.
+ if (states[index] == PROCESS_STATE_NONEXISTENT) {
+ std::scoped_lock lock{mOverrideLock};
+ std::map<int, ProcessInfoOverride>::iterator it =
+ mOverrideMap.find(pids[index]);
+ if (it != mOverrideMap.end()) {
+ states[index] = it->second.procState;
+ }
+ }
+ existent->push_back(states[index] != PROCESS_STATE_NONEXISTENT);
+ }
+
+ return true;
+}
+
bool ProcessInfo::getPriority(int pid, int* priority) {
- sp<IBinder> binder = defaultServiceManager()->getService(String16("processinfo"));
+ sp<IBinder> binder = defaultServiceManager()->waitForService(String16("processinfo"));
sp<IProcessInfoService> service = interface_cast<IProcessInfoService>(binder);
size_t length = 1;
diff --git a/media/utils/include/mediautils/ProcessInfo.h b/media/utils/include/mediautils/ProcessInfo.h
index 9afa3df..c27c939 100644
--- a/media/utils/include/mediautils/ProcessInfo.h
+++ b/media/utils/include/mediautils/ProcessInfo.h
@@ -33,6 +33,8 @@
virtual bool isPidUidTrusted(int pid, int uid);
virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
virtual void removeProcessInfoOverride(int pid);
+ bool checkProcessExistent(const std::vector<int32_t>& pids,
+ std::vector<bool>* existent) override;
protected:
virtual ~ProcessInfo();
diff --git a/media/utils/include/mediautils/ProcessInfoInterface.h b/media/utils/include/mediautils/ProcessInfoInterface.h
index b6529fc..e3384ba 100644
--- a/media/utils/include/mediautils/ProcessInfoInterface.h
+++ b/media/utils/include/mediautils/ProcessInfoInterface.h
@@ -17,16 +17,73 @@
#ifndef PROCESS_INFO_INTERFACE_H_
#define PROCESS_INFO_INTERFACE_H_
+#include <vector>
#include <utils/RefBase.h>
namespace android {
struct ProcessInfoInterface : public RefBase {
+ /*
+ * Gets the priority of the process (with given pid) as oom score.
+ *
+ * @param[in] pid pid of the process.
+ * @param[out] priority of the process.
+ *
+ * @return true for successful return and false otherwise.
+ */
virtual bool getPriority(int pid, int* priority) = 0;
+ /*
+ * Check whether the given pid is trusted or not.
+ *
+ * @param[in] pid pid of the process.
+ *
+ * @return true for trusted process and false otherwise.
+ */
virtual bool isPidTrusted(int pid) = 0;
+ /*
+ * Check whether the given pid and uid is trusted or not.
+ *
+ * @param[in] pid pid of the process.
+ * @param[in] uid uid of the process.
+ *
+ * @return true for trusted process and false otherwise.
+ */
virtual bool isPidUidTrusted(int pid, int uid) = 0;
+ /*
+ * Override process state and oom score of the pid.
+ *
+ * @param[in] pid pid of the process.
+ * @param[in] procState new state of the process to override with.
+ * @param[in] oomScore new oom score of the process to override with.
+ *
+ * @return true upon success and false otherwise.
+ */
virtual bool overrideProcessInfo(int pid, int procState, int oomScore) = 0;
+ /*
+ * Remove the override info of the given process.
+ *
+ * @param[in] pid pid of the process.
+ */
virtual void removeProcessInfoOverride(int pid) = 0;
+ /*
+ * Checks whether the list of processes with given pids exist or not.
+ *
+ * @param[in] pids List of pids for which to check whether they are Existent or not.
+ * @param[out] existent boolean vector corresponds to Existent state of each pids.
+ *
+ * @return true for successful return and false otherwise.
+ * On successful return:
+ * - existent[i] true corresponds to pids[i] still active and
+ * - existent[i] false corresponds to pids[i] already terminated (Nonexistent)
+ * On unsuccessful return, the output argument existent is invalid.
+ */
+ virtual bool checkProcessExistent(const std::vector<int32_t>& pids,
+ std::vector<bool>* existent) {
+ // A default implementation.
+ (void)pids;
+ (void)existent;
+ return false;
+ }
protected:
virtual ~ProcessInfoInterface() {}
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index a6821c7..c7e2103 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -166,8 +166,12 @@
// - cannot route from voice call RX OR
// - audio HAL version is < 3.0 and TX device is on the primary HW module
if (getPhoneState() == AUDIO_MODE_IN_CALL) {
- audio_devices_t txDevice = getDeviceForInputSource(
- AUDIO_SOURCE_VOICE_COMMUNICATION)->type();
+ audio_devices_t txDevice = AUDIO_DEVICE_NONE;
+ sp<DeviceDescriptor> txDeviceDesc =
+ getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+ if (txDeviceDesc != nullptr) {
+ txDevice = txDeviceDesc->type();
+ }
sp<AudioOutputDescriptor> primaryOutput = outputs.getPrimaryOutput();
LOG_ALWAYS_FATAL_IF(primaryOutput == nullptr, "Primary output not found");
DeviceVector availPrimaryInputDevices =
@@ -594,22 +598,26 @@
}
}
switch (commDeviceType) {
- case AUDIO_DEVICE_OUT_BLE_HEADSET:
- device = availableDevices.getDevice(
- AUDIO_DEVICE_IN_BLE_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
- break;
case AUDIO_DEVICE_OUT_SPEAKER:
device = availableDevices.getFirstExistingDevice({
AUDIO_DEVICE_IN_BACK_MIC, AUDIO_DEVICE_IN_BUILTIN_MIC,
AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_USB_HEADSET});
break;
+ case AUDIO_DEVICE_OUT_BLE_HEADSET:
+ device = availableDevices.getDevice(
+ AUDIO_DEVICE_IN_BLE_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
+ if (device != nullptr) {
+ break;
+ }
+ ALOGE("%s LE Audio selected for communication but input device not available",
+ __func__);
+ FALLTHROUGH_INTENDED;
default: // FORCE_NONE
device = availableDevices.getFirstExistingDevice({
AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BLUETOOTH_BLE,
AUDIO_DEVICE_IN_BUILTIN_MIC});
break;
-
}
break;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 22e4686..f5c7a71 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -671,7 +671,10 @@
audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
- ALOG_ASSERT(txSourceDevice != 0, "%s() input selected device not available", __func__);
+ if (txSourceDevice == nullptr) {
+ ALOGE("%s() selected input device not available", __func__);
+ return INVALID_OPERATION;
+ }
ALOGV("%s device rxDevice %s txDevice %s", __func__,
rxDevices.itemAt(0)->toString().c_str(), txSourceDevice->toString().c_str());
@@ -5327,7 +5330,11 @@
*session = (audio_session_t)mpClientInterface->newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
*ioHandle = (audio_io_handle_t)mpClientInterface->newAudioUniqueId(AUDIO_UNIQUE_ID_USE_INPUT);
audio_attributes_t attr = { .source = AUDIO_SOURCE_HOTWORD };
- *device = mEngine->getInputDeviceForAttributes(attr)->type();
+ sp<DeviceDescriptor> deviceDesc = mEngine->getInputDeviceForAttributes(attr);
+ if (deviceDesc == nullptr) {
+ return INVALID_OPERATION;
+ }
+ *device = deviceDesc->type();
return mSoundTriggerSessions.acquireSession(*session, *ioHandle);
}
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index c812cd7..0115023 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -3823,8 +3823,7 @@
// ----------------------------------------------------------------------------
void CameraService::Client::notifyError(int32_t errorCode,
- const CaptureResultExtras& resultExtras) {
- (void) resultExtras;
+ [[maybe_unused]] const CaptureResultExtras& resultExtras) {
if (mRemoteCallback != NULL) {
int32_t api1ErrorCode = CAMERA_ERROR_RELEASED;
if (errorCode == hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISABLED) {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 23a70db..b33303e 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -1359,21 +1359,18 @@
|| l.mParameters.state == Parameters::VIDEO_SNAPSHOT);
}
-void Camera2Client::releaseRecordingFrame(const sp<IMemory>& mem) {
- (void)mem;
+void Camera2Client::releaseRecordingFrame([[maybe_unused]] const sp<IMemory>& mem) {
ATRACE_CALL();
ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
}
-void Camera2Client::releaseRecordingFrameHandle(native_handle_t *handle) {
- (void)handle;
+void Camera2Client::releaseRecordingFrameHandle([[maybe_unused]] native_handle_t *handle) {
ATRACE_CALL();
ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
}
void Camera2Client::releaseRecordingFrameHandleBatch(
- const std::vector<native_handle_t*>& handles) {
- (void)handles;
+ [[maybe_unused]] const std::vector<native_handle_t*>& handles) {
ATRACE_CALL();
ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
}
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 3f5696d..d4ac88d 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -61,7 +61,7 @@
bool systemNativeClient,
const std::optional<String16>& clientFeatureId,
const String8& cameraId,
- int api1CameraId,
+ [[maybe_unused]] int api1CameraId,
int cameraFacing,
int sensorOrientation,
int clientPid,
@@ -81,8 +81,6 @@
servicePid,
overrideToPortrait),
mRemoteCallback(remoteCallback) {
- // We don't need it for API2 clients, but Camera2ClientBase requires it.
- (void) api1CameraId;
}
// Interface used by CameraService
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index fb4d2f7..d798632 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -325,26 +325,20 @@
finishCameraStreamingOps();
}
-void CameraOfflineSessionClient::notifyAutoFocus(uint8_t newState, int triggerId) {
- (void)newState;
- (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoFocus([[maybe_unused]] uint8_t newState,
+ [[maybe_unused]] int triggerId) {
ALOGV("%s: Autofocus state now %d, last trigger %d",
__FUNCTION__, newState, triggerId);
}
-void CameraOfflineSessionClient::notifyAutoExposure(uint8_t newState, int triggerId) {
- (void)newState;
- (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoExposure([[maybe_unused]] uint8_t newState,
+ [[maybe_unused]] int triggerId) {
ALOGV("%s: Autoexposure state now %d, last trigger %d",
__FUNCTION__, newState, triggerId);
}
-void CameraOfflineSessionClient::notifyAutoWhitebalance(uint8_t newState, int triggerId) {
- (void)newState;
- (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoWhitebalance([[maybe_unused]] uint8_t newState,
+ [[maybe_unused]] int triggerId) {
ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", __FUNCTION__, newState,
triggerId);
}
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 0a2819c..f1fc815 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -420,50 +420,38 @@
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyShutter(const CaptureResultExtras& resultExtras,
- nsecs_t timestamp) {
- (void)resultExtras;
- (void)timestamp;
-
+void Camera2ClientBase<TClientBase>::notifyShutter(
+ [[maybe_unused]] const CaptureResultExtras& resultExtras,
+ [[maybe_unused]] nsecs_t timestamp) {
ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
__FUNCTION__, resultExtras.requestId, timestamp);
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoFocus(uint8_t newState,
- int triggerId) {
- (void)newState;
- (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoFocus([[maybe_unused]] uint8_t newState,
+ [[maybe_unused]] int triggerId) {
ALOGV("%s: Autofocus state now %d, last trigger %d",
__FUNCTION__, newState, triggerId);
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoExposure(uint8_t newState,
- int triggerId) {
- (void)newState;
- (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoExposure([[maybe_unused]] uint8_t newState,
+ [[maybe_unused]] int triggerId) {
ALOGV("%s: Autoexposure state now %d, last trigger %d",
__FUNCTION__, newState, triggerId);
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoWhitebalance(uint8_t newState,
- int triggerId) {
- (void)newState;
- (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoWhitebalance(
+ [[maybe_unused]] uint8_t newState,
+ [[maybe_unused]] int triggerId) {
ALOGV("%s: Auto-whitebalance state now %d, last trigger %d",
__FUNCTION__, newState, triggerId);
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyPrepared(int streamId) {
- (void)streamId;
-
+void Camera2ClientBase<TClientBase>::notifyPrepared([[maybe_unused]] int streamId) {
ALOGV("%s: Stream %d now prepared",
__FUNCTION__, streamId);
}
@@ -475,9 +463,8 @@
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyRepeatingRequestError(long lastFrameNumber) {
- (void)lastFrameNumber;
-
+void Camera2ClientBase<TClientBase>::notifyRepeatingRequestError(
+ [[maybe_unused]] long lastFrameNumber) {
ALOGV("%s: Repeating request was stopped. Last frame number is %ld",
__FUNCTION__, lastFrameNumber);
}
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 3b40da9..d2d1e38 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -2074,13 +2074,12 @@
CameraProviderManager::ProviderInfo::ProviderInfo(
const std::string &providerName,
const std::string &providerInstance,
- CameraProviderManager *manager) :
+ [[maybe_unused]] CameraProviderManager *manager) :
mProviderName(providerName),
mProviderInstance(providerInstance),
mProviderTagid(generateVendorTagId(providerName)),
mUniqueDeviceCount(0),
mManager(manager) {
- (void) mManager;
}
const std::string& CameraProviderManager::ProviderInfo::getType() const {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index acf511b..aab6fd5 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -576,17 +576,15 @@
hardware::CameraInfo *info) const = 0;
virtual bool isAPI1Compatible() const = 0;
virtual status_t dumpState(int fd) = 0;
- virtual status_t getCameraCharacteristics(bool overrideForPerfClass,
- CameraMetadata *characteristics, bool overrideToPortrait) {
- (void) overrideForPerfClass;
- (void) characteristics;
- (void) overrideToPortrait;
+ virtual status_t getCameraCharacteristics(
+ [[maybe_unused]] bool overrideForPerfClass,
+ [[maybe_unused]] CameraMetadata *characteristics,
+ [[maybe_unused]] bool overrideToPortrait) {
return INVALID_OPERATION;
}
- virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
- CameraMetadata *characteristics) const {
- (void) physicalCameraId;
- (void) characteristics;
+ virtual status_t getPhysicalCameraCharacteristics(
+ [[maybe_unused]] const std::string& physicalCameraId,
+ [[maybe_unused]] CameraMetadata *characteristics) const {
return INVALID_OPERATION;
}
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index ae0cf51..a13b937 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -442,8 +442,7 @@
}
void HidlProviderInfo::serviceDied(uint64_t cookie,
- const wp<hidl::base::V1_0::IBase>& who) {
- (void) who;
+ [[maybe_unused]] const wp<hidl::base::V1_0::IBase>& who) {
ALOGI("Camera provider '%s' has died; removing it", mProviderInstance.c_str());
if (cookie != mId) {
ALOGW("%s: Unexpected serviceDied cookie %" PRIu64 ", expected %" PRIu32,
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index a556200..2ac38d5 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -451,10 +451,9 @@
return OK;
}
-void Camera3BufferManager::dump(int fd, const Vector<String16>& args) const {
+void Camera3BufferManager::dump(int fd, [[maybe_unused]] const Vector<String16>& args) const {
Mutex::Autolock l(mLock);
- (void) args;
String8 lines;
lines.appendFormat(" Total stream sets: %zu\n", mStreamSetMap.size());
for (size_t i = 0; i < mStreamSetMap.size(); i++) {
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index a8e64de..f2c74d0 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -499,9 +499,8 @@
return BAD_VALUE;
}
-status_t Camera3Device::dump(int fd, const Vector<String16> &args) {
+status_t Camera3Device::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
ATRACE_CALL();
- (void)args;
// Try to lock, but continue in case of failure (to avoid blocking in
// deadlocks)
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
index 19afd69..8c0ac71 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -67,8 +67,7 @@
return INVALID_OPERATION;
}
-void Camera3FakeStream::dump(int fd, const Vector<String16> &args) const {
- (void) args;
+void Camera3FakeStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
String8 lines;
lines.appendFormat(" Stream[%d]: Fake\n", mId);
write(fd, lines.string(), lines.size());
@@ -82,9 +81,8 @@
return OK;
}
-status_t Camera3FakeStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
- (void) buffer;
- (void) fenceFd;
+status_t Camera3FakeStream::detachBuffer([[maybe_unused]] sp<GraphicBuffer>* buffer,
+ [[maybe_unused]] int* fenceFd) {
// Do nothing
return OK;
}
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index a78d01e..fbaaf7b 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -74,8 +74,7 @@
return false;
}
-void Camera3IOStreamBase::dump(int fd, const Vector<String16> &args) const {
- (void) args;
+void Camera3IOStreamBase::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
String8 lines;
uint64_t consumerUsage = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 9a3f7ed..631bb43 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -104,17 +104,14 @@
status_t Camera3InputStream::returnBufferCheckedLocked(
const camera_stream_buffer &buffer,
- nsecs_t timestamp,
- nsecs_t readoutTimestamp,
- bool output,
+ [[maybe_unused]] nsecs_t timestamp,
+ [[maybe_unused]] nsecs_t readoutTimestamp,
+ [[maybe_unused]] bool output,
int32_t /*transform*/,
const std::vector<size_t>&,
/*out*/
sp<Fence> *releaseFenceOut) {
- (void)timestamp;
- (void)readoutTimestamp;
- (void)output;
ALOG_ASSERT(!output, "Expected output to be false");
status_t res;
@@ -218,8 +215,7 @@
return OK;
}
-void Camera3InputStream::dump(int fd, const Vector<String16> &args) const {
- (void) args;
+void Camera3InputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
String8 lines;
lines.appendFormat(" Stream[%d]: Input\n", mId);
write(fd, lines.string(), lines.size());
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 2227232..58db57a 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -394,13 +394,12 @@
const camera_stream_buffer &buffer,
nsecs_t timestamp,
nsecs_t readoutTimestamp,
- bool output,
+ [[maybe_unused]] bool output,
int32_t transform,
const std::vector<size_t>& surface_ids,
/*out*/
sp<Fence> *releaseFenceOut) {
- (void)output;
ALOG_ASSERT(output, "Expected output to be true");
status_t res;
@@ -519,8 +518,7 @@
return res;
}
-void Camera3OutputStream::dump(int fd, const Vector<String16> &args) const {
- (void) args;
+void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
String8 lines;
lines.appendFormat(" Stream[%d]: Output\n", mId);
lines.appendFormat(" Consumer name: %s\n", mConsumerName.string());
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 4d8495f..4395455 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -955,9 +955,8 @@
}
}
-void Camera3Stream::dump(int fd, const Vector<String16> &args) const
+void Camera3Stream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const
{
- (void)args;
mBufferLimitLatency.dump(fd,
" Latency histogram for wait on max_buffers");
}
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index 2f55def..1a6b2e0 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -185,9 +185,8 @@
using getCameraDeviceInterface_V1_x_cb = std::function<void(Status status,
const sp<device::V1_0::ICameraDevice>& device)>;
virtual hardware::Return<void> getCameraDeviceInterface_V1_x(
- const hardware::hidl_string& cameraDeviceName,
+ [[maybe_unused]] const hardware::hidl_string& cameraDeviceName,
getCameraDeviceInterface_V1_x_cb _hidl_cb) override {
- (void) cameraDeviceName;
_hidl_cb(Status::OK, nullptr); //TODO: impl. of ver. 1.0 device interface
// otherwise enumeration will fail.
return hardware::Void();
@@ -261,9 +260,8 @@
virtual ~TestInteractionProxy() {}
virtual bool registerForNotifications(
- const std::string &serviceName,
+ [[maybe_unused]] const std::string &serviceName,
const sp<hidl::manager::V1_0::IServiceNotification> ¬ification) override {
- (void) serviceName;
mManagerNotificationInterface = notification;
return true;
}
diff --git a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
index 8331136..b367571 100644
--- a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
@@ -355,8 +355,6 @@
#include "DistortionMapperTest_OpenCvData.h"
TEST(DistortionMapperTest, CompareToOpenCV) {
- status_t res;
-
float bigDistortion[] = {0.1, -0.003, 0.004, 0.02, 0.01};
// Expect to match within sqrt(2) radius pixels
@@ -370,7 +368,7 @@
using namespace openCvData;
DistortionMapperInfo *mapperInfo = m.getMapperInfo();
- res = m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, mapperInfo, /*clamp*/false,
+ m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, mapperInfo, /*clamp*/false,
/*simple*/false);
for (size_t i = 0; i < rawCoords.size(); i+=2) {
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index c5957e9..cb5e783 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -444,6 +444,12 @@
}
AStatsEvent_writeInt32(event, hdrFormat);
+ int64_t codecId = 0;
+ if (item->getInt64("android.media.mediacodec.id", &codecId)) {
+ metrics_proto.set_codec_id(codecId);
+ }
+ AStatsEvent_writeInt64(event, codecId);
+
int err = AStatsEvent_write(event);
if (err < 0) {
ALOGE("Failed to write codec metrics to statsd (%d)", err);
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index 2b8245e..a2bd5e1 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -17,6 +17,7 @@
"aidl/android/media/MediaResourceParcel.aidl",
"aidl/android/media/MediaResourcePolicyParcel.aidl",
"aidl/android/media/ClientInfoParcel.aidl",
+ "aidl/android/media/ClientConfigParcel.aidl",
],
path: "aidl",
}
@@ -73,9 +74,11 @@
name: "libresourcemanagerservice",
srcs: [
+ "ResourceManagerMetrics.cpp",
"ResourceManagerService.cpp",
"ResourceObserverService.cpp",
"ServiceLog.cpp",
+ "UidObserver.cpp",
// TODO: convert to AIDL?
"IMediaResourceMonitor.cpp",
@@ -92,6 +95,7 @@
"libstatspull",
"libstatssocket",
"libprotobuf-cpp-lite",
+ "libactivitymanager_aidl",
],
static_libs: [
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
new file mode 100644
index 0000000..b60e734
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -0,0 +1,561 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceManagerMetrics"
+#include <utils/Log.h>
+#include <mediautils/ProcessInfo.h>
+
+#include <stats_media_metrics.h>
+
+#include "UidObserver.h"
+#include "ResourceManagerMetrics.h"
+
+#include <cmath>
+#include <sstream>
+
+namespace android {
+
+using stats::media_metrics::stats_write;
+using stats::media_metrics::MEDIA_CODEC_STARTED;
+using stats::media_metrics::MEDIA_CODEC_STOPPED;
+// Disabling this for now.
+#ifdef ENABLE_MEDIA_CODEC_CONCURRENT_USAGE_REPORTED
+using stats::media_metrics::MEDIA_CODEC_CONCURRENT_USAGE_REPORTED;
+#endif
+using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED;
+using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
+using stats::media_metrics::\
+ MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
+using stats::media_metrics::\
+ MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
+
+inline const char* getCodecType(MediaResourceSubType codecType) {
+ switch (codecType) {
+ case MediaResourceSubType::kAudioCodec: return "Audio";
+ case MediaResourceSubType::kVideoCodec: return "Video";
+ case MediaResourceSubType::kImageCodec: return "Image";
+ case MediaResourceSubType::kUnspecifiedSubType:
+ default:
+ return "Unspecified";
+ }
+ return "Unspecified";
+}
+
+static CodecBucket getCodecBucket(bool isHardware,
+ bool isEncoder,
+ MediaResourceSubType codecType) {
+ if (isHardware) {
+ switch (codecType) {
+ case MediaResourceSubType::kAudioCodec:
+ if (isEncoder) return HwAudioEncoder;
+ return HwAudioDecoder;
+ case MediaResourceSubType::kVideoCodec:
+ if (isEncoder) return HwVideoEncoder;
+ return HwVideoDecoder;
+ case MediaResourceSubType::kImageCodec:
+ if (isEncoder) return HwImageEncoder;
+ return HwImageDecoder;
+ case MediaResourceSubType::kUnspecifiedSubType:
+ default:
+ return CodecBucketUnspecified;
+ }
+ } else {
+ switch (codecType) {
+ case MediaResourceSubType::kAudioCodec:
+ if (isEncoder) return SwAudioEncoder;
+ return SwAudioDecoder;
+ case MediaResourceSubType::kVideoCodec:
+ if (isEncoder) return SwVideoEncoder;
+ return SwVideoDecoder;
+ case MediaResourceSubType::kImageCodec:
+ if (isEncoder) return SwImageEncoder;
+ return SwImageDecoder;
+ case MediaResourceSubType::kUnspecifiedSubType:
+ default:
+ return CodecBucketUnspecified;
+ }
+ }
+
+ return CodecBucketUnspecified;
+}
+
+static bool getLogMessage(int hwCount, int swCount, std::stringstream& logMsg) {
+ bool update = false;
+ logMsg.clear();
+
+ if (hwCount > 0) {
+ logMsg << " HW: " << hwCount;
+ update = true;
+ }
+ if (swCount > 0) {
+ logMsg << " SW: " << swCount;
+ update = true;
+ }
+
+ if (update) {
+ logMsg << " ] ";
+ }
+ return update;
+}
+
+ResourceManagerMetrics::ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo) {
+ // Create a process termination watcher, with 5seconds of polling frequency.
+ mUidObserver = sp<UidObserver>::make(processInfo,
+ [this] (int32_t pid, uid_t uid) {
+ onProcessTerminated(pid, uid);
+ });
+ mUidObserver->start();
+}
+
+ResourceManagerMetrics::~ResourceManagerMetrics() {
+ mUidObserver->stop();
+}
+
+void ResourceManagerMetrics::addPid(int pid, uid_t uid) {
+ if (uid != 0) {
+ std::scoped_lock lock(mLock);
+ mUidObserver->add(pid, uid);
+ }
+}
+
+void ResourceManagerMetrics::notifyClientCreated(const ClientInfoParcel& clientInfo) {
+ std::scoped_lock lock(mLock);
+ // Update the resource instance count.
+ std::map<std::string, int>::iterator found = mConcurrentResourceCountMap.find(clientInfo.name);
+ if (found == mConcurrentResourceCountMap.end()) {
+ mConcurrentResourceCountMap[clientInfo.name] = 1;
+ } else {
+ found->second++;
+ }
+}
+
+void ResourceManagerMetrics::notifyClientReleased(const ClientInfoParcel& clientInfo) {
+ bool stopCalled = true;
+ ClientConfigMap::iterator found;
+ {
+ std::scoped_lock lock(mLock);
+ found = mClientConfigMap.find(clientInfo.id);
+ if (found != mClientConfigMap.end()) {
+ // Release is called without Stop!
+ stopCalled = false;
+ }
+ }
+ if (!stopCalled) {
+ // call Stop to update the metrics.
+ notifyClientStopped(found->second);
+ }
+ {
+ std::scoped_lock lock(mLock);
+ // Update the resource instance count also.
+ std::map<std::string, int>::iterator found =
+ mConcurrentResourceCountMap.find(clientInfo.name);
+ if (found != mConcurrentResourceCountMap.end()) {
+ if (found->second > 0) {
+ found->second--;
+ }
+ }
+ }
+}
+
+void ResourceManagerMetrics::notifyClientStarted(const ClientConfigParcel& clientConfig) {
+ std::scoped_lock lock(mLock);
+ int pid = clientConfig.clientInfo.pid;
+ // We need to observer this process.
+ mUidObserver->add(pid, clientConfig.clientInfo.uid);
+
+ // Update the client config for thic client.
+ mClientConfigMap[clientConfig.clientInfo.id] = clientConfig;
+
+ // Update the concurrent codec count for this process.
+ CodecBucket codecBucket = getCodecBucket(clientConfig.isHardware,
+ clientConfig.isEncoder,
+ clientConfig.codecType);
+ increaseConcurrentCodecs(pid, codecBucket);
+
+ if (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
+ clientConfig.codecType == MediaResourceSubType::kImageCodec) {
+ // Update the pixel count for this process
+ increasePixelCount(pid, clientConfig.width * (long)clientConfig.height);
+ }
+
+ // System concurrent codec usage
+ int systemConcurrentCodecCount = mConcurrentCodecsMap[codecBucket];
+ // Process/Application concurrent codec usage for this type of codec
+ int appConcurrentCodecCount = mProcessConcurrentCodecsMap[pid].mCurrent[codecBucket];
+ // Process/Application's current pixel count.
+ long pixelCount = 0;
+ std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
+ if (it != mProcessPixelsMap.end()) {
+ pixelCount = it->second.mCurrent;
+ }
+
+ int result = stats_write(
+ MEDIA_CODEC_STARTED,
+ clientConfig.clientInfo.uid,
+ clientConfig.id,
+ clientConfig.clientInfo.name.c_str(),
+ static_cast<int32_t>(clientConfig.codecType),
+ clientConfig.isEncoder,
+ clientConfig.isHardware,
+ clientConfig.width, clientConfig.height,
+ systemConcurrentCodecCount,
+ appConcurrentCodecCount,
+ pixelCount);
+
+ ALOGV("%s: Pushed MEDIA_CODEC_STARTED atom: "
+ "Process[pid(%d): uid(%d)] "
+ "Codec: [%s: %ju] is %s %s %s "
+ "Timestamp: %jd "
+ "Resolution: %d x %d "
+ "ConcurrentCodec[%d]={System: %d App: %d} "
+ "result: %d",
+ __func__,
+ pid, clientConfig.clientInfo.uid,
+ clientConfig.clientInfo.name.c_str(),
+ clientConfig.id,
+ clientConfig.isHardware? "hardware" : "software",
+ getCodecType(clientConfig.codecType),
+ clientConfig.isEncoder? "encoder" : "decoder",
+ clientConfig.timeStamp,
+ clientConfig.width, clientConfig.height,
+ codecBucket, systemConcurrentCodecCount, appConcurrentCodecCount,
+ result);
+}
+
+void ResourceManagerMetrics::notifyClientStopped(const ClientConfigParcel& clientConfig) {
+ std::scoped_lock lock(mLock);
+ int pid = clientConfig.clientInfo.pid;
+ // Update the concurrent codec count for this process.
+ CodecBucket codecBucket = getCodecBucket(clientConfig.isHardware,
+ clientConfig.isEncoder,
+ clientConfig.codecType);
+ decreaseConcurrentCodecs(pid, codecBucket);
+
+ if (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
+ clientConfig.codecType == MediaResourceSubType::kImageCodec) {
+ // Update the pixel count for this process
+ decreasePixelCount(pid, clientConfig.width * (long)clientConfig.height);
+ }
+
+ // System concurrent codec usage
+ int systemConcurrentCodecCount = mConcurrentCodecsMap[codecBucket];
+ // Process/Application concurrent codec usage for this type of codec
+ int appConcurrentCodecCount = 0;
+ std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+ if (found != mProcessConcurrentCodecsMap.end()) {
+ appConcurrentCodecCount = found->second.mCurrent[codecBucket];
+ }
+ // Process/Application's current pixel count.
+ long pixelCount = 0;
+ std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
+ if (it != mProcessPixelsMap.end()) {
+ pixelCount = it->second.mCurrent;
+ }
+
+ // calculate the usageTime as:
+ // MediaCodecStopped.clientConfig.timeStamp -
+ // MediaCodecStarted.clientConfig.timeStamp
+ int64_t usageTime = 0;
+ ClientConfigMap::iterator entry = mClientConfigMap.find(clientConfig.clientInfo.id);
+ if (entry != mClientConfigMap.end()) {
+ usageTime = clientConfig.timeStamp - entry->second.timeStamp;
+ // And we can erase this config now.
+ mClientConfigMap.erase(entry);
+ } else {
+ ALOGW("%s: Start Config is missing!", __func__);
+ }
+
+ int result = stats_write(
+ MEDIA_CODEC_STOPPED,
+ clientConfig.clientInfo.uid,
+ clientConfig.id,
+ clientConfig.clientInfo.name.c_str(),
+ static_cast<int32_t>(clientConfig.codecType),
+ clientConfig.isEncoder,
+ clientConfig.isHardware,
+ clientConfig.width, clientConfig.height,
+ systemConcurrentCodecCount,
+ appConcurrentCodecCount,
+ pixelCount,
+ usageTime);
+ ALOGV("%s: Pushed MEDIA_CODEC_STOPPED atom: "
+ "Process[pid(%d): uid(%d)] "
+ "Codec: [%s: %ju] is %s %s %s "
+ "Timestamp: %jd Usage time: %jd "
+ "Resolution: %d x %d "
+ "ConcurrentCodec[%d]={System: %d App: %d} "
+ "result: %d",
+ __func__,
+ pid, clientConfig.clientInfo.uid,
+ clientConfig.clientInfo.name.c_str(),
+ clientConfig.id,
+ clientConfig.isHardware? "hardware" : "software",
+ getCodecType(clientConfig.codecType),
+ clientConfig.isEncoder? "encoder" : "decoder",
+ clientConfig.timeStamp, usageTime,
+ clientConfig.width, clientConfig.height,
+ codecBucket, systemConcurrentCodecCount, appConcurrentCodecCount,
+ result);
+}
+
+void ResourceManagerMetrics::onProcessTerminated(int32_t pid, uid_t uid) {
+ std::scoped_lock lock(mLock);
+ // post MediaCodecConcurrentUsageReported for this terminated pid.
+ pushConcurrentUsageReport(pid, uid);
+}
+
+void ResourceManagerMetrics::pushConcurrentUsageReport(int32_t pid, uid_t uid) {
+ // Process/Application peak concurrent codec usage
+ std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+ if (found == mProcessConcurrentCodecsMap.end()) {
+ ALOGI("%s: No MEDIA_CODEC_CONCURRENT_USAGE_REPORTED atom Entry for: "
+ "Application[pid(%d): uid(%d)]", __func__, pid, uid);
+ return;
+ }
+ const ConcurrentCodecsMap& codecsMap = found->second.mPeak;
+ int peakHwAudioEncoderCount = codecsMap[HwAudioEncoder];
+ int peakHwAudioDecoderCount = codecsMap[HwAudioDecoder];
+ int peakHwVideoEncoderCount = codecsMap[HwVideoEncoder];
+ int peakHwVideoDecoderCount = codecsMap[HwVideoDecoder];
+ int peakHwImageEncoderCount = codecsMap[HwImageEncoder];
+ int peakHwImageDecoderCount = codecsMap[HwImageDecoder];
+ int peakSwAudioEncoderCount = codecsMap[SwAudioEncoder];
+ int peakSwAudioDecoderCount = codecsMap[SwAudioDecoder];
+ int peakSwVideoEncoderCount = codecsMap[SwVideoEncoder];
+ int peakSwVideoDecoderCount = codecsMap[SwVideoDecoder];
+ int peakSwImageEncoderCount = codecsMap[SwImageEncoder];
+ int peakSwImageDecoderCount = codecsMap[SwImageDecoder];
+
+ long peakPixels = 0;
+ std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
+ if (it == mProcessPixelsMap.end()) {
+ ALOGI("%s: No Video Codec Entry for Application[pid(%d): uid(%d)]",
+ __func__, pid, uid);
+ } else {
+ peakPixels = it->second.mPeak;
+ }
+ std::string peakPixelsLog("Peak Pixels: " + std::to_string(peakPixels));
+
+ std::stringstream peakCodecLog;
+ peakCodecLog << "Peak { ";
+ std::stringstream logMsg;
+ if (getLogMessage(peakHwAudioEncoderCount, peakSwAudioEncoderCount, logMsg)) {
+ peakCodecLog << "AudioEnc[" << logMsg.str();
+ }
+ if (getLogMessage(peakHwAudioDecoderCount, peakSwAudioDecoderCount, logMsg)) {
+ peakCodecLog << "AudioDec[" << logMsg.str();
+ }
+ if (getLogMessage(peakHwVideoEncoderCount, peakSwVideoEncoderCount, logMsg)) {
+ peakCodecLog << "VideoEnc[" << logMsg.str();
+ }
+ if (getLogMessage(peakHwVideoDecoderCount, peakSwVideoDecoderCount, logMsg)) {
+ peakCodecLog << "VideoDec[" << logMsg.str();
+ }
+ if (getLogMessage(peakHwImageEncoderCount, peakSwImageEncoderCount, logMsg)) {
+ peakCodecLog << "ImageEnc[" << logMsg.str();
+ }
+ if (getLogMessage(peakHwImageDecoderCount, peakSwImageDecoderCount, logMsg)) {
+ peakCodecLog << "ImageDec[" << logMsg.str();
+ }
+ peakCodecLog << "}";
+
+#ifdef ENABLE_MEDIA_CODEC_CONCURRENT_USAGE_REPORTED
+ int result = stats_write(
+ MEDIA_CODEC_CONCURRENT_USAGE_REPORTED,
+ uid,
+ peakHwVideoDecoderCount,
+ peakHwVideoEncoderCount,
+ peakSwVideoDecoderCount,
+ peakSwVideoEncoderCount,
+ peakHwAudioDecoderCount,
+ peakHwAudioEncoderCount,
+ peakSwAudioDecoderCount,
+ peakSwAudioEncoderCount,
+ peakHwImageDecoderCount,
+ peakHwImageEncoderCount,
+ peakSwImageDecoderCount,
+ peakSwImageEncoderCount,
+ peakPixels);
+ ALOGI("%s: Pushed MEDIA_CODEC_CONCURRENT_USAGE_REPORTED atom: "
+ "Process[pid(%d): uid(%d)] %s %s result: %d",
+ __func__, pid, uid, peakCodecLog.str().c_str(), peakPixelsLog.c_str(), result);
+#else
+ ALOGI("%s: Concurrent Codec Usage Report for the Process[pid(%d): uid(%d)] is %s %s",
+ __func__, pid, uid, peakCodecLog.str().c_str(), peakPixelsLog.c_str());
+#endif
+}
+
+void ResourceManagerMetrics::pushReclaimAtom(const ClientInfoParcel& clientInfo,
+ const std::vector<int>& priorities,
+ const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
+ const PidUidVector& idList, bool reclaimed) {
+ // Construct the metrics for codec reclaim as a pushed atom.
+ // 1. Information about the requester.
+ // - UID and the priority (oom score)
+ int32_t callingPid = clientInfo.pid;
+ int32_t requesterUid = clientInfo.uid;
+ std::string clientName = clientInfo.name;
+ int requesterPriority = priorities[0];
+
+ // 2. Information about the codec.
+ // - Name of the codec requested
+ // - Number of concurrent codecs running.
+ int32_t noOfConcurrentCodecs = 0;
+ std::map<std::string, int>::iterator found = mConcurrentResourceCountMap.find(clientName);
+ if (found != mConcurrentResourceCountMap.end()) {
+ noOfConcurrentCodecs = found->second;
+ }
+
+ // 3. Information about the Reclaim:
+ // - Status of reclaim request
+ // - How many codecs are reclaimed
+ // - For each codecs reclaimed, information of the process that it belonged to:
+ // - UID and the Priority (oom score)
+ int32_t reclaimStatus = MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
+ if (!reclaimed) {
+ if (clients.size() == 0) {
+ // No clients to reclaim from
+ reclaimStatus =
+ MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
+ } else {
+ // Couldn't reclaim resources from the clients
+ reclaimStatus =
+ MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
+ }
+ }
+ int32_t noOfCodecsReclaimed = clients.size();
+ int32_t targetIndex = 1;
+ for (PidUidVector::const_reference id : idList) {
+ int32_t targetUid = id.second;
+ int targetPriority = priorities[targetIndex];
+ // Post the pushed atom
+ int result = stats_write(
+ MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
+ requesterUid,
+ requesterPriority,
+ clientName.c_str(),
+ noOfConcurrentCodecs,
+ reclaimStatus,
+ noOfCodecsReclaimed,
+ targetIndex,
+ targetUid,
+ targetPriority);
+ ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
+ "Requester[pid(%d): uid(%d): priority(%d)] "
+ "Codec: [%s] "
+ "No of concurrent codecs: %d "
+ "Reclaim Status: %d "
+ "No of codecs reclaimed: %d "
+ "Target[%d][pid(%d): uid(%d): priority(%d)] result: %d",
+ __func__, callingPid, requesterUid, requesterPriority,
+ clientName.c_str(), noOfConcurrentCodecs,
+ reclaimStatus, noOfCodecsReclaimed,
+ targetIndex, id.first, targetUid, targetPriority, result);
+ targetIndex++;
+ }
+}
+
+void ResourceManagerMetrics::increaseConcurrentCodecs(int32_t pid,
+ CodecBucket codecBucket) {
+ // Increase the codec usage across the system.
+ mConcurrentCodecsMap[codecBucket]++;
+
+ // Now update the codec usage for this (pid) process.
+ std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+ if (found == mProcessConcurrentCodecsMap.end()) {
+ ConcurrentCodecs codecs;
+ codecs.mCurrent[codecBucket] = 1;
+ codecs.mPeak[codecBucket] = 1;
+ mProcessConcurrentCodecsMap.emplace(pid, codecs);
+ } else {
+ found->second.mCurrent[codecBucket]++;
+ // Check if it's the peak count for this slot.
+ if (found->second.mPeak[codecBucket] < found->second.mCurrent[codecBucket]) {
+ found->second.mPeak[codecBucket] = found->second.mCurrent[codecBucket];
+ }
+ }
+}
+
+void ResourceManagerMetrics::decreaseConcurrentCodecs(int32_t pid,
+ CodecBucket codecBucket) {
+ // Decrease the codec usage across the system.
+ if (mConcurrentCodecsMap[codecBucket] > 0) {
+ mConcurrentCodecsMap[codecBucket]--;
+ }
+
+ // Now update the codec usage for this (pid) process.
+ std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+ if (found != mProcessConcurrentCodecsMap.end()) {
+ if (found->second.mCurrent[codecBucket] > 0) {
+ found->second.mCurrent[codecBucket]--;
+ }
+ }
+}
+
+void ResourceManagerMetrics::increasePixelCount(int32_t pid, long pixels) {
+ // Now update the current pixel usage for this (pid) process.
+ std::map<int32_t, PixelCount>::iterator found = mProcessPixelsMap.find(pid);
+ if (found == mProcessPixelsMap.end()) {
+ PixelCount pixelCount {pixels, pixels};
+ mProcessPixelsMap.emplace(pid, pixelCount);
+ } else {
+ if (__builtin_add_overflow(found->second.mCurrent, pixels, &found->second.mCurrent)) {
+ ALOGI("Pixel Count overflow");
+ return;
+ }
+ // Check if it's the peak count for this slot.
+ if (found->second.mPeak < found->second.mCurrent) {
+ found->second.mPeak = found->second.mCurrent;
+ }
+ }
+}
+
+void ResourceManagerMetrics::decreasePixelCount(int32_t pid, long pixels) {
+ // Now update the current pixel usage for this (pid) process.
+ std::map<int32_t, PixelCount>::iterator found = mProcessPixelsMap.find(pid);
+ if (found != mProcessPixelsMap.end()) {
+ if (found->second.mCurrent < pixels) {
+ found->second.mCurrent = 0;
+ } else {
+ if (__builtin_sub_overflow(found->second.mCurrent, pixels, &found->second.mCurrent)) {
+ ALOGI("Pixel Count overflow");
+ return;
+ }
+ }
+ }
+}
+
+long ResourceManagerMetrics::getPeakConcurrentPixelCount(int pid) const {
+ std::map<int32_t, PixelCount>::const_iterator found = mProcessPixelsMap.find(pid);
+ if (found != mProcessPixelsMap.end()) {
+ return found->second.mPeak;
+ }
+
+ return 0;
+}
+
+long ResourceManagerMetrics::getCurrentConcurrentPixelCount(int pid) const {
+ std::map<int32_t, PixelCount>::const_iterator found = mProcessPixelsMap.find(pid);
+ if (found != mProcessPixelsMap.end()) {
+ return found->second.mCurrent;
+ }
+
+ return 0;
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
new file mode 100644
index 0000000..b7810e5
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -0,0 +1,179 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_RESOURCEMANAGERMETRICS_H_
+#define ANDROID_MEDIA_RESOURCEMANAGERMETRICS_H_
+
+#include "ResourceManagerService.h"
+
+namespace android {
+
+using ::aidl::android::media::ClientInfoParcel;
+using ::aidl::android::media::ClientConfigParcel;
+using ::aidl::android::media::IResourceManagerClient;
+
+struct ProcessInfoInterface;
+
+class UidObserver;
+
+//
+// Enumeration for Codec bucket based on:
+// - Encoder or Decoder
+// - hardware implementation or not
+// - Audio/Video/Image codec
+//
+enum CodecBucket {
+ CodecBucketUnspecified = 0,
+ HwAudioEncoder = 1,
+ HwAudioDecoder = 2,
+ HwVideoEncoder = 3,
+ HwVideoDecoder = 4,
+ HwImageEncoder = 5,
+ HwImageDecoder = 6,
+ SwAudioEncoder = 7,
+ SwAudioDecoder = 8,
+ SwVideoEncoder = 9,
+ SwVideoDecoder = 10,
+ SwImageEncoder = 11,
+ SwImageDecoder = 12,
+ CodecBucketMaxSize = 13,
+};
+
+// Map of client id and client configuration, when it was started last.
+typedef std::map<int64_t, ClientConfigParcel> ClientConfigMap;
+
+// Map of pid and the uid.
+typedef std::map<int32_t, uid_t> PidUidMap;
+
+// Map of concurrent codes by Codec type bucket.
+struct ConcurrentCodecsMap {
+ int& operator[](CodecBucket index) {
+ return mCodec[index];
+ }
+
+ const int& operator[](CodecBucket index) const {
+ return mCodec[index];
+ }
+
+private:
+ int mCodec[CodecBucketMaxSize] = {0};
+};
+
+// Current and Peak ConcurrentCodecMap for a process.
+struct ConcurrentCodecs {
+ ConcurrentCodecsMap mCurrent;
+ ConcurrentCodecsMap mPeak;
+};
+
+// Current and Peak pixel count for a process.
+struct PixelCount {
+ long mCurrent = 0;
+ long mPeak = 0;
+};
+
+//
+// ResourceManagerMetrics class that maintaines concurrent codec count based:
+//
+// 1. # of concurrent active codecs (initialized, but aren't released yet) of given
+// implementation (by codec name) across the system.
+//
+// 2. # of concurrent codec usage (started, but not stopped yet), which is
+// measured using codec type bucket (CodecBucket) for:
+// - each process/application.
+// - across the system.
+// Also the peak count of the same for each process/application is maintained.
+//
+// 3. # of Peak Concurrent Pixels for each process/application.
+// This should help with understanding the (video) memory usage per
+// application.
+//
+//
+class ResourceManagerMetrics {
+public:
+ ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo);
+ ~ResourceManagerMetrics();
+
+ // To be called when a client is created.
+ void notifyClientCreated(const ClientInfoParcel& clientInfo);
+
+ // To be called when a client is released.
+ void notifyClientReleased(const ClientInfoParcel& clientInfo);
+
+ // To be called when a client is started.
+ void notifyClientStarted(const ClientConfigParcel& clientConfig);
+
+ // To be called when a client is stopped.
+ void notifyClientStopped(const ClientConfigParcel& clientConfig);
+
+ // To be called when after a reclaim event.
+ void pushReclaimAtom(const ClientInfoParcel& clientInfo,
+ const std::vector<int>& priorities,
+ const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
+ const PidUidVector& idList, bool reclaimed);
+
+ // Add this pid/uid set to monitor for the process termination state.
+ void addPid(int pid, uid_t uid = 0);
+
+ // Get the peak concurrent pixel count (associated with the video codecs) for the process.
+ long getPeakConcurrentPixelCount(int pid) const;
+ // Get the current concurrent pixel count (associated with the video codecs) for the process.
+ long getCurrentConcurrentPixelCount(int pid) const;
+
+private:
+ ResourceManagerMetrics(const ResourceManagerMetrics&) = delete;
+ ResourceManagerMetrics(ResourceManagerMetrics&&) = delete;
+ ResourceManagerMetrics& operator=(const ResourceManagerMetrics&) = delete;
+ ResourceManagerMetrics& operator=(ResourceManagerMetrics&&) = delete;
+
+ // To increase/decrease the concurrent codec usage for a given CodecBucket.
+ void increaseConcurrentCodecs(int32_t pid, CodecBucket codecBucket);
+ void decreaseConcurrentCodecs(int32_t pid, CodecBucket codecBucket);
+
+ // To increase/decrease the concurrent pixels usage for a process.
+ void increasePixelCount(int32_t pid, long pixels);
+ void decreasePixelCount(int32_t pid, long pixels);
+
+ // Issued when the process/application with given pid/uid is terminated.
+ void onProcessTerminated(int32_t pid, uid_t uid);
+
+ // To push conccuret codec usage of a process/application.
+ void pushConcurrentUsageReport(int32_t pid, uid_t uid);
+
+private:
+ std::mutex mLock;
+
+ // Map of client id and the configuration.
+ ClientConfigMap mClientConfigMap;
+
+ // Concurrent and Peak Pixel count for each process/application.
+ std::map<int32_t, PixelCount> mProcessPixelsMap;
+
+ // Map of resources (name) and number of concurrent instances
+ std::map<std::string, int> mConcurrentResourceCountMap;
+
+ // Map of concurrent codes by CodecBucket across the system.
+ ConcurrentCodecsMap mConcurrentCodecsMap;
+ // Map of concurrent and peak codes by CodecBucket for each process/application.
+ std::map<int32_t, ConcurrentCodecs> mProcessConcurrentCodecsMap;
+
+ // Uid Observer to monitor the application termination.
+ sp<UidObserver> mUidObserver;
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_RESOURCEMANAGERMETRICS_H_
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 1cef9d5..6822b06 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -35,23 +35,15 @@
#include <sys/stat.h>
#include <sys/time.h>
#include <unistd.h>
-#include <stats_media_metrics.h>
#include "IMediaResourceMonitor.h"
+#include "ResourceManagerMetrics.h"
#include "ResourceManagerService.h"
#include "ResourceObserverService.h"
#include "ServiceLog.h"
namespace android {
-using stats::media_metrics::stats_write;
-using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED;
-using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
-using stats::media_metrics::\
- MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
-using stats::media_metrics::\
- MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
-
//static
std::mutex ResourceManagerService::sCookieLock;
//static
@@ -61,8 +53,8 @@
class DeathNotifier : public RefBase {
public:
- DeathNotifier(const std::shared_ptr<ResourceManagerService> &service, int pid,
- int64_t clientId);
+ DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
+ const ClientInfoParcel& clientInfo);
virtual ~DeathNotifier() {}
@@ -72,13 +64,12 @@
protected:
std::weak_ptr<ResourceManagerService> mService;
- int mPid;
- int64_t mClientId;
+ const ClientInfoParcel mClientInfo;
};
DeathNotifier::DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
- int pid, int64_t clientId)
- : mService(service), mPid(pid), mClientId(clientId) {}
+ const ClientInfoParcel& clientInfo)
+ : mService(service), mClientInfo(clientInfo) {}
//static
void DeathNotifier::BinderDiedCallback(void* cookie) {
@@ -105,16 +96,16 @@
return;
}
- service->overridePid(mPid, -1);
+ service->overridePid(mClientInfo.pid, -1);
// thiz is freed in the call below, so it must be last call referring thiz
- ClientInfoParcel clientInfo{.pid = mPid, .id = mClientId};
- service->removeResource(clientInfo, false /*checkValid*/);
+ service->removeResource(mClientInfo, false /*checkValid*/);
}
class OverrideProcessInfoDeathNotifier : public DeathNotifier {
public:
OverrideProcessInfoDeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
- int pid) : DeathNotifier(service, pid, 0) {}
+ const ClientInfoParcel& clientInfo)
+ : DeathNotifier(service, clientInfo) {}
virtual ~OverrideProcessInfoDeathNotifier() {}
@@ -129,7 +120,7 @@
return;
}
- service->removeProcessInfoOverride(mPid);
+ service->removeProcessInfoOverride(mClientInfo.pid);
}
template <typename T>
@@ -202,7 +193,11 @@
ResourceInfo info;
info.uid = uid;
info.clientId = clientId;
- info.name = name;
+ if (name.empty()) {
+ info.name = "<unknown client>";
+ } else {
+ info.name = name;
+ }
info.client = client;
info.cookie = 0;
info.pendingRemoval = false;
@@ -292,10 +287,7 @@
snprintf(buffer, SIZE, " Id: %lld\n", (long long)infos[j].clientId);
result.append(buffer);
- std::string clientName = "<unknown client>";
- if (infos[j].client != nullptr) {
- clientName = infos[j].name;
- }
+ std::string clientName = infos[j].name;
snprintf(buffer, SIZE, " Name: %s\n", clientName.c_str());
result.append(buffer);
@@ -357,6 +349,8 @@
mCpuBoostCount(0),
mDeathRecipient(AIBinder_DeathRecipient_new(DeathNotifier::BinderDiedCallback)) {
mSystemCB->noteResetVideo();
+ // Create ResourceManagerMetrics that handles all the metrics.
+ mResourceManagerMetrics = std::make_unique<ResourceManagerMetrics>(mProcessInfo);
}
//static
@@ -510,49 +504,16 @@
}
if (info.cookie == 0 && client != nullptr) {
info.cookie = addCookieAndLink_l(client,
- new DeathNotifier(ref<ResourceManagerService>(), pid, clientId));
+ new DeathNotifier(ref<ResourceManagerService>(), clientInfo));
}
if (mObserverService != nullptr && !resourceAdded.empty()) {
mObserverService->onResourceAdded(uid, pid, resourceAdded);
}
notifyResourceGranted(pid, resources);
- // Increase the instance count of the resource associated with this client.
- increaseResourceInstanceCount(clientId, name);
-
return Status::ok();
}
-void ResourceManagerService::increaseResourceInstanceCount(int64_t clientId,
- const std::string& name) {
- // Check whether this client has been looked into already.
- if (mClientIdSet.find(clientId) == mClientIdSet.end()) {
- mClientIdSet.insert(clientId);
- // Update the resource instance count.
- auto found = mConcurrentResourceCountMap.find(name);
- if (found == mConcurrentResourceCountMap.end()) {
- mConcurrentResourceCountMap[name] = 1;
- } else {
- found->second++;
- }
- }
-}
-
-void ResourceManagerService::decreaseResourceInstanceCount(int64_t clientId,
- const std::string& name) {
- // Since this client has been removed, remove it from mClientIdSet
- mClientIdSet.erase(clientId);
- // Update the resource instance count also.
- auto found = mConcurrentResourceCountMap.find(name);
- if (found != mConcurrentResourceCountMap.end()) {
- if (found->second == 1) {
- mConcurrentResourceCountMap.erase(found);
- } else {
- found->second--;
- }
- }
-}
-
Status ResourceManagerService::removeResource(const ClientInfoParcel& clientInfo,
const std::vector<MediaResourceParcel>& resources) {
int32_t pid = clientInfo.pid;
@@ -657,9 +618,8 @@
onLastRemoved(it->second, info);
}
- // Since this client has been removed, decrease the corresponding
- // resources instance count.
- decreaseResourceInstanceCount(clientId, info.name);
+ // Since this client has been removed, update the metrics collector.
+ mResourceManagerMetrics->notifyClientReleased(clientInfo);
removeCookieAndUnlink_l(info.client, info.cookie);
@@ -791,73 +751,19 @@
void ResourceManagerService::pushReclaimAtom(const ClientInfoParcel& clientInfo,
const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
const PidUidVector& idVector, bool reclaimed) {
- // Construct the metrics for codec reclaim as a pushed atom.
- // 1. Information about the requester.
- // - UID and the priority (oom score)
int32_t callingPid = clientInfo.pid;
- int32_t requesterUid = clientInfo.uid;
- std::string clientName = clientInfo.name;
int requesterPriority = -1;
getPriority_l(callingPid, &requesterPriority);
+ std::vector<int> priorities;
+ priorities.push_back(requesterPriority);
- // 2. Information about the codec.
- // - Name of the codec requested
- // - Number of concurrent codecs running.
- int32_t noOfConcurrentCodecs = 0;
- auto found = mConcurrentResourceCountMap.find(clientName);
- if (found != mConcurrentResourceCountMap.end()) {
- noOfConcurrentCodecs = found->second;
- }
-
- // 3. Information about the Reclaim:
- // - Status of reclaim request
- // - How many codecs are reclaimed
- // - For each codecs reclaimed, information of the process that it belonged to:
- // - UID and the Priority (oom score)
- int32_t reclaimStatus = MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
- if (!reclaimed) {
- if (clients.size() == 0) {
- // No clients to reclaim from
- reclaimStatus =
- MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
- } else {
- // Couldn't reclaim resources from the clients
- reclaimStatus =
- MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
- }
- }
- int32_t noOfCodecsReclaimed = clients.size();
- int32_t targetIndex = 1;
- for (const auto& id : idVector) {
- int32_t targetUid = id.second;
+ for (PidUidVector::const_reference id : idVector) {
int targetPriority = -1;
getPriority_l(id.first, &targetPriority);
- // Post the pushed atom
- int result = stats_write(
- MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
- requesterUid,
- requesterPriority,
- clientName.c_str(),
- noOfConcurrentCodecs,
- reclaimStatus,
- noOfCodecsReclaimed,
- targetIndex,
- targetUid,
- targetPriority);
- ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
- "Requester[pid(%d): uid(%d): priority(%d)] "
- "Codec: [%s] "
- "No of concurrent codecs: %d "
- "Reclaim Status: %d "
- "No of codecs reclaimed: %d "
- "Target[%d][pid(%d): uid(%d): priority(%d)] "
- "Atom Size: %d",
- __func__, callingPid, requesterUid, requesterPriority,
- clientName.c_str(), noOfConcurrentCodecs,
- reclaimStatus, noOfCodecsReclaimed,
- targetIndex, id.first, targetUid, targetPriority, result);
- targetIndex++;
+ priorities.push_back(targetPriority);
}
+ mResourceManagerMetrics->pushReclaimAtom(clientInfo, priorities, clients,
+ idVector, reclaimed);
}
bool ResourceManagerService::reclaimUnconditionallyFrom(
@@ -933,6 +839,7 @@
mOverridePidMap.erase(originalPid);
if (newPid != -1) {
mOverridePidMap.emplace(originalPid, newPid);
+ mResourceManagerMetrics->addPid(newPid);
}
}
@@ -966,8 +873,12 @@
return Status::fromServiceSpecificError(BAD_VALUE);
}
+ ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(pid),
+ .uid = 0,
+ .id = 0,
+ .name = "<unknown client>"};
uintptr_t cookie = addCookieAndLink_l(client,
- new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), pid));
+ new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), clientInfo));
mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{cookie, client});
@@ -1282,4 +1193,27 @@
return true;
}
+Status ResourceManagerService::notifyClientCreated(const ClientInfoParcel& clientInfo) {
+ mResourceManagerMetrics->notifyClientCreated(clientInfo);
+ return Status::ok();
+}
+
+Status ResourceManagerService::notifyClientStarted(const ClientConfigParcel& clientConfig) {
+ mResourceManagerMetrics->notifyClientStarted(clientConfig);
+ return Status::ok();
+}
+
+Status ResourceManagerService::notifyClientStopped(const ClientConfigParcel& clientConfig) {
+ mResourceManagerMetrics->notifyClientStopped(clientConfig);
+ return Status::ok();
+}
+
+long ResourceManagerService::getPeakConcurrentPixelCount(int pid) const {
+ return mResourceManagerMetrics->getPeakConcurrentPixelCount(pid);
+}
+
+long ResourceManagerService::getCurrentConcurrentPixelCount(int pid) const {
+ return mResourceManagerMetrics->getCurrentConcurrentPixelCount(pid);
+}
+
} // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 0016a19..b9756ae 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -39,6 +39,7 @@
class ResourceObserverService;
class ServiceLog;
struct ProcessInfoInterface;
+class ResourceManagerMetrics;
using Status = ::ndk::ScopedAStatus;
using ::aidl::android::media::IResourceManagerClient;
@@ -46,6 +47,7 @@
using ::aidl::android::media::MediaResourceParcel;
using ::aidl::android::media::MediaResourcePolicyParcel;
using ::aidl::android::media::ClientInfoParcel;
+using ::aidl::android::media::ClientConfigParcel;
typedef std::map<std::tuple<
MediaResource::Type, MediaResource::SubType, std::vector<uint8_t>>,
@@ -61,6 +63,7 @@
bool pendingRemoval{false};
};
+// vector of <PID, UID>
typedef std::vector<std::pair<int32_t, uid_t>> PidUidVector;
// TODO: convert these to std::map
@@ -118,6 +121,12 @@
Status removeResource(const ClientInfoParcel& clientInfo, bool checkValid);
+ Status notifyClientCreated(const ClientInfoParcel& clientInfo) override;
+
+ Status notifyClientStarted(const ClientConfigParcel& clientConfig) override;
+
+ Status notifyClientStopped(const ClientConfigParcel& clientConfig) override;
+
private:
friend class ResourceManagerServiceTest;
friend class DeathNotifier;
@@ -182,15 +191,15 @@
void removeCookieAndUnlink_l(const std::shared_ptr<IResourceManagerClient>& client,
uintptr_t cookie);
- // To increase/decrease the number of instances of a given resource
- // associated with a client.
- void increaseResourceInstanceCount(int64_t clientId, const std::string& name);
- void decreaseResourceInstanceCount(int64_t clientId, const std::string& name);
-
void pushReclaimAtom(const ClientInfoParcel& clientInfo,
const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
const PidUidVector& idList, bool reclaimed);
+ // Get the peak concurrent pixel count (associated with the video codecs) for the process.
+ long getPeakConcurrentPixelCount(int pid) const;
+ // Get the current concurrent pixel count (associated with the video codecs) for the process.
+ long getCurrentConcurrentPixelCount(int pid) const;
+
mutable Mutex mLock;
sp<ProcessInfoInterface> mProcessInfo;
sp<SystemCallbackInterface> mSystemCB;
@@ -211,11 +220,7 @@
static std::map<uintptr_t, sp<DeathNotifier> > sCookieToDeathNotifierMap
GUARDED_BY(sCookieLock);
std::shared_ptr<ResourceObserverService> mObserverService;
-
- // List of active clients
- std::set<int64_t> mClientIdSet;
- // Map of resources (name) and number of concurrent instances
- std::map<std::string, int> mConcurrentResourceCountMap;
+ std::unique_ptr<ResourceManagerMetrics> mResourceManagerMetrics;
};
// ----------------------------------------------------------------------------
diff --git a/services/mediaresourcemanager/UidObserver.cpp b/services/mediaresourcemanager/UidObserver.cpp
new file mode 100644
index 0000000..f321ebc
--- /dev/null
+++ b/services/mediaresourcemanager/UidObserver.cpp
@@ -0,0 +1,182 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceManagerMetrics"
+
+#include <android/binder_process.h>
+#include <mediautils/ProcessInfo.h>
+#include "UidObserver.h"
+
+namespace {
+const char* kActivityServiceName = "activity";
+}; // namespace anonymous
+
+namespace android {
+
+UidObserver::UidObserver(const sp<ProcessInfoInterface>& processInfo,
+ OnProcessTerminated onProcessTerminated) :
+ mRegistered(false),
+ mOnProcessTerminated(std::move(onProcessTerminated)),
+ mProcessInfo(processInfo) {
+}
+
+UidObserver::~UidObserver() {
+ stop();
+}
+
+void UidObserver::start() {
+ // Use check service to see if the activity service is available
+ // If not available then register for notifications, instead of blocking
+ // till the service is ready
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->checkService(String16(kActivityServiceName));
+ if (!binder) {
+ sm->registerForNotifications(String16(kActivityServiceName), this);
+ } else {
+ registerWithActivityManager();
+ }
+}
+
+void UidObserver::stop() {
+ std::scoped_lock lock{mLock};
+
+ if (mRegistered) {
+ // Unregistered with ActivityManager
+ mAm.unregisterUidObserver(this);
+ mAm.unlinkToDeath(this);
+ mRegistered = false;
+ }
+}
+
+void UidObserver::add(int pid, uid_t uid) {
+ bool needToRegister = false;
+ {
+ std::scoped_lock lock(mLock);
+ std::map<uid_t, std::set<int32_t>>::iterator found = mUids.find(uid);
+ if (found != mUids.end()) {
+ found->second.insert(pid);
+ } else {
+ std::set<int32_t> pids{pid};
+ mUids.emplace(uid, std::move(pids));
+ }
+ needToRegister = !mRegistered;
+ }
+ if (needToRegister) {
+ start();
+ }
+}
+
+void UidObserver::registerWithActivityManager() {
+ std::scoped_lock lock{mLock};
+
+ if (mRegistered) {
+ return;
+ }
+ status_t res = mAm.linkToDeath(this);
+ // Register for UID gone.
+ mAm.registerUidObserver(this, ActivityManager::UID_OBSERVER_GONE,
+ ActivityManager::PROCESS_STATE_UNKNOWN,
+ String16("mediaserver"));
+ if (res == OK) {
+ mRegistered = true;
+ ALOGV("UidObserver: Registered with ActivityManager");
+ }
+}
+
+void UidObserver::onServiceRegistration(const String16& name, const sp<IBinder>&) {
+ if (name != String16(kActivityServiceName)) {
+ return;
+ }
+
+ registerWithActivityManager();
+}
+
+void UidObserver::getTerminatedProcesses(const std::vector<int32_t>& pids,
+ std::vector<int32_t>& terminatedPids) {
+ std::vector<bool> existent;
+ terminatedPids.clear();
+ if (mProcessInfo->checkProcessExistent(pids, &existent)) {
+ for (size_t index = 0; index < existent.size(); index++) {
+ if (!existent[index]) {
+ // This process has been terminated already.
+ terminatedPids.push_back(pids[index]);
+ }
+ }
+ }
+}
+
+// This callback will be issued for every UID that is gone/terminated.
+// Since one UID could have multiple PIDs, this callback can be issued
+// multiple times with that same UID for each activity/pid.
+// So, we need to check which one among the PIDs (that share the same UID)
+// is gone.
+void UidObserver::onUidGone(uid_t uid, bool /*disabled*/) {
+ std::vector<int32_t> terminatedPids;
+ {
+ std::scoped_lock lock{mLock};
+ std::map<uid_t, std::set<int32_t>>::iterator found = mUids.find(uid);
+ if (found != mUids.end()) {
+ if (found->second.size() == 1) {
+ terminatedPids.push_back(*(found->second.begin()));
+ // Only one PID. So we can remove this UID entry.
+ mUids.erase(found);
+ } else {
+ // There are multiple PIDs with the same UID.
+ // Get the list of all terminated PIDs (with the same UID)
+ std::vector<int32_t> pids;
+ std::copy(found->second.begin(), found->second.end(), std::back_inserter(pids));
+ getTerminatedProcesses(pids, terminatedPids);
+ for (int32_t pid : terminatedPids) {
+ // Remove all the terminated PIDs
+ found->second.erase(pid);
+ }
+ // If all PIDs under this UID have terminated, remove this UID entry.
+ if (found->second.size() == 0) {
+ mUids.erase(uid);
+ }
+ }
+ }
+ }
+
+ for (int32_t pid : terminatedPids) {
+ mOnProcessTerminated(pid, uid);
+ }
+}
+
+void UidObserver::onUidActive(uid_t /*uid*/) {
+}
+
+void UidObserver::onUidIdle(uid_t /*uid*/, bool /*disabled*/) {
+}
+
+void UidObserver::onUidStateChanged(uid_t /*uid*/,
+ int32_t /*procState*/,
+ int64_t /*procStateSeq*/,
+ int32_t /*capability*/) {
+}
+
+void UidObserver::onUidProcAdjChanged(uid_t /*uid*/) {
+}
+
+void UidObserver::binderDied(const wp<IBinder>& /*who*/) {
+ std::scoped_lock lock{mLock};
+ ALOGE("UidObserver: ActivityManager has died");
+ mRegistered = false;
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/UidObserver.h b/services/mediaresourcemanager/UidObserver.h
new file mode 100644
index 0000000..ed76839
--- /dev/null
+++ b/services/mediaresourcemanager/UidObserver.h
@@ -0,0 +1,116 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_UIDOBSERVER_H_
+#define ANDROID_MEDIA_UIDOBSERVER_H_
+
+#include <map>
+#include <set>
+#include <mutex>
+#include <functional>
+#include <binder/ActivityManager.h>
+#include <binder/IUidObserver.h>
+#include <binder/BinderService.h>
+
+namespace android {
+
+using OnProcessTerminated = std::function<void(int32_t pid, uid_t)>;
+
+struct ProcessInfoInterface;
+
+//
+// UidObserver class
+//
+// This class implements a callback mechanism to notify the termination of the
+// process/applications that are registered with this class.
+//
+// It uses ActivityManager get notification on when an UID is not existent
+// anymore.
+// Since one UID could have multiple PIDs, it uses ActivityManager
+// (through ProcessInfoInterface) to query for the process/application
+// state for the pids.
+//
+class UidObserver :
+ public BnUidObserver,
+ public virtual IBinder::DeathRecipient,
+ public virtual IServiceManager::LocalRegistrationCallback {
+public:
+ explicit UidObserver(const sp<ProcessInfoInterface>& processInfo,
+ OnProcessTerminated onProcessTerminated);
+ virtual ~UidObserver();
+
+ // Start registration (with Application Manager)
+ void start();
+ // Stop registration (with Application Manager)
+ void stop();
+
+ // Add this pid/uid to set of Uid to be observed.
+ void add(int pid, uid_t uid);
+
+private:
+ UidObserver() = delete;
+ UidObserver(const UidObserver&) = delete;
+ UidObserver(UidObserver&&) = delete;
+ UidObserver& operator=(const UidObserver&) = delete;
+ UidObserver& operator=(UidObserver&&) = delete;
+
+ // IUidObserver implementation.
+ void onUidGone(uid_t uid, bool disabled) override;
+ void onUidActive(uid_t uid) override;
+ void onUidIdle(uid_t uid, bool disabled) override;
+ void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
+ int32_t capability) override;
+ void onUidProcAdjChanged(uid_t uid) override;
+
+ // IServiceManager::LocalRegistrationCallback implementation.
+ void onServiceRegistration(const String16& name,
+ const sp<IBinder>& binder) override;
+
+ // IBinder::DeathRecipient implementation.
+ void binderDied(const wp<IBinder> &who) override;
+
+ // Registers with Application Manager for UID gone event
+ // to track the termination of Applications.
+ void registerWithActivityManager();
+
+ /*
+ * For a list of input pids, it will check whether the corresponding
+ * processes are already terminated or not.
+ *
+ * @param[in] pids List of pids to check whether they are terminated.
+ * @param[out] terminatedPids List of pid of terminated processes.
+ *
+ * Upon return, terminatedPids returns list of all the termibated pids
+ * that will be a subset of input pids (in that order).
+ * If none of the input pids have terminated, terminatedPids will be empty.
+ */
+ void getTerminatedProcesses(const std::vector<int32_t>& pids,
+ std::vector<int32_t>& terminatedPids);
+
+ bool mRegistered = false;
+ std::mutex mLock;
+ ActivityManager mAm;
+ // map of UID and all the PIDs associated with it
+ // as one UID could have multiple PIDs.
+ std::map<uid_t, std::set<int32_t>> mUids;
+ OnProcessTerminated mOnProcessTerminated;
+ sp<ProcessInfoInterface> mProcessInfo;
+};
+
+} // namespace android
+
+#endif //ANDROID_MEDIA_UIDOBSERVER_H_
diff --git a/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl b/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl
new file mode 100644
index 0000000..3c9c8c7
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl
@@ -0,0 +1,65 @@
+/**
+ * Copyright (c) 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.ClientInfoParcel;
+import android.media.MediaResourceSubType;
+
+/**
+ * Description of a Client(codec) configuration.
+ *
+ * {@hide}
+ */
+parcelable ClientConfigParcel {
+ /**
+ * Client info.
+ */
+ ClientInfoParcel clientInfo;
+
+ /**
+ * Type of codec (Audio/Video/Image).
+ */
+ MediaResourceSubType codecType;
+
+ /**
+ * true if this is an encoder, false if this is a decoder.
+ */
+ boolean isEncoder;
+
+ /**
+ * true if this is hardware codec, false otherwise.
+ */
+ boolean isHardware;
+
+ /*
+ * Video Resolution of the codec when it was configured, as width and height (in pixels).
+ */
+ int width;
+ int height;
+
+ /*
+ * Timestamp (in microseconds) when this configuration is created.
+ */
+ long timeStamp;
+ /*
+ * ID associated with the Codec.
+ * This will be used by the metrics:
+ * - Associate MediaCodecStarted with MediaCodecStopped Atom.
+ * - Correlate MediaCodecReported Atom for codec configuration parameters.
+ */
+ long id;
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
index 30ad41b..fcade38 100644
--- a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
@@ -20,6 +20,7 @@
import android.media.MediaResourceParcel;
import android.media.MediaResourcePolicyParcel;
import android.media.ClientInfoParcel;
+import android.media.ClientConfigParcel;
/**
* ResourceManagerService interface that keeps track of media resource
@@ -125,4 +126,34 @@
* @param pid pid from which resources will be reclaimed.
*/
void reclaimResourcesFromClientsPendingRemoval(int pid);
+
+ /**
+ * Notify that the client has been created.
+ *
+ * This call is made to collect the (concurrent) metrics about the
+ * resources associated with the Codec (and also DRM sessions).
+ *
+ * @param clientInfo Information of the client.
+ */
+ void notifyClientCreated(in ClientInfoParcel clientInfo);
+
+ /**
+ * Notify that the client has been started.
+ *
+ * This call is made to collect the (concurrent) metrics about the
+ * resources associated with the Codec (and also DRM sessions).
+ *
+ * @param clientConfig Configuration information of the client.
+ */
+ void notifyClientStarted(in ClientConfigParcel clientConfig);
+
+ /**
+ * Notify that the client has been stopped.
+ *
+ * This call is made to collect the (concurrent) metrics about the
+ * resources associated with the Codec (and also DRM sessions).
+ *
+ * @param clientConfig Configuration information of the client.
+ */
+ void notifyClientStopped(in ClientConfigParcel clientConfig);
}
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 27d45d5..d98974f 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -45,6 +45,7 @@
"libstats_media_metrics",
"libstatspull",
"libstatssocket",
+ "libactivitymanager_aidl",
],
fuzz_config: {
cc: [
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 16c5a4c..f903c62 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -23,6 +23,7 @@
"libstats_media_metrics",
"libstatspull",
"libstatssocket",
+ "libactivitymanager_aidl",
],
include_dirs: [
"frameworks/av/include",
@@ -72,6 +73,7 @@
"libstats_media_metrics",
"libstatspull",
"libstatssocket",
+ "libactivitymanager_aidl",
],
include_dirs: [
"frameworks/av/include",
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
index 8fe2505..474ff0f 100644
--- a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -15,6 +15,7 @@
*/
#include <gtest/gtest.h>
+#include <android/binder_process.h>
#include "ResourceManagerService.h"
#include <aidl/android/media/BnResourceManagerClient.h>
@@ -197,13 +198,20 @@
return static_cast<TestClient*>(testClient.get());
}
- ResourceManagerServiceTestBase()
- : mSystemCB(new TestSystemCallback()),
- mService(::ndk::SharedRefBase::make<ResourceManagerService>(
- new TestProcessInfo, mSystemCB)),
- mTestClient1(::ndk::SharedRefBase::make<TestClient>(kTestPid1, kTestUid1, mService)),
- mTestClient2(::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService)),
- mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService)) {
+ ResourceManagerServiceTestBase() {
+ ALOGI("ResourceManagerServiceTestBase created");
+ }
+
+ void SetUp() override {
+ // Need thread pool to receive callbacks, otherwise oneway callbacks are
+ // silently ignored.
+ ABinderProcess_startThreadPool();
+ mSystemCB = new TestSystemCallback();
+ mService = ::ndk::SharedRefBase::make<ResourceManagerService>(
+ new TestProcessInfo, mSystemCB);
+ mTestClient1 = ::ndk::SharedRefBase::make<TestClient>(kTestPid1, kTestUid1, mService);
+ mTestClient2 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService);
+ mTestClient3 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService);
}
std::shared_ptr<IResourceManagerClient> createTestClient(int pid, int uid) {
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index 41cccb8..4e575f0 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -1367,6 +1367,143 @@
// CPU boost is not expected to be reclaimed when marked as pending removal
EXPECT_FALSE(toTestClient(cpuBoostMarkedClient)->checkIfReclaimedAndReset());
}
+
+ inline void initClientConfigParcel(bool encoder, bool hw,
+ int32_t width, int32_t height,
+ int64_t id,
+ const ClientInfoParcel& clientInfo,
+ ClientConfigParcel& clientConfig) {
+ clientConfig.codecType = MediaResource::SubType::kVideoCodec;
+ clientConfig.isEncoder = encoder;
+ clientConfig.isHardware = hw;
+ clientConfig.width = width;
+ clientConfig.height = height;
+ clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
+ clientConfig.id = id;
+ clientConfig.clientInfo = clientInfo;
+ }
+
+ void testConcurrentCodecs() {
+ std::shared_ptr<IResourceManagerClient> testClient4 =
+ createTestClient(kTestPid1, kTestUid1);
+ ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+ .uid = static_cast<int32_t>(kTestUid1),
+ .id = getId(mTestClient1),
+ .name = "none"};
+ ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+ .uid = static_cast<int32_t>(kTestUid2),
+ .id = getId(mTestClient2),
+ .name = "none"};
+ ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+ .uid = static_cast<int32_t>(kTestUid2),
+ .id = getId(mTestClient3),
+ .name = "none"};
+ ClientInfoParcel client4Info{.pid = static_cast<int32_t>(kTestPid1),
+ .uid = static_cast<int32_t>(kTestUid1),
+ .id = getId(testClient4),
+ .name = "none"};
+ ClientConfigParcel client1Config;
+ ClientConfigParcel client2Config;
+ ClientConfigParcel client3Config;
+ ClientConfigParcel client4Config;
+
+ // HW Video Encoder @ 1080P.
+ initClientConfigParcel(true, true, 1920, 1080, 11111111,
+ client1Info, client1Config);
+ // HW Video Decoder @ 4K.
+ initClientConfigParcel(true, true, 2160, 3840, 22222222,
+ client2Info, client2Config);
+ // SW Video Encoder @ 1080P.
+ initClientConfigParcel(true, true, 1920, 1080, 33333333,
+ client3Info, client3Config);
+ // SW Video Decoder @ 4K.
+ initClientConfigParcel(true, true, 2160, 3840, 44444444,
+ client4Info, client4Config);
+
+ // Start client1 at 1080P.
+ mService->notifyClientStarted(client1Config);
+ long peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+ long currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+ EXPECT_TRUE(peakPixelCountP1 = client1Config.width * client1Config.height);
+ EXPECT_TRUE(currentPixelCountP1 = client1Config.width * client1Config.height);
+
+ // Stop client1.
+ mService->notifyClientStopped(client1Config);
+ peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+ currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+ EXPECT_TRUE(peakPixelCountP1 == client1Config.width * client1Config.height);
+ EXPECT_TRUE(currentPixelCountP1 == 0);
+
+ // Start client1 at 1080P.
+ mService->notifyClientStarted(client1Config);
+ // Start client2 at 4K.
+ mService->notifyClientStarted(client2Config);
+
+ // Verify the Peak and Current Concurrent pixel count for both the process
+ // (kTestPid1, kTestPid2)
+ peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+ currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+ long peakPixelCountP2 = mService->getPeakConcurrentPixelCount(kTestPid2);
+ long currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+ EXPECT_TRUE(peakPixelCountP1 == client1Config.width * client1Config.height);
+ EXPECT_TRUE(currentPixelCountP1 == client1Config.width * client1Config.height);
+ EXPECT_TRUE(peakPixelCountP2 == client2Config.width * client2Config.height);
+ EXPECT_TRUE(currentPixelCountP2 == client2Config.width * client2Config.height);
+
+ // Start client3 at 1080P.
+ mService->notifyClientStarted(client3Config);
+ // Start client4 at 4K.
+ mService->notifyClientStarted(client4Config);
+
+ // Verify the Peak and Current Concurrent pixel count for both the process
+ // (kTestPid1, kTestPid2)
+ peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+ currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+ peakPixelCountP2 = mService->getPeakConcurrentPixelCount(kTestPid2);
+ currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+ EXPECT_TRUE(peakPixelCountP1 ==
+ (client1Config.width * client1Config.height +
+ client4Config.width * client4Config.height));
+ EXPECT_TRUE(currentPixelCountP1 ==
+ (client1Config.width * client1Config.height +
+ client4Config.width * client4Config.height));
+ EXPECT_TRUE(peakPixelCountP2 ==
+ (client2Config.width * client2Config.height +
+ client3Config.width * client3Config.height));
+ EXPECT_TRUE(currentPixelCountP2 ==
+ (client2Config.width * client2Config.height +
+ client3Config.width * client3Config.height));
+
+ // Stop client4
+ mService->notifyClientStopped(client4Config);
+ currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+ EXPECT_TRUE(currentPixelCountP1 == client1Config.width * client1Config.height);
+
+ // Stop client1
+ mService->notifyClientStopped(client1Config);
+
+ // Stop client2
+ mService->notifyClientStopped(client2Config);
+ currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+ EXPECT_TRUE(currentPixelCountP2 == client3Config.width * client3Config.height);
+ // Stop client3
+ mService->notifyClientStopped(client3Config);
+
+ // Verify the Peak and Current Concurrent pixel count for both the process
+ // (kTestPid1, kTestPid2)
+ peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+ currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+ peakPixelCountP2 = mService->getPeakConcurrentPixelCount(kTestPid2);
+ currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+ EXPECT_TRUE(peakPixelCountP1 ==
+ (client1Config.width * client1Config.height +
+ client4Config.width * client4Config.height));
+ EXPECT_TRUE(currentPixelCountP1 == 0);
+ EXPECT_TRUE(peakPixelCountP2 ==
+ (client2Config.width * client2Config.height +
+ client3Config.width * client3Config.height));
+ EXPECT_TRUE(currentPixelCountP2 == 0);
+ }
};
TEST_F(ResourceManagerServiceTest, config) {
@@ -1451,4 +1588,8 @@
testReclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources();
}
+TEST_F(ResourceManagerServiceTest, concurrentCodecs) {
+ testConcurrentCodecs();
+}
+
} // namespace android
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
index a0d728c..85769d5 100644
--- a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -166,11 +166,14 @@
class ResourceObserverServiceTest : public ResourceManagerServiceTestBase {
public:
- ResourceObserverServiceTest() : ResourceManagerServiceTestBase(),
- mObserverService(::ndk::SharedRefBase::make<ResourceObserverService>()),
- mTestObserver1(::ndk::SharedRefBase::make<TestObserver>("observer1")),
- mTestObserver2(::ndk::SharedRefBase::make<TestObserver>("observer2")),
- mTestObserver3(::ndk::SharedRefBase::make<TestObserver>("observer3")) {
+ ResourceObserverServiceTest() : ResourceManagerServiceTestBase() {}
+
+ void SetUp() override {
+ ResourceManagerServiceTestBase::SetUp();
+ mObserverService = ::ndk::SharedRefBase::make<ResourceObserverService>();
+ mTestObserver1 = ::ndk::SharedRefBase::make<TestObserver>("observer1");
+ mTestObserver2 = ::ndk::SharedRefBase::make<TestObserver>("observer2");
+ mTestObserver3 = ::ndk::SharedRefBase::make<TestObserver>("observer3");
mService->setObserverService(mObserverService);
}
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 51ef2d9..65854c8 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -418,7 +418,16 @@
while (mThreadEnabled.load()) {
loopCount++;
int64_t timeoutNanos = -1;
- if (isRunning() || (isIdle_l() && !isStandby_l())) {
+ if (isDisconnected_l()) {
+ if (!isStandby_l()) {
+ // If the stream is disconnected but not in standby mode, wait until standby time.
+ timeoutNanos = standbyTime - AudioClock::getNanoseconds();
+ timeoutNanos = std::max<int64_t>(0, timeoutNanos);
+ } // else {
+ // If the stream is disconnected and in standby mode, keep `timeoutNanos` as
+ // -1 to wait forever until next command as the stream can only be closed.
+ // }
+ } else if (isRunning() || (isIdle_l() && !isStandby_l())) {
timeoutNanos = (isRunning() ? std::min(nextTimestampReportTime, nextDataReportTime)
: standbyTime) - AudioClock::getNanoseconds();
timeoutNanos = std::max<int64_t>(0, timeoutNanos);
@@ -430,7 +439,7 @@
break;
}
- if (isRunning()) {
+ if (isRunning() && !isDisconnected_l()) {
auto currentTimestamp = AudioClock::getNanoseconds();
if (currentTimestamp >= nextDataReportTime) {
reportData_l();
@@ -441,12 +450,11 @@
if (sendCurrentTimestamp_l() != AAUDIO_OK) {
ALOGE("Failed to send current timestamp, stop updating timestamp");
disconnect_l();
- } else {
- nextTimestampReportTime = timestampScheduler.nextAbsoluteTime();
}
+ nextTimestampReportTime = timestampScheduler.nextAbsoluteTime();
}
}
- if (isIdle_l() && AudioClock::getNanoseconds() >= standbyTime) {
+ if ((isIdle_l() || isDisconnected_l()) && AudioClock::getNanoseconds() >= standbyTime) {
aaudio_result_t result = standby_l();
if (result != AAUDIO_OK) {
// If standby failed because of the function is not implemented, there is no