Merge "codec2: unlock GraphicBuffer if lockYCbCr succeeded but could not process it" into sc-dev
diff --git a/apex/manifest.json b/apex/manifest.json
index c7e56be..5d72031 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,6 +1,6 @@
{
"name": "com.android.media",
- "version": 309999910,
+ "version": 319999900,
"requireNativeLibs": [
"libandroid.so",
"libbinder_ndk.so",
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index d36e914..b0d962d 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,6 +1,6 @@
{
"name": "com.android.media.swcodec",
- "version": 309999910,
+ "version": 319999900,
"requireNativeLibs": [
":sphal"
]
diff --git a/apex/mediatranscoding.rc b/apex/mediatranscoding.rc
index 24306a2..ae9f8ba 100644
--- a/apex/mediatranscoding.rc
+++ b/apex/mediatranscoding.rc
@@ -8,4 +8,5 @@
ioprio rt 4
# Restrict to little cores only with system-background cpuset.
writepid /dev/cpuset/system-background/tasks
+ interface aidl media.transcoding
disabled
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 3d93ba5..1cde4c6 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -1988,6 +1988,16 @@
* ACAMERA_CONTROL_ZOOM_RATIO is not 1.0, and ACAMERA_SCALER_CROP_REGION is set to be
* windowboxing, the camera framework will override the ACAMERA_SCALER_CROP_REGION to be
* the active array.</p>
+ * <p>In the capture request, if the application sets ACAMERA_CONTROL_ZOOM_RATIO to a
+ * value != 1.0, the ACAMERA_CONTROL_ZOOM_RATIO tag in the capture result reflects the
+ * effective zoom ratio achieved by the camera device, and the ACAMERA_SCALER_CROP_REGION
+ * adjusts for additional crops that are not zoom related. Otherwise, if the application
+ * sets ACAMERA_CONTROL_ZOOM_RATIO to 1.0, or does not set it at all, the
+ * ACAMERA_CONTROL_ZOOM_RATIO tag in the result metadata will also be 1.0.</p>
+ * <p>When the application requests a physical stream for a logical multi-camera, the
+ * ACAMERA_CONTROL_ZOOM_RATIO in the physical camera result metadata will be 1.0, and
+ * the ACAMERA_SCALER_CROP_REGION tag reflects the amount of zoom and crop done by the
+ * physical camera device.</p>
*
* @see ACAMERA_CONTROL_AE_REGIONS
* @see ACAMERA_CONTROL_ZOOM_RATIO
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 2fa4f25..f857e87 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -26,6 +26,11 @@
#include <media/stagefright/foundation/MediaDefs.h>
namespace android {
+namespace {
+
+constexpr uint8_t NEUTRAL_UV_VALUE = 128;
+
+} // namespace
// codecname set and passed in as a compile flag from Android.bp
constexpr char COMPONENT_NAME[] = CODECNAME;
@@ -51,8 +56,8 @@
DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
.withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
+ C2F(mSize, width).inRange(2, 4096, 2),
+ C2F(mSize, height).inRange(2, 4096, 2),
})
.withSetter(SizeSetter)
.build());
@@ -464,7 +469,8 @@
const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
size_t dstYStride, size_t dstUVStride,
- uint32_t width, uint32_t height) {
+ uint32_t width, uint32_t height,
+ bool isMonochrome) {
for (size_t i = 0; i < height; ++i) {
memcpy(dstY, srcY, width);
@@ -472,6 +478,17 @@
dstY += dstYStride;
}
+ if (isMonochrome) {
+ // Fill with neutral U/V values.
+ for (size_t i = 0; i < height / 2; ++i) {
+ memset(dstV, NEUTRAL_UV_VALUE, width / 2);
+ memset(dstU, NEUTRAL_UV_VALUE, width / 2);
+ dstV += dstUVStride;
+ dstU += dstUVStride;
+ }
+ return;
+ }
+
for (size_t i = 0; i < height / 2; ++i) {
memcpy(dstV, srcV, width / 2);
srcV += srcVStride;
@@ -557,7 +574,7 @@
const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
size_t dstYStride, size_t dstUVStride,
- size_t width, size_t height) {
+ size_t width, size_t height, bool isMonochrome) {
for (size_t y = 0; y < height; ++y) {
for (size_t x = 0; x < width; ++x) {
@@ -568,6 +585,17 @@
dstY += dstYStride;
}
+ if (isMonochrome) {
+ // Fill with neutral U/V values.
+ for (size_t y = 0; y < (height + 1) / 2; ++y) {
+ memset(dstV, NEUTRAL_UV_VALUE, (width + 1) / 2);
+ memset(dstU, NEUTRAL_UV_VALUE, (width + 1) / 2);
+ dstV += dstUVStride;
+ dstU += dstUVStride;
+ }
+ return;
+ }
+
for (size_t y = 0; y < (height + 1) / 2; ++y) {
for (size_t x = 0; x < (width + 1) / 2; ++x) {
dstU[x] = (uint8_t)(srcU[x] >> 2);
@@ -623,8 +651,16 @@
}
}
- // TODO(vigneshv): Add support for monochrome videos since AV1 supports it.
- CHECK(buffer->image_format == libgav1::kImageFormatYuv420);
+ if (!(buffer->image_format == libgav1::kImageFormatYuv420 ||
+ buffer->image_format == libgav1::kImageFormatMonochrome400)) {
+ ALOGE("image_format %d not supported", buffer->image_format);
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return false;
+ }
+ const bool isMonochrome =
+ buffer->image_format == libgav1::kImageFormatMonochrome400;
std::shared_ptr<C2GraphicBlock> block;
uint32_t format = HAL_PIXEL_FORMAT_YV12;
@@ -636,6 +672,13 @@
if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+ if (buffer->image_format != libgav1::kImageFormatYuv420) {
+ ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+ mSignalledError = true;
+ work->result = C2_OMITTED;
+ work->workletsProcessed = 1u;
+ return false;
+ }
format = HAL_PIXEL_FORMAT_RGBA_1010102;
}
}
@@ -682,21 +725,18 @@
(uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
} else {
- convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
- srcY, srcU, srcV,
- srcYStride / 2, srcUStride / 2, srcVStride / 2,
- dstYStride, dstUVStride,
- mWidth, mHeight);
+ convertYUV420Planar16ToYUV420Planar(
+ dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
+ srcVStride / 2, dstYStride, dstUVStride, mWidth, mHeight,
+ isMonochrome);
}
} else {
const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
- copyOutputBufferToYV12Frame(dstY, dstU, dstV,
- srcY, srcU, srcV,
- srcYStride, srcUStride, srcVStride,
- dstYStride, dstUVStride,
- mWidth, mHeight);
+ copyOutputBufferToYV12Frame(
+ dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+ dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
}
finishWork(buffer->user_private_data, work, std::move(block));
block = nullptr;
diff --git a/media/codec2/core/include/C2Work.h b/media/codec2/core/include/C2Work.h
index 67084cc..794402f 100644
--- a/media/codec2/core/include/C2Work.h
+++ b/media/codec2/core/include/C2Work.h
@@ -145,10 +145,35 @@
*/
FLAG_INCOMPLETE = (1 << 3),
/**
+ * This frame has been corrected due to a bitstream error. This is a hint, and in most cases
+ * can be ignored. This flag can be set by components on their output to signal the clients
+ * that errors may be present but the frame should be used nonetheless. It can also be set
+ * by clients to signal that the input frame has been corrected, but nonetheless should be
+ * processed.
+ */
+ FLAG_CORRECTED = (1 << 4),
+ /**
+ * This frame is corrupt due to a bitstream error. This is similar to FLAG_CORRECTED,
+ * with the exception that this is a hint that downstream components should not process this
+ * frame.
+ * <p>
+ * If set on the input by the client, the input is likely non-processable and should be
+ * handled similarly to uncorrectable bitstream error detected. For components that operat
+ * on whole access units, this flag can be propagated to the output. Other components should
+ * aim to detect access unit boundaries to determine if any part of the input frame can be
+ * processed.
+ * <p>
+ * If set by the component, this signals to the client that the output is non-usable -
+ * including possibly the metadata that may also be non-usable; -- however, the component
+ * will try to recover on successive input frames.
+ */
+ FLAG_CORRUPT = (1 << 5),
+
+ /**
* This frame contains only codec-specific configuration data, and no actual access unit.
*
- * \deprecated pass codec configuration with using the \todo codec-specific configuration
- * info together with the access unit.
+ * \deprecated pass codec configuration with using the C2InitData info parameter together
+ * with the access unit.
*/
FLAG_CODEC_CONFIG = (1u << 31),
};
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 77a63a7..7c4bfb6 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -67,7 +67,8 @@
s.compare(s.size() - suffixLen, suffixLen, suffix) == 0;
}
-void addSupportedProfileLevels(
+// returns true if component advertised supported profile level(s)
+bool addSupportedProfileLevels(
std::shared_ptr<Codec2Client::Interface> intf,
MediaCodecInfo::CapabilitiesWriter *caps,
const Traits& trait, const std::string &mediaType) {
@@ -87,12 +88,12 @@
c2_status_t err = intf->querySupportedValues(profileQuery, C2_DONT_BLOCK);
ALOGV("query supported profiles -> %s | %s", asString(err), asString(profileQuery[0].status));
if (err != C2_OK || profileQuery[0].status != C2_OK) {
- return;
+ return false;
}
// we only handle enumerated values
if (profileQuery[0].values.type != C2FieldSupportedValues::VALUES) {
- return;
+ return false;
}
// determine if codec supports HDR
@@ -125,6 +126,8 @@
supportsHdr |= (mediaType == MIMETYPE_VIDEO_VP9);
supportsHdr |= (mediaType == MIMETYPE_VIDEO_AV1);
+ bool added = false;
+
for (C2Value::Primitive profile : profileQuery[0].values.values) {
pl.profile = (C2Config::profile_t)profile.ref<uint32_t>();
std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -165,6 +168,7 @@
} else if (!mapper) {
caps->addProfileLevel(pl.profile, pl.level);
}
+ added = true;
// for H.263 also advertise the second highest level if the
// codec supports level 45, as level 45 only covers level 10
@@ -188,6 +192,7 @@
}
}
}
+ return added;
}
void addSupportedColorFormats(
@@ -604,7 +609,15 @@
}
}
- addSupportedProfileLevels(intf, caps.get(), trait, mediaType);
+ if (!addSupportedProfileLevels(intf, caps.get(), trait, mediaType)) {
+ // TODO(b/193279646) This will get fixed in C2InterfaceHelper
+ // Some components may not advertise supported values if they use a const
+ // param for profile/level (they support only one profile). For now cover
+ // only VP8 here until it is fixed.
+ if (mediaType == MIMETYPE_VIDEO_VP8) {
+ caps->addProfileLevel(VP8ProfileMain, VP8Level_Version0);
+ }
+ }
addSupportedColorFormats(intf, caps.get(), trait, mediaType);
}
}
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 00bf84f..4d939fa 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -92,6 +92,7 @@
ALookup<C2Config::bitrate_mode_t, int32_t> sBitrateModes = {
{ C2Config::BITRATE_CONST, BITRATE_MODE_CBR },
+ { C2Config::BITRATE_CONST_SKIP_ALLOWED, BITRATE_MODE_CBR_FD },
{ C2Config::BITRATE_VARIABLE, BITRATE_MODE_VBR },
{ C2Config::BITRATE_IGNORE, BITRATE_MODE_CQ },
};
diff --git a/media/extractors/fuzzers/Android.bp b/media/extractors/fuzzers/Android.bp
index b731292..0e54b58 100644
--- a/media/extractors/fuzzers/Android.bp
+++ b/media/extractors/fuzzers/Android.bp
@@ -39,6 +39,7 @@
static_libs: [
"liblog",
+ "libstagefright_foundation_colorutils_ndk",
"libstagefright_foundation",
"libmediandk_format",
"libmedia_ndkformatpriv",
diff --git a/media/extractors/mkv/Android.bp b/media/extractors/mkv/Android.bp
index 840c9fc..54c5b27 100644
--- a/media/extractors/mkv/Android.bp
+++ b/media/extractors/mkv/Android.bp
@@ -32,6 +32,7 @@
],
static_libs: [
+ "libstagefright_foundation_colorutils_ndk", // for mainline-safe ColorUtils
"libstagefright_foundation",
"libstagefright_metadatautils",
"libwebm",
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index b4e4c5d..fbcd554 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1126,7 +1126,9 @@
void *data;
size_t size;
- if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)) {
+ if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2,
+ &data, &size)
+ && size >= 5) {
const uint8_t *ptr = (const uint8_t *)data;
const uint8_t profile = ptr[2] >> 1;
const uint8_t bl_compatibility_id = (ptr[4]) >> 4;
@@ -1163,8 +1165,12 @@
mLastTrack->next = track_b;
track_b->next = NULL;
- auto id = track_b->meta->mFormat->findEntryByName(AMEDIAFORMAT_KEY_CSD_2);
- track_b->meta->mFormat->removeEntryAt(id);
+ // we want to remove the csd-2 key from the metadata, but
+ // don't have an AMediaFormat_* function to do so. Settle
+ // for replacing this csd-2 with an empty csd-2.
+ uint8_t emptybuffer[8] = {};
+ AMediaFormat_setBuffer(track_b->meta, AMEDIAFORMAT_KEY_CSD_2,
+ emptybuffer, 0);
if (4 == profile || 7 == profile || 8 == profile ) {
AMediaFormat_setString(track_b->meta,
diff --git a/media/extractors/tests/Android.bp b/media/extractors/tests/Android.bp
index e17893e..5d97d9a 100644
--- a/media/extractors/tests/Android.bp
+++ b/media/extractors/tests/Android.bp
@@ -51,6 +51,7 @@
"libstagefright_esds",
"libstagefright_mpeg2support",
"libstagefright_mpeg2extractor",
+ "libstagefright_foundation_colorutils_ndk",
"libstagefright_foundation",
"libstagefright_metadatautils",
diff --git a/media/libmediaformatshaper/VQApply.cpp b/media/libmediaformatshaper/VQApply.cpp
index 585ec6c..26ff446 100644
--- a/media/libmediaformatshaper/VQApply.cpp
+++ b/media/libmediaformatshaper/VQApply.cpp
@@ -63,13 +63,62 @@
return 0;
}
- if (codec->supportedMinimumQuality() > 0) {
- // allow the codec provided minimum quality behavior to work at it
- ALOGD("minquality: codec claims to implement minquality=%d",
- codec->supportedMinimumQuality());
+ // only proceed if we're in the handheld category.
+ // We embed this information within the codec record when we build up features
+ // and pass them in from MediaCodec; it's the easiest place to store it
+ //
+ // TODO: make a #define for ' _vq_eligible.device' here and in MediaCodec.cpp
+ //
+ int32_t isVQEligible = 0;
+ (void) codec->getFeatureValue("_vq_eligible.device", &isVQEligible);
+ if (!isVQEligible) {
+ ALOGD("minquality: not an eligible device class");
return 0;
}
+ // look at resolution to determine if we want any shaping/modification at all.
+ //
+ // we currently only shape (or ask the underlying codec to shape) for
+ // resolution range 320x240 < target <= 1920x1080
+ // NB: the < vs <=, that is deliberate.
+ //
+
+ int32_t width = 0;
+ (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_WIDTH, &width);
+ int32_t height = 0;
+ (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_HEIGHT, &height);
+ int64_t pixels = ((int64_t)width) * height;
+
+ bool eligibleSize = true;
+ if (pixels <= 320 * 240) {
+ eligibleSize = false;
+ } else if (pixels > 1920 * 1088) {
+ eligibleSize = false;
+ }
+
+ if (!eligibleSize) {
+ // we won't shape, and ask that the codec not shape
+ ALOGD("minquality: %dx%d outside of shaping range", width, height);
+ AMediaFormat_setInt32(inFormat, "android._encoding-quality-level", 0);
+ return 0;
+ }
+
+ if (codec->supportedMinimumQuality() > 0) {
+ // have the codec-provided minimum quality behavior to work at it
+ ALOGD("minquality: codec claims to implement minquality=%d",
+ codec->supportedMinimumQuality());
+
+ // tell the underlying codec to do its thing; we won't try to second guess.
+ // default to 1, aka S_HANDHELD;
+ int32_t qualityTarget = 1;
+ (void) codec->getFeatureValue("_quality.target", &qualityTarget);
+ AMediaFormat_setInt32(inFormat, "android._encoding-quality-level", qualityTarget);
+ return 0;
+ }
+
+ // let the codec know that we'll be enforcing the minimum quality standards
+ AMediaFormat_setInt32(inFormat, "android._encoding-quality-level", 0);
+
//
// consider any and all tools available
// -- qp
@@ -84,11 +133,8 @@
bitrateConfigured = bitrateConfiguredTmp;
bitrateChosen = bitrateConfigured;
- int32_t width = 0;
- (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_WIDTH, &width);
- int32_t height = 0;
- (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_HEIGHT, &height);
- int64_t pixels = ((int64_t)width) * height;
+ // width, height, and pixels are calculated above
+
double minimumBpp = codec->getBpp(width, height);
int64_t bitrateFloor = pixels * minimumBpp;
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 6dc3e3f..2aabd53 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -233,7 +233,7 @@
for (size_t i = 0; i < matchingCodecs.size(); ++i) {
const AString &componentName = matchingCodecs[i];
- sp<ImageDecoder> decoder = new ImageDecoder(componentName, trackMeta, source);
+ sp<MediaImageDecoder> decoder = new MediaImageDecoder(componentName, trackMeta, source);
int64_t frameTimeUs = thumbnail ? -1 : 0;
if (decoder->init(frameTimeUs, 0 /*option*/, colorFormat) == OK) {
sp<IMemory> frame = decoder->extractFrame(rect);
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 0fd4ef2..efd4070 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -744,7 +744,7 @@
////////////////////////////////////////////////////////////////////////
-ImageDecoder::ImageDecoder(
+MediaImageDecoder::MediaImageDecoder(
const AString &componentName,
const sp<MetaData> &trackMeta,
const sp<IMediaSource> &source)
@@ -760,7 +760,7 @@
mTargetTiles(0) {
}
-sp<AMessage> ImageDecoder::onGetFormatAndSeekOptions(
+sp<AMessage> MediaImageDecoder::onGetFormatAndSeekOptions(
int64_t frameTimeUs, int /*seekMode*/,
MediaSource::ReadOptions *options, sp<Surface> * /*window*/) {
sp<MetaData> overrideMeta;
@@ -836,7 +836,7 @@
return videoFormat;
}
-status_t ImageDecoder::onExtractRect(FrameRect *rect) {
+status_t MediaImageDecoder::onExtractRect(FrameRect *rect) {
// TODO:
// This callback is for verifying whether we can decode the rect,
// and if so, set up the internal variables for decoding.
@@ -875,7 +875,7 @@
return OK;
}
-status_t ImageDecoder::onOutputReceived(
+status_t MediaImageDecoder::onOutputReceived(
const sp<MediaCodecBuffer> &videoFrameBuffer,
const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
if (outputFormat == NULL) {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index f2bcebb..c03236a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -31,6 +31,8 @@
#include "include/SoftwareRenderer.h"
#include "PlaybackDurationAccumulator.h"
+#include <android/binder_manager.h>
+#include <android/content/pm/IPackageManagerNative.h>
#include <android/hardware/cas/native/1.0/IDescrambler.h>
#include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -40,6 +42,7 @@
#include <android/binder_manager.h>
#include <android/dlext.h>
#include <binder/IMemory.h>
+#include <binder/IServiceManager.h>
#include <binder/MemoryDealer.h>
#include <cutils/properties.h>
#include <gui/BufferQueue.h>
@@ -1697,6 +1700,7 @@
//
static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
+static bool sIsHandheld = true;
static bool connectFormatShaper() {
static std::once_flag sCheckOnce;
@@ -1770,6 +1774,64 @@
ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
(loading_finished - loading_started)/1000);
+
+ // we also want to know whether this is a handheld device
+ // start with assumption that the device is handheld.
+ sIsHandheld = true;
+ sp<IServiceManager> serviceMgr = defaultServiceManager();
+ sp<content::pm::IPackageManagerNative> packageMgr;
+ if (serviceMgr.get() != nullptr) {
+ sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
+ packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
+ }
+ // if we didn't get serviceMgr, we'll leave packageMgr as default null
+ if (packageMgr != nullptr) {
+
+ // MUST have these
+ static const String16 featuresNeeded[] = {
+ String16("android.hardware.touchscreen")
+ };
+ // these must be present to be a handheld
+ for (::android::String16 required : featuresNeeded) {
+ bool hasFeature = false;
+ binder::Status status = packageMgr->hasSystemFeature(required, 0, &hasFeature);
+ if (!status.isOk()) {
+ ALOGE("%s: hasSystemFeature failed: %s",
+ __func__, status.exceptionMessage().c_str());
+ continue;
+ }
+ ALOGV("feature %s says %d", String8(required).c_str(), hasFeature);
+ if (!hasFeature) {
+ ALOGV("... which means we are not handheld");
+ sIsHandheld = false;
+ break;
+ }
+ }
+
+ // MUST NOT have these
+ static const String16 featuresDisallowed[] = {
+ String16("android.hardware.type.automotive"),
+ String16("android.hardware.type.television"),
+ String16("android.hardware.type.watch")
+ };
+ // any of these present -- we aren't a handheld
+ for (::android::String16 forbidden : featuresDisallowed) {
+ bool hasFeature = false;
+ binder::Status status = packageMgr->hasSystemFeature(forbidden, 0, &hasFeature);
+ if (!status.isOk()) {
+ ALOGE("%s: hasSystemFeature failed: %s",
+ __func__, status.exceptionMessage().c_str());
+ continue;
+ }
+ ALOGV("feature %s says %d", String8(forbidden).c_str(), hasFeature);
+ if (hasFeature) {
+ ALOGV("... which means we are not handheld");
+ sIsHandheld = false;
+ break;
+ }
+ }
+ }
+
});
return true;
@@ -1848,6 +1910,18 @@
}
}
}
+
+ // we also carry in the codec description whether we are on a handheld device.
+ // this info is eventually used by both the Codec and the C2 machinery to inform
+ // the underlying codec whether to do any shaping.
+ //
+ if (sIsHandheld) {
+ // set if we are indeed a handheld device (or in future 'any eligible device'
+ // missing on devices that aren't eligible for minimum quality enforcement.
+ (void)(sShaperOps->setFeature)(shaperHandle, "_vq_eligible.device", 1);
+ // strictly speaking, it's a tuning, but those are strings and feature stores int
+ (void)(sShaperOps->setFeature)(shaperHandle, "_quality.target", 1 /* S_HANDHELD */);
+ }
}
status_t MediaCodec::setupFormatShaper(AString mediaType) {
@@ -1888,6 +1962,16 @@
// Format Shaping
// Mapping and Manipulation of encoding parameters
//
+// All of these decisions are pushed into the shaper instead of here within MediaCodec.
+// this includes decisions based on whether the codec implements minimum quality bars
+// itself or needs to be shaped outside of the codec.
+// This keeps all those decisions in one place.
+// It also means that we push some extra decision information (is this a handheld device
+// or one that is otherwise eligible for minimum quality manipulation, which generational
+// quality target is in force, etc). This allows those values to be cached in the
+// per-codec structures that are done 1 time within a process instead of for each
+// codec instantiation.
+//
status_t MediaCodec::shapeMediaFormat(
const sp<AMessage> &format,
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index a5c3ba6..6893324 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -307,8 +307,16 @@
sp<MetaData> meta = mImpl->getMetaData();
+ if (meta == nullptr) {
+ //extractor did not publish file metadata
+ return -EINVAL;
+ }
+
const char *mime;
- CHECK(meta->findCString(kKeyMIMEType, &mime));
+ if (!meta->findCString(kKeyMIMEType, &mime)) {
+ // no mime type maps to invalid
+ return -EINVAL;
+ }
*format = new AMessage();
(*format)->setString("mime", mime);
@@ -354,6 +362,11 @@
sp<MetaData> meta = mImpl->getMetaData();
+ if (meta == nullptr) {
+ //extractor did not publish file metadata
+ return -EINVAL;
+ }
+
int64_t exifOffset, exifSize;
if (meta->findInt64(kKeyExifOffset, &exifOffset)
&& meta->findInt64(kKeyExifSize, &exifSize)) {
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index 4bfc673..dd2c66f 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -85,6 +85,7 @@
"AudioPresentationInfo.cpp",
"ByteUtils.cpp",
"ColorUtils.cpp",
+ "ColorUtils_fill.cpp",
"FoundationUtils.cpp",
"MediaBuffer.cpp",
"MediaBufferBase.cpp",
@@ -148,3 +149,66 @@
"-DNO_IMEMORY",
],
}
+
+// this gets linked into extractors in media mainline module
+// so must be ndk api 29 so that it runs on >=Q
+cc_library_static {
+ name: "libstagefright_foundation_colorutils_ndk",
+ host_supported: true,
+ vendor_available: true,
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+
+ shared_libs: [
+ "liblog",
+ "libutils", // for sp<>
+ // actually invokes this, but called from folks who already load it
+ // "libmediandk",
+ ],
+
+ header_libs: [
+ // this is only needed for the vendor variant that removes libbinder, but vendor
+ // target below does not allow adding header_libs.
+ "libbinder_headers",
+ "libstagefright_foundation_headers",
+ "media_ndk_headers",
+ "media_plugin_headers",
+ ],
+
+ local_include_dirs: [
+ "include/media/stagefright/foundation",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ srcs: [
+ "ColorUtils_ndk.cpp",
+ "ColorUtils_fill.cpp",
+ ],
+
+ clang: true,
+
+ sanitize: {
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ },
+
+ min_sdk_version: "29",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ "com.android.media.swcodec",
+ ],
+
+}
+
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index 3812afe..fa722b5 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -722,13 +722,6 @@
transfer, asString((ColorTransfer)transfer));
}
-
-// static
-void ColorUtils::setHDRStaticInfoIntoAMediaFormat(
- const HDRStaticInfo &info, AMediaFormat *format) {
- setHDRStaticInfoIntoFormat(info, format->mFormat);
-}
-
// static
void ColorUtils::setHDRStaticInfoIntoFormat(
const HDRStaticInfo &info, sp<AMessage> &format) {
@@ -736,48 +729,7 @@
// Convert the data in infoBuffer to little endian format as defined by CTA-861-3
uint8_t *data = infoBuffer->data();
- // Static_Metadata_Descriptor_ID
- data[0] = info.mID;
-
- // display primary 0
- data[1] = LO_UINT16(info.sType1.mR.x);
- data[2] = HI_UINT16(info.sType1.mR.x);
- data[3] = LO_UINT16(info.sType1.mR.y);
- data[4] = HI_UINT16(info.sType1.mR.y);
-
- // display primary 1
- data[5] = LO_UINT16(info.sType1.mG.x);
- data[6] = HI_UINT16(info.sType1.mG.x);
- data[7] = LO_UINT16(info.sType1.mG.y);
- data[8] = HI_UINT16(info.sType1.mG.y);
-
- // display primary 2
- data[9] = LO_UINT16(info.sType1.mB.x);
- data[10] = HI_UINT16(info.sType1.mB.x);
- data[11] = LO_UINT16(info.sType1.mB.y);
- data[12] = HI_UINT16(info.sType1.mB.y);
-
- // white point
- data[13] = LO_UINT16(info.sType1.mW.x);
- data[14] = HI_UINT16(info.sType1.mW.x);
- data[15] = LO_UINT16(info.sType1.mW.y);
- data[16] = HI_UINT16(info.sType1.mW.y);
-
- // MaxDisplayLuminance
- data[17] = LO_UINT16(info.sType1.mMaxDisplayLuminance);
- data[18] = HI_UINT16(info.sType1.mMaxDisplayLuminance);
-
- // MinDisplayLuminance
- data[19] = LO_UINT16(info.sType1.mMinDisplayLuminance);
- data[20] = HI_UINT16(info.sType1.mMinDisplayLuminance);
-
- // MaxContentLightLevel
- data[21] = LO_UINT16(info.sType1.mMaxContentLightLevel);
- data[22] = HI_UINT16(info.sType1.mMaxContentLightLevel);
-
- // MaxFrameAverageLightLevel
- data[23] = LO_UINT16(info.sType1.mMaxFrameAverageLightLevel);
- data[24] = HI_UINT16(info.sType1.mMaxFrameAverageLightLevel);
+ fillHdrStaticInfoBuffer(info, data);
format->setBuffer("hdr-static-info", infoBuffer);
}
diff --git a/media/libstagefright/foundation/ColorUtils_fill.cpp b/media/libstagefright/foundation/ColorUtils_fill.cpp
new file mode 100644
index 0000000..f07493e
--- /dev/null
+++ b/media/libstagefright/foundation/ColorUtils_fill.cpp
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ColorUtils"
+
+#include <inttypes.h>
+#include <arpa/inet.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+
+namespace android {
+
+// shortcut names for brevity in the following tables
+typedef ColorAspects CA;
+typedef ColorUtils CU;
+
+#define HI_UINT16(a) (((a) >> 8) & 0xFF)
+#define LO_UINT16(a) ((a) & 0xFF)
+
+//
+// static
+void ColorUtils::fillHdrStaticInfoBuffer( const HDRStaticInfo &info, uint8_t *data) {
+ // Static_Metadata_Descriptor_ID
+ data[0] = info.mID;
+
+ // display primary 0
+ data[1] = LO_UINT16(info.sType1.mR.x);
+ data[2] = HI_UINT16(info.sType1.mR.x);
+ data[3] = LO_UINT16(info.sType1.mR.y);
+ data[4] = HI_UINT16(info.sType1.mR.y);
+
+ // display primary 1
+ data[5] = LO_UINT16(info.sType1.mG.x);
+ data[6] = HI_UINT16(info.sType1.mG.x);
+ data[7] = LO_UINT16(info.sType1.mG.y);
+ data[8] = HI_UINT16(info.sType1.mG.y);
+
+ // display primary 2
+ data[9] = LO_UINT16(info.sType1.mB.x);
+ data[10] = HI_UINT16(info.sType1.mB.x);
+ data[11] = LO_UINT16(info.sType1.mB.y);
+ data[12] = HI_UINT16(info.sType1.mB.y);
+
+ // white point
+ data[13] = LO_UINT16(info.sType1.mW.x);
+ data[14] = HI_UINT16(info.sType1.mW.x);
+ data[15] = LO_UINT16(info.sType1.mW.y);
+ data[16] = HI_UINT16(info.sType1.mW.y);
+
+ // MaxDisplayLuminance
+ data[17] = LO_UINT16(info.sType1.mMaxDisplayLuminance);
+ data[18] = HI_UINT16(info.sType1.mMaxDisplayLuminance);
+
+ // MinDisplayLuminance
+ data[19] = LO_UINT16(info.sType1.mMinDisplayLuminance);
+ data[20] = HI_UINT16(info.sType1.mMinDisplayLuminance);
+
+ // MaxContentLightLevel
+ data[21] = LO_UINT16(info.sType1.mMaxContentLightLevel);
+ data[22] = HI_UINT16(info.sType1.mMaxContentLightLevel);
+
+ // MaxFrameAverageLightLevel
+ data[23] = LO_UINT16(info.sType1.mMaxFrameAverageLightLevel);
+ data[24] = HI_UINT16(info.sType1.mMaxFrameAverageLightLevel);
+}
+
+
+} // namespace android
+
diff --git a/media/libstagefright/foundation/ColorUtils_ndk.cpp b/media/libstagefright/foundation/ColorUtils_ndk.cpp
new file mode 100644
index 0000000..3ed2425
--- /dev/null
+++ b/media/libstagefright/foundation/ColorUtils_ndk.cpp
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ColorUtils"
+
+#include <inttypes.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Log.h>
+
+namespace android {
+
+// static
+void ColorUtils::setHDRStaticInfoIntoAMediaFormat(
+ const HDRStaticInfo &info, AMediaFormat *format) {
+ uint8_t *data = (uint8_t *) malloc(25);
+ if (data != NULL) {
+ fillHdrStaticInfoBuffer(info, data);
+ AMediaFormat_setBuffer(format, "hdr-static-info", data, 25);
+ free(data);
+ }
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
index 9e3f718..a2b6c4f 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -191,6 +191,8 @@
static void setHDRStaticInfoIntoFormat(const HDRStaticInfo &info, sp<AMessage> &format);
// writes |info| into format.
static void setHDRStaticInfoIntoAMediaFormat(const HDRStaticInfo &info, AMediaFormat *format);
+ // (internal) used by the setHDRStaticInfoInfo* routines
+ static void fillHdrStaticInfoBuffer( const HDRStaticInfo &info, uint8_t *data);
};
inline static const char *asString(android::ColorUtils::ColorStandard i, const char *def = "??") {
diff --git a/media/libstagefright/foundation/tests/colorutils/Android.bp b/media/libstagefright/foundation/tests/colorutils/Android.bp
index 5a17e4b..0fea0d5 100644
--- a/media/libstagefright/foundation/tests/colorutils/Android.bp
+++ b/media/libstagefright/foundation/tests/colorutils/Android.bp
@@ -40,6 +40,7 @@
],
static_libs: [
+ "libstagefright_foundation_colorutils_ndk",
"libstagefright_foundation",
],
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index bca7f01..d59e4f5 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -146,8 +146,8 @@
status_t captureSurface();
};
-struct ImageDecoder : public FrameDecoder {
- ImageDecoder(
+struct MediaImageDecoder : public FrameDecoder {
+ MediaImageDecoder(
const AString &componentName,
const sp<MetaData> &trackMeta,
const sp<IMediaSource> &source);
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 6371769..4237e8c 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -557,12 +557,14 @@
}
constexpr int32_t BITRATE_MODE_CBR = 2;
+constexpr int32_t BITRATE_MODE_CBR_FD = 3;
constexpr int32_t BITRATE_MODE_CQ = 0;
constexpr int32_t BITRATE_MODE_VBR = 1;
inline static const char *asString_BitrateMode(int32_t i, const char *def = "??") {
switch (i) {
case BITRATE_MODE_CBR: return "CBR";
+ case BITRATE_MODE_CBR_FD: return "CBR_FD";
case BITRATE_MODE_CQ: return "CQ";
case BITRATE_MODE_VBR: return "VBR";
default: return def;
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
index a628c70..c251479 100644
--- a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
@@ -42,7 +42,7 @@
if (isVideoDecoder) {
decoder = new VideoFrameDecoder(componentName, trackMeta, source);
} else {
- decoder = new ImageDecoder(componentName, trackMeta, source);
+ decoder = new MediaImageDecoder(componentName, trackMeta, source);
}
while (fdp.remaining_bytes()) {
@@ -80,4 +80,3 @@
}
} // namespace android
-
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 88aa7cb..e8552c4 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -21,42 +21,6 @@
#error This header file should only be included from AudioFlinger.h
#endif
-// Checks and monitors app ops for audio record
-class OpRecordAudioMonitor : public RefBase {
-public:
- ~OpRecordAudioMonitor() override;
- bool hasOp() const;
- int32_t getOp() const { return mAppOp; }
-
- static sp<OpRecordAudioMonitor> createIfNeeded(const AttributionSourceState& attributionSource,
- const audio_attributes_t& attr);
-
-private:
- OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp);
-
- void onFirstRef() override;
-
- AppOpsManager mAppOpsManager;
-
- class RecordAudioOpCallback : public BnAppOpsCallback {
- public:
- explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
- void opChanged(int32_t op, const String16& packageName) override;
-
- private:
- const wp<OpRecordAudioMonitor> mMonitor;
- };
-
- sp<RecordAudioOpCallback> mOpCallback;
- // called by RecordAudioOpCallback when the app op for this OpRecordAudioMonitor is updated
- // in AppOp callback and in onFirstRef()
- void checkOp();
-
- std::atomic_bool mHasOp;
- const AttributionSourceState mAttributionSource;
- const int32_t mAppOp;
-};
-
// record track
class RecordTrack : public TrackBase {
public:
@@ -107,7 +71,7 @@
{ return (mFlags & AUDIO_INPUT_FLAG_DIRECT) != 0; }
void setSilenced(bool silenced) { if (!isPatchTrack()) mSilenced = silenced; }
- bool isSilenced() const;
+ bool isSilenced() const { return mSilenced; }
status_t getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones);
@@ -154,8 +118,6 @@
bool mSilenced;
- // used to enforce the audio record app op corresponding to this track's audio source
- sp<OpRecordAudioMonitor> mOpRecordAudioMonitor;
std::string mSharedAudioPackageName = {};
int32_t mStartFrames = -1;
};
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 8d98afe..a6e3c06 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -2247,109 +2247,6 @@
// ----------------------------------------------------------------------------
-// ----------------------------------------------------------------------------
-// AppOp for audio recording
-// -------------------------------
-
-#undef LOG_TAG
-#define LOG_TAG "AF::OpRecordAudioMonitor"
-
-// static
-sp<AudioFlinger::RecordThread::OpRecordAudioMonitor>
-AudioFlinger::RecordThread::OpRecordAudioMonitor::createIfNeeded(
- const AttributionSourceState& attributionSource, const audio_attributes_t& attr)
-{
- if (isServiceUid(attributionSource.uid)) {
- ALOGV("not silencing record for service %s",
- attributionSource.toString().c_str());
- return nullptr;
- }
-
- // Capturing from FM TUNER output is not controlled by an app op
- // because it does not affect users privacy as does capturing from an actual microphone.
- if (attr.source == AUDIO_SOURCE_FM_TUNER) {
- ALOGV("not muting FM TUNER capture for uid %d", attributionSource.uid);
- return nullptr;
- }
-
- AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
- attributionSource);
- if (!checkedAttributionSource.packageName.has_value()
- || checkedAttributionSource.packageName.value().size() == 0) {
- return nullptr;
- }
- return new OpRecordAudioMonitor(checkedAttributionSource, getOpForSource(attr.source));
-}
-
-AudioFlinger::RecordThread::OpRecordAudioMonitor::OpRecordAudioMonitor(
- const AttributionSourceState& attributionSource, int32_t appOp)
- : mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp)
-{
-}
-
-AudioFlinger::RecordThread::OpRecordAudioMonitor::~OpRecordAudioMonitor()
-{
- if (mOpCallback != 0) {
- mAppOpsManager.stopWatchingMode(mOpCallback);
- }
- mOpCallback.clear();
-}
-
-void AudioFlinger::RecordThread::OpRecordAudioMonitor::onFirstRef()
-{
- checkOp();
- mOpCallback = new RecordAudioOpCallback(this);
- ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
- // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
- // since it controls the mic permission for legacy apps.
- mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
- mAttributionSource.packageName.value_or(""))),
- mOpCallback);
-}
-
-bool AudioFlinger::RecordThread::OpRecordAudioMonitor::hasOp() const {
- return mHasOp.load();
-}
-
-// Called by RecordAudioOpCallback when the app op corresponding to this OpRecordAudioMonitor
-// is updated in AppOp callback and in onFirstRef()
-// Note this method is never called (and never to be) for audio server / root track
-// due to the UID in createIfNeeded(). As a result for those record track, it's:
-// - not called from constructor,
-// - not called from RecordAudioOpCallback because the callback is not installed in this case
-void AudioFlinger::RecordThread::OpRecordAudioMonitor::checkOp()
-{
- // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
- // since it controls the mic permission for legacy apps.
- const int32_t mode = mAppOpsManager.checkOp(mAppOp,
- mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
- mAttributionSource.packageName.value_or(""))));
- const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
- // verbose logging only log when appOp changed
- ALOGI_IF(hasIt != mHasOp.load(),
- "App op %d missing, %ssilencing record %s",
- mAppOp, hasIt ? "un" : "", mAttributionSource.toString().c_str());
- mHasOp.store(hasIt);
-}
-
-AudioFlinger::RecordThread::OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
- const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
-{ }
-
-void AudioFlinger::RecordThread::OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
- const String16& packageName) {
- UNUSED(packageName);
- sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
- if (monitor != NULL) {
- if (op != monitor->getOp()) {
- return;
- }
- monitor->checkOp();
- }
-}
-
-
-
#undef LOG_TAG
#define LOG_TAG "AF::RecordHandle"
@@ -2450,7 +2347,6 @@
mRecordBufferConverter(NULL),
mFlags(flags),
mSilenced(false),
- mOpRecordAudioMonitor(OpRecordAudioMonitor::createIfNeeded(attributionSource, attr)),
mStartFrames(startFrames)
{
if (mCblk == NULL) {
@@ -2709,14 +2605,6 @@
mServerLatencyMs.store(latencyMs);
}
-bool AudioFlinger::RecordThread::RecordTrack::isSilenced() const {
- if (mSilenced) {
- return true;
- }
- // The monitor is only created for record tracks that can be silenced.
- return mOpRecordAudioMonitor ? !mOpRecordAudioMonitor->hasOp() : false;
-}
-
status_t AudioFlinger::RecordThread::RecordTrack::getActiveMicrophones(
std::vector<media::MicrophoneInfo>* activeMicrophones)
{
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 2e866ff..0c4608a 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -5847,11 +5847,12 @@
// With low-latency playing on speaker, music on WFD, when the first low-latency
// output is stopped, getNewOutputDevices checks for a product strategy
// from the list, as STRATEGY_SONIFICATION comes prior to STRATEGY_MEDIA.
- // If an ALARM or ENFORCED_AUDIBLE stream is supported by the product strategy,
+ // If an ALARM, RING or ENFORCED_AUDIBLE stream is supported by the product strategy,
// devices are returned for STRATEGY_SONIFICATION without checking whether the
// stream is associated to the output descriptor.
if (doGetOutputDevicesForVoice() || outputDesc->isStrategyActive(productStrategy) ||
((hasStreamActive(AUDIO_STREAM_ALARM) ||
+ hasStreamActive(AUDIO_STREAM_RING) ||
hasStreamActive(AUDIO_STREAM_ENFORCED_AUDIBLE)) &&
mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc))) {
// Retrieval of devices for voice DL is done on primary output profile, cannot
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 0273d29..454c020 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -38,6 +38,7 @@
"libmedia_helper",
"libmediametrics",
"libmediautils",
+ "libpermission",
"libsensorprivacy",
"libutils",
"audioclient-types-aidl-cpp",
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 3298f6b..b4b6ddf 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -674,7 +674,8 @@
sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
selectedDeviceId, adjAttributionSource,
- canCaptureOutput, canCaptureHotword);
+ canCaptureOutput, canCaptureHotword,
+ mAudioCommandThread);
mAudioRecordClients.add(portId, client);
}
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 201273e..4d0e1f1 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -730,7 +730,10 @@
&& !(isTopOrLatestSensitive || current->canCaptureOutput))
&& canCaptureIfInCallOrCommunication(current);
- if (isVirtualSource(source)) {
+ if (!current->hasOp()) {
+ // Never allow capture if app op is denied
+ allowCapture = false;
+ } else if (isVirtualSource(source)) {
// Allow capture for virtual (remote submix, call audio TX or RX...) sources
allowCapture = true;
} else if (mUidPolicy->isAssistantUid(currentUid)) {
@@ -830,6 +833,19 @@
return false;
}
+/* static */
+bool AudioPolicyService::isAppOpSource(audio_source_t source)
+{
+ switch (source) {
+ case AUDIO_SOURCE_FM_TUNER:
+ case AUDIO_SOURCE_ECHO_REFERENCE:
+ return false;
+ default:
+ break;
+ }
+ return true;
+}
+
void AudioPolicyService::setAppState_l(sp<AudioRecordClient> client, app_state_t state)
{
AutoCallerClear acc;
@@ -1418,6 +1434,109 @@
return binder::Status::ok();
}
+// ----------- AudioPolicyService::OpRecordAudioMonitor implementation ----------
+
+// static
+sp<AudioPolicyService::OpRecordAudioMonitor>
+AudioPolicyService::OpRecordAudioMonitor::createIfNeeded(
+ const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
+ wp<AudioCommandThread> commandThread)
+{
+ if (isAudioServerOrRootUid(attributionSource.uid)) {
+ ALOGV("not silencing record for audio or root source %s",
+ attributionSource.toString().c_str());
+ return nullptr;
+ }
+
+ if (!AudioPolicyService::isAppOpSource(attr.source)) {
+ ALOGD("not monitoring app op for uid %d and source %d",
+ attributionSource.uid, attr.source);
+ return nullptr;
+ }
+
+ if (!attributionSource.packageName.has_value()
+ || attributionSource.packageName.value().size() == 0) {
+ return nullptr;
+ }
+ return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
+}
+
+AudioPolicyService::OpRecordAudioMonitor::OpRecordAudioMonitor(
+ const AttributionSourceState& attributionSource, int32_t appOp,
+ wp<AudioCommandThread> commandThread) :
+ mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
+ mCommandThread(commandThread)
+{
+}
+
+AudioPolicyService::OpRecordAudioMonitor::~OpRecordAudioMonitor()
+{
+ if (mOpCallback != 0) {
+ mAppOpsManager.stopWatchingMode(mOpCallback);
+ }
+ mOpCallback.clear();
+}
+
+void AudioPolicyService::OpRecordAudioMonitor::onFirstRef()
+{
+ checkOp();
+ mOpCallback = new RecordAudioOpCallback(this);
+ ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
+ // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
+ // since it controls the mic permission for legacy apps.
+ mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+ mAttributionSource.packageName.value_or(""))),
+ mOpCallback);
+}
+
+bool AudioPolicyService::OpRecordAudioMonitor::hasOp() const {
+ return mHasOp.load();
+}
+
+// Called by RecordAudioOpCallback when the app op corresponding to this OpRecordAudioMonitor
+// is updated in AppOp callback and in onFirstRef()
+// Note this method is never called (and never to be) for audio server / root track
+// due to the UID in createIfNeeded(). As a result for those record track, it's:
+// - not called from constructor,
+// - not called from RecordAudioOpCallback because the callback is not installed in this case
+void AudioPolicyService::OpRecordAudioMonitor::checkOp(bool updateUidStates)
+{
+ // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
+ // since it controls the mic permission for legacy apps.
+ const int32_t mode = mAppOpsManager.checkOp(mAppOp,
+ mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+ mAttributionSource.packageName.value_or(""))));
+ const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+ // verbose logging only log when appOp changed
+ ALOGI_IF(hasIt != mHasOp.load(),
+ "App op %d missing, %ssilencing record %s",
+ mAppOp, hasIt ? "un" : "", mAttributionSource.toString().c_str());
+ mHasOp.store(hasIt);
+
+ if (updateUidStates) {
+ sp<AudioCommandThread> commandThread = mCommandThread.promote();
+ if (commandThread != nullptr) {
+ commandThread->updateUidStatesCommand();
+ }
+ }
+}
+
+AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
+ const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
+{ }
+
+void AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
+ const String16& packageName __unused) {
+ sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
+ if (monitor != NULL) {
+ if (op != monitor->getOp()) {
+ return;
+ }
+ monitor->checkOp(true);
+ }
+}
+
+
// ----------- AudioPolicyService::AudioCommandThread implementation ----------
AudioPolicyService::AudioCommandThread::AudioCommandThread(String8 name,
@@ -1634,6 +1753,17 @@
mLock.lock();
} break;
+ case UPDATE_UID_STATES: {
+ ALOGV("AudioCommandThread() processing updateUID states");
+ svc = mService.promote();
+ if (svc == 0) {
+ break;
+ }
+ mLock.unlock();
+ svc->updateUidStates();
+ mLock.lock();
+ } break;
+
default:
ALOGW("AudioCommandThread() unknown command %d", command->mCommand);
}
@@ -1847,6 +1977,14 @@
sendCommand(command);
}
+void AudioPolicyService::AudioCommandThread::updateUidStatesCommand()
+{
+ sp<AudioCommand> command = new AudioCommand();
+ command->mCommand = UPDATE_UID_STATES;
+ ALOGV("AudioCommandThread() adding update UID states");
+ sendCommand(command);
+}
+
void AudioPolicyService::AudioCommandThread::updateAudioPatchListCommand()
{
sp<AudioCommand>command = new AudioCommand();
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index ac9c20f..3b77ed8 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -27,6 +27,7 @@
#include <utils/Vector.h>
#include <utils/SortedVector.h>
#include <binder/ActivityManager.h>
+#include <binder/AppOpsManager.h>
#include <binder/BinderService.h>
#include <binder/IUidObserver.h>
#include <system/audio.h>
@@ -358,6 +359,13 @@
static bool isVirtualSource(audio_source_t source);
+ /** returns true if the audio source must be silenced when the corresponding app op is denied.
+ * false if the audio source does not actually capture from the microphone while still
+ * being mapped to app op OP_RECORD_AUDIO and not a specialized op tracked separately.
+ * See getOpForSource().
+ */
+ static bool isAppOpSource(audio_source_t source);
+
// If recording we need to make sure the UID is allowed to do that. If the UID is idle
// then it cannot record and gets buffers with zeros - silence. As soon as the UID
// transitions to an active state we will start reporting buffers with data. This approach
@@ -467,6 +475,7 @@
SET_EFFECT_SUSPENDED,
AUDIO_MODULES_UPDATE,
ROUTING_UPDATED,
+ UPDATE_UID_STATES
};
AudioCommandThread (String8 name, const wp<AudioPolicyService>& service);
@@ -514,6 +523,7 @@
bool suspended);
void audioModulesUpdateCommand();
void routingChangedCommand();
+ void updateUidStatesCommand();
void insertCommand_l(AudioCommand *command, int delayMs = 0);
private:
class AudioCommandData;
@@ -814,6 +824,47 @@
bool active; // Playback/Capture is active or inactive
};
+ // Checks and monitors app ops for AudioRecordClient
+ class OpRecordAudioMonitor : public RefBase {
+ public:
+ ~OpRecordAudioMonitor() override;
+ bool hasOp() const;
+ int32_t getOp() const { return mAppOp; }
+
+ static sp<OpRecordAudioMonitor> createIfNeeded(
+ const AttributionSourceState& attributionSource,
+ const audio_attributes_t& attr, wp<AudioCommandThread> commandThread);
+
+ private:
+ OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
+ wp<AudioCommandThread> commandThread);
+
+ void onFirstRef() override;
+
+ AppOpsManager mAppOpsManager;
+
+ class RecordAudioOpCallback : public BnAppOpsCallback {
+ public:
+ explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
+ void opChanged(int32_t op, const String16& packageName) override;
+
+ private:
+ const wp<OpRecordAudioMonitor> mMonitor;
+ };
+
+ sp<RecordAudioOpCallback> mOpCallback;
+ // called by RecordAudioOpCallback when the app op for this OpRecordAudioMonitor is updated
+ // in AppOp callback and in onFirstRef()
+ // updateUidStates is true when the silenced state of active AudioRecordClients must be
+ // re-evaluated
+ void checkOp(bool updateUidStates = false);
+
+ std::atomic_bool mHasOp;
+ const AttributionSourceState mAttributionSource;
+ const int32_t mAppOp;
+ wp<AudioCommandThread> mCommandThread;
+ };
+
// --- AudioRecordClient ---
// Information about each registered AudioRecord client
// (between calls to getInputForAttr() and releaseInput())
@@ -824,20 +875,32 @@
const audio_session_t session, audio_port_handle_t portId,
const audio_port_handle_t deviceId,
const AttributionSourceState& attributionSource,
- bool canCaptureOutput, bool canCaptureHotword) :
+ bool canCaptureOutput, bool canCaptureHotword,
+ wp<AudioCommandThread> commandThread) :
AudioClient(attributes, io, attributionSource,
session, portId, deviceId), attributionSource(attributionSource),
startTimeNs(0), canCaptureOutput(canCaptureOutput),
- canCaptureHotword(canCaptureHotword), silenced(false) {}
+ canCaptureHotword(canCaptureHotword), silenced(false),
+ mOpRecordAudioMonitor(
+ OpRecordAudioMonitor::createIfNeeded(attributionSource,
+ attributes, commandThread)) {}
~AudioRecordClient() override = default;
+ bool hasOp() const {
+ return mOpRecordAudioMonitor ? mOpRecordAudioMonitor->hasOp() : true;
+ }
+
const AttributionSourceState attributionSource; // attribution source of client
nsecs_t startTimeNs;
const bool canCaptureOutput;
const bool canCaptureHotword;
bool silenced;
+
+ private:
+ sp<OpRecordAudioMonitor> mOpRecordAudioMonitor;
};
+
// --- AudioPlaybackClient ---
// Information about each registered AudioTrack client
// (between calls to getOutputForAttr() and releaseOutput())
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index bd2e7dc..80508e4 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -29,6 +29,7 @@
#include "Parameters.h"
#include "system/camera.h"
+#include <android-base/properties.h>
#include <android/hardware/ICamera.h>
#include <media/MediaProfiles.h>
#include <media/mediarecorder.h>
@@ -1247,6 +1248,7 @@
}
}
fastInfo.maxZslSize = maxPrivInputSize;
+ fastInfo.usedZslSize = maxPrivInputSize;
} else {
fastInfo.maxZslSize = {0, 0};
}
@@ -2047,12 +2049,33 @@
slowJpegMode = false;
Size pictureSize = { pictureWidth, pictureHeight };
- int64_t minFrameDurationNs = getJpegStreamMinFrameDurationNs(pictureSize);
- if (previewFpsRange[1] > 1e9/minFrameDurationNs + FPS_MARGIN) {
+ bool zslFrameRateSupported = false;
+ int64_t jpegMinFrameDurationNs = getJpegStreamMinFrameDurationNs(pictureSize);
+ if (previewFpsRange[1] > 1e9/jpegMinFrameDurationNs + FPS_MARGIN) {
slowJpegMode = true;
}
- if (isDeviceZslSupported || slowJpegMode ||
- property_get_bool("camera.disable_zsl_mode", false)) {
+ if (isZslReprocessPresent) {
+ unsigned int firstApiLevel =
+ android::base::GetUintProperty<unsigned int>("ro.product.first_api_level", 0);
+ Size chosenSize;
+ if ((firstApiLevel >= __ANDROID_API_S__) &&
+ !android::base::GetBoolProperty("ro.camera.enableCamera1MaxZsl", false)) {
+ chosenSize = pictureSize;
+ } else {
+ // follow old behavior of keeping max zsl size as the input / output
+ // zsl stream size
+ chosenSize = fastInfo.maxZslSize;
+ }
+ int64_t zslMinFrameDurationNs = getZslStreamMinFrameDurationNs(chosenSize);
+ if (zslMinFrameDurationNs > 0 &&
+ previewFpsRange[1] <= (1e9/zslMinFrameDurationNs + FPS_MARGIN)) {
+ zslFrameRateSupported = true;
+ fastInfo.usedZslSize = chosenSize;
+ }
+ }
+
+ if (isDeviceZslSupported || slowJpegMode || !zslFrameRateSupported ||
+ android::base::GetBoolProperty("camera.disable_zsl_mode", false)) {
allowZslMode = false;
} else {
allowZslMode = isZslReprocessPresent;
@@ -3056,6 +3079,10 @@
return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_BLOB);
}
+int64_t Parameters::getZslStreamMinFrameDurationNs(Parameters::Size size) {
+ return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+}
+
int64_t Parameters::getMinFrameDurationNs(Parameters::Size size, int fmt) {
const int STREAM_DURATION_SIZE = 4;
const int STREAM_FORMAT_OFFSET = 0;
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 02ac638..e2f8d011 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -248,6 +248,7 @@
bool useFlexibleYuv;
Size maxJpegSize;
Size maxZslSize;
+ Size usedZslSize;
bool supportsPreferredConfigs;
} fastInfo;
@@ -426,6 +427,11 @@
// return -1 if input jpeg size cannot be found in supported size list
int64_t getJpegStreamMinFrameDurationNs(Parameters::Size size);
+ // Helper function to get minimum frame duration for a
+ // IMPLEMENTATION_DEFINED stream of size 'size'
+ // return -1 if input size cannot be found in supported size list
+ int64_t getZslStreamMinFrameDurationNs(Parameters::Size size);
+
// Helper function to get minimum frame duration for a size/format combination
// return -1 if input size/format combination cannot be found.
int64_t getMinFrameDurationNs(Parameters::Size size, int format);
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 8e598f1..1321e6b 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -235,8 +235,8 @@
}
if (mInputStreamId == NO_STREAM) {
- res = device->createInputStream(params.fastInfo.maxZslSize.width,
- params.fastInfo.maxZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ res = device->createInputStream(params.fastInfo.usedZslSize.width,
+ params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
/*isMultiResolution*/false, &mInputStreamId);
if (res != OK) {
ALOGE("%s: Camera %d: Can't create input stream: "
@@ -258,8 +258,8 @@
mProducer->setName(String8("Camera2-ZslRingBufferConsumer"));
sp<Surface> outSurface = new Surface(producer);
- res = device->createStream(outSurface, params.fastInfo.maxZslSize.width,
- params.fastInfo.maxZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ res = device->createStream(outSurface, params.fastInfo.usedZslSize.width,
+ params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
HAL_DATASPACE_UNKNOWN, CAMERA_STREAM_ROTATION_0, &mZslStreamId,
String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
if (res != OK) {
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 221bebb..225dee9 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -654,7 +654,8 @@
* Then there is circular locking dependency.
*/
sp<Surface> consumer = mConsumer;
- size_t remainingBuffers = camera_stream::max_buffers - mHandoutTotalBufferCount;
+ size_t remainingBuffers = (mState == STATE_PREPARING ? mTotalBufferCount :
+ camera_stream::max_buffers) - mHandoutTotalBufferCount;
mLock.unlock();
std::unique_lock<std::mutex> batchLock(mBatchLock);
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 065c594..46cbdc8 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -34,6 +34,7 @@
#include "cleaner.h"
#include "MediaMetricsService.h"
+#include "StringUtils.h"
#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
#include "iface_statsd.h"
@@ -227,6 +228,7 @@
std::string sessionId;
if (item->getString("android.media.mediacodec.log-session-id", &sessionId)) {
+ sessionId = mediametrics::stringutils::sanitizeLogSessionId(sessionId);
metrics_proto.set_log_session_id(sessionId);
}
AStatsEvent_writeString(event, codec.c_str());
diff --git a/services/mediametrics/statsd_extractor.cpp b/services/mediametrics/statsd_extractor.cpp
index 4ac5621..bcf2e0a 100644
--- a/services/mediametrics/statsd_extractor.cpp
+++ b/services/mediametrics/statsd_extractor.cpp
@@ -32,6 +32,7 @@
#include <statslog.h>
#include "MediaMetricsService.h"
+#include "StringUtils.h"
#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
#include "iface_statsd.h"
@@ -85,6 +86,7 @@
std::string log_session_id;
if (item->getString("android.media.mediaextractor.logSessionId", &log_session_id)) {
+ log_session_id = mediametrics::stringutils::sanitizeLogSessionId(log_session_id);
metrics_proto.set_log_session_id(log_session_id);
}
diff --git a/services/mediametrics/statsd_mediaparser.cpp b/services/mediametrics/statsd_mediaparser.cpp
index af2946b..921b320 100644
--- a/services/mediametrics/statsd_mediaparser.cpp
+++ b/services/mediametrics/statsd_mediaparser.cpp
@@ -31,6 +31,7 @@
#include <statslog.h>
#include "MediaMetricsService.h"
+#include "StringUtils.h"
#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
#include "iface_statsd.h"
@@ -80,6 +81,7 @@
std::string logSessionId;
item->getString("android.media.mediaparser.logSessionId", &logSessionId);
+ logSessionId = mediametrics::stringutils::sanitizeLogSessionId(logSessionId);
int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED,
timestamp_nanos,
diff --git a/services/mediametrics/statsd_recorder.cpp b/services/mediametrics/statsd_recorder.cpp
index 1b312b5..b29ad73 100644
--- a/services/mediametrics/statsd_recorder.cpp
+++ b/services/mediametrics/statsd_recorder.cpp
@@ -32,6 +32,7 @@
#include <statslog.h>
#include "MediaMetricsService.h"
+#include "StringUtils.h"
#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
#include "iface_statsd.h"
@@ -58,6 +59,7 @@
// string kRecorderLogSessionId = "android.media.mediarecorder.log-session-id";
std::string log_session_id;
if (item->getString("android.media.mediarecorder.log-session-id", &log_session_id)) {
+ log_session_id = mediametrics::stringutils::sanitizeLogSessionId(log_session_id);
metrics_proto.set_log_session_id(log_session_id);
}
// string kRecorderAudioMime = "android.media.mediarecorder.audio.mime";
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/services/mediatranscoding/MediaTranscodingService.cpp
index e387800..2a20981 100644
--- a/services/mediatranscoding/MediaTranscodingService.cpp
+++ b/services/mediatranscoding/MediaTranscodingService.cpp
@@ -132,10 +132,10 @@
void MediaTranscodingService::instantiate() {
std::shared_ptr<MediaTranscodingService> service =
::ndk::SharedRefBase::make<MediaTranscodingService>();
- binder_status_t status =
- AServiceManager_addService(service->asBinder().get(), getServiceName());
- if (status != STATUS_OK) {
- return;
+ if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+ // Once service is started, we want it to stay even is client side perished.
+ AServiceManager_forceLazyServicesPersist(true /*persist*/);
+ (void)AServiceManager_registerLazyService(service->asBinder().get(), getServiceName());
}
}
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
index 20e4bfb..0cb2fad 100644
--- a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -481,7 +481,7 @@
// Need thread pool to receive callbacks, otherwise oneway callbacks are
// silently ignored.
ABinderProcess_startThreadPool();
- ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
+ ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.transcoding"));
mService = IMediaTranscodingService::fromBinder(binder);
if (mService == nullptr) {
ALOGE("Failed to connect to the media.trascoding service.");