Merge "Define an extensible audio channel layout description"
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index 8fe48c2..716b550 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -1,6 +1,8 @@
[Hook Scripts]
mainline_hook = ${REPO_ROOT}/frameworks/av/tools/mainline_hook_partial.sh ${REPO_ROOT} ${PREUPLOAD_FILES}
+hidden_api_txt_checksorted_hook = ${REPO_ROOT}/tools/platform-compat/hiddenapi/checksorted_sha.sh ${PREUPLOAD_COMMIT} ${REPO_ROOT}
+
[Builtin Hooks]
clang_format = true
diff --git a/apex/Android.bp b/apex/Android.bp
index a86d2b9..b9abd12 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -98,6 +98,35 @@
name: "com.android.media-bootclasspath-fragment",
contents: ["updatable-media"],
apex_available: ["com.android.media"],
+
+ api: {
+ stub_libs: [
+ // Stubs for the APIs provided by updatable-media. This has to be
+ // specified explicitly because updatable-media is not a
+ // java_sdk_library.
+ "framework-media",
+ ],
+ },
+
+ // The bootclasspath_fragments that provide APIs on which this depends.
+ fragments: [
+ {
+ apex: "com.android.art",
+ module: "art-bootclasspath-fragment",
+ },
+ ],
+
+ // Additional stubs libraries that this fragment's contents use which are
+ // not provided by another bootclasspath_fragment.
+ additional_stubs: [
+ "android-non-updatable",
+ ],
+
+ // Additional hidden API flag files to override the defaults. This must only be
+ // modified by the Soong or platform compat team.
+ hidden_api: {
+ max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+ },
}
// Encapsulate the contributions made by the com.android.media to the systemserverclasspath.
diff --git a/apex/hiddenapi/OWNERS b/apex/hiddenapi/OWNERS
new file mode 100644
index 0000000..ac8a2b6
--- /dev/null
+++ b/apex/hiddenapi/OWNERS
@@ -0,0 +1,5 @@
+# soong-team@ as the hiddenapi files are tightly coupled with Soong
+file:platform/build/soong:/OWNERS
+
+# compat-team@ for changes to hiddenapi files
+file:tools/platform-compat:/OWNERS
diff --git a/apex/hiddenapi/hiddenapi-max-target-o-low-priority.txt b/apex/hiddenapi/hiddenapi-max-target-o-low-priority.txt
new file mode 100644
index 0000000..32bbb10
--- /dev/null
+++ b/apex/hiddenapi/hiddenapi-max-target-o-low-priority.txt
@@ -0,0 +1,6 @@
+Landroid/media/MediaSession2$ControllerInfo;-><init>(Landroid/content/Context;IILjava/lang/String;Landroid/os/IInterface;)V
+Landroid/media/MediaSession2$ControllerInfo;->getPackageName()Ljava/lang/String;
+Landroid/media/MediaSession2$ControllerInfo;->getProvider()Landroid/media/update/MediaSession2Provider$ControllerInfoProvider;
+Landroid/media/MediaSession2$ControllerInfo;->getUid()I
+Landroid/media/MediaSession2$ControllerInfo;->isTrusted()Z
+Landroid/media/MediaSession2$ControllerInfo;->mProvider:Landroid/media/update/MediaSession2Provider$ControllerInfoProvider;
diff --git a/apex/mediatranscoding.rc b/apex/mediatranscoding.rc
index 24306a2..ae9f8ba 100644
--- a/apex/mediatranscoding.rc
+++ b/apex/mediatranscoding.rc
@@ -8,4 +8,5 @@
ioprio rt 4
# Restrict to little cores only with system-background cpuset.
writepid /dev/cpuset/system-background/tasks
+ interface aidl media.transcoding
disabled
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index f948dd4..342d771 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -289,13 +289,14 @@
mOutputDelayRingBufferFilled = 0;
mBuffersInfo.clear();
- // To make the codec behave the same before and after a reset, we need to invalidate the
- // streaminfo struct. This does that:
- mStreamInfo->sampleRate = 0; // TODO: mStreamInfo is read only
-
+ status_t status = UNKNOWN_ERROR;
+ if (mAACDecoder) {
+ aacDecoder_Close(mAACDecoder);
+ status = initDecoder();
+ }
mSignalledError = false;
- return C2_OK;
+ return status == OK ? C2_OK : C2_CORRUPTED;
}
void C2SoftAacDec::onReset() {
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
index 70a2da5..e92d38d 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
@@ -143,7 +143,7 @@
if (!mIsWide) {
Speech_Decode_Frame_reset(mAmrHandle);
} else {
- pvDecoder_AmrWb_Reset(mAmrHandle, 0 /* reset_all */);
+ pvDecoder_AmrWb_Reset(mAmrHandle, 1 /* reset_all */);
}
mSignalledError = false;
mSignalledOutputEos = false;
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index ff59490..f857e87 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -651,8 +651,14 @@
}
}
- CHECK(buffer->image_format == libgav1::kImageFormatYuv420 ||
- buffer->image_format == libgav1::kImageFormatMonochrome400);
+ if (!(buffer->image_format == libgav1::kImageFormatYuv420 ||
+ buffer->image_format == libgav1::kImageFormatMonochrome400)) {
+ ALOGE("image_format %d not supported", buffer->image_format);
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return false;
+ }
const bool isMonochrome =
buffer->image_format == libgav1::kImageFormatMonochrome400;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index b47275f..370d33c 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -78,6 +78,19 @@
.build());
addParameter(
+ DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+ .withDefault(new C2StreamBitrateModeTuning::output(
+ 0u, C2Config::BITRATE_VARIABLE))
+ .withFields({
+ C2F(mBitrateMode, value).oneOf({
+ C2Config::BITRATE_CONST,
+ C2Config::BITRATE_VARIABLE})
+ })
+ .withSetter(
+ Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
.withDefault(new C2StreamBitrateInfo::output(0u, 128000))
.withFields({C2F(mBitrate, value).inRange(500, 512000)})
@@ -100,12 +113,14 @@
uint32_t getSampleRate() const { return mSampleRate->value; }
uint32_t getChannelCount() const { return mChannelCount->value; }
uint32_t getBitrate() const { return mBitrate->value; }
+ uint32_t getBitrateMode() const { return mBitrateMode->value; }
uint32_t getComplexity() const { return mComplexity->value; }
private:
std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
};
@@ -135,6 +150,7 @@
mSampleRate = mIntf->getSampleRate();
mChannelCount = mIntf->getChannelCount();
uint32_t bitrate = mIntf->getBitrate();
+ uint32_t bitrateMode = mIntf->getBitrateMode();
int complexity = mIntf->getComplexity();
mNumSamplesPerFrame = mSampleRate / (1000 / mFrameDurationMs);
mNumPcmBytesPerInputFrame =
@@ -189,14 +205,24 @@
return C2_BAD_VALUE;
}
- // Constrained VBR
- if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR(1) != OPUS_OK)) {
- ALOGE("failed to set vbr type");
- return C2_BAD_VALUE;
- }
- if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR_CONSTRAINT(1) !=
- OPUS_OK)) {
- ALOGE("failed to set vbr constraint");
+ if (bitrateMode == C2Config::BITRATE_VARIABLE) {
+ // Constrained VBR
+ if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR(1) != OPUS_OK)) {
+ ALOGE("failed to set vbr type");
+ return C2_BAD_VALUE;
+ }
+ if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR_CONSTRAINT(1) !=
+ OPUS_OK)) {
+ ALOGE("failed to set vbr constraint");
+ return C2_BAD_VALUE;
+ }
+ } else if (bitrateMode == C2Config::BITRATE_CONST) {
+ if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR(0) != OPUS_OK)) {
+ ALOGE("failed to set cbr type");
+ return C2_BAD_VALUE;
+ }
+ } else {
+ ALOGE("unknown bitrate mode");
return C2_BAD_VALUE;
}
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index f5d6529..2cc7ab7 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -2392,22 +2392,24 @@
C2StreamTunnelStartRender;
constexpr char C2_PARAMKEY_TUNNEL_START_RENDER[] = "output.tunnel-start-render";
-C2ENUM(C2PlatformConfig::encoding_quality_level_t, uint32_t,
- NONE,
- S_HANDHELD,
- S_HANDHELD_PC
-);
-
-namespace android {
-
/**
* Encoding quality level signaling.
+ *
+ * Signal the 'minimum encoding quality' introduced in Android 12/S. It indicates
+ * whether the underlying codec is expected to take extra steps to ensure quality meets the
+ * appropriate minimum. A value of NONE indicates that the codec is not to apply
+ * any minimum quality bar requirements. Other values indicate that the codec is to apply
+ * a minimum quality bar, with the exact quality bar being decided by the parameter value.
*/
typedef C2GlobalParam<C2Setting,
C2SimpleValueStruct<C2EasyEnum<C2PlatformConfig::encoding_quality_level_t>>,
kParamIndexEncodingQualityLevel> C2EncodingQualityLevel;
+constexpr char C2_PARAMKEY_ENCODING_QUALITY_LEVEL[] = "algo.encoding-quality-level";
-}
+C2ENUM(C2PlatformConfig::encoding_quality_level_t, uint32_t,
+ NONE = 0,
+ S_HANDHELD = 1 // corresponds to VMAF=70
+);
/// @}
diff --git a/media/codec2/core/include/C2Work.h b/media/codec2/core/include/C2Work.h
index 67084cc..794402f 100644
--- a/media/codec2/core/include/C2Work.h
+++ b/media/codec2/core/include/C2Work.h
@@ -145,10 +145,35 @@
*/
FLAG_INCOMPLETE = (1 << 3),
/**
+ * This frame has been corrected due to a bitstream error. This is a hint, and in most cases
+ * can be ignored. This flag can be set by components on their output to signal the clients
+ * that errors may be present but the frame should be used nonetheless. It can also be set
+ * by clients to signal that the input frame has been corrected, but nonetheless should be
+ * processed.
+ */
+ FLAG_CORRECTED = (1 << 4),
+ /**
+ * This frame is corrupt due to a bitstream error. This is similar to FLAG_CORRECTED,
+ * with the exception that this is a hint that downstream components should not process this
+ * frame.
+ * <p>
+ * If set on the input by the client, the input is likely non-processable and should be
+ * handled similarly to uncorrectable bitstream error detected. For components that operat
+ * on whole access units, this flag can be propagated to the output. Other components should
+ * aim to detect access unit boundaries to determine if any part of the input frame can be
+ * processed.
+ * <p>
+ * If set by the component, this signals to the client that the output is non-usable -
+ * including possibly the metadata that may also be non-usable; -- however, the component
+ * will try to recover on successive input frames.
+ */
+ FLAG_CORRUPT = (1 << 5),
+
+ /**
* This frame contains only codec-specific configuration data, and no actual access unit.
*
- * \deprecated pass codec configuration with using the \todo codec-specific configuration
- * info together with the access unit.
+ * \deprecated pass codec configuration with using the C2InitData info parameter together
+ * with the access unit.
*/
FLAG_CODEC_CONFIG = (1u << 31),
};
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index 6a00edd..a6507e7 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -334,6 +334,12 @@
int bytesCount = nWidth * nHeight * 3 >> 1;
int32_t timestampIncr = ENCODER_TIMESTAMP_INCREMENT;
c2_status_t err = C2_OK;
+
+ // Query component's memory usage flags
+ std::vector<std::unique_ptr<C2Param>> params;
+ C2StreamUsageTuning::input compUsage(0u, 0u);
+ component->query({&compUsage}, {}, C2_DONT_BLOCK, ¶ms);
+
while (1) {
if (nFrames == 0) break;
uint32_t flags = 0;
@@ -384,7 +390,8 @@
}
std::shared_ptr<C2GraphicBlock> block;
err = graphicPool->fetchGraphicBlock(nWidth, nHeight, HAL_PIXEL_FORMAT_YV12,
- {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE},
+ {C2MemoryUsage::CPU_READ | compUsage.value,
+ C2MemoryUsage::CPU_WRITE | compUsage.value},
&block);
if (err != C2_OK) {
fprintf(stderr, "fetchGraphicBlock failed : %d\n", err);
diff --git a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp b/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
index 7de3503..b942be7 100644
--- a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
+++ b/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
@@ -626,6 +626,14 @@
}
LOG(VERBOSE) << "work #" << workCount << ": flags=" << work->input.flags
<< " timestamp=" << work->input.ordinal.timestamp.peek();;
+
+ std::vector<C2Param *> configUpdate;
+ for (const std::unique_ptr<C2Param> ¶m : work->input.configUpdate) {
+ configUpdate.push_back(param.get());
+ }
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ mIntf->config_vb(configUpdate, C2_MAY_BLOCK, &failures);
+
std::shared_ptr<C2StreamHdrStaticInfo::input> hdrStaticInfo =
mIntf->getHdrStaticMetadata();
uint32_t dataspace = mIntf->getDataSpace();
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 2df0ba2..c275187 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -909,6 +909,8 @@
}
}));
+ add(ConfigMapper("android._encoding-quality-level", C2_PARAMKEY_ENCODING_QUALITY_LEVEL, "value")
+ .limitTo(D::ENCODER & (D::CONFIG | D::PARAM)));
add(ConfigMapper(KEY_QUALITY, C2_PARAMKEY_QUALITY, "value")
.limitTo(D::ENCODER & (D::CONFIG | D::PARAM)));
add(ConfigMapper(KEY_FLAC_COMPRESSION_LEVEL, C2_PARAMKEY_COMPLEXITY, "value")
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 00bf84f..4d939fa 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -92,6 +92,7 @@
ALookup<C2Config::bitrate_mode_t, int32_t> sBitrateModes = {
{ C2Config::BITRATE_CONST, BITRATE_MODE_CBR },
+ { C2Config::BITRATE_CONST_SKIP_ALLOWED, BITRATE_MODE_CBR_FD },
{ C2Config::BITRATE_VARIABLE, BITRATE_MODE_VBR },
{ C2Config::BITRATE_IGNORE, BITRATE_MODE_CQ },
};
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 6ca5fc8..4b08295 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -82,7 +82,9 @@
/**
* This format uses 24-bit samples packed into 3 bytes.
- * The bytes are in the native endian order.
+ * The bytes are in little-endian order, so the least significant byte
+ * comes first in the byte array.
+ *
* The maximum range of the data is -8388608 (0x800000)
* to 8388607 (0x7FFFFF).
*
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index b6bd0e1..0f24771 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -91,10 +91,15 @@
AudioTrack::Buffer *audioBuffer = static_cast<AudioTrack::Buffer *>(info);
if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
ALOGW("processCallbackCommon() data, stream disconnected");
+ // This will kill the stream and prevent it from being restarted.
+ // That is OK because the stream is disconnected.
audioBuffer->size = SIZE_STOP_CALLBACKS;
} else if (!mCallbackEnabled.load()) {
- ALOGW("processCallbackCommon() no data because callback disabled");
- audioBuffer->size = SIZE_STOP_CALLBACKS;
+ ALOGW("processCallbackCommon() no data because callback disabled, set size=0");
+ // Do NOT use SIZE_STOP_CALLBACKS here because that will kill the stream and
+ // prevent it from being restarted. This can occur because of a race condition
+ // caused by Legacy callbacks running after the track is "stopped".
+ audioBuffer->size = 0;
} else {
if (audioBuffer->frameCount == 0) {
ALOGW("processCallbackCommon() data, frameCount is zero");
diff --git a/media/libaudiohal/FactoryHalHidl.cpp b/media/libaudiohal/FactoryHalHidl.cpp
index e420d07..c19d2c2 100644
--- a/media/libaudiohal/FactoryHalHidl.cpp
+++ b/media/libaudiohal/FactoryHalHidl.cpp
@@ -94,7 +94,7 @@
} // namespace
void* createPreferredImpl(const std::string& package, const std::string& interface) {
- for (auto version = detail::sAudioHALVersions; version != nullptr; ++version) {
+ for (auto version = detail::sAudioHALVersions; *version != nullptr; ++version) {
void* rawInterface = nullptr;
if (hasHalService(package, *version, interface)
&& createHalService(*version, interface, &rawInterface)) {
diff --git a/media/libmediaformatshaper/VQApply.cpp b/media/libmediaformatshaper/VQApply.cpp
index 585ec6c..26ff446 100644
--- a/media/libmediaformatshaper/VQApply.cpp
+++ b/media/libmediaformatshaper/VQApply.cpp
@@ -63,13 +63,62 @@
return 0;
}
- if (codec->supportedMinimumQuality() > 0) {
- // allow the codec provided minimum quality behavior to work at it
- ALOGD("minquality: codec claims to implement minquality=%d",
- codec->supportedMinimumQuality());
+ // only proceed if we're in the handheld category.
+ // We embed this information within the codec record when we build up features
+ // and pass them in from MediaCodec; it's the easiest place to store it
+ //
+ // TODO: make a #define for ' _vq_eligible.device' here and in MediaCodec.cpp
+ //
+ int32_t isVQEligible = 0;
+ (void) codec->getFeatureValue("_vq_eligible.device", &isVQEligible);
+ if (!isVQEligible) {
+ ALOGD("minquality: not an eligible device class");
return 0;
}
+ // look at resolution to determine if we want any shaping/modification at all.
+ //
+ // we currently only shape (or ask the underlying codec to shape) for
+ // resolution range 320x240 < target <= 1920x1080
+ // NB: the < vs <=, that is deliberate.
+ //
+
+ int32_t width = 0;
+ (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_WIDTH, &width);
+ int32_t height = 0;
+ (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_HEIGHT, &height);
+ int64_t pixels = ((int64_t)width) * height;
+
+ bool eligibleSize = true;
+ if (pixels <= 320 * 240) {
+ eligibleSize = false;
+ } else if (pixels > 1920 * 1088) {
+ eligibleSize = false;
+ }
+
+ if (!eligibleSize) {
+ // we won't shape, and ask that the codec not shape
+ ALOGD("minquality: %dx%d outside of shaping range", width, height);
+ AMediaFormat_setInt32(inFormat, "android._encoding-quality-level", 0);
+ return 0;
+ }
+
+ if (codec->supportedMinimumQuality() > 0) {
+ // have the codec-provided minimum quality behavior to work at it
+ ALOGD("minquality: codec claims to implement minquality=%d",
+ codec->supportedMinimumQuality());
+
+ // tell the underlying codec to do its thing; we won't try to second guess.
+ // default to 1, aka S_HANDHELD;
+ int32_t qualityTarget = 1;
+ (void) codec->getFeatureValue("_quality.target", &qualityTarget);
+ AMediaFormat_setInt32(inFormat, "android._encoding-quality-level", qualityTarget);
+ return 0;
+ }
+
+ // let the codec know that we'll be enforcing the minimum quality standards
+ AMediaFormat_setInt32(inFormat, "android._encoding-quality-level", 0);
+
//
// consider any and all tools available
// -- qp
@@ -84,11 +133,8 @@
bitrateConfigured = bitrateConfiguredTmp;
bitrateChosen = bitrateConfigured;
- int32_t width = 0;
- (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_WIDTH, &width);
- int32_t height = 0;
- (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_HEIGHT, &height);
- int64_t pixels = ((int64_t)width) * height;
+ // width, height, and pixels are calculated above
+
double minimumBpp = codec->getBpp(width, height);
int64_t bitrateFloor = pixels * minimumBpp;
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 6dc3e3f..2aabd53 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -233,7 +233,7 @@
for (size_t i = 0; i < matchingCodecs.size(); ++i) {
const AString &componentName = matchingCodecs[i];
- sp<ImageDecoder> decoder = new ImageDecoder(componentName, trackMeta, source);
+ sp<MediaImageDecoder> decoder = new MediaImageDecoder(componentName, trackMeta, source);
int64_t frameTimeUs = thumbnail ? -1 : 0;
if (decoder->init(frameTimeUs, 0 /*option*/, colorFormat) == OK) {
sp<IMemory> frame = decoder->extractFrame(rect);
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index a15a9f1..94a0424 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -741,7 +741,7 @@
////////////////////////////////////////////////////////////////////////
-ImageDecoder::ImageDecoder(
+MediaImageDecoder::MediaImageDecoder(
const AString &componentName,
const sp<MetaData> &trackMeta,
const sp<IMediaSource> &source)
@@ -757,7 +757,7 @@
mTargetTiles(0) {
}
-sp<AMessage> ImageDecoder::onGetFormatAndSeekOptions(
+sp<AMessage> MediaImageDecoder::onGetFormatAndSeekOptions(
int64_t frameTimeUs, int /*seekMode*/,
MediaSource::ReadOptions *options, sp<Surface> * /*window*/) {
sp<MetaData> overrideMeta;
@@ -833,7 +833,7 @@
return videoFormat;
}
-status_t ImageDecoder::onExtractRect(FrameRect *rect) {
+status_t MediaImageDecoder::onExtractRect(FrameRect *rect) {
// TODO:
// This callback is for verifying whether we can decode the rect,
// and if so, set up the internal variables for decoding.
@@ -872,7 +872,7 @@
return OK;
}
-status_t ImageDecoder::onOutputReceived(
+status_t MediaImageDecoder::onOutputReceived(
const sp<MediaCodecBuffer> &videoFrameBuffer,
const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
if (outputFormat == NULL) {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 13e7279..a366506 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -31,6 +31,8 @@
#include "include/SoftwareRenderer.h"
#include "PlaybackDurationAccumulator.h"
+#include <android/binder_manager.h>
+#include <android/content/pm/IPackageManagerNative.h>
#include <android/hardware/cas/native/1.0/IDescrambler.h>
#include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -40,6 +42,7 @@
#include <android/binder_manager.h>
#include <android/dlext.h>
#include <binder/IMemory.h>
+#include <binder/IServiceManager.h>
#include <binder/MemoryDealer.h>
#include <cutils/properties.h>
#include <gui/BufferQueue.h>
@@ -1697,6 +1700,7 @@
//
static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
+static bool sIsHandheld = true;
static bool connectFormatShaper() {
static std::once_flag sCheckOnce;
@@ -1770,6 +1774,64 @@
ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
(loading_finished - loading_started)/1000);
+
+ // we also want to know whether this is a handheld device
+ // start with assumption that the device is handheld.
+ sIsHandheld = true;
+ sp<IServiceManager> serviceMgr = defaultServiceManager();
+ sp<content::pm::IPackageManagerNative> packageMgr;
+ if (serviceMgr.get() != nullptr) {
+ sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
+ packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
+ }
+ // if we didn't get serviceMgr, we'll leave packageMgr as default null
+ if (packageMgr != nullptr) {
+
+ // MUST have these
+ static const String16 featuresNeeded[] = {
+ String16("android.hardware.touchscreen")
+ };
+ // these must be present to be a handheld
+ for (::android::String16 required : featuresNeeded) {
+ bool hasFeature = false;
+ binder::Status status = packageMgr->hasSystemFeature(required, 0, &hasFeature);
+ if (!status.isOk()) {
+ ALOGE("%s: hasSystemFeature failed: %s",
+ __func__, status.exceptionMessage().c_str());
+ continue;
+ }
+ ALOGV("feature %s says %d", String8(required).c_str(), hasFeature);
+ if (!hasFeature) {
+ ALOGV("... which means we are not handheld");
+ sIsHandheld = false;
+ break;
+ }
+ }
+
+ // MUST NOT have these
+ static const String16 featuresDisallowed[] = {
+ String16("android.hardware.type.automotive"),
+ String16("android.hardware.type.television"),
+ String16("android.hardware.type.watch")
+ };
+ // any of these present -- we aren't a handheld
+ for (::android::String16 forbidden : featuresDisallowed) {
+ bool hasFeature = false;
+ binder::Status status = packageMgr->hasSystemFeature(forbidden, 0, &hasFeature);
+ if (!status.isOk()) {
+ ALOGE("%s: hasSystemFeature failed: %s",
+ __func__, status.exceptionMessage().c_str());
+ continue;
+ }
+ ALOGV("feature %s says %d", String8(forbidden).c_str(), hasFeature);
+ if (hasFeature) {
+ ALOGV("... which means we are not handheld");
+ sIsHandheld = false;
+ break;
+ }
+ }
+ }
+
});
return true;
@@ -1848,6 +1910,18 @@
}
}
}
+
+ // we also carry in the codec description whether we are on a handheld device.
+ // this info is eventually used by both the Codec and the C2 machinery to inform
+ // the underlying codec whether to do any shaping.
+ //
+ if (sIsHandheld) {
+ // set if we are indeed a handheld device (or in future 'any eligible device'
+ // missing on devices that aren't eligible for minimum quality enforcement.
+ (void)(sShaperOps->setFeature)(shaperHandle, "_vq_eligible.device", 1);
+ // strictly speaking, it's a tuning, but those are strings and feature stores int
+ (void)(sShaperOps->setFeature)(shaperHandle, "_quality.target", 1 /* S_HANDHELD */);
+ }
}
status_t MediaCodec::setupFormatShaper(AString mediaType) {
@@ -1888,6 +1962,16 @@
// Format Shaping
// Mapping and Manipulation of encoding parameters
//
+// All of these decisions are pushed into the shaper instead of here within MediaCodec.
+// this includes decisions based on whether the codec implements minimum quality bars
+// itself or needs to be shaped outside of the codec.
+// This keeps all those decisions in one place.
+// It also means that we push some extra decision information (is this a handheld device
+// or one that is otherwise eligible for minimum quality manipulation, which generational
+// quality target is in force, etc). This allows those values to be cached in the
+// per-codec structures that are done 1 time within a process instead of for each
+// codec instantiation.
+//
status_t MediaCodec::shapeMediaFormat(
const sp<AMessage> &format,
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index a5c3ba6..6893324 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -307,8 +307,16 @@
sp<MetaData> meta = mImpl->getMetaData();
+ if (meta == nullptr) {
+ //extractor did not publish file metadata
+ return -EINVAL;
+ }
+
const char *mime;
- CHECK(meta->findCString(kKeyMIMEType, &mime));
+ if (!meta->findCString(kKeyMIMEType, &mime)) {
+ // no mime type maps to invalid
+ return -EINVAL;
+ }
*format = new AMessage();
(*format)->setString("mime", mime);
@@ -354,6 +362,11 @@
sp<MetaData> meta = mImpl->getMetaData();
+ if (meta == nullptr) {
+ //extractor did not publish file metadata
+ return -EINVAL;
+ }
+
int64_t exifOffset, exifSize;
if (meta->findInt64(kKeyExifOffset, &exifOffset)
&& meta->findInt64(kKeyExifSize, &exifSize)) {
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index a32bc26..a4e3425 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -237,7 +237,7 @@
<Limit name="sample-rate" ranges="8000,12000,16000,24000,48000" />
<Limit name="bitrate" range="500-512000" />
<Limit name="complexity" range="0-10" default="5" />
- <Feature name="bitrate-modes" value="CBR" />
+ <Feature name="bitrate-modes" value="CBR,VBR" />
</MediaCodec>
<MediaCodec name="c2.android.h263.encoder" type="video/3gpp">
<Alias name="OMX.google.h263.encoder" />
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index 6bb7b37..c2114b3 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -54,13 +54,11 @@
AMessage::AMessage(void)
: mWhat(0),
- mTarget(0),
- mNumItems(0) {
+ mTarget(0) {
}
AMessage::AMessage(uint32_t what, const sp<const AHandler> &handler)
- : mWhat(what),
- mNumItems(0) {
+ : mWhat(what) {
setTarget(handler);
}
@@ -89,13 +87,13 @@
}
void AMessage::clear() {
- for (size_t i = 0; i < mNumItems; ++i) {
- Item *item = &mItems[i];
- delete[] item->mName;
- item->mName = NULL;
- freeItemValue(item);
+ // Item needs to be handled delicately
+ for (Item &item : mItems) {
+ delete[] item.mName;
+ item.mName = NULL;
+ freeItemValue(&item);
}
- mNumItems = 0;
+ mItems.clear();
}
void AMessage::freeItemValue(Item *item) {
@@ -157,7 +155,7 @@
size_t memchecks = 0;
#endif
size_t i = 0;
- for (; i < mNumItems; i++) {
+ for (; i < mItems.size(); i++) {
if (len != mItems[i].mNameLength) {
continue;
}
@@ -172,7 +170,7 @@
{
Mutex::Autolock _l(gLock);
++gFindItemCalls;
- gAverageNumItems += mNumItems;
+ gAverageNumItems += mItems.size();
gAverageNumMemChecks += memchecks;
gAverageNumChecks += i;
reportStats();
@@ -188,20 +186,26 @@
memcpy((void*)mName, name, len + 1);
}
+AMessage::Item::Item(const char *name, size_t len)
+ : mType(kTypeInt32) {
+ // mName and mNameLength are initialized by setName
+ setName(name, len);
+}
+
AMessage::Item *AMessage::allocateItem(const char *name) {
size_t len = strlen(name);
size_t i = findItemIndex(name, len);
Item *item;
- if (i < mNumItems) {
+ if (i < mItems.size()) {
item = &mItems[i];
freeItemValue(item);
} else {
- CHECK(mNumItems < kMaxNumItems);
- i = mNumItems++;
+ CHECK(mItems.size() < kMaxNumItems);
+ i = mItems.size();
+ // place a 'blank' item at the end - this is of type kTypeInt32
+ mItems.emplace_back(name, len);
item = &mItems[i];
- item->mType = kTypeInt32;
- item->setName(name, len);
}
return item;
@@ -210,7 +214,7 @@
const AMessage::Item *AMessage::findItem(
const char *name, Type type) const {
size_t i = findItemIndex(name, strlen(name));
- if (i < mNumItems) {
+ if (i < mItems.size()) {
const Item *item = &mItems[i];
return item->mType == type ? item : NULL;
@@ -220,7 +224,7 @@
bool AMessage::findAsFloat(const char *name, float *value) const {
size_t i = findItemIndex(name, strlen(name));
- if (i < mNumItems) {
+ if (i < mItems.size()) {
const Item *item = &mItems[i];
switch (item->mType) {
case kTypeFloat:
@@ -247,7 +251,7 @@
bool AMessage::findAsInt64(const char *name, int64_t *value) const {
size_t i = findItemIndex(name, strlen(name));
- if (i < mNumItems) {
+ if (i < mItems.size()) {
const Item *item = &mItems[i];
switch (item->mType) {
case kTypeInt64:
@@ -265,15 +269,16 @@
bool AMessage::contains(const char *name) const {
size_t i = findItemIndex(name, strlen(name));
- return i < mNumItems;
+ return i < mItems.size();
}
#define BASIC_TYPE(NAME,FIELDNAME,TYPENAME) \
void AMessage::set##NAME(const char *name, TYPENAME value) { \
Item *item = allocateItem(name); \
- \
- item->mType = kType##NAME; \
- item->u.FIELDNAME = value; \
+ if (item) { \
+ item->mType = kType##NAME; \
+ item->u.FIELDNAME = value; \
+ } \
} \
\
/* NOLINT added to avoid incorrect warning/fix from clang.tidy */ \
@@ -298,8 +303,10 @@
void AMessage::setString(
const char *name, const char *s, ssize_t len) {
Item *item = allocateItem(name);
- item->mType = kTypeString;
- item->u.stringValue = new AString(s, len < 0 ? strlen(s) : len);
+ if (item) {
+ item->mType = kTypeString;
+ item->u.stringValue = new AString(s, len < 0 ? strlen(s) : len);
+ }
}
void AMessage::setString(
@@ -310,10 +317,12 @@
void AMessage::setObjectInternal(
const char *name, const sp<RefBase> &obj, Type type) {
Item *item = allocateItem(name);
- item->mType = type;
+ if (item) {
+ item->mType = type;
- if (obj != NULL) { obj->incStrong(this); }
- item->u.refValue = obj.get();
+ if (obj != NULL) { obj->incStrong(this); }
+ item->u.refValue = obj.get();
+ }
}
void AMessage::setObject(const char *name, const sp<RefBase> &obj) {
@@ -326,22 +335,26 @@
void AMessage::setMessage(const char *name, const sp<AMessage> &obj) {
Item *item = allocateItem(name);
- item->mType = kTypeMessage;
+ if (item) {
+ item->mType = kTypeMessage;
- if (obj != NULL) { obj->incStrong(this); }
- item->u.refValue = obj.get();
+ if (obj != NULL) { obj->incStrong(this); }
+ item->u.refValue = obj.get();
+ }
}
void AMessage::setRect(
const char *name,
int32_t left, int32_t top, int32_t right, int32_t bottom) {
Item *item = allocateItem(name);
- item->mType = kTypeRect;
+ if (item) {
+ item->mType = kTypeRect;
- item->u.rectValue.mLeft = left;
- item->u.rectValue.mTop = top;
- item->u.rectValue.mRight = right;
- item->u.rectValue.mBottom = bottom;
+ item->u.rectValue.mLeft = left;
+ item->u.rectValue.mTop = top;
+ item->u.rectValue.mRight = right;
+ item->u.rectValue.mBottom = bottom;
+ }
}
bool AMessage::findString(const char *name, AString *value) const {
@@ -466,18 +479,18 @@
sp<AMessage> AMessage::dup() const {
sp<AMessage> msg = new AMessage(mWhat, mHandler.promote());
- msg->mNumItems = mNumItems;
+ msg->mItems = mItems;
#ifdef DUMP_STATS
{
Mutex::Autolock _l(gLock);
++gDupCalls;
- gAverageDupItems += mNumItems;
+ gAverageDupItems += mItems.size();
reportStats();
}
#endif
- for (size_t i = 0; i < mNumItems; ++i) {
+ for (size_t i = 0; i < mItems.size(); ++i) {
const Item *from = &mItems[i];
Item *to = &msg->mItems[i];
@@ -560,7 +573,7 @@
}
s.append(") = {\n");
- for (size_t i = 0; i < mNumItems; ++i) {
+ for (size_t i = 0; i < mItems.size(); ++i) {
const Item &item = mItems[i];
switch (item.mType) {
@@ -653,19 +666,20 @@
sp<AMessage> msg = new AMessage();
msg->setWhat(what);
- msg->mNumItems = static_cast<size_t>(parcel.readInt32());
- if (msg->mNumItems > kMaxNumItems) {
+ size_t numItems = static_cast<size_t>(parcel.readInt32());
+ if (numItems > kMaxNumItems) {
ALOGE("Too large number of items clipped.");
- msg->mNumItems = kMaxNumItems;
+ numItems = kMaxNumItems;
}
+ msg->mItems.resize(numItems);
- for (size_t i = 0; i < msg->mNumItems; ++i) {
+ for (size_t i = 0; i < msg->mItems.size(); ++i) {
Item *item = &msg->mItems[i];
const char *name = parcel.readCString();
if (name == NULL) {
ALOGE("Failed reading name for an item. Parsing aborted.");
- msg->mNumItems = i;
+ msg->mItems.resize(i);
break;
}
@@ -709,7 +723,7 @@
if (stringValue == NULL) {
ALOGE("Failed reading string value from a parcel. "
"Parsing aborted.");
- msg->mNumItems = i;
+ msg->mItems.resize(i);
continue;
// The loop will terminate subsequently.
} else {
@@ -754,11 +768,9 @@
void AMessage::writeToParcel(Parcel *parcel) const {
parcel->writeInt32(static_cast<int32_t>(mWhat));
- parcel->writeInt32(static_cast<int32_t>(mNumItems));
+ parcel->writeInt32(static_cast<int32_t>(mItems.size()));
- for (size_t i = 0; i < mNumItems; ++i) {
- const Item &item = mItems[i];
-
+ for (const Item &item : mItems) {
parcel->writeCString(item.mName);
parcel->writeInt32(static_cast<int32_t>(item.mType));
@@ -828,8 +840,7 @@
diff->setTarget(mHandler.promote());
}
- for (size_t i = 0; i < mNumItems; ++i) {
- const Item &item = mItems[i];
+ for (const Item &item : mItems) {
const Item *oitem = other->findItem(item.mName, item.mType);
switch (item.mType) {
case kTypeInt32:
@@ -936,11 +947,11 @@
}
size_t AMessage::countEntries() const {
- return mNumItems;
+ return mItems.size();
}
const char *AMessage::getEntryNameAt(size_t index, Type *type) const {
- if (index >= mNumItems) {
+ if (index >= mItems.size()) {
*type = kTypeInt32;
return NULL;
@@ -953,7 +964,7 @@
AMessage::ItemData AMessage::getEntryAt(size_t index) const {
ItemData it;
- if (index < mNumItems) {
+ if (index < mItems.size()) {
switch (mItems[index].mType) {
case kTypeInt32: it.set(mItems[index].u.int32Value); break;
case kTypeInt64: it.set(mItems[index].u.int64Value); break;
@@ -986,7 +997,7 @@
}
status_t AMessage::setEntryNameAt(size_t index, const char *name) {
- if (index >= mNumItems) {
+ if (index >= mItems.size()) {
return BAD_INDEX;
}
if (name == nullptr) {
@@ -996,7 +1007,7 @@
return OK; // name has not changed
}
size_t len = strlen(name);
- if (findItemIndex(name, len) < mNumItems) {
+ if (findItemIndex(name, len) < mItems.size()) {
return ALREADY_EXISTS;
}
delete[] mItems[index].mName;
@@ -1011,7 +1022,7 @@
sp<AMessage> msgValue;
sp<ABuffer> bufValue;
- if (index >= mNumItems) {
+ if (index >= mItems.size()) {
return BAD_INDEX;
}
if (!item.used()) {
@@ -1060,21 +1071,22 @@
}
status_t AMessage::removeEntryAt(size_t index) {
- if (index >= mNumItems) {
+ if (index >= mItems.size()) {
return BAD_INDEX;
}
// delete entry data and objects
- --mNumItems;
delete[] mItems[index].mName;
mItems[index].mName = nullptr;
freeItemValue(&mItems[index]);
// swap entry with last entry and clear last entry's data
- if (index < mNumItems) {
- mItems[index] = mItems[mNumItems];
- mItems[mNumItems].mName = nullptr;
- mItems[mNumItems].mType = kTypeInt32;
+ size_t lastIndex = mItems.size() - 1;
+ if (index < lastIndex) {
+ mItems[index] = mItems[lastIndex];
+ mItems[lastIndex].mName = nullptr;
+ mItems[lastIndex].mType = kTypeInt32;
}
+ mItems.pop_back();
return OK;
}
@@ -1083,7 +1095,7 @@
return BAD_VALUE;
}
size_t index = findEntryByName(name);
- if (index >= mNumItems) {
+ if (index >= mItems.size()) {
return BAD_INDEX;
}
return removeEntryAt(index);
@@ -1093,7 +1105,7 @@
if (item.used()) {
Item *it = allocateItem(name);
if (it != nullptr) {
- setEntryAt(it - mItems, item);
+ setEntryAt(it - &mItems[0], item);
}
}
}
@@ -1108,11 +1120,11 @@
return;
}
- for (size_t ix = 0; ix < other->mNumItems; ++ix) {
+ for (size_t ix = 0; ix < other->mItems.size(); ++ix) {
Item *it = allocateItem(other->mItems[ix].mName);
if (it != nullptr) {
ItemData data = other->getEntryAt(ix);
- setEntryAt(it - mItems, data);
+ setEntryAt(it - &mItems[0], data);
}
}
}
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h b/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
index 98d6147..960212a 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
@@ -24,6 +24,8 @@
#include <utils/KeyedVector.h>
#include <utils/RefBase.h>
+#include <vector>
+
namespace android {
struct ABuffer;
@@ -95,6 +97,7 @@
void setTarget(const sp<const AHandler> &handler);
+ // removes all items
void clear();
void setInt32(const char *name, int32_t value);
@@ -302,16 +305,39 @@
size_t mNameLength;
Type mType;
void setName(const char *name, size_t len);
+ Item() : mName(nullptr), mNameLength(0), mType(kTypeInt32) { }
+ Item(const char *name, size_t length);
};
enum {
- kMaxNumItems = 64
+ kMaxNumItems = 256
};
- Item mItems[kMaxNumItems];
- size_t mNumItems;
+ std::vector<Item> mItems;
+ /**
+ * Allocates an item with the given key |name|. If the key already exists, the corresponding
+ * item value is freed. Otherwise a new item is added.
+ *
+ * This method currently asserts if the number of elements would exceed the max number of
+ * elements allowed (kMaxNumItems). This is a security precaution to avoid arbitrarily large
+ * AMessage structures.
+ *
+ * @todo(b/192153245) Either revisit this security precaution, or change the behavior to
+ * silently ignore keys added after the max number of elements are reached.
+ *
+ * @note All previously returned Item* pointers are deemed invalid after this call. (E.g. from
+ * allocateItem or findItem)
+ *
+ * @param name the key for the requested item.
+ *
+ * @return Item* a pointer to the item.
+ */
Item *allocateItem(const char *name);
+
+ /** Frees the value for the item. */
void freeItemValue(Item *item);
+
+ /** Finds an item with given key |name| and |type|. Returns nullptr if item is not found. */
const Item *findItem(const char *name, Type type) const;
void setObjectInternal(
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index bca7f01..d59e4f5 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -146,8 +146,8 @@
status_t captureSurface();
};
-struct ImageDecoder : public FrameDecoder {
- ImageDecoder(
+struct MediaImageDecoder : public FrameDecoder {
+ MediaImageDecoder(
const AString &componentName,
const sp<MetaData> &trackMeta,
const sp<IMediaSource> &source);
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 6371769..4237e8c 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -557,12 +557,14 @@
}
constexpr int32_t BITRATE_MODE_CBR = 2;
+constexpr int32_t BITRATE_MODE_CBR_FD = 3;
constexpr int32_t BITRATE_MODE_CQ = 0;
constexpr int32_t BITRATE_MODE_VBR = 1;
inline static const char *asString_BitrateMode(int32_t i, const char *def = "??") {
switch (i) {
case BITRATE_MODE_CBR: return "CBR";
+ case BITRATE_MODE_CBR_FD: return "CBR_FD";
case BITRATE_MODE_CQ: return "CQ";
case BITRATE_MODE_VBR: return "VBR";
default: return def;
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
index a628c70..c251479 100644
--- a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
@@ -42,7 +42,7 @@
if (isVideoDecoder) {
decoder = new VideoFrameDecoder(componentName, trackMeta, source);
} else {
- decoder = new ImageDecoder(componentName, trackMeta, source);
+ decoder = new MediaImageDecoder(componentName, trackMeta, source);
}
while (fdp.remaining_bytes()) {
@@ -80,4 +80,3 @@
}
} // namespace android
-
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 3e4e4932..e25658f 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -35,7 +35,6 @@
"android.hardware.media.omx@1.0",
"libandroidicu",
"libfmq",
- "libbase",
"libbinder",
"libhidlbase",
"liblog",
@@ -53,9 +52,12 @@
"frameworks/av/services/mediaresourcemanager",
],
- // mediaserver has only been verified on 32-bit, see b/126502613
- // use "prefer32" to *only* enable 64-bit builds on 64-bit-only lunch
- // targets, which allows them to reach 'boot_complete'.
+ // By default mediaserver runs in 32-bit to save memory, except
+ // on 64-bit-only lunch targets.
+ // ****************************************************************
+ // TO ENABLE 64-BIT MEDIASERVER ON MIXED 32/64-BIT DEVICES, COMMENT
+ // OUT THE FOLLOWING LINE:
+ // ****************************************************************
compile_multilib: "prefer32",
init_rc: ["mediaserver.rc"],
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index dc1b9b8..58e2d2a 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -18,7 +18,6 @@
#define LOG_TAG "mediaserver"
//#define LOG_NDEBUG 0
-#include <android-base/properties.h>
#include <binder/IPCThreadState.h>
#include <binder/ProcessState.h>
#include <binder/IServiceManager.h>
@@ -43,12 +42,6 @@
ResourceManagerService::instantiate();
registerExtensions();
::android::hardware::configureRpcThreadpool(16, false);
-
- if (!android::base::GetBoolProperty("ro.config.low_ram", false)) {
- // Start the media.transcoding service if the device is not low ram
- // device.
- android::base::SetProperty("ctl.start", "media.transcoding");
- }
ProcessState::self()->startThreadPool();
IPCThreadState::self()->joinThreadPool();
::android::hardware::joinRpcThreadpool();
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 88aa7cb..e8552c4 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -21,42 +21,6 @@
#error This header file should only be included from AudioFlinger.h
#endif
-// Checks and monitors app ops for audio record
-class OpRecordAudioMonitor : public RefBase {
-public:
- ~OpRecordAudioMonitor() override;
- bool hasOp() const;
- int32_t getOp() const { return mAppOp; }
-
- static sp<OpRecordAudioMonitor> createIfNeeded(const AttributionSourceState& attributionSource,
- const audio_attributes_t& attr);
-
-private:
- OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp);
-
- void onFirstRef() override;
-
- AppOpsManager mAppOpsManager;
-
- class RecordAudioOpCallback : public BnAppOpsCallback {
- public:
- explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
- void opChanged(int32_t op, const String16& packageName) override;
-
- private:
- const wp<OpRecordAudioMonitor> mMonitor;
- };
-
- sp<RecordAudioOpCallback> mOpCallback;
- // called by RecordAudioOpCallback when the app op for this OpRecordAudioMonitor is updated
- // in AppOp callback and in onFirstRef()
- void checkOp();
-
- std::atomic_bool mHasOp;
- const AttributionSourceState mAttributionSource;
- const int32_t mAppOp;
-};
-
// record track
class RecordTrack : public TrackBase {
public:
@@ -107,7 +71,7 @@
{ return (mFlags & AUDIO_INPUT_FLAG_DIRECT) != 0; }
void setSilenced(bool silenced) { if (!isPatchTrack()) mSilenced = silenced; }
- bool isSilenced() const;
+ bool isSilenced() const { return mSilenced; }
status_t getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones);
@@ -154,8 +118,6 @@
bool mSilenced;
- // used to enforce the audio record app op corresponding to this track's audio source
- sp<OpRecordAudioMonitor> mOpRecordAudioMonitor;
std::string mSharedAudioPackageName = {};
int32_t mStartFrames = -1;
};
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 8d98afe..a6e3c06 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -2247,109 +2247,6 @@
// ----------------------------------------------------------------------------
-// ----------------------------------------------------------------------------
-// AppOp for audio recording
-// -------------------------------
-
-#undef LOG_TAG
-#define LOG_TAG "AF::OpRecordAudioMonitor"
-
-// static
-sp<AudioFlinger::RecordThread::OpRecordAudioMonitor>
-AudioFlinger::RecordThread::OpRecordAudioMonitor::createIfNeeded(
- const AttributionSourceState& attributionSource, const audio_attributes_t& attr)
-{
- if (isServiceUid(attributionSource.uid)) {
- ALOGV("not silencing record for service %s",
- attributionSource.toString().c_str());
- return nullptr;
- }
-
- // Capturing from FM TUNER output is not controlled by an app op
- // because it does not affect users privacy as does capturing from an actual microphone.
- if (attr.source == AUDIO_SOURCE_FM_TUNER) {
- ALOGV("not muting FM TUNER capture for uid %d", attributionSource.uid);
- return nullptr;
- }
-
- AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
- attributionSource);
- if (!checkedAttributionSource.packageName.has_value()
- || checkedAttributionSource.packageName.value().size() == 0) {
- return nullptr;
- }
- return new OpRecordAudioMonitor(checkedAttributionSource, getOpForSource(attr.source));
-}
-
-AudioFlinger::RecordThread::OpRecordAudioMonitor::OpRecordAudioMonitor(
- const AttributionSourceState& attributionSource, int32_t appOp)
- : mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp)
-{
-}
-
-AudioFlinger::RecordThread::OpRecordAudioMonitor::~OpRecordAudioMonitor()
-{
- if (mOpCallback != 0) {
- mAppOpsManager.stopWatchingMode(mOpCallback);
- }
- mOpCallback.clear();
-}
-
-void AudioFlinger::RecordThread::OpRecordAudioMonitor::onFirstRef()
-{
- checkOp();
- mOpCallback = new RecordAudioOpCallback(this);
- ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
- // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
- // since it controls the mic permission for legacy apps.
- mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
- mAttributionSource.packageName.value_or(""))),
- mOpCallback);
-}
-
-bool AudioFlinger::RecordThread::OpRecordAudioMonitor::hasOp() const {
- return mHasOp.load();
-}
-
-// Called by RecordAudioOpCallback when the app op corresponding to this OpRecordAudioMonitor
-// is updated in AppOp callback and in onFirstRef()
-// Note this method is never called (and never to be) for audio server / root track
-// due to the UID in createIfNeeded(). As a result for those record track, it's:
-// - not called from constructor,
-// - not called from RecordAudioOpCallback because the callback is not installed in this case
-void AudioFlinger::RecordThread::OpRecordAudioMonitor::checkOp()
-{
- // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
- // since it controls the mic permission for legacy apps.
- const int32_t mode = mAppOpsManager.checkOp(mAppOp,
- mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
- mAttributionSource.packageName.value_or(""))));
- const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
- // verbose logging only log when appOp changed
- ALOGI_IF(hasIt != mHasOp.load(),
- "App op %d missing, %ssilencing record %s",
- mAppOp, hasIt ? "un" : "", mAttributionSource.toString().c_str());
- mHasOp.store(hasIt);
-}
-
-AudioFlinger::RecordThread::OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
- const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
-{ }
-
-void AudioFlinger::RecordThread::OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
- const String16& packageName) {
- UNUSED(packageName);
- sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
- if (monitor != NULL) {
- if (op != monitor->getOp()) {
- return;
- }
- monitor->checkOp();
- }
-}
-
-
-
#undef LOG_TAG
#define LOG_TAG "AF::RecordHandle"
@@ -2450,7 +2347,6 @@
mRecordBufferConverter(NULL),
mFlags(flags),
mSilenced(false),
- mOpRecordAudioMonitor(OpRecordAudioMonitor::createIfNeeded(attributionSource, attr)),
mStartFrames(startFrames)
{
if (mCblk == NULL) {
@@ -2709,14 +2605,6 @@
mServerLatencyMs.store(latencyMs);
}
-bool AudioFlinger::RecordThread::RecordTrack::isSilenced() const {
- if (mSilenced) {
- return true;
- }
- // The monitor is only created for record tracks that can be silenced.
- return mOpRecordAudioMonitor ? !mOpRecordAudioMonitor->hasOp() : false;
-}
-
status_t AudioFlinger::RecordThread::RecordTrack::getActiveMicrophones(
std::vector<media::MicrophoneInfo>* activeMicrophones)
{
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index ccb82f2..49db0d1 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -5848,11 +5848,12 @@
// With low-latency playing on speaker, music on WFD, when the first low-latency
// output is stopped, getNewOutputDevices checks for a product strategy
// from the list, as STRATEGY_SONIFICATION comes prior to STRATEGY_MEDIA.
- // If an ALARM or ENFORCED_AUDIBLE stream is supported by the product strategy,
+ // If an ALARM, RING or ENFORCED_AUDIBLE stream is supported by the product strategy,
// devices are returned for STRATEGY_SONIFICATION without checking whether the
// stream is associated to the output descriptor.
if (doGetOutputDevicesForVoice() || outputDesc->isStrategyActive(productStrategy) ||
((hasStreamActive(AUDIO_STREAM_ALARM) ||
+ hasStreamActive(AUDIO_STREAM_RING) ||
hasStreamActive(AUDIO_STREAM_ENFORCED_AUDIBLE)) &&
mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc))) {
// Retrieval of devices for voice DL is done on primary output profile, cannot
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 0273d29..454c020 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -38,6 +38,7 @@
"libmedia_helper",
"libmediametrics",
"libmediautils",
+ "libpermission",
"libsensorprivacy",
"libutils",
"audioclient-types-aidl-cpp",
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 1ebf76b..90332af 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -674,7 +674,8 @@
sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
selectedDeviceId, adjAttributionSource,
- canCaptureOutput, canCaptureHotword);
+ canCaptureOutput, canCaptureHotword,
+ mAudioCommandThread);
mAudioRecordClients.add(portId, client);
}
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 201273e..4d0e1f1 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -730,7 +730,10 @@
&& !(isTopOrLatestSensitive || current->canCaptureOutput))
&& canCaptureIfInCallOrCommunication(current);
- if (isVirtualSource(source)) {
+ if (!current->hasOp()) {
+ // Never allow capture if app op is denied
+ allowCapture = false;
+ } else if (isVirtualSource(source)) {
// Allow capture for virtual (remote submix, call audio TX or RX...) sources
allowCapture = true;
} else if (mUidPolicy->isAssistantUid(currentUid)) {
@@ -830,6 +833,19 @@
return false;
}
+/* static */
+bool AudioPolicyService::isAppOpSource(audio_source_t source)
+{
+ switch (source) {
+ case AUDIO_SOURCE_FM_TUNER:
+ case AUDIO_SOURCE_ECHO_REFERENCE:
+ return false;
+ default:
+ break;
+ }
+ return true;
+}
+
void AudioPolicyService::setAppState_l(sp<AudioRecordClient> client, app_state_t state)
{
AutoCallerClear acc;
@@ -1418,6 +1434,109 @@
return binder::Status::ok();
}
+// ----------- AudioPolicyService::OpRecordAudioMonitor implementation ----------
+
+// static
+sp<AudioPolicyService::OpRecordAudioMonitor>
+AudioPolicyService::OpRecordAudioMonitor::createIfNeeded(
+ const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
+ wp<AudioCommandThread> commandThread)
+{
+ if (isAudioServerOrRootUid(attributionSource.uid)) {
+ ALOGV("not silencing record for audio or root source %s",
+ attributionSource.toString().c_str());
+ return nullptr;
+ }
+
+ if (!AudioPolicyService::isAppOpSource(attr.source)) {
+ ALOGD("not monitoring app op for uid %d and source %d",
+ attributionSource.uid, attr.source);
+ return nullptr;
+ }
+
+ if (!attributionSource.packageName.has_value()
+ || attributionSource.packageName.value().size() == 0) {
+ return nullptr;
+ }
+ return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
+}
+
+AudioPolicyService::OpRecordAudioMonitor::OpRecordAudioMonitor(
+ const AttributionSourceState& attributionSource, int32_t appOp,
+ wp<AudioCommandThread> commandThread) :
+ mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
+ mCommandThread(commandThread)
+{
+}
+
+AudioPolicyService::OpRecordAudioMonitor::~OpRecordAudioMonitor()
+{
+ if (mOpCallback != 0) {
+ mAppOpsManager.stopWatchingMode(mOpCallback);
+ }
+ mOpCallback.clear();
+}
+
+void AudioPolicyService::OpRecordAudioMonitor::onFirstRef()
+{
+ checkOp();
+ mOpCallback = new RecordAudioOpCallback(this);
+ ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
+ // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
+ // since it controls the mic permission for legacy apps.
+ mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+ mAttributionSource.packageName.value_or(""))),
+ mOpCallback);
+}
+
+bool AudioPolicyService::OpRecordAudioMonitor::hasOp() const {
+ return mHasOp.load();
+}
+
+// Called by RecordAudioOpCallback when the app op corresponding to this OpRecordAudioMonitor
+// is updated in AppOp callback and in onFirstRef()
+// Note this method is never called (and never to be) for audio server / root track
+// due to the UID in createIfNeeded(). As a result for those record track, it's:
+// - not called from constructor,
+// - not called from RecordAudioOpCallback because the callback is not installed in this case
+void AudioPolicyService::OpRecordAudioMonitor::checkOp(bool updateUidStates)
+{
+ // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
+ // since it controls the mic permission for legacy apps.
+ const int32_t mode = mAppOpsManager.checkOp(mAppOp,
+ mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+ mAttributionSource.packageName.value_or(""))));
+ const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+ // verbose logging only log when appOp changed
+ ALOGI_IF(hasIt != mHasOp.load(),
+ "App op %d missing, %ssilencing record %s",
+ mAppOp, hasIt ? "un" : "", mAttributionSource.toString().c_str());
+ mHasOp.store(hasIt);
+
+ if (updateUidStates) {
+ sp<AudioCommandThread> commandThread = mCommandThread.promote();
+ if (commandThread != nullptr) {
+ commandThread->updateUidStatesCommand();
+ }
+ }
+}
+
+AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
+ const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
+{ }
+
+void AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
+ const String16& packageName __unused) {
+ sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
+ if (monitor != NULL) {
+ if (op != monitor->getOp()) {
+ return;
+ }
+ monitor->checkOp(true);
+ }
+}
+
+
// ----------- AudioPolicyService::AudioCommandThread implementation ----------
AudioPolicyService::AudioCommandThread::AudioCommandThread(String8 name,
@@ -1634,6 +1753,17 @@
mLock.lock();
} break;
+ case UPDATE_UID_STATES: {
+ ALOGV("AudioCommandThread() processing updateUID states");
+ svc = mService.promote();
+ if (svc == 0) {
+ break;
+ }
+ mLock.unlock();
+ svc->updateUidStates();
+ mLock.lock();
+ } break;
+
default:
ALOGW("AudioCommandThread() unknown command %d", command->mCommand);
}
@@ -1847,6 +1977,14 @@
sendCommand(command);
}
+void AudioPolicyService::AudioCommandThread::updateUidStatesCommand()
+{
+ sp<AudioCommand> command = new AudioCommand();
+ command->mCommand = UPDATE_UID_STATES;
+ ALOGV("AudioCommandThread() adding update UID states");
+ sendCommand(command);
+}
+
void AudioPolicyService::AudioCommandThread::updateAudioPatchListCommand()
{
sp<AudioCommand>command = new AudioCommand();
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index b583484..ed9f44e 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -27,6 +27,7 @@
#include <utils/Vector.h>
#include <utils/SortedVector.h>
#include <binder/ActivityManager.h>
+#include <binder/AppOpsManager.h>
#include <binder/BinderService.h>
#include <binder/IUidObserver.h>
#include <system/audio.h>
@@ -358,6 +359,13 @@
static bool isVirtualSource(audio_source_t source);
+ /** returns true if the audio source must be silenced when the corresponding app op is denied.
+ * false if the audio source does not actually capture from the microphone while still
+ * being mapped to app op OP_RECORD_AUDIO and not a specialized op tracked separately.
+ * See getOpForSource().
+ */
+ static bool isAppOpSource(audio_source_t source);
+
// If recording we need to make sure the UID is allowed to do that. If the UID is idle
// then it cannot record and gets buffers with zeros - silence. As soon as the UID
// transitions to an active state we will start reporting buffers with data. This approach
@@ -467,6 +475,7 @@
SET_EFFECT_SUSPENDED,
AUDIO_MODULES_UPDATE,
ROUTING_UPDATED,
+ UPDATE_UID_STATES
};
AudioCommandThread (String8 name, const wp<AudioPolicyService>& service);
@@ -514,6 +523,7 @@
bool suspended);
void audioModulesUpdateCommand();
void routingChangedCommand();
+ void updateUidStatesCommand();
void insertCommand_l(AudioCommand *command, int delayMs = 0);
private:
class AudioCommandData;
@@ -814,6 +824,47 @@
bool active; // Playback/Capture is active or inactive
};
+ // Checks and monitors app ops for AudioRecordClient
+ class OpRecordAudioMonitor : public RefBase {
+ public:
+ ~OpRecordAudioMonitor() override;
+ bool hasOp() const;
+ int32_t getOp() const { return mAppOp; }
+
+ static sp<OpRecordAudioMonitor> createIfNeeded(
+ const AttributionSourceState& attributionSource,
+ const audio_attributes_t& attr, wp<AudioCommandThread> commandThread);
+
+ private:
+ OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
+ wp<AudioCommandThread> commandThread);
+
+ void onFirstRef() override;
+
+ AppOpsManager mAppOpsManager;
+
+ class RecordAudioOpCallback : public BnAppOpsCallback {
+ public:
+ explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
+ void opChanged(int32_t op, const String16& packageName) override;
+
+ private:
+ const wp<OpRecordAudioMonitor> mMonitor;
+ };
+
+ sp<RecordAudioOpCallback> mOpCallback;
+ // called by RecordAudioOpCallback when the app op for this OpRecordAudioMonitor is updated
+ // in AppOp callback and in onFirstRef()
+ // updateUidStates is true when the silenced state of active AudioRecordClients must be
+ // re-evaluated
+ void checkOp(bool updateUidStates = false);
+
+ std::atomic_bool mHasOp;
+ const AttributionSourceState mAttributionSource;
+ const int32_t mAppOp;
+ wp<AudioCommandThread> mCommandThread;
+ };
+
// --- AudioRecordClient ---
// Information about each registered AudioRecord client
// (between calls to getInputForAttr() and releaseInput())
@@ -824,20 +875,32 @@
const audio_session_t session, audio_port_handle_t portId,
const audio_port_handle_t deviceId,
const AttributionSourceState& attributionSource,
- bool canCaptureOutput, bool canCaptureHotword) :
+ bool canCaptureOutput, bool canCaptureHotword,
+ wp<AudioCommandThread> commandThread) :
AudioClient(attributes, io, attributionSource,
session, portId, deviceId), attributionSource(attributionSource),
startTimeNs(0), canCaptureOutput(canCaptureOutput),
- canCaptureHotword(canCaptureHotword), silenced(false) {}
+ canCaptureHotword(canCaptureHotword), silenced(false),
+ mOpRecordAudioMonitor(
+ OpRecordAudioMonitor::createIfNeeded(attributionSource,
+ attributes, commandThread)) {}
~AudioRecordClient() override = default;
+ bool hasOp() const {
+ return mOpRecordAudioMonitor ? mOpRecordAudioMonitor->hasOp() : true;
+ }
+
const AttributionSourceState attributionSource; // attribution source of client
nsecs_t startTimeNs;
const bool canCaptureOutput;
const bool canCaptureHotword;
bool silenced;
+
+ private:
+ sp<OpRecordAudioMonitor> mOpRecordAudioMonitor;
};
+
// --- AudioPlaybackClient ---
// Information about each registered AudioTrack client
// (between calls to getOutputForAttr() and releaseOutput())
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index bd2e7dc..80508e4 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -29,6 +29,7 @@
#include "Parameters.h"
#include "system/camera.h"
+#include <android-base/properties.h>
#include <android/hardware/ICamera.h>
#include <media/MediaProfiles.h>
#include <media/mediarecorder.h>
@@ -1247,6 +1248,7 @@
}
}
fastInfo.maxZslSize = maxPrivInputSize;
+ fastInfo.usedZslSize = maxPrivInputSize;
} else {
fastInfo.maxZslSize = {0, 0};
}
@@ -2047,12 +2049,33 @@
slowJpegMode = false;
Size pictureSize = { pictureWidth, pictureHeight };
- int64_t minFrameDurationNs = getJpegStreamMinFrameDurationNs(pictureSize);
- if (previewFpsRange[1] > 1e9/minFrameDurationNs + FPS_MARGIN) {
+ bool zslFrameRateSupported = false;
+ int64_t jpegMinFrameDurationNs = getJpegStreamMinFrameDurationNs(pictureSize);
+ if (previewFpsRange[1] > 1e9/jpegMinFrameDurationNs + FPS_MARGIN) {
slowJpegMode = true;
}
- if (isDeviceZslSupported || slowJpegMode ||
- property_get_bool("camera.disable_zsl_mode", false)) {
+ if (isZslReprocessPresent) {
+ unsigned int firstApiLevel =
+ android::base::GetUintProperty<unsigned int>("ro.product.first_api_level", 0);
+ Size chosenSize;
+ if ((firstApiLevel >= __ANDROID_API_S__) &&
+ !android::base::GetBoolProperty("ro.camera.enableCamera1MaxZsl", false)) {
+ chosenSize = pictureSize;
+ } else {
+ // follow old behavior of keeping max zsl size as the input / output
+ // zsl stream size
+ chosenSize = fastInfo.maxZslSize;
+ }
+ int64_t zslMinFrameDurationNs = getZslStreamMinFrameDurationNs(chosenSize);
+ if (zslMinFrameDurationNs > 0 &&
+ previewFpsRange[1] <= (1e9/zslMinFrameDurationNs + FPS_MARGIN)) {
+ zslFrameRateSupported = true;
+ fastInfo.usedZslSize = chosenSize;
+ }
+ }
+
+ if (isDeviceZslSupported || slowJpegMode || !zslFrameRateSupported ||
+ android::base::GetBoolProperty("camera.disable_zsl_mode", false)) {
allowZslMode = false;
} else {
allowZslMode = isZslReprocessPresent;
@@ -3056,6 +3079,10 @@
return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_BLOB);
}
+int64_t Parameters::getZslStreamMinFrameDurationNs(Parameters::Size size) {
+ return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+}
+
int64_t Parameters::getMinFrameDurationNs(Parameters::Size size, int fmt) {
const int STREAM_DURATION_SIZE = 4;
const int STREAM_FORMAT_OFFSET = 0;
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 02ac638..e2f8d011 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -248,6 +248,7 @@
bool useFlexibleYuv;
Size maxJpegSize;
Size maxZslSize;
+ Size usedZslSize;
bool supportsPreferredConfigs;
} fastInfo;
@@ -426,6 +427,11 @@
// return -1 if input jpeg size cannot be found in supported size list
int64_t getJpegStreamMinFrameDurationNs(Parameters::Size size);
+ // Helper function to get minimum frame duration for a
+ // IMPLEMENTATION_DEFINED stream of size 'size'
+ // return -1 if input size cannot be found in supported size list
+ int64_t getZslStreamMinFrameDurationNs(Parameters::Size size);
+
// Helper function to get minimum frame duration for a size/format combination
// return -1 if input size/format combination cannot be found.
int64_t getMinFrameDurationNs(Parameters::Size size, int format);
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 8e598f1..1321e6b 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -235,8 +235,8 @@
}
if (mInputStreamId == NO_STREAM) {
- res = device->createInputStream(params.fastInfo.maxZslSize.width,
- params.fastInfo.maxZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ res = device->createInputStream(params.fastInfo.usedZslSize.width,
+ params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
/*isMultiResolution*/false, &mInputStreamId);
if (res != OK) {
ALOGE("%s: Camera %d: Can't create input stream: "
@@ -258,8 +258,8 @@
mProducer->setName(String8("Camera2-ZslRingBufferConsumer"));
sp<Surface> outSurface = new Surface(producer);
- res = device->createStream(outSurface, params.fastInfo.maxZslSize.width,
- params.fastInfo.maxZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ res = device->createStream(outSurface, params.fastInfo.usedZslSize.width,
+ params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
HAL_DATASPACE_UNKNOWN, CAMERA_STREAM_ROTATION_0, &mZslStreamId,
String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
if (res != OK) {
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 582001d..a73ffb9 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -16,6 +16,7 @@
#define LOG_TAG "Camera3-HeicCompositeStream"
#define ATRACE_TAG ATRACE_TAG_CAMERA
+#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
//#define LOG_NDEBUG 0
#include <linux/memfd.h>
@@ -1380,7 +1381,9 @@
mOutputWidth = width;
mOutputHeight = height;
mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
- mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize;
+ mMaxHeicBufferSize =
+ ALIGN(mOutputWidth, HeicEncoderInfoManager::kGridWidth) *
+ ALIGN(mOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2 + mAppSegmentMaxSize;
return OK;
}
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
index 58edba2..a65be9c 100644
--- a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
@@ -38,6 +38,7 @@
bool isSizeSupported(int32_t width, int32_t height,
bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) const;
+ // kGridWidth and kGridHeight should be 2^n
static const auto kGridWidth = 512;
static const auto kGridHeight = 512;
private:
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 578a32f..aefc75e 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -779,16 +779,21 @@
}
lines = String8(" In-flight requests:\n");
- if (mInFlightMap.size() == 0) {
- lines.append(" None\n");
- } else {
- for (size_t i = 0; i < mInFlightMap.size(); i++) {
- InFlightRequest r = mInFlightMap.valueAt(i);
- lines.appendFormat(" Frame %d | Timestamp: %" PRId64 ", metadata"
- " arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i),
- r.shutterTimestamp, r.haveResultMetadata ? "true" : "false",
- r.numBuffersLeft);
+ if (mInFlightLock.try_lock()) {
+ if (mInFlightMap.size() == 0) {
+ lines.append(" None\n");
+ } else {
+ for (size_t i = 0; i < mInFlightMap.size(); i++) {
+ InFlightRequest r = mInFlightMap.valueAt(i);
+ lines.appendFormat(" Frame %d | Timestamp: %" PRId64 ", metadata"
+ " arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i),
+ r.shutterTimestamp, r.haveResultMetadata ? "true" : "false",
+ r.numBuffersLeft);
+ }
}
+ mInFlightLock.unlock();
+ } else {
+ lines.append(" Failed to acquire In-flight lock!\n");
}
write(fd, lines.string(), lines.size());
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/services/mediatranscoding/MediaTranscodingService.cpp
index e387800..2a20981 100644
--- a/services/mediatranscoding/MediaTranscodingService.cpp
+++ b/services/mediatranscoding/MediaTranscodingService.cpp
@@ -132,10 +132,10 @@
void MediaTranscodingService::instantiate() {
std::shared_ptr<MediaTranscodingService> service =
::ndk::SharedRefBase::make<MediaTranscodingService>();
- binder_status_t status =
- AServiceManager_addService(service->asBinder().get(), getServiceName());
- if (status != STATUS_OK) {
- return;
+ if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+ // Once service is started, we want it to stay even is client side perished.
+ AServiceManager_forceLazyServicesPersist(true /*persist*/);
+ (void)AServiceManager_registerLazyService(service->asBinder().get(), getServiceName());
}
}
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
index 20e4bfb..0cb2fad 100644
--- a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -481,7 +481,7 @@
// Need thread pool to receive callbacks, otherwise oneway callbacks are
// silently ignored.
ABinderProcess_startThreadPool();
- ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
+ ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.transcoding"));
mService = IMediaTranscodingService::fromBinder(binder);
if (mService == nullptr) {
ALOGE("Failed to connect to the media.trascoding service.");