Merge "AudioFlinger: Use EffectClientAsyncProxy for local oneway calls." into main
diff --git a/media/Android.mk b/media/Android.mk
deleted file mode 100644
index 220a358..0000000
--- a/media/Android.mk
+++ /dev/null
@@ -1,5 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.conf))
-$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.xml))
-$(eval $(call declare-1p-copy-files,frameworks/av/media/libstagefright,))
diff --git a/media/aconfig/Android.bp b/media/aconfig/Android.bp
index e0d1fa9..16beb28 100644
--- a/media/aconfig/Android.bp
+++ b/media/aconfig/Android.bp
@@ -43,6 +43,7 @@
name: "android.media.codec-aconfig-cc",
min_sdk_version: "30",
vendor_available: true,
+ double_loadable: true,
apex_available: [
"//apex_available:platform",
"com.android.media.swcodec",
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index 4d0df77..cdbadc2 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -13,6 +13,13 @@
}
flag {
+ name: "as_device_connection_failure"
+ namespace: "media_audio"
+ description: "AudioService handles device connection failures."
+ bug: "326597760"
+}
+
+flag {
name: "bluetooth_mac_address_anonymization"
namespace: "media_audio"
description:
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index 525dceb..cfdf1ab 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -71,6 +71,13 @@
}
flag {
+ name: "mute_background_audio"
+ namespace: "media_audio"
+ description: "mute audio playing in background"
+ bug: "296232417"
+}
+
+flag {
name: "sco_managed_by_audio"
namespace: "media_audio"
description: "\
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index c7a1bfd..55847f4 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -169,6 +169,11 @@
"%s: AudioSystem already has an AudioFlinger instance!", __func__);
const auto aps = sp<AudioPolicyService>::make();
ALOGD("%s: AudioPolicy created", __func__);
+ ALOGW_IF(AudioSystem::setLocalAudioPolicyService(aps) != OK,
+ "%s: AudioSystem already has an AudioPolicyService instance!", __func__);
+
+ // Start initialization of internally managed audio objects such as Device Effects.
+ aps->onAudioSystemReady();
// Add AudioFlinger and AudioPolicy to ServiceManager.
sp<IServiceManager> sm = defaultServiceManager();
diff --git a/media/codec2/hal/aidl/Component.cpp b/media/codec2/hal/aidl/Component.cpp
index ba5f8d4..eb64a4a 100644
--- a/media/codec2/hal/aidl/Component.cpp
+++ b/media/codec2/hal/aidl/Component.cpp
@@ -205,30 +205,7 @@
mDeathContext(nullptr) {
// Retrieve supported parameters from store
// TODO: We could cache this per component/interface type
- if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
- c2_status_t err = C2_OK;
- C2ComponentDomainSetting domain;
- std::vector<std::unique_ptr<C2Param>> heapParams;
- err = component->intf()->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
- if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
- std::vector<std::shared_ptr<C2ParamDescriptor>> params;
- bool isComponentSupportsLargeAudioFrame = false;
- component->intf()->querySupportedParams_nb(¶ms);
- for (const auto ¶mDesc : params) {
- if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
- isComponentSupportsLargeAudioFrame = true;
- LOG(VERBOSE) << "Underlying component supports large frame audio";
- break;
- }
- }
- if (!isComponentSupportsLargeAudioFrame) {
- mMultiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
- component->intf(),
- std::static_pointer_cast<C2ReflectorHelper>(
- ::android::GetCodec2PlatformComponentStore()->getParamReflector()));
- }
- }
- }
+ mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
mInterface = SharedRefBase::make<ComponentInterface>(
component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
mInit = mInterface->status();
diff --git a/media/codec2/hal/aidl/ComponentInterface.cpp b/media/codec2/hal/aidl/ComponentInterface.cpp
index 1f0534d..8ae9fa8 100644
--- a/media/codec2/hal/aidl/ComponentInterface.cpp
+++ b/media/codec2/hal/aidl/ComponentInterface.cpp
@@ -131,9 +131,35 @@
virtual c2_status_t querySupportedValues(
std::vector<C2FieldSupportedValuesQuery>& fields,
c2_blocking_t mayBlock) const override {
- c2_status_t err = mIntf->querySupportedValues_vb(fields, mayBlock);
- if (mMultiAccessUnitIntf != nullptr) {
- err = mMultiAccessUnitIntf->querySupportedValues(fields, mayBlock);
+ if (mMultiAccessUnitIntf == nullptr) {
+ return mIntf->querySupportedValues_vb(fields, mayBlock);
+ }
+ std::vector<C2FieldSupportedValuesQuery> dup = fields;
+ std::vector<C2FieldSupportedValuesQuery> queryArray[2];
+ std::map<C2ParamField, std::pair<uint32_t, size_t>> queryMap;
+ c2_status_t err = C2_OK;
+ for (int i = 0 ; i < fields.size(); i++) {
+ const C2ParamField &field = fields[i].field();
+ uint32_t queryArrayIdx = 1;
+ if (mMultiAccessUnitIntf->isValidField(fields[i].field())) {
+ queryArrayIdx = 0;
+ }
+ queryMap[field] = std::make_pair(
+ queryArrayIdx, queryArray[queryArrayIdx].size());
+ queryArray[queryArrayIdx].push_back(fields[i]);
+ }
+ if (queryArray[0].size() > 0) {
+ err = mMultiAccessUnitIntf->querySupportedValues(queryArray[0], mayBlock);
+ }
+ if (queryArray[1].size() > 0) {
+ err = mIntf->querySupportedValues_vb(queryArray[1], mayBlock);
+ }
+ for (int i = 0 ; i < dup.size(); i++) {
+ auto it = queryMap.find(dup[i].field());
+ if (it != queryMap.end()) {
+ std::pair<uint32_t, size_t> queryid = it->second;
+ fields[i] = queryArray[queryid.first][queryid.second];
+ }
}
return err;
}
diff --git a/media/codec2/hal/aidl/ComponentStore.cpp b/media/codec2/hal/aidl/ComponentStore.cpp
index ef49308..b95c09e 100644
--- a/media/codec2/hal/aidl/ComponentStore.cpp
+++ b/media/codec2/hal/aidl/ComponentStore.cpp
@@ -199,6 +199,36 @@
}
#endif
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface) {
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+ if (c2interface == nullptr) {
+ return nullptr;
+ }
+ if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+ c2_status_t err = C2_OK;
+ C2ComponentDomainSetting domain;
+ std::vector<std::unique_ptr<C2Param>> heapParams;
+ err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+ if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+ std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+ bool isComponentSupportsLargeAudioFrame = false;
+ c2interface->querySupportedParams_nb(¶ms);
+ for (const auto ¶mDesc : params) {
+ if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+ isComponentSupportsLargeAudioFrame = true;
+ break;
+ }
+ }
+ if (!isComponentSupportsLargeAudioFrame) {
+ multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+ c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+ }
+ }
+ }
+ return multiAccessUnitIntf;
+}
+
// Methods from ::aidl::android::hardware::media::c2::IComponentStore
ScopedAStatus ComponentStore::createComponent(
const std::string& name,
@@ -258,7 +288,10 @@
c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
#endif
onInterfaceLoaded(c2interface);
- *intf = SharedRefBase::make<ComponentInterface>(c2interface, mParameterCache);
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+ tryCreateMultiAccessUnitInterface(c2interface);
+ *intf = SharedRefBase::make<ComponentInterface>(
+ c2interface, multiAccessUnitIntf, mParameterCache);
return ScopedAStatus::ok();
}
return ScopedAStatus::fromServiceSpecificError(res);
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
index 0698b0f..746e1bf 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
@@ -75,6 +75,9 @@
static std::shared_ptr<::android::FilterWrapper> GetFilterWrapper();
+ std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface);
+
// Methods from ::aidl::android::hardware::media::c2::IComponentStore.
virtual ::ndk::ScopedAStatus createComponent(
const std::string& name,
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 9ed9458..b3ae514 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -649,7 +649,7 @@
return C2_CORRUPTED;
}
size_t i = 0;
- size_t numUpdatedStackParams = 0;
+ size_t numQueried = 0;
for (auto it = paramPointers.begin(); it != paramPointers.end(); ) {
C2Param* paramPointer = *it;
if (numStackIndices > 0) {
@@ -678,7 +678,7 @@
continue;
}
if (stackParams[i++]->updateFrom(*paramPointer)) {
- ++numUpdatedStackParams;
+ ++numQueried;
} else {
LOG(WARNING) << "query -- param update failed: "
"index = "
@@ -695,14 +695,11 @@
"unexpected extra stack param.";
} else {
heapParams->emplace_back(C2Param::Copy(*paramPointer));
+ ++numQueried;
}
}
++it;
}
- size_t numQueried = numUpdatedStackParams;
- if (heapParams) {
- numQueried += heapParams->size();
- }
if (status == C2_OK && indices.size() != numQueried) {
status = C2_BAD_INDEX;
}
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
index 9221a24..03a76e9 100644
--- a/media/codec2/hal/common/MultiAccessUnitHelper.cpp
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -73,12 +73,17 @@
for (std::shared_ptr<C2ParamDescriptor> &desc : supportedParams) {
mSupportedParamIndexSet.insert(desc->index());
}
+ mParamFields.emplace_back(mLargeFrameParams.get(), &(mLargeFrameParams.get()->maxSize));
+ mParamFields.emplace_back(mLargeFrameParams.get(), &(mLargeFrameParams.get()->thresholdSize));
if (mC2ComponentIntf) {
c2_status_t err = mC2ComponentIntf->query_vb({&mKind}, {}, C2_MAY_BLOCK, nullptr);
}
}
+bool MultiAccessUnitInterface::isValidField(const C2ParamField &field) const {
+ return (std::find(mParamFields.begin(), mParamFields.end(), field) != mParamFields.end());
+}
bool MultiAccessUnitInterface::isParamSupported(C2Param::Index index) {
return (mSupportedParamIndexSet.count(index) != 0);
}
@@ -91,18 +96,23 @@
return (C2Component::kind_t)(mKind.value);
}
-void MultiAccessUnitInterface::getDecoderSampleRateAndChannelCount(
- uint32_t &sampleRate_, uint32_t &channelCount_) const {
+bool MultiAccessUnitInterface::getDecoderSampleRateAndChannelCount(
+ uint32_t * const sampleRate_, uint32_t * const channelCount_) const {
+ if (sampleRate_ == nullptr || sampleRate_ == nullptr) {
+ return false;
+ }
if (mC2ComponentIntf) {
C2StreamSampleRateInfo::output sampleRate;
C2StreamChannelCountInfo::output channelCount;
c2_status_t res = mC2ComponentIntf->query_vb(
{&sampleRate, &channelCount}, {}, C2_MAY_BLOCK, nullptr);
- if (res == C2_OK) {
- sampleRate_ = sampleRate.value;
- channelCount_ = channelCount.value;
+ if (res == C2_OK && sampleRate.value > 0 && channelCount.value > 0) {
+ *sampleRate_ = sampleRate.value;
+ *channelCount_ = channelCount.value;
+ return true;
}
}
+ return false;
}
//C2MultiAccessUnitBuffer
@@ -315,26 +325,10 @@
}
}
if (!processedWork->empty()) {
- {
- C2LargeFrame::output multiAccessParams = mInterface->getLargeFrameParam();
- if (mInterface->kind() == C2Component::KIND_DECODER) {
- uint32_t sampleRate = 0;
- uint32_t channelCount = 0;
- uint32_t frameSize = 0;
- mInterface->getDecoderSampleRateAndChannelCount(
- sampleRate, channelCount);
- if (sampleRate > 0 && channelCount > 0) {
- frameSize = channelCount * 2;
- multiAccessParams.maxSize =
- (multiAccessParams.maxSize / frameSize) * frameSize;
- multiAccessParams.thresholdSize =
- (multiAccessParams.thresholdSize / frameSize) * frameSize;
- }
- }
- frameInfo.mLargeFrameTuning = multiAccessParams;
- std::lock_guard<std::mutex> l(mLock);
- mFrameHolder.push_back(std::move(frameInfo));
- }
+ C2LargeFrame::output multiAccessParams = mInterface->getLargeFrameParam();
+ frameInfo.mLargeFrameTuning = multiAccessParams;
+ std::lock_guard<std::mutex> l(mLock);
+ mFrameHolder.push_back(std::move(frameInfo));
}
}
return C2_OK;
@@ -501,6 +495,20 @@
frame.reset();
return C2_OK;
}
+ int64_t sampleTimeUs = 0;
+ uint32_t frameSize = 0;
+ uint32_t sampleRate = 0;
+ uint32_t channelCount = 0;
+ if (mInterface->getDecoderSampleRateAndChannelCount(&sampleRate, &channelCount)) {
+ sampleTimeUs = (1000000u) / (sampleRate * channelCount * 2);
+ frameSize = channelCount * 2;
+ if (mInterface->kind() == C2Component::KIND_DECODER) {
+ frame.mLargeFrameTuning.maxSize =
+ (frame.mLargeFrameTuning.maxSize / frameSize) * frameSize;
+ frame.mLargeFrameTuning.thresholdSize =
+ (frame.mLargeFrameTuning.thresholdSize / frameSize) * frameSize;
+ }
+ }
c2_status_t c2ret = allocateWork(frame, true);
if (c2ret != C2_OK) {
return c2ret;
@@ -515,15 +523,7 @@
outputFramedata.infoBuffers.insert(outputFramedata.infoBuffers.begin(),
(*worklet)->output.infoBuffers.begin(),
(*worklet)->output.infoBuffers.end());
- int64_t sampleTimeUs = 0;
- uint32_t frameSize = 0;
- uint32_t sampleRate = 0;
- uint32_t channelCount = 0;
- mInterface->getDecoderSampleRateAndChannelCount(sampleRate, channelCount);
- if (sampleRate > 0 && channelCount > 0) {
- sampleTimeUs = (1000000u) / (sampleRate * channelCount * 2);
- frameSize = channelCount * 2;
- }
+
LOG(DEBUG) << "maxOutSize " << frame.mLargeFrameTuning.maxSize
<< " threshold " << frame.mLargeFrameTuning.thresholdSize;
if ((*worklet)->output.buffers.size() > 0) {
diff --git a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
index ef5cff9..a6d938e 100644
--- a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
+++ b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
@@ -41,14 +41,16 @@
bool isParamSupported(C2Param::Index index);
C2LargeFrame::output getLargeFrameParam() const;
C2Component::kind_t kind() const;
+ bool isValidField(const C2ParamField &field) const;
protected:
- void getDecoderSampleRateAndChannelCount(
- uint32_t &sampleRate_, uint32_t &channelCount_) const;
+ bool getDecoderSampleRateAndChannelCount(
+ uint32_t * const sampleRate_, uint32_t * const channelCount_) const;
const std::shared_ptr<C2ComponentInterface> mC2ComponentIntf;
std::shared_ptr<C2LargeFrame::output> mLargeFrameParams;
C2ComponentKindSetting mKind;
std::set<C2Param::Index> mSupportedParamIndexSet;
+ std::vector<C2ParamField> mParamFields;
friend struct MultiAccessUnitHelper;
};
diff --git a/media/codec2/hal/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
index ebbaafc..e32e6ae 100644
--- a/media/codec2/hal/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/Component.cpp
@@ -259,30 +259,7 @@
mBufferPoolSender{clientPoolManager} {
// Retrieve supported parameters from store
// TODO: We could cache this per component/interface type
- if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
- c2_status_t err = C2_OK;
- C2ComponentDomainSetting domain;
- std::vector<std::unique_ptr<C2Param>> heapParams;
- err = component->intf()->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
- if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
- std::vector<std::shared_ptr<C2ParamDescriptor>> params;
- bool isComponentSupportsLargeAudioFrame = false;
- component->intf()->querySupportedParams_nb(¶ms);
- for (const auto ¶mDesc : params) {
- if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
- isComponentSupportsLargeAudioFrame = true;
- LOG(VERBOSE) << "Underlying component supports large frame audio";
- break;
- }
- }
- if (!isComponentSupportsLargeAudioFrame) {
- mMultiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
- component->intf(),
- std::static_pointer_cast<C2ReflectorHelper>(
- GetCodec2PlatformComponentStore()->getParamReflector()));
- }
- }
- }
+ mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
mInterface = new ComponentInterface(
component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
mInit = mInterface->status();
diff --git a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
index 5a5e780..41a8904 100644
--- a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
@@ -130,9 +130,35 @@
virtual c2_status_t querySupportedValues(
std::vector<C2FieldSupportedValuesQuery>& fields,
c2_blocking_t mayBlock) const override {
- c2_status_t err = mIntf->querySupportedValues_vb(fields, mayBlock);
- if (mMultiAccessUnitIntf != nullptr) {
- err = mMultiAccessUnitIntf->querySupportedValues(fields, mayBlock);
+ if (mMultiAccessUnitIntf == nullptr) {
+ return mIntf->querySupportedValues_vb(fields, mayBlock);
+ }
+ std::vector<C2FieldSupportedValuesQuery> dup = fields;
+ std::vector<C2FieldSupportedValuesQuery> queryArray[2];
+ std::map<C2ParamField, std::pair<uint32_t, size_t>> queryMap;
+ c2_status_t err = C2_OK;
+ for (int i = 0 ; i < fields.size(); i++) {
+ const C2ParamField &field = fields[i].field();
+ uint32_t queryArrayIdx = 1;
+ if (mMultiAccessUnitIntf->isValidField(field)) {
+ queryArrayIdx = 0;
+ }
+ queryMap[field] = std::make_pair(
+ queryArrayIdx, queryArray[queryArrayIdx].size());
+ queryArray[queryArrayIdx].push_back(fields[i]);
+ }
+ if (queryArray[0].size() > 0) {
+ err = mMultiAccessUnitIntf->querySupportedValues(queryArray[0], mayBlock);
+ }
+ if (queryArray[1].size() > 0) {
+ err = mIntf->querySupportedValues_vb(queryArray[1], mayBlock);
+ }
+ for (int i = 0 ; i < dup.size(); i++) {
+ auto it = queryMap.find(dup[i].field());
+ if (it != queryMap.end()) {
+ std::pair<uint32_t, size_t> queryid = it->second;
+ fields[i] = queryArray[queryid.first][queryid.second];
+ }
}
return err;
}
diff --git a/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
index 1c0d5b0..988ab6f 100644
--- a/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
@@ -194,6 +194,36 @@
}
#endif
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface) {
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+ if (c2interface == nullptr) {
+ return nullptr;
+ }
+ if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+ c2_status_t err = C2_OK;
+ C2ComponentDomainSetting domain;
+ std::vector<std::unique_ptr<C2Param>> heapParams;
+ err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+ if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+ std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+ bool isComponentSupportsLargeAudioFrame = false;
+ c2interface->querySupportedParams_nb(¶ms);
+ for (const auto ¶mDesc : params) {
+ if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+ isComponentSupportsLargeAudioFrame = true;
+ break;
+ }
+ }
+ if (!isComponentSupportsLargeAudioFrame) {
+ multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+ c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+ }
+ }
+ }
+ return multiAccessUnitIntf;
+}
+
// Methods from ::android::hardware::media::c2::V1_0::IComponentStore
Return<void> ComponentStore::createComponent(
const hidl_string& name,
@@ -242,7 +272,9 @@
c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
#endif
onInterfaceLoaded(c2interface);
- interface = new ComponentInterface(c2interface, mParameterCache);
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+ tryCreateMultiAccessUnitInterface(c2interface);
+ interface = new ComponentInterface(c2interface, multiAccessUnitIntf, mParameterCache);
}
_hidl_cb(static_cast<Status>(res), interface);
return Void();
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
index 27e2a05..b5d85da 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
@@ -78,6 +78,9 @@
static std::shared_ptr<FilterWrapper> GetFilterWrapper();
+ std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface);
+
// Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
virtual Return<void> createComponent(
const hidl_string& name,
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index 275a721..ab47b7c 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "codec2_hidl_hal_component_test"
#include <android-base/logging.h>
+#include <android/binder_process.h>
#include <gtest/gtest.h>
#include <hidl/GtestPrinter.h>
@@ -382,5 +383,6 @@
}
::testing::InitGoogleTest(&argc, argv);
+ ABinderProcess_startThreadPool();
return RUN_ALL_TESTS();
}
diff --git a/media/codec2/hal/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
index 5073983..09e5709 100644
--- a/media/codec2/hal/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/Component.cpp
@@ -263,30 +263,7 @@
mBufferPoolSender{clientPoolManager} {
// Retrieve supported parameters from store
// TODO: We could cache this per component/interface type
- if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
- c2_status_t err = C2_OK;
- C2ComponentDomainSetting domain;
- std::vector<std::unique_ptr<C2Param>> heapParams;
- err = component->intf()->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
- if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
- std::vector<std::shared_ptr<C2ParamDescriptor>> params;
- bool isComponentSupportsLargeAudioFrame = false;
- component->intf()->querySupportedParams_nb(¶ms);
- for (const auto ¶mDesc : params) {
- if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
- isComponentSupportsLargeAudioFrame = true;
- LOG(VERBOSE) << "Underlying component supports large frame audio";
- break;
- }
- }
- if (!isComponentSupportsLargeAudioFrame) {
- mMultiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
- component->intf(),
- std::static_pointer_cast<C2ReflectorHelper>(
- GetCodec2PlatformComponentStore()->getParamReflector()));
- }
- }
- }
+ mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
mInterface = new ComponentInterface(
component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
mInit = mInterface->status();
diff --git a/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
index d47abdd..46af809 100644
--- a/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
@@ -194,6 +194,37 @@
}
#endif
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface) {
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+ if (c2interface == nullptr) {
+ return nullptr;
+ }
+ if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+ c2_status_t err = C2_OK;
+ C2ComponentDomainSetting domain;
+ std::vector<std::unique_ptr<C2Param>> heapParams;
+ err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+ if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+ std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+ bool isComponentSupportsLargeAudioFrame = false;
+ c2interface->querySupportedParams_nb(¶ms);
+ for (const auto ¶mDesc : params) {
+ if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+ isComponentSupportsLargeAudioFrame = true;
+ break;
+ }
+ }
+
+ if (!isComponentSupportsLargeAudioFrame) {
+ multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+ c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+ }
+ }
+ }
+ return multiAccessUnitIntf;
+}
+
// Methods from ::android::hardware::media::c2::V1_0::IComponentStore
Return<void> ComponentStore::createComponent(
const hidl_string& name,
@@ -241,7 +272,10 @@
c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
#endif
onInterfaceLoaded(c2interface);
- interface = new ComponentInterface(c2interface, mParameterCache);
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+ tryCreateMultiAccessUnitInterface(c2interface);
+ interface = new ComponentInterface(
+ c2interface, multiAccessUnitIntf, mParameterCache);
}
_hidl_cb(static_cast<Status>(res), interface);
return Void();
diff --git a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
index f6daee7..85862a9 100644
--- a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
@@ -79,6 +79,9 @@
static std::shared_ptr<FilterWrapper> GetFilterWrapper();
+ std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface);
+
// Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
virtual Return<void> createComponent(
const hidl_string& name,
diff --git a/media/codec2/hal/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
index bbdbef5..0fe16e3 100644
--- a/media/codec2/hal/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/Component.cpp
@@ -261,30 +261,7 @@
mBufferPoolSender{clientPoolManager} {
// Retrieve supported parameters from store
// TODO: We could cache this per component/interface type
- if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
- c2_status_t err = C2_OK;
- C2ComponentDomainSetting domain;
- std::vector<std::unique_ptr<C2Param>> heapParams;
- err = component->intf()->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
- if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
- std::vector<std::shared_ptr<C2ParamDescriptor>> params;
- bool isComponentSupportsLargeAudioFrame = false;
- component->intf()->querySupportedParams_nb(¶ms);
- for (const auto ¶mDesc : params) {
- if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
- isComponentSupportsLargeAudioFrame = true;
- LOG(VERBOSE) << "Underlying component supports large frame audio";
- break;
- }
- }
- if (!isComponentSupportsLargeAudioFrame) {
- mMultiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
- component->intf(),
- std::static_pointer_cast<C2ReflectorHelper>(
- GetCodec2PlatformComponentStore()->getParamReflector()));
- }
- }
- }
+ mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
mInterface = new ComponentInterface(
component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
mInit = mInterface->status();
diff --git a/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
index 9fac5d5..f89c835 100644
--- a/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
@@ -194,6 +194,36 @@
}
#endif
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface) {
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+ if (c2interface == nullptr) {
+ return nullptr;
+ }
+ if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+ c2_status_t err = C2_OK;
+ C2ComponentDomainSetting domain;
+ std::vector<std::unique_ptr<C2Param>> heapParams;
+ err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+ if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+ std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+ bool isComponentSupportsLargeAudioFrame = false;
+ c2interface->querySupportedParams_nb(¶ms);
+ for (const auto ¶mDesc : params) {
+ if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+ isComponentSupportsLargeAudioFrame = true;
+ break;
+ }
+ }
+ if (!isComponentSupportsLargeAudioFrame) {
+ multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+ c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+ }
+ }
+ }
+ return multiAccessUnitIntf;
+}
+
// Methods from ::android::hardware::media::c2::V1_0::IComponentStore
Return<void> ComponentStore::createComponent(
const hidl_string& name,
@@ -241,7 +271,9 @@
c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
#endif
onInterfaceLoaded(c2interface);
- interface = new ComponentInterface(c2interface, mParameterCache);
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+ tryCreateMultiAccessUnitInterface(c2interface);
+ interface = new ComponentInterface(c2interface, multiAccessUnitIntf, mParameterCache);
}
_hidl_cb(static_cast<Status>(res), interface);
return Void();
diff --git a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
index e95a651..c08fce4 100644
--- a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
@@ -79,6 +79,9 @@
static std::shared_ptr<FilterWrapper> GetFilterWrapper();
+ std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface);
+
// Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
virtual Return<void> createComponent(
const hidl_string& name,
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index d867eb1..18c2468 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -45,6 +45,7 @@
static_libs: [
"libSurfaceFlingerProperties",
+ "android.media.codec-aconfig-cc",
],
shared_libs: [
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 453a0d2..8dce789 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -20,6 +20,8 @@
#include <strings.h>
+#include <android_media_codec.h>
+
#include <C2Component.h>
#include <C2Config.h>
#include <C2Debug.h>
@@ -752,6 +754,24 @@
}
addSupportedColorFormats(
intf, caps.get(), trait, mediaType, it->second);
+
+ if (android::media::codec::provider_->large_audio_frame_finish()) {
+ // Adding feature-multiple-frames when C2LargeFrame param is present
+ if (trait.domain == C2Component::DOMAIN_AUDIO) {
+ std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+ c2_status_t err = intf->querySupportedParams(¶ms);
+ if (err == C2_OK) {
+ for (const auto ¶mDesc : params) {
+ if (C2LargeFrame::output::PARAM_TYPE == paramDesc->index()) {
+ std::string featureMultipleFrames =
+ std::string(KEY_FEATURE_) + FEATURE_MultipleFrames;
+ caps->addDetail(featureMultipleFrames.c_str(), 0);
+ break;
+ }
+ }
+ }
+ }
+ }
}
}
}
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index d723493..90910a1 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -68,7 +68,6 @@
"libaudioclient_aidl_conversion",
"libaudioutils",
"libbinder",
- "libbinder_ndk",
"libcutils",
"liblog",
"libutils",
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 4bc5c8a..d1b1849 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -66,113 +66,183 @@
using media::audio::common::Int;
std::mutex AudioSystem::gMutex;
-sp<IAudioFlinger> AudioSystem::gAudioFlinger;
-sp<IBinder> AudioSystem::gAudioFlingerBinder;
-sp<IAudioFlinger> AudioSystem::gLocalAudioFlinger;
-sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient;
dynamic_policy_callback AudioSystem::gDynPolicyCallback = NULL;
record_config_callback AudioSystem::gRecordConfigCallback = NULL;
routing_callback AudioSystem::gRoutingCallback = NULL;
vol_range_init_req_callback AudioSystem::gVolRangeInitReqCallback = NULL;
+std::mutex AudioSystem::gApsCallbackMutex;
std::mutex AudioSystem::gErrorCallbacksMutex;
std::set<audio_error_callback> AudioSystem::gAudioErrorCallbacks;
std::mutex AudioSystem::gSoundTriggerMutex;
sp<CaptureStateListenerImpl> AudioSystem::gSoundTriggerCaptureStateListener;
-std::mutex AudioSystem::gAPSMutex;
-sp<IAudioPolicyService> AudioSystem::gAudioPolicyService;
-sp<AudioSystem::AudioPolicyServiceClient> AudioSystem::gAudioPolicyServiceClient;
-
// Sets the Binder for the AudioFlinger service, passed to this client process
// from the system server.
// This allows specific isolated processes to access the audio system. Currently used only for the
// HotwordDetectionService.
-void AudioSystem::setAudioFlingerBinder(const sp<IBinder>& audioFlinger) {
- if (audioFlinger->getInterfaceDescriptor() != media::IAudioFlingerService::descriptor) {
- ALOGE("setAudioFlingerBinder: received a binder of type %s",
- String8(audioFlinger->getInterfaceDescriptor()).c_str());
- return;
- }
- std::lock_guard _l(gMutex);
- if (gAudioFlinger != nullptr) {
- ALOGW("setAudioFlingerBinder: ignoring; AudioFlinger connection already established.");
- return;
- }
- gAudioFlingerBinder = audioFlinger;
-}
+template <typename ServiceInterface, typename Client, typename AidlInterface,
+ typename ServiceTraits>
+class ServiceHandler {
+public:
+ sp<ServiceInterface> getService(bool canStartThreadPool = true)
+ EXCLUDES(mMutex) NO_THREAD_SAFETY_ANALYSIS { // std::unique_ptr
+ sp<ServiceInterface> service;
+ sp<Client> client;
-status_t AudioSystem::setLocalAudioFlinger(const sp<IAudioFlinger>& af) {
- std::lock_guard _l(gMutex);
- if (gAudioFlinger != nullptr) return INVALID_OPERATION;
- gLocalAudioFlinger = af;
- return OK;
-}
-
-// establish binder interface to AudioFlinger service
-const sp<IAudioFlinger> AudioSystem::getAudioFlingerImpl(bool canStartThreadPool = true) {
- sp<IAudioFlinger> af;
- sp<AudioFlingerClient> afc;
- bool reportNoError = false;
- {
- std::lock_guard _l(gMutex);
- if (gAudioFlinger != nullptr) {
- return gAudioFlinger;
+ bool reportNoError = false;
+ {
+ std::lock_guard _l(mMutex);
+ if (mService != nullptr) {
+ return mService;
+ }
}
- if (gAudioFlingerClient == nullptr) {
- gAudioFlingerClient = sp<AudioFlingerClient>::make();
+ std::unique_lock ul_only1thread(mSingleGetter);
+ std::unique_lock ul(mMutex);
+ if (mService != nullptr) {
+ return mService;
+ }
+ if (mClient == nullptr) {
+ mClient = sp<Client>::make();
} else {
reportNoError = true;
}
+ while (true) {
+ mService = mLocalService;
+ if (mService != nullptr) break;
- if (gLocalAudioFlinger != nullptr) {
- gAudioFlinger = gLocalAudioFlinger;
- } else {
- sp<IBinder> binder;
- if (gAudioFlingerBinder != nullptr) {
- binder = gAudioFlingerBinder;
- } else {
- sp<IServiceManager> sm = defaultServiceManager();
- binder = sm->waitForService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+ sp<IBinder> binder = mBinder;
+ if (binder == nullptr) {
+ sp <IServiceManager> sm = defaultServiceManager();
+ binder = sm->checkService(String16(ServiceTraits::SERVICE_NAME));
if (binder == nullptr) {
- return nullptr;
+ ALOGD("%s: waiting for %s", __func__, ServiceTraits::SERVICE_NAME);
+
+ // if the condition variable is present, setLocalService() and
+ // setBinder() is allowed to use it to notify us.
+ if (mCvGetter == nullptr) {
+ mCvGetter = std::make_shared<std::condition_variable>();
+ }
+ mCvGetter->wait_for(ul, std::chrono::seconds(1));
+ continue;
}
}
- binder->linkToDeath(gAudioFlingerClient);
- const auto afs = interface_cast<media::IAudioFlingerService>(binder);
- LOG_ALWAYS_FATAL_IF(afs == nullptr);
- gAudioFlinger = sp<AudioFlingerClientAdapter>::make(afs);
+ binder->linkToDeath(mClient);
+ auto aidlInterface = interface_cast<AidlInterface>(binder);
+ LOG_ALWAYS_FATAL_IF(aidlInterface == nullptr);
+ if constexpr (std::is_same_v<ServiceInterface, AidlInterface>) {
+ mService = std::move(aidlInterface);
+ } else /* constexpr */ {
+ mService = ServiceTraits::createServiceAdapter(aidlInterface);
+ }
+ break;
}
- afc = gAudioFlingerClient;
- af = gAudioFlinger;
- // Make sure callbacks can be received by gAudioFlingerClient
- if(canStartThreadPool) {
+ if (mCvGetter) mCvGetter.reset(); // remove condition variable.
+ client = mClient;
+ service = mService;
+ // Make sure callbacks can be received by the client
+ if (canStartThreadPool) {
ProcessState::self()->startThreadPool();
}
+ ul.unlock();
+ ul_only1thread.unlock();
+ ServiceTraits::onServiceCreate(service, client);
+ if (reportNoError) AudioSystem::reportError(NO_ERROR);
+ return service;
}
- const int64_t token = IPCThreadState::self()->clearCallingIdentity();
- af->registerClient(afc);
- IPCThreadState::self()->restoreCallingIdentity(token);
- if (reportNoError) reportError(NO_ERROR);
- return af;
-}
+
+ status_t setLocalService(const sp<ServiceInterface>& service) EXCLUDES(mMutex) {
+ std::lock_guard _l(mMutex);
+ // we allow clearing once set, but not a double non-null set.
+ if (mService != nullptr && service != nullptr) return INVALID_OPERATION;
+ mLocalService = service;
+ if (mCvGetter) mCvGetter->notify_one();
+ return OK;
+ }
+
+ sp<Client> getClient() EXCLUDES(mMutex) {
+ const auto service = getService();
+ if (service == nullptr) return nullptr;
+ std::lock_guard _l(mMutex);
+ return mClient;
+ }
+
+ void setBinder(const sp<IBinder>& binder) EXCLUDES(mMutex) {
+ std::lock_guard _l(mMutex);
+ if (mService != nullptr) {
+ ALOGW("%s: ignoring; %s connection already established.",
+ __func__, ServiceTraits::SERVICE_NAME);
+ return;
+ }
+ mBinder = binder;
+ if (mCvGetter) mCvGetter->notify_one();
+ }
+
+ void clearService() EXCLUDES(mMutex) {
+ std::lock_guard _l(mMutex);
+ mService.clear();
+ if (mClient) ServiceTraits::onClearService(mClient);
+ }
+
+private:
+ std::mutex mSingleGetter;
+ std::mutex mMutex;
+ std::shared_ptr<std::condition_variable> mCvGetter GUARDED_BY(mMutex);
+ sp<IBinder> mBinder GUARDED_BY(mMutex);
+ sp<ServiceInterface> mLocalService GUARDED_BY(mMutex);
+ sp<ServiceInterface> mService GUARDED_BY(mMutex);
+ sp<Client> mClient GUARDED_BY(mMutex);
+};
+
+struct AudioFlingerTraits {
+ static void onServiceCreate(
+ const sp<IAudioFlinger>& af, const sp<AudioSystem::AudioFlingerClient>& afc) {
+ const int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ af->registerClient(afc);
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ }
+
+ static sp<IAudioFlinger> createServiceAdapter(
+ const sp<media::IAudioFlingerService>& aidlInterface) {
+ return sp<AudioFlingerClientAdapter>::make(aidlInterface);
+ }
+
+ static void onClearService(const sp<AudioSystem::AudioFlingerClient>& afc) {
+ afc->clearIoCache();
+ }
+
+ static constexpr const char* SERVICE_NAME = IAudioFlinger::DEFAULT_SERVICE_NAME;
+};
+
+[[clang::no_destroy]] static constinit ServiceHandler<IAudioFlinger,
+ AudioSystem::AudioFlingerClient, media::IAudioFlingerService,
+ AudioFlingerTraits> gAudioFlingerServiceHandler;
sp<IAudioFlinger> AudioSystem::get_audio_flinger() {
- return getAudioFlingerImpl();
+ return gAudioFlingerServiceHandler.getService();
}
sp<IAudioFlinger> AudioSystem::get_audio_flinger_for_fuzzer() {
- return getAudioFlingerImpl(false);
+ return gAudioFlingerServiceHandler.getService(false /* canStartThreadPool */);
}
-const sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
- // calling get_audio_flinger() will initialize gAudioFlingerClient if needed
- const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return 0;
- std::lock_guard _l(gMutex);
- return gAudioFlingerClient;
+sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
+ return gAudioFlingerServiceHandler.getClient();
+}
+
+void AudioSystem::setAudioFlingerBinder(const sp<IBinder>& audioFlinger) {
+ if (audioFlinger->getInterfaceDescriptor() != media::IAudioFlingerService::descriptor) {
+ ALOGE("%s: received a binder of type %s",
+ __func__, String8(audioFlinger->getInterfaceDescriptor()).c_str());
+ return;
+ }
+ gAudioFlingerServiceHandler.setBinder(audioFlinger);
+}
+
+status_t AudioSystem::setLocalAudioFlinger(const sp<IAudioFlinger>& af) {
+ return gAudioFlingerServiceHandler.setLocalService(af);
}
sp<AudioIoDescriptor> AudioSystem::getIoDescriptor(audio_io_handle_t ioHandle) {
@@ -557,14 +627,7 @@
}
void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused) {
- {
- std::lock_guard _l(AudioSystem::gMutex);
- AudioSystem::gAudioFlinger.clear();
- }
-
- // clear output handles and stream to output map caches
- clearIoCache();
-
+ gAudioFlingerServiceHandler.clearService();
reportError(DEAD_OBJECT);
ALOGW("AudioFlinger server died!");
@@ -863,44 +926,35 @@
gVolRangeInitReqCallback = cb;
}
-// establish binder interface to AudioPolicy service
-sp<IAudioPolicyService> AudioSystem::get_audio_policy_service() {
- sp<IAudioPolicyService> ap;
- sp<AudioPolicyServiceClient> apc;
- {
- std::lock_guard _l(gAPSMutex);
- if (gAudioPolicyService == 0) {
- sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder = sm->waitForService(String16("media.audio_policy"));
- if (binder == nullptr) {
- return nullptr;
- }
- if (gAudioPolicyServiceClient == NULL) {
- gAudioPolicyServiceClient = new AudioPolicyServiceClient();
- }
- binder->linkToDeath(gAudioPolicyServiceClient);
- gAudioPolicyService = interface_cast<IAudioPolicyService>(binder);
- LOG_ALWAYS_FATAL_IF(gAudioPolicyService == 0);
- apc = gAudioPolicyServiceClient;
- // Make sure callbacks can be received by gAudioPolicyServiceClient
- ProcessState::self()->startThreadPool();
- }
- ap = gAudioPolicyService;
- }
- if (apc != 0) {
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
+struct AudioPolicyTraits {
+ static void onServiceCreate(const sp<IAudioPolicyService>& ap,
+ const sp<AudioSystem::AudioPolicyServiceClient>& apc) {
+ const int64_t token = IPCThreadState::self()->clearCallingIdentity();
ap->registerClient(apc);
ap->setAudioPortCallbacksEnabled(apc->isAudioPortCbEnabled());
ap->setAudioVolumeGroupCallbacksEnabled(apc->isAudioVolumeGroupCbEnabled());
IPCThreadState::self()->restoreCallingIdentity(token);
}
- return ap;
+ static void onClearService(const sp<AudioSystem::AudioPolicyServiceClient>&) {}
+
+ static constexpr const char *SERVICE_NAME = "media.audio_policy";
+};
+
+[[clang::no_destroy]] static constinit ServiceHandler<IAudioPolicyService,
+ AudioSystem::AudioPolicyServiceClient, IAudioPolicyService,
+ AudioPolicyTraits> gAudioPolicyServiceHandler;
+
+status_t AudioSystem::setLocalAudioPolicyService(const sp<IAudioPolicyService>& aps) {
+ return gAudioPolicyServiceHandler.setLocalService(aps);
+}
+
+sp<IAudioPolicyService> AudioSystem::get_audio_policy_service() {
+ return gAudioPolicyServiceHandler.getService();
}
void AudioSystem::clearAudioPolicyService() {
- std::lock_guard _l(gAPSMutex);
- gAudioPolicyService.clear();
+ gAudioPolicyServiceHandler.clearService();
}
// ---------------------------------------------------------------------------
@@ -1501,13 +1555,7 @@
void AudioSystem::clearAudioConfigCache() {
// called by restoreTrack_l(), which needs new IAudioFlinger and IAudioPolicyService instances
ALOGV("clearAudioConfigCache()");
- {
- std::lock_guard _l(gMutex);
- if (gAudioFlingerClient != 0) {
- gAudioFlingerClient->clearIoCache();
- }
- gAudioFlinger.clear();
- }
+ gAudioFlingerServiceHandler.clearService();
clearAudioPolicyService();
}
@@ -1670,12 +1718,11 @@
status_t AudioSystem::addAudioPortCallback(const sp<AudioPortCallback>& callback) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
+ const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (apc == nullptr) return NO_INIT;
- std::lock_guard _l(gAPSMutex);
- if (gAudioPolicyServiceClient == 0) {
- return NO_INIT;
- }
- int ret = gAudioPolicyServiceClient->addAudioPortCallback(callback);
+ std::lock_guard _l(gApsCallbackMutex);
+ const int ret = apc->addAudioPortCallback(callback);
if (ret == 1) {
aps->setAudioPortCallbacksEnabled(true);
}
@@ -1686,12 +1733,11 @@
status_t AudioSystem::removeAudioPortCallback(const sp<AudioPortCallback>& callback) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
+ const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (apc == nullptr) return NO_INIT;
- std::lock_guard _l(gAPSMutex);
- if (gAudioPolicyServiceClient == 0) {
- return NO_INIT;
- }
- int ret = gAudioPolicyServiceClient->removeAudioPortCallback(callback);
+ std::lock_guard _l(gApsCallbackMutex);
+ const int ret = apc->removeAudioPortCallback(callback);
if (ret == 0) {
aps->setAudioPortCallbacksEnabled(false);
}
@@ -1701,12 +1747,11 @@
status_t AudioSystem::addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
+ const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (apc == nullptr) return NO_INIT;
- std::lock_guard _l(gAPSMutex);
- if (gAudioPolicyServiceClient == 0) {
- return NO_INIT;
- }
- int ret = gAudioPolicyServiceClient->addAudioVolumeGroupCallback(callback);
+ std::lock_guard _l(gApsCallbackMutex);
+ const int ret = apc->addAudioVolumeGroupCallback(callback);
if (ret == 1) {
aps->setAudioVolumeGroupCallbacksEnabled(true);
}
@@ -1716,12 +1761,11 @@
status_t AudioSystem::removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
+ const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (apc == nullptr) return NO_INIT;
- std::lock_guard _l(gAPSMutex);
- if (gAudioPolicyServiceClient == 0) {
- return NO_INIT;
- }
- int ret = gAudioPolicyServiceClient->removeAudioVolumeGroupCallback(callback);
+ std::lock_guard _l(gApsCallbackMutex);
+ const int ret = apc->removeAudioVolumeGroupCallback(callback);
if (ret == 0) {
aps->setAudioVolumeGroupCallbacksEnabled(false);
}
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index 60b08fa..a71bb18 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -242,6 +242,7 @@
legacy.mCbFlags = VALUE_OR_RETURN(aidl2legacy_AudioMixCallbackFlag_uint32_t_mask(aidl.cbFlags));
legacy.mAllowPrivilegedMediaPlaybackCapture = aidl.allowPrivilegedMediaPlaybackCapture;
legacy.mVoiceCommunicationCaptureAllowed = aidl.voiceCommunicationCaptureAllowed;
+ legacy.mToken = aidl.mToken;
return legacy;
}
@@ -265,6 +266,7 @@
aidl.cbFlags = VALUE_OR_RETURN(legacy2aidl_uint32_t_AudioMixCallbackFlag_mask(legacy.mCbFlags));
aidl.allowPrivilegedMediaPlaybackCapture = legacy.mAllowPrivilegedMediaPlaybackCapture;
aidl.voiceCommunicationCaptureAllowed = legacy.mVoiceCommunicationCaptureAllowed;
+ aidl.mToken = legacy.mToken;
return aidl;
}
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index 9c4ccb8..e213f08 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -872,6 +872,18 @@
{ .duration = 0 , .waveFreq = { 0 }, 0, 0}},
.repeatCnt = 3,
.repeatSegment = 0 }, // TONE_NZ_CALL_WAITING
+ { .segments = { { .duration = 500, .waveFreq = { 425, 0 }, 0, 0 },
+ { .duration = 250, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_MY_CONGESTION
+ { .segments = { { .duration = 400, .waveFreq = { 425, 0 }, 0, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 400, .waveFreq = { 425, 0 }, 0, 0 },
+ { .duration = 2000, .waveFreq = { 0 }, 0, 0},
+ { .duration = 0, .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 } // TONE_MY_RINGTONE
};
// Used by ToneGenerator::getToneForRegion() to convert user specified supervisory tone type
@@ -976,6 +988,16 @@
TONE_SUP_ERROR, // TONE_SUP_ERROR
TONE_NZ_CALL_WAITING, // TONE_SUP_CALL_WAITING
TONE_GB_RINGTONE // TONE_SUP_RINGTONE
+ },
+ { // MALAYSIA
+ TONE_SUP_DIAL, // TONE_SUP_DIAL
+ TONE_SUP_BUSY, // TONE_SUP_BUSY
+ TONE_MY_CONGESTION, // TONE_SUP_CONGESTION
+ TONE_SUP_RADIO_ACK, // TONE_SUP_RADIO_ACK
+ TONE_SUP_RADIO_NOTAVAIL, // TONE_SUP_RADIO_NOTAVAIL
+ TONE_SUP_ERROR, // TONE_SUP_ERROR
+ TONE_SUP_CALL_WAITING, // TONE_SUP_CALL_WAITING
+ TONE_MY_RINGTONE // TONE_SUP_RINGTONE
}
};
@@ -1055,6 +1077,8 @@
mRegion = TAIWAN;
} else if (strstr(value, "nz") != NULL) {
mRegion = NZ;
+ } else if (strstr(value, "my") != NULL) {
+ mRegion = MY;
} else {
mRegion = CEPT;
}
diff --git a/media/libaudioclient/aidl/android/media/AudioMix.aidl b/media/libaudioclient/aidl/android/media/AudioMix.aidl
index 88b0450..f0c561c 100644
--- a/media/libaudioclient/aidl/android/media/AudioMix.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioMix.aidl
@@ -39,4 +39,6 @@
boolean allowPrivilegedMediaPlaybackCapture;
/** Indicates if the caller can capture voice communication output */
boolean voiceCommunicationCaptureAllowed;
+ /** Identifies the owner of the AudioPolicy that this AudioMix belongs to */
+ IBinder mToken;
}
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index ec35e93..9e4ae54 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -18,6 +18,7 @@
#ifndef ANDROID_AUDIO_POLICY_H
#define ANDROID_AUDIO_POLICY_H
+#include <binder/IBinder.h>
#include <binder/Parcel.h>
#include <media/AudioDeviceTypeAddr.h>
#include <system/audio.h>
@@ -127,6 +128,7 @@
audio_devices_t mDeviceType;
String8 mDeviceAddress;
uint32_t mCbFlags; // flags indicating which callbacks to use, see kCbFlag*
+ sp<IBinder> mToken;
/** Ignore the AUDIO_FLAG_NO_MEDIA_PROJECTION */
bool mAllowPrivilegedMediaPlaybackCapture = false;
/** Indicates if the caller can capture voice communication output */
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 3061633..338534d 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -100,6 +100,10 @@
friend class AudioFlingerClient;
friend class AudioPolicyServiceClient;
friend class CaptureStateListenerImpl;
+ template <typename ServiceInterface, typename Client, typename AidlInterface,
+ typename ServiceTraits>
+ friend class ServiceHandler;
+
public:
// FIXME Declare in binder opcode order, similarly to IAudioFlinger.h and IAudioFlinger.cpp
@@ -407,6 +411,11 @@
// and output configuration cache (gOutputs)
static void clearAudioConfigCache();
+ // Sets a local AudioPolicyService interface to be used by AudioSystem.
+ // This is used by audioserver main() to allow client object initialization
+ // before exposing any interfaces to ServiceManager.
+ static status_t setLocalAudioPolicyService(const sp<media::IAudioPolicyService>& aps);
+
static sp<media::IAudioPolicyService> get_audio_policy_service();
static void clearAudioPolicyService();
@@ -781,8 +790,6 @@
static int32_t getAAudioHardwareBurstMinUsec();
-private:
-
class AudioFlingerClient: public IBinder::DeathRecipient, public media::BnAudioFlingerClient
{
public:
@@ -892,24 +899,22 @@
std::set<sp<AudioVolumeGroupCallback>> mAudioVolumeGroupCallbacks GUARDED_BY(mMutex);
};
+ private:
+
static audio_io_handle_t getOutput(audio_stream_type_t stream);
- static const sp<AudioFlingerClient> getAudioFlingerClient();
+ static sp<AudioFlingerClient> getAudioFlingerClient();
static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
- static const sp<IAudioFlinger> getAudioFlingerImpl(bool canStartThreadPool);
// Invokes all registered error callbacks with the given error code.
static void reportError(status_t err);
[[clang::no_destroy]] static std::mutex gMutex;
- [[clang::no_destroy]] static sp<IAudioFlinger> gAudioFlinger GUARDED_BY(gMutex);
- [[clang::no_destroy]] static sp<IBinder> gAudioFlingerBinder GUARDED_BY(gMutex);
- [[clang::no_destroy]] static sp<IAudioFlinger> gLocalAudioFlinger GUARDED_BY(gMutex);
- [[clang::no_destroy]] static sp<AudioFlingerClient> gAudioFlingerClient GUARDED_BY(gMutex);
static dynamic_policy_callback gDynPolicyCallback GUARDED_BY(gMutex);
static record_config_callback gRecordConfigCallback GUARDED_BY(gMutex);
static routing_callback gRoutingCallback GUARDED_BY(gMutex);
static vol_range_init_req_callback gVolRangeInitReqCallback GUARDED_BY(gMutex);
+ [[clang::no_destroy]] static std::mutex gApsCallbackMutex;
[[clang::no_destroy]] static std::mutex gErrorCallbacksMutex;
[[clang::no_destroy]] static std::set<audio_error_callback> gAudioErrorCallbacks
GUARDED_BY(gErrorCallbacksMutex);
@@ -917,12 +922,6 @@
[[clang::no_destroy]] static std::mutex gSoundTriggerMutex;
[[clang::no_destroy]] static sp<CaptureStateListenerImpl> gSoundTriggerCaptureStateListener
GUARDED_BY(gSoundTriggerMutex);
-
- [[clang::no_destroy]] static std::mutex gAPSMutex;
- [[clang::no_destroy]] static sp<media::IAudioPolicyService> gAudioPolicyService
- GUARDED_BY(gAPSMutex);
- [[clang::no_destroy]] static sp<AudioPolicyServiceClient> gAudioPolicyServiceClient
- GUARDED_BY(gAPSMutex);
};
} // namespace android
diff --git a/media/libaudioclient/include/media/ToneGenerator.h b/media/libaudioclient/include/media/ToneGenerator.h
index 46e9501..3e515fc 100644
--- a/media/libaudioclient/include/media/ToneGenerator.h
+++ b/media/libaudioclient/include/media/ToneGenerator.h
@@ -225,11 +225,14 @@
TONE_INDIA_CONGESTION, // Congestion tone: 400 Hz, 250ms ON, 250ms OFF...
TONE_INDIA_CALL_WAITING, // Call waiting tone: 400 Hz, tone repeated in a 0.2s on, 0.1s off, 0.2s on, 7.5s off pattern.
TONE_INDIA_RINGTONE, // Ring tone: 400 Hz tone modulated with 25Hz, 0.4 on 0.2 off 0.4 on 2..0 off
- // TAIWAN supervisory tones
+ // TAIWAN supervisory tones
TONE_TW_RINGTONE, // Ring Tone: 440 Hz + 480 Hz repeated with pattern 1s on, 3s off.
- // NEW ZEALAND supervisory tones
+ // NEW ZEALAND supervisory tones
TONE_NZ_CALL_WAITING, // Call waiting tone: 400 Hz, 0.2s ON, 3s OFF,
// 0.2s ON, 3s OFF, 0.2s ON, 3s OFF, 0.2s ON
+ // MALAYSIA supervisory tones
+ TONE_MY_CONGESTION, // Congestion tone: 425 Hz, 500ms ON, 250ms OFF...
+ TONE_MY_RINGTONE, // Ring tone: 425 Hz, 400ms ON 200ms OFF 400ms ON 2s OFF..
NUM_ALTERNATE_TONES
};
@@ -244,6 +247,7 @@
INDIA,
TAIWAN,
NZ,
+ MY,
CEPT,
NUM_REGIONS
};
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index 8f76f9b..3b2285e 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -17,6 +17,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "AudioRoutingTest"
+#include <string.h>
+
+#include <binder/Binder.h>
#include <binder/ProcessState.h>
#include <cutils/properties.h>
#include <gtest/gtest.h>
@@ -149,6 +152,7 @@
config.sample_rate = 48000;
AudioMix mix(criteria, mixType, config, mixFlag, String8{mAddress.c_str()}, 0);
mix.mDeviceType = deviceType;
+ mix.mToken = sp<BBinder>::make();
mMixes.push(mix);
if (OK == AudioSystem::registerPolicyMixes(mMixes, true)) {
mPolicyMixRegistered = true;
diff --git a/media/libaudiohal/impl/EffectBufferHalAidl.cpp b/media/libaudiohal/impl/EffectBufferHalAidl.cpp
index a701852..33fe3ed 100644
--- a/media/libaudiohal/impl/EffectBufferHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectBufferHalAidl.cpp
@@ -58,25 +58,14 @@
}
EffectBufferHalAidl::~EffectBufferHalAidl() {
+ if (mAudioBuffer.raw) free(mAudioBuffer.raw);
}
status_t EffectBufferHalAidl::init() {
- int fd = ashmem_create_region("audioEffectAidl", mBufferSize);
- if (fd < 0) {
- ALOGE("%s create ashmem failed %d", __func__, fd);
- return fd;
+ if (0 != posix_memalign(&mAudioBuffer.raw, 32, mBufferSize)) {
+ return NO_MEMORY;
}
- ScopedFileDescriptor tempFd(fd);
- mAudioBuffer.raw = mmap(nullptr /* address */, mBufferSize /* length */, PROT_READ | PROT_WRITE,
- MAP_SHARED, fd, 0 /* offset */);
- if (mAudioBuffer.raw == MAP_FAILED) {
- ALOGE("mmap failed for fd %d", fd);
- mAudioBuffer.raw = nullptr;
- return INVALID_OPERATION;
- }
-
- mMemory = {std::move(tempFd), static_cast<int64_t>(mBufferSize)};
return OK;
}
diff --git a/media/libaudiohal/impl/EffectBufferHalAidl.h b/media/libaudiohal/impl/EffectBufferHalAidl.h
index 035314b..cf6031f 100644
--- a/media/libaudiohal/impl/EffectBufferHalAidl.h
+++ b/media/libaudiohal/impl/EffectBufferHalAidl.h
@@ -50,7 +50,6 @@
const size_t mBufferSize;
bool mFrameCountChanged;
void* mExternalData;
- aidl::android::hardware::common::Ashmem mMemory;
audio_buffer_t mAudioBuffer;
// Can not be constructed directly by clients.
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index f4c40e1..24ac2e8 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -697,6 +697,7 @@
inline constexpr char FEATURE_AdaptivePlayback[] = "adaptive-playback";
inline constexpr char FEATURE_EncodingStatistics[] = "encoding-statistics";
inline constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
+inline constexpr char FEATURE_MultipleFrames[] = "multiple-frames";
inline constexpr char FEATURE_PartialFrame[] = "partial-frame";
inline constexpr char FEATURE_QpBounds[] = "qp-bounds";
inline constexpr char FEATURE_SecurePlayback[] = "secure-playback";
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 19f9549..2341af1 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -1,4 +1,3 @@
-
package {
default_applicable_licenses: ["frameworks_av_media_mediaserver_license"],
}
@@ -86,7 +85,16 @@
"-Wall",
],
- vintf_fragments: ["manifest_media_c2_software.xml"],
+ // AIDL is only used when release_aidl_use_unfrozen is true
+ // because the swcodec mainline module is a prebuilt from an
+ // Android U branch in that case.
+ // TODO(b/327508501)
+ vintf_fragments: ["manifest_media_c2_software_hidl.xml"],
+ product_variables: {
+ release_aidl_use_unfrozen: {
+ vintf_fragments: ["manifest_media_c2_software_aidl.xml"],
+ },
+ },
soong_config_variables: {
TARGET_DYNAMIC_64_32_MEDIASERVER: {
diff --git a/media/mediaserver/manifest_media_c2_software_aidl.xml b/media/mediaserver/manifest_media_c2_software_aidl.xml
new file mode 100644
index 0000000..e6bcafa
--- /dev/null
+++ b/media/mediaserver/manifest_media_c2_software_aidl.xml
@@ -0,0 +1,7 @@
+<manifest version="1.0" type="framework">
+ <hal format="aidl">
+ <name>android.hardware.media.c2</name>
+ <version>1</version>
+ <fqname>IComponentStore/software</fqname>
+ </hal>
+</manifest>
diff --git a/media/mediaserver/manifest_media_c2_software.xml b/media/mediaserver/manifest_media_c2_software_hidl.xml
similarity index 68%
rename from media/mediaserver/manifest_media_c2_software.xml
rename to media/mediaserver/manifest_media_c2_software_hidl.xml
index 31dfafb..69a27be 100644
--- a/media/mediaserver/manifest_media_c2_software.xml
+++ b/media/mediaserver/manifest_media_c2_software_hidl.xml
@@ -8,9 +8,4 @@
<instance>software</instance>
</interface>
</hal>
- <hal format="aidl">
- <name>android.hardware.media.c2</name>
- <version>1</version>
- <fqname>IComponentStore/software</fqname>
- </hal>
</manifest>
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 77abaf6..4d07821 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1233,7 +1233,7 @@
&& (state == IDLE || state == STOPPED || state == FLUSHED)) {
mFrameMap.reset();
- if (!isFastTrack() && (isDirect() || isOffloaded())) {
+ if (!isFastTrack()) {
// Start point of track -> sink frame map. If the HAL returns a
// frame position smaller than the first written frame in
// updateTrackFrameInfo, the timestamp can be interpolated
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 598d52d..e8b04ce 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -36,6 +36,7 @@
"src/TypeConverter.cpp",
],
shared_libs: [
+ "android.media.audiopolicy-aconfig-cc",
"audioclient-types-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"libaudioclient_aidl_conversion",
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 6f71ac5..44f84b9 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -235,7 +235,8 @@
&deviceType,
String8(mDevice->address().c_str()),
source,
- flags);
+ static_cast<audio_input_flags_t>(
+ flags & mProfile->getFlags()));
LOG_ALWAYS_FATAL_IF(mDevice->type() != deviceType,
"%s openInput returned device %08x when given device %08x",
__FUNCTION__, mDevice->type(), deviceType);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index dc0f466..d1819fd 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -15,7 +15,7 @@
*/
#define LOG_TAG "APM_AudioPolicyMix"
-// #define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
#include <algorithm>
#include <iterator>
@@ -28,6 +28,9 @@
#include "PolicyAudioPort.h"
#include "IOProfile.h"
#include <AudioOutputDescriptor.h>
+#include <android_media_audiopolicy.h>
+
+namespace audio_flags = android::media::audiopolicy;
namespace android {
namespace {
@@ -190,6 +193,12 @@
mix.mDeviceType, mix.mDeviceAddress.c_str());
return BAD_VALUE;
}
+ if (audio_flags::audio_mix_ownership()) {
+ if (mix.mToken == registeredMix->mToken) {
+ ALOGE("registerMix(): same mix already registered - skipping");
+ return BAD_VALUE;
+ }
+ }
}
if (!areMixCriteriaConsistent(mix.mCriteria)) {
ALOGE("registerMix(): Mix contains inconsistent criteria "
@@ -212,12 +221,21 @@
{
for (size_t i = 0; i < size(); i++) {
const sp<AudioPolicyMix>& registeredMix = itemAt(i);
- if (mix.mDeviceType == registeredMix->mDeviceType
+ if (audio_flags::audio_mix_ownership()) {
+ if (mix.mToken == registeredMix->mToken) {
+ ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
+ mix.mDeviceType, mix.mDeviceAddress.c_str());
+ removeAt(i);
+ return NO_ERROR;
+ }
+ } else {
+ if (mix.mDeviceType == registeredMix->mDeviceType
&& mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0) {
- ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
- mix.mDeviceType, mix.mDeviceAddress.c_str());
- removeAt(i);
- return NO_ERROR;
+ ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
+ mix.mDeviceType, mix.mDeviceAddress.c_str());
+ removeAt(i);
+ return NO_ERROR;
+ }
}
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 2761480..3bebb11 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -3678,6 +3678,7 @@
status_t res = NO_ERROR;
bool checkOutputs = false;
sp<HwModule> rSubmixModule;
+ Vector<AudioMix> registeredMixes;
// examine each mix's route type
for (size_t i = 0; i < mixes.size(); i++) {
AudioMix mix = mixes[i];
@@ -3801,11 +3802,19 @@
break;
} else {
checkOutputs = true;
+ registeredMixes.add(mix);
}
}
}
if (res != NO_ERROR) {
- unregisterPolicyMixes(mixes);
+ if (audio_flags::audio_mix_ownership()) {
+ // Only unregister mixes that were actually registered to not accidentally unregister
+ // mixes that already existed previously.
+ unregisterPolicyMixes(registeredMixes);
+ registeredMixes.clear();
+ } else {
+ unregisterPolicyMixes(mixes);
+ }
} else if (checkOutputs) {
checkForDeviceAndOutputChanges();
updateCallAndOutputRouting();
@@ -3816,6 +3825,7 @@
status_t AudioPolicyManager::unregisterPolicyMixes(Vector<AudioMix> mixes)
{
ALOGV("unregisterPolicyMixes() num mixes %zu", mixes.size());
+ status_t endResult = NO_ERROR;
status_t res = NO_ERROR;
bool checkOutputs = false;
sp<HwModule> rSubmixModule;
@@ -3828,6 +3838,7 @@
AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX);
if (rSubmixModule == 0) {
res = INVALID_OPERATION;
+ endResult = INVALID_OPERATION;
continue;
}
}
@@ -3836,6 +3847,7 @@
if (mPolicyMixes.unregisterMix(mix) != NO_ERROR) {
res = INVALID_OPERATION;
+ endResult = INVALID_OPERATION;
continue;
}
@@ -3848,6 +3860,7 @@
if (res != OK) {
ALOGE("Error making RemoteSubmix device unavailable for mix "
"with type %d, address %s", device, address.c_str());
+ endResult = INVALID_OPERATION;
}
}
}
@@ -3857,15 +3870,24 @@
} else if ((mix.mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER) {
if (mPolicyMixes.unregisterMix(mix) != NO_ERROR) {
res = INVALID_OPERATION;
+ endResult = INVALID_OPERATION;
continue;
} else {
checkOutputs = true;
}
}
}
- if (res == NO_ERROR && checkOutputs) {
- checkForDeviceAndOutputChanges();
- updateCallAndOutputRouting();
+ if (audio_flags::audio_mix_ownership()) {
+ res = endResult;
+ if (res == NO_ERROR && checkOutputs) {
+ checkForDeviceAndOutputChanges();
+ updateCallAndOutputRouting();
+ }
+ } else {
+ if (res == NO_ERROR && checkOutputs) {
+ checkForDeviceAndOutputChanges();
+ updateCallAndOutputRouting();
+ }
}
return res;
}
@@ -3882,6 +3904,7 @@
policyMix->mFormat, policyMix->mRouteFlags, policyMix->mDeviceAddress,
policyMix->mCbFlags);
_aidl_return.back().mDeviceType = policyMix->mDeviceType;
+ _aidl_return.back().mToken = policyMix->mToken;
}
ALOGVV("%s() returning %zu registered mixes", __func__, _aidl_return->size());
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 71edd57..d67ddb6 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -61,11 +61,6 @@
}
}
-void AudioPolicyEffects::setDefaultDeviceEffects() {
- mDefaultDeviceEffectFuture = std::async(
- std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
-}
-
status_t AudioPolicyEffects::addInputEffects(audio_io_handle_t input,
audio_source_t inputSource,
audio_session_t audioSession)
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index a9628c2..259b84a 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -116,10 +116,8 @@
// Remove the default stream effect from wherever it's attached.
status_t removeStreamDefaultEffect(audio_unique_id_t id) EXCLUDES_AudioPolicyEffects_Mutex;
- // Called by AudioPolicyService::onFirstRef() to load device effects
- // on a separate worker thread.
- // TODO(b/319515492) move this initialization after AudioPolicyService::onFirstRef().
- void setDefaultDeviceEffects();
+ // Initializes the Effects (AudioSystem must be ready as this creates audio client objects).
+ void initDefaultDeviceEffects() EXCLUDES(mDeviceEffectsMutex) EXCLUDES_EffectHandle_Mutex;
private:
@@ -201,11 +199,6 @@
};
- // Called on an async thread because it creates AudioEffects
- // which register with AudioFlinger and AudioPolicy.
- // We must therefore exclude the EffectHandle_Mutex.
- void initDefaultDeviceEffects() EXCLUDES(mDeviceEffectsMutex) EXCLUDES_EffectHandle_Mutex;
-
status_t loadAudioEffectConfig_ll(const sp<EffectsFactoryHalInterface>& effectsFactoryHal)
REQUIRES(mMutex, mDeviceEffectsMutex);
@@ -281,18 +274,6 @@
std::mutex mDeviceEffectsMutex;
std::map<std::string, std::unique_ptr<DeviceEffects>> mDeviceEffects
GUARDED_BY(mDeviceEffectsMutex);
-
- /**
- * Device Effect initialization must be asynchronous: the audio_policy service parses and init
- * effect on first reference. AudioFlinger will handle effect creation and register these
- * effect on audio_policy service.
- *
- * The future is associated with the std::async launched thread - no need to lock as
- * it is only set once on init. Due to the async nature, it is conceivable that
- * some device effects are not available immediately after AudioPolicyService::onFirstRef()
- * while the effects are being created.
- */
- std::future<void> mDefaultDeviceEffectFuture;
};
} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index bc6498a..e95147e 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -313,9 +313,16 @@
}
}
AudioSystem::audioPolicyReady();
- // AudioFlinger will handle effect creation and register these effects on audio_policy
- // service. Hence, audio_policy service must be ready.
- audioPolicyEffects->setDefaultDeviceEffects();
+}
+
+void AudioPolicyService::onAudioSystemReady() {
+ sp<AudioPolicyEffects> audioPolicyEffects;
+ {
+ audio_utils::lock_guard _l(mMutex);
+
+ audioPolicyEffects = mAudioPolicyEffects;
+ }
+ audioPolicyEffects->initDefaultDeviceEffects();
}
void AudioPolicyService::unloadAudioPolicyManager()
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index bd56366..7aa80cf 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -322,6 +322,9 @@
// RefBase
virtual void onFirstRef();
+ // Commence initialization when AudioSystem is ready.
+ void onAudioSystemReady();
+
//
// Helpers for the struct audio_policy_service_ops implementation.
// This is used by the audio policy manager for certain operations that
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index dbc48ae..16f3a9a 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -50,12 +50,12 @@
using aidl_utils::statusTFromBinderStatus;
using android::content::AttributionSourceState;
using binder::Status;
+using internal::ToString;
using media::HeadTrackingMode;
using media::Pose3f;
using media::SensorPoseProvider;
using media::audio::common::HeadTracking;
using media::audio::common::Spatialization;
-using ::android::internal::ToString;
using namespace std::chrono_literals;
@@ -349,7 +349,8 @@
bool activeLevelFound = false;
for (const auto spatializationLevel : spatializationLevels) {
if (!aidl_utils::isValidEnum(spatializationLevel)) {
- ALOGW("%s: ignoring spatializationLevel:%d", __func__, (int)spatializationLevel);
+ ALOGW("%s: ignoring spatializationLevel:%s", __func__,
+ ToString(spatializationLevel).c_str());
continue;
}
if (spatializationLevel == Spatialization::Level::NONE) {
@@ -376,7 +377,8 @@
for (const auto spatializationMode : spatializationModes) {
if (!aidl_utils::isValidEnum(spatializationMode)) {
- ALOGW("%s: ignoring spatializationMode:%d", __func__, (int)spatializationMode);
+ ALOGW("%s: ignoring spatializationMode:%s", __func__,
+ ToString(spatializationMode).c_str());
continue;
}
// we don't detect duplicates.
@@ -413,27 +415,26 @@
return BAD_VALUE;
}
- //TODO b/273373363: use AIDL enum when available
if (com::android::media::audio::dsa_over_bt_le_audio()
&& mSupportsHeadTracking) {
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
- std::vector<uint8_t> headtrackingConnectionModes;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
+ std::vector<HeadTracking::ConnectionMode> headtrackingConnectionModes;
status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION,
&headtrackingConnectionModes);
if (status == NO_ERROR) {
for (const auto htConnectionMode : headtrackingConnectionModes) {
- if (htConnectionMode < HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED ||
- htConnectionMode > HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL) {
- ALOGW("%s: ignoring HT connection mode:%d", __func__, (int)htConnectionMode);
+ if (htConnectionMode < HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED ||
+ htConnectionMode > HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL) {
+ ALOGW("%s: ignoring HT connection mode:%s", __func__,
+ ToString(htConnectionMode).c_str());
continue;
}
- mSupportedHeadtrackingConnectionModes.insert(
- static_cast<headtracking_connection_t> (htConnectionMode));
+ mSupportedHeadtrackingConnectionModes.insert(htConnectionMode);
}
ALOGW_IF(mSupportedHeadtrackingConnectionModes.find(
- HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED)
- == mSupportedHeadtrackingConnectionModes.end(),
- "%s: HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED not reported", __func__);
+ HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED) ==
+ mSupportedHeadtrackingConnectionModes.end(),
+ "%s: Headtracking FRAMEWORK_PROCESSED not reported", __func__);
}
}
@@ -560,12 +561,12 @@
}
audio_utils::lock_guard lock(mMutex);
*level = mLevel;
- ALOGV("%s level %d", __func__, (int)*level);
+ ALOGV("%s level %s", __func__, ToString(*level).c_str());
return Status::ok();
}
Status Spatializer::isHeadTrackingSupported(bool *supports) {
- ALOGV("%s mSupportsHeadTracking %d", __func__, mSupportsHeadTracking);
+ ALOGV("%s mSupportsHeadTracking %s", __func__, ToString(mSupportsHeadTracking).c_str());
if (supports == nullptr) {
return binderStatusFromStatusT(BAD_VALUE);
}
@@ -860,7 +861,7 @@
}
void Spatializer::onActualModeChange(HeadTrackingMode mode) {
- std::string modeStr = media::toString(mode);
+ std::string modeStr = ToString(mode);
ALOGV("%s(%s)", __func__, modeStr.c_str());
sp<AMessage> msg = new AMessage(EngineCallbackHandler::kWhatOnActualModeChange, mHandler);
msg->setInt32(EngineCallbackHandler::kModeKey, static_cast<int>(mode));
@@ -868,7 +869,7 @@
}
void Spatializer::onActualModeChangeMsg(HeadTrackingMode mode) {
- ALOGV("%s(%d)", __func__, (int) mode);
+ ALOGV("%s(%s)", __func__, ToString(mode).c_str());
sp<media::ISpatializerHeadTrackingCallback> callback;
HeadTracking::Mode spatializerMode;
{
@@ -887,7 +888,7 @@
spatializerMode = HeadTracking::Mode::RELATIVE_SCREEN;
break;
default:
- LOG_ALWAYS_FATAL("Unknown mode: %d", static_cast<int>(mode));
+ LOG_ALWAYS_FATAL("Unknown mode: %s", ToString(mode).c_str());
}
}
mActualHeadTrackingMode = spatializerMode;
@@ -901,7 +902,7 @@
}
}
callback = mHeadTrackingCallback;
- mLocalLog.log("%s: updating mode to %s", __func__, media::toString(mode).c_str());
+ mLocalLog.log("%s: updating mode to %s", __func__, ToString(mode).c_str());
}
if (callback != nullptr) {
callback->onHeadTrackingModeChanged(spatializerMode);
@@ -1059,24 +1060,23 @@
}
}
-//TODO b/273373363: use AIDL enum when available
audio_latency_mode_t Spatializer::selectHeadtrackingConnectionMode_l() {
if (!com::android::media::audio::dsa_over_bt_le_audio()) {
return AUDIO_LATENCY_MODE_LOW;
}
// mSupportedLatencyModes is ordered according to system preferences loaded in
// mOrderedLowLatencyModes
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
audio_latency_mode_t requestedLatencyMode = mSupportedLatencyModes[0];
if (requestedLatencyMode == AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE) {
if (mSupportedHeadtrackingConnectionModes.find(
- HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL)
+ HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL)
!= mSupportedHeadtrackingConnectionModes.end()) {
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL;
} else if (mSupportedHeadtrackingConnectionModes.find(
- HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW)
+ HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW)
!= mSupportedHeadtrackingConnectionModes.end()) {
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW;
} else {
// if the engine does not support direct reading of IMU data, do not allow
// DYNAMIC_SPATIAL_AUDIO_HARDWARE mode and fallback to next mode
@@ -1220,7 +1220,7 @@
base::StringAppendF(&ss, " %s", ToString(mode).c_str());
}
base::StringAppendF(&ss, "], Desired: %s, Actual %s\n",
- media::toString(mDesiredHeadTrackingMode).c_str(),
+ ToString(mDesiredHeadTrackingMode).c_str(),
ToString(mActualHeadTrackingMode).c_str());
base::StringAppendF(&ss, "%smSpatializationModes: [", prefixSpace.c_str());
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 24788dc..355df18 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -486,11 +486,13 @@
bool mSupportsHeadTracking;
/** List of supported headtracking connection modes reported by the spatializer.
* If the list is empty, the spatializer does not support any optional connection
- * mode and mode HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED is assumed.
+ * mode and mode HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED is assumed.
*/
- std::unordered_set<headtracking_connection_t> mSupportedHeadtrackingConnectionModes;
+ std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
+ mSupportedHeadtrackingConnectionModes;
/** Selected HT connection mode when several modes are supported by the spatializer */
- headtracking_connection_t mHeadtrackingConnectionMode;
+ media::audio::common::HeadTracking::ConnectionMode mHeadtrackingConnectionMode =
+ media::audio::common::HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
// Looper thread for mEngine callbacks
class EngineCallbackHandler;
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index e883e10..e02c93a 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -1330,6 +1330,10 @@
std::string mixAddress, const audio_config_t& audioConfig,
const std::vector<AudioMixMatchCriterion>& matchCriteria);
+ status_t addPolicyMix(const AudioMix& mix);
+
+ status_t removePolicyMixes(const Vector<AudioMix>& mixes);
+
std::vector<AudioMix> getRegisteredPolicyMixes();
void clearPolicyMix();
void addPolicyMixAndStartInputForLoopback(
@@ -1367,7 +1371,11 @@
myAudioMix.mDeviceType = deviceType;
// Clear mAudioMix before add new one to make sure we don't add already exist mixes.
mAudioMixes.clear();
- mAudioMixes.add(myAudioMix);
+ return addPolicyMix(myAudioMix);
+}
+
+status_t AudioPolicyManagerTestDynamicPolicy::addPolicyMix(const AudioMix& mix) {
+ mAudioMixes.add(mix);
// As the policy mixes registration may fail at some case,
// caller need to check the returned status.
@@ -1375,6 +1383,11 @@
return ret;
}
+status_t AudioPolicyManagerTestDynamicPolicy::removePolicyMixes(const Vector<AudioMix>& mixes) {
+ status_t ret = mManager->unregisterPolicyMixes(mixes);
+ return ret;
+}
+
std::vector<AudioMix> AudioPolicyManagerTestDynamicPolicy::getRegisteredPolicyMixes() {
std::vector<AudioMix> audioMixes;
if (mManager != nullptr) {
@@ -1539,6 +1552,98 @@
TEST_F_WITH_FLAGS(
AudioPolicyManagerTestDynamicPolicy,
+ RegisterInvalidMixesDoesNotImpactPriorMixes,
+ REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api),
+ ACONFIG_FLAG(android::media::audiopolicy, audio_mix_ownership))
+) {
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+
+ std::vector<AudioMixMatchCriterion> validMixMatchCriteria = {
+ createUidCriterion(/*uid=*/42),
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+ AudioMix validAudioMix(validMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+ MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+ validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+ mAudioMixes.clear();
+ mAudioMixes.add(validAudioMix);
+ status_t ret = mManager->registerPolicyMixes(mAudioMixes);
+
+ ASSERT_EQ(NO_ERROR, ret);
+
+ std::vector<AudioMix> registeredMixes = getRegisteredPolicyMixes();
+ ASSERT_EQ(1, registeredMixes.size());
+
+ std::vector<AudioMixMatchCriterion> invalidMixMatchCriteria = {
+ createUidCriterion(/*uid=*/42),
+ createUidCriterion(/*uid=*/1235, /*exclude=*/true),
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+
+ AudioMix invalidAudioMix(invalidMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+ MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+ validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+ mAudioMixes.add(invalidAudioMix);
+ ret = mManager->registerPolicyMixes(mAudioMixes);
+
+ ASSERT_EQ(INVALID_OPERATION, ret);
+
+ std::vector<AudioMix> remainingMixes = getRegisteredPolicyMixes();
+ ASSERT_EQ(registeredMixes.size(), remainingMixes.size());
+}
+
+TEST_F_WITH_FLAGS(
+ AudioPolicyManagerTestDynamicPolicy,
+ UnregisterInvalidMixesReturnsError,
+ REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api),
+ ACONFIG_FLAG(android::media::audiopolicy, audio_mix_ownership))
+) {
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+
+ std::vector<AudioMixMatchCriterion> validMixMatchCriteria = {
+ createUidCriterion(/*uid=*/42),
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+ AudioMix validAudioMix(validMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+ MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+ validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+ mAudioMixes.clear();
+ mAudioMixes.add(validAudioMix);
+ status_t ret = mManager->registerPolicyMixes(mAudioMixes);
+
+ ASSERT_EQ(NO_ERROR, ret);
+
+ std::vector<AudioMix> registeredMixes = getRegisteredPolicyMixes();
+ ASSERT_EQ(1, registeredMixes.size());
+
+ std::vector<AudioMixMatchCriterion> invalidMixMatchCriteria = {
+ createUidCriterion(/*uid=*/42),
+ createUidCriterion(/*uid=*/1235, /*exclude=*/true),
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+
+ AudioMix invalidAudioMix(invalidMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+ MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+ validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+ Vector<AudioMix> mixes;
+ mixes.add(invalidAudioMix);
+ mixes.add(validAudioMix);
+ ret = removePolicyMixes(mixes);
+
+ ASSERT_EQ(INVALID_OPERATION, ret);
+
+ std::vector<AudioMix> remainingMixes = getRegisteredPolicyMixes();
+ EXPECT_THAT(remainingMixes, IsEmpty());
+}
+
+TEST_F_WITH_FLAGS(
+ AudioPolicyManagerTestDynamicPolicy,
GetRegisteredPolicyMixes,
REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api))
) {
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index ebe771e..d0aaa8c 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -2353,12 +2353,8 @@
return true;
} else if (mSensorPrivacyPolicy->getCameraPrivacyState() == SensorPrivacyManager::DISABLED) {
return false;
- } else if ((mSensorPrivacyPolicy->getCameraPrivacyState()
- == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_HELPFUL_APPS) ||
- (mSensorPrivacyPolicy->getCameraPrivacyState()
- == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_REQUIRED_APPS) ||
- (mSensorPrivacyPolicy->getCameraPrivacyState() ==
- SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_APPS)) {
+ } else if (mSensorPrivacyPolicy->getCameraPrivacyState()
+ == SensorPrivacyManager::ENABLED_EXCEPT_ALLOWLISTED_APPS) {
if (hasPermissionsForCameraPrivacyAllowlist(callingPid, callingUid)) {
return false;
} else {
@@ -4903,9 +4899,7 @@
// if sensor privacy is enabled then block all clients from accessing the camera
if (state == SensorPrivacyManager::ENABLED) {
service->blockAllClients();
- } else if ((state == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_APPS)
- || (state == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_HELPFUL_APPS)
- || (state == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_REQUIRED_APPS)) {
+ } else if (state == SensorPrivacyManager::ENABLED_EXCEPT_ALLOWLISTED_APPS) {
service->blockPrivacyEnabledClients();
}
return binder::Status::ok();
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index ed1e92c..7636cbd 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -81,8 +81,6 @@
constexpr MetadataBuilder::ControlRegion kDefaultEmptyControlRegion{};
-constexpr float kAspectRatioEpsilon = 0.05;
-
const std::array<Resolution, 5> kStandardJpegThumbnailSizes{
Resolution(176, 144), Resolution(240, 144), Resolution(256, 144),
Resolution(240, 160), Resolution(240, 180)};
@@ -91,14 +89,15 @@
PixelFormat::IMPLEMENTATION_DEFINED, PixelFormat::YCBCR_420_888,
PixelFormat::BLOB};
-bool isApproximatellySameAspectRatio(const Resolution r1, const Resolution r2) {
- float aspectRatio1 =
- static_cast<float>(r1.width) / static_cast<float>(r1.height);
- float aspectRatio2 =
- static_cast<float>(r2.width) / static_cast<float>(r2.height);
-
- return abs(aspectRatio1 - aspectRatio2) < kAspectRatioEpsilon;
-}
+// The resolutions below will used to extend the set of supported output formats.
+// All resolutions with lower pixel count and same aspect ratio as some supported
+// input resolution will be added to the set of supported output resolutions.
+const std::array<Resolution, 10> kOutputResolutions{
+ Resolution(320, 240), Resolution(640, 360), Resolution(640, 480),
+ Resolution(720, 480), Resolution(720, 576), Resolution(800, 600),
+ Resolution(1024, 576), Resolution(1280, 720), Resolution(1280, 960),
+ Resolution(1280, 1080),
+};
std::vector<Resolution> getSupportedJpegThumbnailSizes(
const std::vector<SupportedStreamConfiguration>& configs) {
@@ -180,6 +179,36 @@
}
}
+ std::map<Resolution, int> additionalResolutionToMaxFpsMap;
+ // Add additional resolutions we can support by downscaling input streams with
+ // same aspect ratio.
+ for (const Resolution& outputResolution : kOutputResolutions) {
+ for (const auto& [resolution, maxFps] : resolutionToMaxFpsMap) {
+ if (resolutionToMaxFpsMap.find(outputResolution) !=
+ resolutionToMaxFpsMap.end()) {
+ // Resolution is already in the map, skip it.
+ continue;
+ }
+
+ if (outputResolution < resolution &&
+ isApproximatellySameAspectRatio(outputResolution, resolution)) {
+ // Lower resolution with same aspect ratio, we can achieve this by
+ // downscaling, let's add it to the map.
+ ALOGD(
+ "Extending set of output resolutions with %dx%d which has same "
+ "aspect ratio as supported input %dx%d.",
+ outputResolution.width, outputResolution.height, resolution.width,
+ resolution.height);
+ additionalResolutionToMaxFpsMap[outputResolution] = maxFps;
+ break;
+ }
+ }
+ }
+
+ // Add all resolution we can achieve by downscaling to the map.
+ resolutionToMaxFpsMap.insert(additionalResolutionToMaxFpsMap.begin(),
+ additionalResolutionToMaxFpsMap.end());
+
return resolutionToMaxFpsMap;
}
@@ -250,7 +279,8 @@
.setRequestPartialResultCount(1)
.setPipelineMaxDepth(kPipelineMaxDepth)
.setSyncMaxLatency(ANDROID_SYNC_MAX_LATENCY_UNKNOWN)
- .setAvailableRequestKeys({ANDROID_CONTROL_CAPTURE_INTENT,
+ .setAvailableRequestKeys({ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+ ANDROID_CONTROL_CAPTURE_INTENT,
ANDROID_CONTROL_AE_MODE,
ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
@@ -265,19 +295,20 @@
ANDROID_CONTROL_SCENE_MODE,
ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
ANDROID_CONTROL_ZOOM_RATIO,
- ANDROID_STATISTICS_FACE_DETECT_MODE,
ANDROID_FLASH_MODE,
ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
ANDROID_JPEG_QUALITY,
- ANDROID_JPEG_THUMBNAIL_QUALITY})
+ ANDROID_JPEG_THUMBNAIL_QUALITY,
+ ANDROID_NOISE_REDUCTION_MODE,
+ ANDROID_STATISTICS_FACE_DETECT_MODE})
.setAvailableResultKeys(
- {ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
- ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AWB_MODE,
- ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
- ANDROID_FLASH_MODE, ANDROID_FLASH_STATE,
+ {ANDROID_COLOR_CORRECTION_ABERRATION_MODE, ANDROID_CONTROL_AE_MODE,
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
+ ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_EFFECT_MODE,
+ ANDROID_CONTROL_MODE, ANDROID_FLASH_MODE, ANDROID_FLASH_STATE,
ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, ANDROID_JPEG_QUALITY,
- ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_SENSOR_TIMESTAMP,
- ANDROID_LENS_FOCAL_LENGTH})
+ ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_LENS_FOCAL_LENGTH,
+ ANDROID_SENSOR_TIMESTAMP, ANDROID_NOISE_REDUCTION_MODE})
.setAvailableCapabilities(
{ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE});
@@ -399,6 +430,22 @@
return false;
}
+ const std::vector<Stream>& streams = streamConfiguration.streams;
+
+ Resolution firstStreamResolution(streams[0].width, streams[0].height);
+ auto isSameAspectRatioAsFirst = [firstStreamResolution](const Stream& stream) {
+ return isApproximatellySameAspectRatio(
+ firstStreamResolution, Resolution(stream.width, stream.height));
+ };
+ if (!std::all_of(streams.begin(), streams.end(), isSameAspectRatioAsFirst)) {
+ ALOGW(
+ "%s: Requested streams do not have same aspect ratio. Different aspect "
+ "ratios are currently "
+ "not supported by virtual camera. Stream configuration: %s",
+ __func__, streamConfiguration.toString().c_str());
+ return false;
+ }
+
int numberOfProcessedStreams = 0;
int numberOfStallStreams = 0;
for (const Stream& stream : streamConfiguration.streams) {
@@ -421,9 +468,13 @@
numberOfProcessedStreams++;
}
+ Resolution requestedResolution(stream.width, stream.height);
auto matchesSupportedInputConfig =
- [&stream](const SupportedStreamConfiguration& config) {
- return stream.width == config.width && stream.height == config.height;
+ [requestedResolution](const SupportedStreamConfiguration& config) {
+ Resolution supportedInputResolution(config.width, config.height);
+ return requestedResolution <= supportedInputResolution &&
+ isApproximatellySameAspectRatio(requestedResolution,
+ supportedInputResolution);
};
if (std::none_of(mSupportedInputConfigurations.begin(),
mSupportedInputConfigurations.end(),
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 615e449..9b0fc07 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+#include "system/camera_metadata.h"
#define LOG_TAG "VirtualCameraRenderThread"
#include "VirtualCameraRenderThread.h"
@@ -93,6 +94,8 @@
const Resolution reportedSensorSize) {
std::unique_ptr<CameraMetadata> metadata =
MetadataBuilder()
+ .setAberrationCorrectionMode(
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
.setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
.setControlAePrecaptureTrigger(
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
@@ -109,6 +112,7 @@
.setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
requestSettings.thumbnailResolution.height)
.setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
+ .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
.setPipelineDepth(kPipelineDepth)
.setSensorTimestamp(timestamp)
.build();
@@ -577,37 +581,36 @@
std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
- std::optional<size_t> compressedSize;
- if (gBuffer != nullptr) {
- if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
- // This should never happen since we're allocating the temporary buffer
- // with YUV420 layout above.
- ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
- gBuffer->getPixelFormat());
- return cameraStatus(Status::INTERNAL_ERROR);
- }
-
- YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
- if (yCbCrLock.getStatus() != OK) {
- return cameraStatus(Status::INTERNAL_ERROR);
- }
-
- std::vector<uint8_t> app1ExifData =
- createExif(Resolution(stream->width, stream->height),
- createThumbnail(requestSettings.thumbnailResolution,
- requestSettings.thumbnailJpegQuality));
- compressedSize = compressJpeg(
- gBuffer->getWidth(), gBuffer->getHeight(), requestSettings.jpegQuality,
- *yCbCrLock, app1ExifData, stream->bufferSize - sizeof(CameraBlob),
- (*planesLock).planes[0].data);
- } else {
- std::vector<uint8_t> app1ExifData =
- createExif(Resolution(stream->width, stream->height));
- compressedSize = compressBlackJpeg(
- stream->width, stream->height, requestSettings.jpegQuality, app1ExifData,
- stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
+ if (gBuffer == nullptr) {
+ ALOGE(
+ "%s: Encountered invalid temporary buffer while rendering JPEG "
+ "into BLOB stream",
+ __func__);
+ return cameraStatus(Status::INTERNAL_ERROR);
}
+ if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+ // This should never happen since we're allocating the temporary buffer
+ // with YUV420 layout above.
+ ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
+ gBuffer->getPixelFormat());
+ return cameraStatus(Status::INTERNAL_ERROR);
+ }
+
+ YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
+ if (yCbCrLock.getStatus() != OK) {
+ return cameraStatus(Status::INTERNAL_ERROR);
+ }
+
+ std::vector<uint8_t> app1ExifData =
+ createExif(Resolution(stream->width, stream->height),
+ createThumbnail(requestSettings.thumbnailResolution,
+ requestSettings.thumbnailJpegQuality));
+ std::optional<size_t> compressedSize = compressJpeg(
+ gBuffer->getWidth(), gBuffer->getHeight(), requestSettings.jpegQuality,
+ *yCbCrLock, app1ExifData, stream->bufferSize - sizeof(CameraBlob),
+ (*planesLock).planes[0].data);
+
if (!compressedSize.has_value()) {
ALOGE("%s: Failed to compress JPEG image", __func__);
return cameraStatus(Status::INTERNAL_ERROR);
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index dfa71f3..2a691c1 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -21,6 +21,7 @@
#include <algorithm>
#include <atomic>
#include <chrono>
+#include <cmath>
#include <cstddef>
#include <cstdint>
#include <cstring>
@@ -39,6 +40,7 @@
#include "VirtualCameraDevice.h"
#include "VirtualCameraRenderThread.h"
#include "VirtualCameraStream.h"
+#include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
#include "aidl/android/hardware/camera/common/Status.h"
#include "aidl/android/hardware/camera/device/BufferCache.h"
#include "aidl/android/hardware/camera/device/BufferStatus.h"
@@ -48,6 +50,7 @@
#include "aidl/android/hardware/camera/device/NotifyMsg.h"
#include "aidl/android/hardware/camera/device/RequestTemplate.h"
#include "aidl/android/hardware/camera/device/ShutterMsg.h"
+#include "aidl/android/hardware/camera/device/Stream.h"
#include "aidl/android/hardware/camera/device/StreamBuffer.h"
#include "aidl/android/hardware/camera/device/StreamConfiguration.h"
#include "aidl/android/hardware/camera/device/StreamRotation.h"
@@ -106,9 +109,6 @@
// Maximum number of buffers to use per single stream.
constexpr size_t kMaxStreamBuffers = 2;
-constexpr int32_t kDefaultJpegQuality = 80;
-constexpr int32_t kDefaultJpegThumbnailQuality = 70;
-
// Thumbnail size (0,0) correspods to disabling thumbnail.
const Resolution kDefaultJpegThumbnailSize(0, 0);
@@ -142,6 +142,8 @@
int maxFps = getMaxFps(inputConfigs);
auto metadata =
MetadataBuilder()
+ .setAberrationCorrectionMode(
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
.setControlCaptureIntent(requestTemplateToIntent(type))
.setControlMode(ANDROID_CONTROL_MODE_AUTO)
.setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
@@ -160,6 +162,7 @@
.setJpegQuality(VirtualCameraDevice::kDefaultJpegQuality)
.setJpegThumbnailQuality(VirtualCameraDevice::kDefaultJpegQuality)
.setJpegThumbnailSize(0, 0)
+ .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
.build();
if (metadata == nullptr) {
ALOGE("%s: Failed to construct metadata for default request type %s",
@@ -201,6 +204,55 @@
}));
}
+Resolution resolutionFromStream(const Stream& stream) {
+ return Resolution(stream.width, stream.height);
+}
+
+Resolution resolutionFromInputConfig(
+ const SupportedStreamConfiguration& inputConfig) {
+ return Resolution(inputConfig.width, inputConfig.height);
+}
+
+std::optional<SupportedStreamConfiguration> pickInputConfigurationForStreams(
+ const std::vector<Stream>& requestedStreams,
+ const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
+ Stream maxResolutionStream = getHighestResolutionStream(requestedStreams);
+ Resolution maxResolution = resolutionFromStream(maxResolutionStream);
+
+ // Find best fitting stream to satisfy all requested streams:
+ // Best fitting => same or higher resolution as input with lowest pixel count
+ // difference and same aspect ratio.
+ auto isBetterInputConfig = [maxResolution](
+ const SupportedStreamConfiguration& configA,
+ const SupportedStreamConfiguration& configB) {
+ int maxResPixelCount = maxResolution.width * maxResolution.height;
+ int pixelCountDiffA =
+ std::abs((configA.width * configA.height) - maxResPixelCount);
+ int pixelCountDiffB =
+ std::abs((configB.width * configB.height) - maxResPixelCount);
+
+ return pixelCountDiffA < pixelCountDiffB;
+ };
+
+ std::optional<SupportedStreamConfiguration> bestConfig;
+ for (const SupportedStreamConfiguration& inputConfig : supportedInputConfigs) {
+ Resolution inputConfigResolution = resolutionFromInputConfig(inputConfig);
+ if (inputConfigResolution < maxResolution ||
+ !isApproximatellySameAspectRatio(inputConfigResolution, maxResolution)) {
+ // We don't want to upscale from lower resolution, or use different aspect
+ // ratio, skip.
+ continue;
+ }
+
+ if (!bestConfig.has_value() ||
+ isBetterInputConfig(inputConfig, bestConfig.value())) {
+ bestConfig = inputConfig;
+ }
+ }
+
+ return bestConfig;
+}
+
RequestSettings createSettingsFromMetadata(const CameraMetadata& metadata) {
return RequestSettings{
.jpegQuality = getJpegQuality(metadata).value_or(
@@ -276,15 +328,13 @@
halStreams.clear();
halStreams.resize(in_requestedConfiguration.streams.size());
- sp<Surface> inputSurface = nullptr;
- int inputWidth;
- int inputHeight;
-
if (!virtualCamera->isStreamCombinationSupported(in_requestedConfiguration)) {
ALOGE("%s: Requested stream configuration is not supported", __func__);
return cameraStatus(Status::ILLEGAL_ARGUMENT);
}
+ sp<Surface> inputSurface = nullptr;
+ std::optional<SupportedStreamConfiguration> inputConfig;
{
std::lock_guard<std::mutex> lock(mLock);
for (int i = 0; i < in_requestedConfiguration.streams.size(); ++i) {
@@ -294,14 +344,20 @@
}
}
- Stream maxResStream = getHighestResolutionStream(streams);
- inputWidth = maxResStream.width;
- inputHeight = maxResStream.height;
+ inputConfig = pickInputConfigurationForStreams(
+ streams, virtualCamera->getInputConfigs());
+ if (!inputConfig.has_value()) {
+ ALOGE(
+ "%s: Failed to pick any input configuration for stream configuration "
+ "request: %s",
+ __func__, in_requestedConfiguration.toString().c_str());
+ return cameraStatus(Status::ILLEGAL_ARGUMENT);
+ }
if (mRenderThread == nullptr) {
// If there's no client callback, start camera in test mode.
const bool testMode = mVirtualCameraClientCallback == nullptr;
mRenderThread = std::make_unique<VirtualCameraRenderThread>(
- mSessionContext, Resolution(inputWidth, inputHeight),
+ mSessionContext, resolutionFromInputConfig(*inputConfig),
virtualCamera->getMaxInputResolution(), mCameraDeviceCallback,
testMode);
mRenderThread->start();
@@ -315,7 +371,7 @@
// create single texture.
mVirtualCameraClientCallback->onStreamConfigured(
/*streamId=*/0, aidl::android::view::Surface(inputSurface.get()),
- inputWidth, inputHeight, Format::YUV_420_888);
+ inputConfig->width, inputConfig->height, inputConfig->pixelFormat);
}
return ndk::ScopedAStatus::ok();
diff --git a/services/camera/virtualcamera/flags/Android.bp b/services/camera/virtualcamera/flags/Android.bp
index 5fa8852..5fa53d8 100644
--- a/services/camera/virtualcamera/flags/Android.bp
+++ b/services/camera/virtualcamera/flags/Android.bp
@@ -35,3 +35,27 @@
export_include_dirs: ["."],
defaults: ["virtual_device_build_flags_defaults"],
}
+
+soong_config_module_type {
+ name: "virtual_device_build_flags_java_library",
+ module_type: "java_library",
+ config_namespace: "vdm",
+ bool_variables: [
+ "virtual_camera_service_enabled",
+ ],
+ properties: [
+ "srcs",
+ ],
+}
+
+virtual_device_build_flags_java_library {
+ name: "virtual_device_build_flag_java",
+ soong_config_variables: {
+ virtual_camera_service_enabled: {
+ srcs: ["java/enabled/**/*.java"],
+ conditions_default: {
+ srcs: ["java/disabled/**/*.java"],
+ },
+ },
+ },
+}
diff --git a/services/camera/virtualcamera/flags/java/disabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java b/services/camera/virtualcamera/flags/java/disabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
new file mode 100644
index 0000000..128d93c
--- /dev/null
+++ b/services/camera/virtualcamera/flags/java/disabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.companion.virtualdevice.flags;
+
+/** This file is included only if RELEASE_PACKAGE_VIRTUAL_CAMERA build flag isn't set.*/
+public class VirtualCameraServiceBuildFlag {
+
+ public static boolean isVirtualCameraServiceBuildFlagEnabled() {
+ return false;
+ }
+}
diff --git a/services/camera/virtualcamera/flags/java/enabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java b/services/camera/virtualcamera/flags/java/enabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
new file mode 100644
index 0000000..02816fb
--- /dev/null
+++ b/services/camera/virtualcamera/flags/java/enabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.companion.virtualdevice.flags;
+
+/** This file is included only if RELEASE_PACKAGE_VIRTUAL_CAMERA build flag is set.*/
+public class VirtualCameraServiceBuildFlag {
+
+ public static boolean isVirtualCameraServiceBuildFlagEnabled() {
+ return true;
+ }
+}
diff --git a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
index 9146d8a..ad9d83b 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
@@ -55,6 +55,10 @@
camera_metadata_enum_android_scaler_available_stream_configurations_t;
constexpr int kCameraId = 42;
+constexpr int kQvgaWidth = 320;
+constexpr int kQvgaHeight = 240;
+constexpr int k360pWidth = 640;
+constexpr int k360pHeight = 360;
constexpr int kVgaWidth = 640;
constexpr int kVgaHeight = 480;
constexpr int kHdWidth = 1280;
@@ -79,7 +83,8 @@
const int width;
const int height;
const int pixelFormat;
- const metadata_stream_t streamConfiguration;
+ const metadata_stream_t streamConfiguration =
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
};
bool operator==(const AvailableStreamConfiguration& a,
@@ -173,24 +178,33 @@
.lensFacing = LensFacing::FRONT},
.expectedAvailableStreamConfigs =
{AvailableStreamConfiguration{
- .width = kVgaWidth,
- .height = kVgaHeight,
- .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ .width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat =
+ ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+ AvailableStreamConfiguration{
+ .width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat =
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+ AvailableStreamConfiguration{
+ .width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
AvailableStreamConfiguration{
.width = kVgaWidth,
.height = kVgaHeight,
.pixelFormat =
- ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
AvailableStreamConfiguration{
.width = kVgaWidth,
.height = kVgaHeight,
- .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT}}},
+ .pixelFormat =
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+ AvailableStreamConfiguration{
+ .width = kVgaWidth,
+ .height = kVgaHeight,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB}}},
VirtualCameraConfigTestParam{
.inputConfig =
VirtualCameraConfiguration{
@@ -210,43 +224,70 @@
.lensFacing = LensFacing::BACK},
.expectedAvailableStreamConfigs = {
AvailableStreamConfiguration{
+ .width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+ AvailableStreamConfiguration{
+ .width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat =
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+ AvailableStreamConfiguration{
+ .width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+ AvailableStreamConfiguration{
+ .width = 640,
+ .height = 360,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+ AvailableStreamConfiguration{
+ .width = 640,
+ .height = 360,
+ .pixelFormat =
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+ AvailableStreamConfiguration{
+ .width = 640,
+ .height = 360,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+ AvailableStreamConfiguration{
.width = kVgaWidth,
.height = kVgaHeight,
- .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
AvailableStreamConfiguration{
.width = kVgaWidth,
.height = kVgaHeight,
.pixelFormat =
- ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
AvailableStreamConfiguration{
.width = kVgaWidth,
.height = kVgaHeight,
- .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+ AvailableStreamConfiguration{
+ .width = 1024,
+ .height = 576,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+ AvailableStreamConfiguration{
+ .width = 1024,
+ .height = 576,
+ .pixelFormat =
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+ AvailableStreamConfiguration{
+ .width = 1024,
+ .height = 576,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
AvailableStreamConfiguration{
.width = kHdWidth,
.height = kHdHeight,
- .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
AvailableStreamConfiguration{
.width = kHdWidth,
.height = kHdHeight,
.pixelFormat =
- ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
AvailableStreamConfiguration{
.width = kHdWidth,
.height = kHdHeight,
- .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT}}}));
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB}}}));
class VirtualCameraDeviceTest : public ::testing::Test {
public:
diff --git a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
index 1af8b80..5f313a0 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
@@ -37,6 +37,8 @@
namespace virtualcamera {
namespace {
+constexpr int kQvgaWidth = 320;
+constexpr int kQvgaHeight = 240;
constexpr int kVgaWidth = 640;
constexpr int kVgaHeight = 480;
constexpr int kSvgaWidth = 800;
@@ -104,32 +106,13 @@
MOCK_METHOD(ndk::ScopedAStatus, onStreamClosed, (int), (override));
};
-class VirtualCameraSessionTest : public ::testing::Test {
+class VirtualCameraSessionTestBase : public ::testing::Test {
public:
- void SetUp() override {
+ virtual void SetUp() override {
mMockCameraDeviceCallback =
ndk::SharedRefBase::make<MockCameraDeviceCallback>();
mMockVirtualCameraClientCallback =
ndk::SharedRefBase::make<MockVirtualCameraCallback>();
- mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
- kCameraId,
- VirtualCameraConfiguration{
- .supportedStreamConfigs = {SupportedStreamConfiguration{
- .width = kVgaWidth,
- .height = kVgaHeight,
- .pixelFormat = Format::YUV_420_888,
- .maxFps = kMaxFps},
- SupportedStreamConfiguration{
- .width = kSvgaWidth,
- .height = kSvgaHeight,
- .pixelFormat = Format::YUV_420_888,
- .maxFps = kMaxFps}},
- .virtualCameraCallback = nullptr,
- .sensorOrientation = SensorOrientation::ORIENTATION_0,
- .lensFacing = LensFacing::FRONT});
- mVirtualCameraSession = ndk::SharedRefBase::make<VirtualCameraSession>(
- mVirtualCameraDevice, mMockCameraDeviceCallback,
- mMockVirtualCameraClientCallback);
// Explicitly defining default actions below to prevent gmock from
// default-constructing ndk::ScopedAStatus, because default-constructed
@@ -155,6 +138,35 @@
protected:
std::shared_ptr<MockCameraDeviceCallback> mMockCameraDeviceCallback;
std::shared_ptr<MockVirtualCameraCallback> mMockVirtualCameraClientCallback;
+};
+
+class VirtualCameraSessionTest : public VirtualCameraSessionTestBase {
+ public:
+ void SetUp() override {
+ VirtualCameraSessionTestBase::SetUp();
+
+ mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
+ kCameraId,
+ VirtualCameraConfiguration{
+ .supportedStreamConfigs = {SupportedStreamConfiguration{
+ .width = kVgaWidth,
+ .height = kVgaHeight,
+ .pixelFormat = Format::YUV_420_888,
+ .maxFps = kMaxFps},
+ SupportedStreamConfiguration{
+ .width = kSvgaWidth,
+ .height = kSvgaHeight,
+ .pixelFormat = Format::YUV_420_888,
+ .maxFps = kMaxFps}},
+ .virtualCameraCallback = mMockVirtualCameraClientCallback,
+ .sensorOrientation = SensorOrientation::ORIENTATION_0,
+ .lensFacing = LensFacing::FRONT});
+ mVirtualCameraSession = ndk::SharedRefBase::make<VirtualCameraSession>(
+ mVirtualCameraDevice, mMockCameraDeviceCallback,
+ mMockVirtualCameraClientCallback);
+ }
+
+ protected:
std::shared_ptr<VirtualCameraDevice> mVirtualCameraDevice;
std::shared_ptr<VirtualCameraSession> mVirtualCameraSession;
};
@@ -272,6 +284,97 @@
Eq(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)));
}
+TEST_F(VirtualCameraSessionTest, ConfigureWithDifferentAspectRatioFails) {
+ StreamConfiguration streamConfiguration;
+ streamConfiguration.streams = {
+ createStream(kStreamId, kVgaWidth, kVgaHeight, PixelFormat::YCBCR_420_888),
+ createStream(kSecondStreamId, kVgaHeight, kVgaWidth,
+ PixelFormat::YCBCR_420_888)};
+
+ std::vector<HalStream> halStreams;
+
+ // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+ EXPECT_THAT(
+ mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+ .getServiceSpecificError(),
+ Eq(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)));
+}
+
+class VirtualCameraSessionInputChoiceTest : public VirtualCameraSessionTestBase {
+ public:
+ std::shared_ptr<VirtualCameraSession> createSession(
+ const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
+ mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
+ kCameraId, VirtualCameraConfiguration{
+ .supportedStreamConfigs = supportedInputConfigs,
+ .virtualCameraCallback = mMockVirtualCameraClientCallback,
+ .sensorOrientation = SensorOrientation::ORIENTATION_0,
+ .lensFacing = LensFacing::FRONT});
+ return ndk::SharedRefBase::make<VirtualCameraSession>(
+ mVirtualCameraDevice, mMockCameraDeviceCallback,
+ mMockVirtualCameraClientCallback);
+ }
+
+ protected:
+ std::shared_ptr<VirtualCameraDevice> mVirtualCameraDevice;
+};
+
+TEST_F(VirtualCameraSessionInputChoiceTest,
+ configureChoosesCorrectInputStreamForDownsampledOutput) {
+ // Create camera configured to support SVGA YUV input and RGB QVGA input.
+ auto virtualCameraSession = createSession(
+ {SupportedStreamConfiguration{.width = kSvgaWidth,
+ .height = kSvgaHeight,
+ .pixelFormat = Format::YUV_420_888,
+ .maxFps = kMaxFps},
+ SupportedStreamConfiguration{.width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat = Format::RGBA_8888,
+ .maxFps = kMaxFps}});
+
+ // Configure VGA stream. Expect SVGA input to be chosen to downscale from.
+ StreamConfiguration streamConfiguration;
+ streamConfiguration.streams = {createStream(
+ kStreamId, kVgaWidth, kVgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+ std::vector<HalStream> halStreams;
+
+ // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+ EXPECT_CALL(*mMockVirtualCameraClientCallback,
+ onStreamConfigured(kStreamId, _, kSvgaWidth, kSvgaHeight,
+ Format::YUV_420_888));
+ EXPECT_TRUE(
+ virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+ .isOk());
+}
+
+TEST_F(VirtualCameraSessionInputChoiceTest,
+ configureChoosesCorrectInputStreamForMatchingResolution) {
+ // Create camera configured to support SVGA YUV input and RGB QVGA input.
+ auto virtualCameraSession = createSession(
+ {SupportedStreamConfiguration{.width = kSvgaWidth,
+ .height = kSvgaHeight,
+ .pixelFormat = Format::YUV_420_888,
+ .maxFps = kMaxFps},
+ SupportedStreamConfiguration{.width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat = Format::RGBA_8888,
+ .maxFps = kMaxFps}});
+
+ // Configure VGA stream. Expect SVGA input to be chosen to downscale from.
+ StreamConfiguration streamConfiguration;
+ streamConfiguration.streams = {createStream(
+ kStreamId, kQvgaWidth, kQvgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+ std::vector<HalStream> halStreams;
+
+ // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+ EXPECT_CALL(*mMockVirtualCameraClientCallback,
+ onStreamConfigured(kStreamId, _, kQvgaWidth, kQvgaHeight,
+ Format::RGBA_8888));
+ EXPECT_TRUE(
+ virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+ .isOk());
+}
+
} // namespace
} // namespace virtualcamera
} // namespace companion
diff --git a/services/camera/virtualcamera/util/JpegUtil.cc b/services/camera/virtualcamera/util/JpegUtil.cc
index 98f2448..8569eff 100644
--- a/services/camera/virtualcamera/util/JpegUtil.cc
+++ b/services/camera/virtualcamera/util/JpegUtil.cc
@@ -153,18 +153,6 @@
return compress(yLines, cbLines, crLines);
}
- std::optional<size_t> compressBlackImage() {
- // We only really need to prepare one scanline for Y and one shared scanline
- // for Cb & Cr.
- std::vector<uint8_t> yLine(mWidth, 0);
- std::vector<uint8_t> chromaLine(mWidth / 2, 0xff / 2);
-
- std::vector<JSAMPROW> yLines(mHeight, yLine.data());
- std::vector<JSAMPROW> cLines(mHeight / 2, chromaLine.data());
-
- return compress(yLines, cLines, cLines);
- }
-
private:
void setSuccess(const boolean success) {
mSuccess = success;
@@ -279,17 +267,6 @@
return context.compress(ycbcr);
}
-std::optional<size_t> compressBlackJpeg(const int width, const int height,
- const int quality,
- const std::vector<uint8_t>& app1ExifData,
- size_t outBufferSize, void* outBuffer) {
- LibJpegContext context(width, height, quality, outBufferSize, outBuffer);
- if (!app1ExifData.empty()) {
- context.setApp1Data(app1ExifData.data(), app1ExifData.size());
- }
- return context.compressBlackImage();
-}
-
} // namespace virtualcamera
} // namespace companion
} // namespace android
diff --git a/services/camera/virtualcamera/util/JpegUtil.h b/services/camera/virtualcamera/util/JpegUtil.h
index e64fb4f..83ed74b 100644
--- a/services/camera/virtualcamera/util/JpegUtil.h
+++ b/services/camera/virtualcamera/util/JpegUtil.h
@@ -17,10 +17,8 @@
#ifndef ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
#define ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
-#include <memory>
#include <optional>
-#include "android/hardware_buffer.h"
#include "system/graphics.h"
namespace android {
@@ -43,20 +41,6 @@
const std::vector<uint8_t>& app1ExifData,
size_t outBufferSize, void* outBuffer);
-// Jpeg-compress all-black image into the output buffer.
-// * width - width of the image
-// * heigh - height of the image
-// * quality - 0-100, higher number corresponds to higher quality.
-// * app1ExifData - vector containing data to be included in APP1
-// segment. Can be empty.
-// * outBufferSize - capacity of the output buffer.
-// * outBuffer - output buffer to write compressed data into.
-// Returns size of compressed data if the compression was successful,
-// empty optional otherwise.
-std::optional<size_t> compressBlackJpeg(int width, int height, int quality,
- const std::vector<uint8_t>& app1ExifData,
- size_t outBufferSize, void* outBuffer);
-
} // namespace virtualcamera
} // namespace companion
} // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataUtil.cc b/services/camera/virtualcamera/util/MetadataUtil.cc
index 8996ac7..e3d9e28 100644
--- a/services/camera/virtualcamera/util/MetadataUtil.cc
+++ b/services/camera/virtualcamera/util/MetadataUtil.cc
@@ -489,6 +489,14 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setAberrationCorrectionMode(
+ const camera_metadata_enum_android_color_correction_aberration_mode
+ aberrationCorrectionMode) {
+ mEntryMap[ANDROID_COLOR_CORRECTION_ABERRATION_MODE] =
+ asVectorOf<uint8_t>(aberrationCorrectionMode);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setAvailableNoiseReductionModes(
const std::vector<camera_metadata_enum_android_noise_reduction_mode>&
noiseReductionModes) {
@@ -497,6 +505,13 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setNoiseReductionMode(
+ camera_metadata_enum_android_noise_reduction_mode noiseReductionMode) {
+ mEntryMap[ANDROID_NOISE_REDUCTION_MODE] =
+ asVectorOf<uint8_t>(noiseReductionMode);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setRequestPartialResultCount(
const int partialResultCount) {
mEntryMap[ANDROID_REQUEST_PARTIAL_RESULT_COUNT] =
diff --git a/services/camera/virtualcamera/util/MetadataUtil.h b/services/camera/virtualcamera/util/MetadataUtil.h
index 13c4797..b4d60cb 100644
--- a/services/camera/virtualcamera/util/MetadataUtil.h
+++ b/services/camera/virtualcamera/util/MetadataUtil.h
@@ -155,11 +155,20 @@
camera_metadata_enum_android_color_correction_aberration_mode>&
aberrationCorectionModes);
+ // See COLOR_CORRECTION_ABERRATION_MODE in CaptureRequest.java.
+ MetadataBuilder& setAberrationCorrectionMode(
+ camera_metadata_enum_android_color_correction_aberration_mode
+ aberrationCorrectionMode);
+
// See NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES in CameraCharacteristics.java.
MetadataBuilder& setAvailableNoiseReductionModes(
const std::vector<camera_metadata_enum_android_noise_reduction_mode>&
noiseReductionModes);
+ // See NOISE_REDUCTION_MODE in CaptureRequest.java.
+ MetadataBuilder& setNoiseReductionMode(
+ camera_metadata_enum_android_noise_reduction_mode noiseReductionMode);
+
// See REQUEST_PARTIAL_RESULT_COUNT in CameraCharacteristics.java.
MetadataBuilder& setRequestPartialResultCount(int partialResultCount);
diff --git a/services/camera/virtualcamera/util/Util.h b/services/camera/virtualcamera/util/Util.h
index d5b0b1f..faae010 100644
--- a/services/camera/virtualcamera/util/Util.h
+++ b/services/camera/virtualcamera/util/Util.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_COMPANION_VIRTUALCAMERA_UTIL_H
#define ANDROID_COMPANION_VIRTUALCAMERA_UTIL_H
+#include <cmath>
#include <cstdint>
#include <memory>
@@ -129,6 +130,10 @@
: pixCount < otherPixCount;
}
+ bool operator<=(const Resolution& other) const {
+ return *this == other || *this < other;
+ }
+
bool operator==(const Resolution& other) const {
return width == other.width && height == other.height;
}
@@ -137,6 +142,17 @@
int height = 0;
};
+inline bool isApproximatellySameAspectRatio(const Resolution r1,
+ const Resolution r2) {
+ static constexpr float kAspectRatioEpsilon = 0.05;
+ float aspectRatio1 =
+ static_cast<float>(r1.width) / static_cast<float>(r1.height);
+ float aspectRatio2 =
+ static_cast<float>(r2.width) / static_cast<float>(r2.height);
+
+ return std::abs(aspectRatio1 - aspectRatio2) < kAspectRatioEpsilon;
+}
+
std::ostream& operator<<(std::ostream& os, const Resolution& resolution);
} // namespace virtualcamera
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index d24a3c9..cd00937 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -447,6 +447,42 @@
#endif
}
+inline void pushReclaimStats(int32_t callingPid,
+ int32_t requesterUid,
+ int requesterPriority,
+ const std::string& clientName,
+ int32_t noOfConcurrentCodecs,
+ int32_t reclaimStatus,
+ int32_t noOfCodecsReclaimed = 0,
+ int32_t targetIndex = -1,
+ int32_t targetClientPid = -1,
+ int32_t targetClientUid = -1,
+ int32_t targetPriority = -1) {
+ // Post the pushed atom
+ int result = stats_write(
+ MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
+ requesterUid,
+ requesterPriority,
+ clientName.c_str(),
+ noOfConcurrentCodecs,
+ reclaimStatus,
+ noOfCodecsReclaimed,
+ targetIndex,
+ targetClientUid,
+ targetPriority);
+ ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
+ "Requester[pid(%d): uid(%d): priority(%d)] "
+ "Codec: [%s] "
+ "No of concurrent codecs: %d "
+ "Reclaim Status: %d "
+ "No of codecs reclaimed: %d "
+ "Target[%d][pid(%d): uid(%d): priority(%d)] result: %d",
+ __func__, callingPid, requesterUid, requesterPriority,
+ clientName.c_str(), noOfConcurrentCodecs,
+ reclaimStatus, noOfCodecsReclaimed,
+ targetIndex, targetClientPid, targetClientUid, targetPriority, result);
+}
+
void ResourceManagerMetrics::pushReclaimAtom(const ClientInfoParcel& clientInfo,
const std::vector<int>& priorities,
const std::vector<ClientInfo>& targetClients,
@@ -485,33 +521,34 @@
MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
}
}
+
+ if (targetClients.empty()) {
+ // Push the reclaim atom to stats.
+ pushReclaimStats(callingPid,
+ requesterUid,
+ requesterPriority,
+ clientName,
+ noOfConcurrentCodecs,
+ reclaimStatus);
+ return;
+ }
+
int32_t noOfCodecsReclaimed = targetClients.size();
int32_t targetIndex = 1;
for (const ClientInfo& targetClient : targetClients) {
int targetPriority = priorities[targetIndex];
- // Post the pushed atom
- int result = stats_write(
- MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
- requesterUid,
- requesterPriority,
- clientName.c_str(),
- noOfConcurrentCodecs,
- reclaimStatus,
- noOfCodecsReclaimed,
- targetIndex,
- targetClient.mUid,
- targetPriority);
- ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
- "Requester[pid(%d): uid(%d): priority(%d)] "
- "Codec: [%s] "
- "No of concurrent codecs: %d "
- "Reclaim Status: %d "
- "No of codecs reclaimed: %d "
- "Target[%d][pid(%d): uid(%d): priority(%d)] result: %d",
- __func__, callingPid, requesterUid, requesterPriority,
- clientName.c_str(), noOfConcurrentCodecs,
- reclaimStatus, noOfCodecsReclaimed,
- targetIndex, targetClient.mPid, targetClient.mUid, targetPriority, result);
+ // Push the reclaim atom to stats.
+ pushReclaimStats(callingPid,
+ requesterUid,
+ requesterPriority,
+ clientName,
+ noOfConcurrentCodecs,
+ reclaimStatus,
+ noOfCodecsReclaimed,
+ targetIndex,
+ targetClient.mPid,
+ targetClient.mUid,
+ targetPriority);
targetIndex++;
}
}
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
index a9bc34b..7a5a89f 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.h
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -96,7 +96,32 @@
};
//
-// ResourceManagerMetrics class that maintaines concurrent codec count based:
+// Resource Manager Metrics is designed to answer some of the questions like:
+// - What apps are causing reclaim and what apps are targeted (reclaimed from) in the process?
+// - which apps use the most codecs and the most codec memory?
+// - What is the % of total successful reclaims?
+//
+// Though, it's not in the context of this class, metrics should also answer:
+// - what % of codec errors are due to codec being reclaimed?
+// - What % of successful codec creation(start) requires codec reclaims?
+// - How often codec start fails even after successful reclaim?
+//
+// The metrics are collected to analyze and understand the codec resource usage
+// and use that information to help with:
+// - minimize the no of reclaims
+// - reduce the codec start delays by minimizing no of times we try to reclaim
+// - minimize the reclaim errors in codec records
+//
+// Success metrics for Resource Manager Service could be defined as:
+// - increase in sucecssful codec creation for the foreground apps
+// - reduce the number of reclaims for codecs
+// - reduce the time to create codec
+//
+// We would like to use this data to come up with a better resource management that would:
+// - increase the successful codec creation (for all kind of apps)
+// - decrease the codec errors due to resources
+//
+// This class that maintains concurrent codec counts based on:
//
// 1. # of concurrent active codecs (initialized, but aren't released yet) of given
// implementation (by codec name) across the system.
@@ -111,7 +136,7 @@
// This should help with understanding the (video) memory usage per
// application.
//
-//
+
class ResourceManagerMetrics {
public:
ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo);
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 305f6fe..d37d893 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -586,18 +586,21 @@
// Check if there are any resources to be reclaimed before processing.
if (resources.empty()) {
+ // Invalid reclaim request. So no need to log.
return Status::ok();
}
std::vector<ClientInfo> targetClients;
- if (!getTargetClients(clientInfo, resources, targetClients)) {
- // Nothing to reclaim from.
+ if (getTargetClients(clientInfo, resources, targetClients)) {
+ // Reclaim all the target clients.
+ *_aidl_return = reclaimUnconditionallyFrom(targetClients);
+ } else {
+ // No clients to reclaim from.
ALOGI("%s: There aren't any clients to reclaim from", __func__);
- return Status::ok();
+ // We need to log this failed reclaim as "no clients to reclaim from".
+ targetClients.clear();
}
- *_aidl_return = reclaimUnconditionallyFrom(targetClients);
-
// Log Reclaim Pushed Atom to statsd
pushReclaimAtom(clientInfo, targetClients, *_aidl_return);