Merge Android U (ab/10368041)
Bug: 291102124
Merged-In: Ied8e295ae059db07463ba06d3e6d747659b2757f
Change-Id: Ib79234b765308e957b682871b2178b66769f5660
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 85746f3..17e6d15 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -143,6 +143,7 @@
defaults: [
"latest_android_media_audio_common_types_cpp_shared",
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
"audioflinger_flags_defaults",
],
@@ -150,6 +151,8 @@
"AudioFlinger.cpp",
"DeviceEffectManager.cpp",
"Effects.cpp",
+ "MelReporter.cpp",
+ "PatchCommandThread.cpp",
"PatchPanel.cpp",
"Threads.cpp",
"Tracks.cpp",
@@ -166,6 +169,7 @@
"av-types-aidl-cpp",
"effect-aidl-cpp",
"libaudioclient_aidl_conversion",
+ "libactivitymanager_aidl",
"libaudioflinger_datapath",
"libaudioflinger_fastpath",
"libaudioflinger_timing",
@@ -179,7 +183,9 @@
"libutils",
"liblog",
"libbinder",
+ "libbinder_ndk",
"libaudioclient",
+ "libaudiomanager",
"libmedialogservice",
"libmediametrics",
"libmediautils",
@@ -187,10 +193,10 @@
"libnblog",
"libpermission",
"libpowermanager",
- "libmediautils",
"libmemunreachable",
"libmedia_helper",
"libshmemcompat",
+ "libsounddose",
"libvibrator",
"packagemanager_aidl-cpp",
],
@@ -204,11 +210,13 @@
"libaaudio_headers",
"libaudioclient_headers",
"libaudiohal_headers",
+ "libaudioutils_headers",
"libmedia_headers",
],
export_shared_lib_headers: [
"libpermission",
+ "android.hardware.audio.core.sounddose-V1-ndk",
],
cflags: [
@@ -221,3 +229,8 @@
},
}
+
+cc_library_headers {
+ name: "libaudioflinger_headers",
+ export_include_dirs: ["."],
+}
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 18627bd..18ab3a7 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -55,9 +55,10 @@
#include <cutils/properties.h>
#include <system/audio.h>
-#include <audiomanager/AudioManager.h>
+#include <audiomanager/IAudioManager.h>
#include "AudioFlinger.h"
+#include "EffectConfiguration.h"
#include <afutils/PropertyUtils.h>
#include <media/AudioResamplerPublic.h>
@@ -116,11 +117,12 @@
static const AudioHalVersionInfo kMaxAAudioPropertyDeviceHalVersion =
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1);
-static const char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
-static const char kHardwareLockedString[] = "Hardware lock is taken\n";
-static const char kClientLockedString[] = "Client lock is taken\n";
-static const char kNoEffectsFactory[] = "Effects Factory is absent\n";
+static constexpr char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
+static constexpr char kHardwareLockedString[] = "Hardware lock is taken\n";
+static constexpr char kClientLockedString[] = "Client lock is taken\n";
+static constexpr char kNoEffectsFactory[] = "Effects Factory is absent\n";
+static constexpr char kAudioServiceName[] = "audio";
nsecs_t AudioFlinger::mStandbyTimeInNsecs = kDefaultStandbyTimeInNsecs;
@@ -192,7 +194,6 @@
BINDER_METHOD_ENTRY(restoreOutput) \
BINDER_METHOD_ENTRY(openInput) \
BINDER_METHOD_ENTRY(closeInput) \
-BINDER_METHOD_ENTRY(invalidateStream) \
BINDER_METHOD_ENTRY(setVoiceVolume) \
BINDER_METHOD_ENTRY(getRenderPosition) \
BINDER_METHOD_ENTRY(getInputFramesLost) \
@@ -234,6 +235,7 @@
BINDER_METHOD_ENTRY(setBluetoothVariableLatencyEnabled) \
BINDER_METHOD_ENTRY(isBluetoothVariableLatencyEnabled) \
BINDER_METHOD_ENTRY(supportsBluetoothVariableLatency) \
+BINDER_METHOD_ENTRY(getSoundDoseInterface) \
BINDER_METHOD_ENTRY(getAudioPolicyConfig) \
// singleton for Binder Method Statistics for IAudioFlinger
@@ -327,7 +329,9 @@
mClientSharedHeapSize(kMinimumClientSharedHeapSizeBytes),
mGlobalEffectEnableTime(0),
mPatchPanel(this),
- mDeviceEffectManager(this),
+ mPatchCommandThread(sp<PatchCommandThread>::make()),
+ mDeviceEffectManager(sp<DeviceEffectManager>::make(*this)),
+ mMelReporter(sp<MelReporter>::make(*this)),
mSystemReady(false),
mBluetoothLatencyModesEnabled(true)
{
@@ -367,7 +371,7 @@
BatteryNotifier::getInstance().noteResetAudio();
mDevicesFactoryHal = DevicesFactoryHalInterface::create();
- mEffectsFactoryHal = EffectsFactoryHalInterface::create();
+ mEffectsFactoryHal = audioflinger::EffectConfiguration::getEffectsFactoryHal();
mMediaLogNotifier->run("MediaLogNotifier");
std::vector<pid_t> halPids;
@@ -644,13 +648,20 @@
fullConfig.format = config->format;
std::vector<audio_io_handle_t> secondaryOutputs;
bool isSpatialized;
+ bool isBitPerfect;
ret = AudioSystem::getOutputForAttr(&localAttr, &io,
actualSessionId,
&streamType, adjAttributionSource,
&fullConfig,
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
AUDIO_OUTPUT_FLAG_DIRECT),
- deviceId, &portId, &secondaryOutputs, &isSpatialized);
+ deviceId, &portId, &secondaryOutputs, &isSpatialized,
+ &isBitPerfect);
+ if (ret != NO_ERROR) {
+ config->sample_rate = fullConfig.sample_rate;
+ config->channel_mask = fullConfig.channel_mask;
+ config->format = fullConfig.format;
+ }
ALOGW_IF(!secondaryOutputs.empty(),
"%s does not support secondary outputs, ignoring them", __func__);
} else {
@@ -691,27 +702,29 @@
}
/* static */
-int AudioFlinger::onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration) {
+os::HapticScale AudioFlinger::onExternalVibrationStart(
+ const sp<os::ExternalVibration>& externalVibration) {
sp<os::IExternalVibratorService> evs = getExternalVibratorService();
if (evs != nullptr) {
int32_t ret;
binder::Status status = evs->onExternalVibrationStart(*externalVibration, &ret);
if (status.isOk()) {
ALOGD("%s, start external vibration with intensity as %d", __func__, ret);
- return ret;
+ return os::ExternalVibration::externalVibrationScaleToHapticScale(ret);
}
}
ALOGD("%s, start external vibration with intensity as MUTE due to %s",
__func__,
evs == nullptr ? "external vibration service not found"
: "error when querying intensity");
- return static_cast<int>(os::HapticScale::MUTE);
+ return os::HapticScale::MUTE;
}
/* static */
void AudioFlinger::onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration) {
sp<os::IExternalVibratorService> evs = getExternalVibratorService();
if (evs != 0) {
+ ALOGD("%s, stopping external vibration", __func__);
evs->onExternalVibrationStop(*externalVibration);
}
}
@@ -779,15 +792,14 @@
{
String8 result;
- result.append("Clients:\n");
- result.append(" pid heap_size\n");
+ result.append("Client Allocators:\n");
for (size_t i = 0; i < mClients.size(); ++i) {
sp<Client> client = mClients.valueAt(i).promote();
if (client != 0) {
- result.appendFormat("%6d %12zu\n", client->pid(),
- client->heap()->getMemoryHeap()->getSize());
+ result.appendFormat("Client: %d\n", client->pid());
+ result.append(client->allocator().dump().c_str());
}
- }
+ }
result.append("Notification Clients:\n");
result.append(" pid uid name\n");
@@ -823,6 +835,13 @@
(uint32_t)(mStandbyTimeInNsecs / 1000000));
result.append(buffer);
write(fd, result.c_str(), result.size());
+
+ dprintf(fd, "Vibrator infos(size=%zu):\n", mAudioVibratorInfos.size());
+ for (const auto& vibratorInfo : mAudioVibratorInfos) {
+ dprintf(fd, " - %s\n", vibratorInfo.toString().c_str());
+ }
+ dprintf(fd, "Bluetooth latency modes are %senabled\n",
+ mBluetoothLatencyModesEnabled ? "" : "not ");
}
void AudioFlinger::dumpPermissionDenial(int fd, const Vector<String16>& args __unused)
@@ -917,7 +936,10 @@
mPatchPanel.dump(fd);
- mDeviceEffectManager.dump(fd);
+ mDeviceEffectManager->dump(fd);
+
+ std::string melOutput = mMelReporter->dump();
+ write(fd, melOutput.c_str(), melOutput.size());
// dump external setParameters
auto dumpLogger = [fd](SimpleLog& logger, const char* name) {
@@ -1092,7 +1114,8 @@
audio_stream_type_t streamType;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
std::vector<audio_io_handle_t> secondaryOutputs;
- bool isSpatialized = false;;
+ bool isSpatialized = false;
+ bool isBitPerfect = false;
// TODO b/182392553: refactor or make clearer
pid_t clientPid =
@@ -1139,7 +1162,7 @@
lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
adjAttributionSource, &input.config, input.flags,
&output.selectedDeviceId, &portId, &secondaryOutputs,
- &isSpatialized);
+ &isSpatialized, &isBitPerfect);
if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
@@ -1205,31 +1228,34 @@
input.notificationsPerBuffer, input.speed,
input.sharedBuffer, sessionId, &output.flags,
callingPid, adjAttributionSource, input.clientInfo.clientTid,
- &lStatus, portId, input.audioTrackCallback, isSpatialized);
+ &lStatus, portId, input.audioTrackCallback, isSpatialized,
+ isBitPerfect);
LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
// we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
output.afFrameCount = thread->frameCount();
output.afSampleRate = thread->sampleRate();
+ output.afChannelMask = static_cast<audio_channel_mask_t>(thread->channelMask() |
+ thread->hapticChannelMask());
+ output.afFormat = thread->format();
output.afLatencyMs = thread->latency();
output.portId = portId;
if (lStatus == NO_ERROR) {
+ // no risk of deadlock because AudioFlinger::mLock is held
+ Mutex::Autolock _dl(thread->mLock);
// Connect secondary outputs. Failure on a secondary output must not imped the primary
// Any secondary output setup failure will lead to a desync between the AP and AF until
// the track is destroyed.
updateSecondaryOutputsForTrack_l(track.get(), thread, secondaryOutputs);
- }
-
- // move effect chain to this output thread if an effect on same session was waiting
- // for a track to be created
- if (lStatus == NO_ERROR && effectThread != NULL) {
- // no risk of deadlock because AudioFlinger::mLock is held
- Mutex::Autolock _dl(thread->mLock);
- Mutex::Autolock _sl(effectThread->mLock);
- if (moveEffectChain_l(sessionId, effectThread, thread) == NO_ERROR) {
- effectThreadId = thread->id();
- effectIds = thread->getEffectIds_l(sessionId);
+ // move effect chain to this output thread if an effect on same session was waiting
+ // for a track to be created
+ if (effectThread != nullptr) {
+ Mutex::Autolock _sl(effectThread->mLock);
+ if (moveEffectChain_l(sessionId, effectThread, thread) == NO_ERROR) {
+ effectThreadId = thread->id();
+ effectIds = thread->getEffectIds_l(sessionId);
+ }
}
}
@@ -1709,6 +1735,16 @@
return NO_ERROR;
}
+status_t AudioFlinger::getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+ sp<media::ISoundDose>* soundDose) {
+ if (soundDose == nullptr) {
+ return BAD_VALUE;
+ }
+
+ *soundDose = mMelReporter->getSoundDoseInterface(callback);
+ return NO_ERROR;
+}
+
status_t AudioFlinger::setStreamMute(audio_stream_type_t stream, bool muted)
{
// check calling permissions
@@ -2278,12 +2314,8 @@
AudioFlinger::Client::Client(const sp<AudioFlinger>& audioFlinger, pid_t pid)
: RefBase(),
mAudioFlinger(audioFlinger),
- mPid(pid)
-{
- mMemoryDealer = new MemoryDealer(
- audioFlinger->getClientSharedHeapSize(),
- (std::string("AudioFlinger::Client(") + std::to_string(pid) + ")").c_str());
-}
+ mPid(pid),
+ mClientAllocator(AllocatorFactory::getClientAllocator()) {}
// Client destructor must be called with AudioFlinger::mClientLock held
AudioFlinger::Client::~Client()
@@ -2291,9 +2323,9 @@
mAudioFlinger->removeClient_l(mPid);
}
-sp<MemoryDealer> AudioFlinger::Client::heap() const
+AllocatorFactory::ClientAllocator& AudioFlinger::Client::allocator()
{
- return mMemoryDealer;
+ return mClientAllocator;
}
// ----------------------------------------------------------------------------
@@ -2496,6 +2528,12 @@
};
}
+ output.halConfig = {
+ thread->sampleRate(),
+ thread->channelMask(),
+ thread->format()
+ };
+
// Check if one effect chain was awaiting for an AudioRecord to be created on this
// session and move it to this thread.
sp<EffectChain> chain = getOrphanEffectChain_l(sessionId);
@@ -2611,6 +2649,9 @@
ALOGE("loadHwModule() error %d loading module %s", rc, name);
return nullptr;
}
+ if (!mMelReporter->activateHalSoundDoseComputation(name, dev)) {
+ ALOGW("loadHwModule() sound dose reporting is not available");
+ }
mHardwareStatus = AUDIO_HW_INIT;
rc = dev->initCheck();
@@ -2869,9 +2910,28 @@
ThreadBase *thread = (ThreadBase *)mMmapThreads.valueAt(i).get();
thread->systemReady();
}
+
+ // Java services are ready, so we can create a reference to AudioService
+ getOrCreateAudioManager();
+
return NO_ERROR;
}
+sp<IAudioManager> AudioFlinger::getOrCreateAudioManager()
+{
+ if (mAudioManager.load() == nullptr) {
+ // use checkService() to avoid blocking
+ sp<IBinder> binder =
+ defaultServiceManager()->checkService(String16(kAudioServiceName));
+ if (binder != nullptr) {
+ mAudioManager = interface_cast<IAudioManager>(binder);
+ } else {
+ ALOGE("%s(): binding to audio service failed.", __func__);
+ }
+ }
+ return mAudioManager.load();
+}
+
status_t AudioFlinger::getMicrophones(std::vector<media::MicrophoneInfoFw> *microphones)
{
AutoMutex lock(mHardwareLock);
@@ -2984,7 +3044,11 @@
return thread;
} else {
sp<PlaybackThread> thread;
- if (flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) {
+ if (flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+ thread = sp<BitPerfectThread>::make(this, outputStream, *output, mSystemReady);
+ ALOGV("%s() created bit-perfect output: ID %d thread %p",
+ __func__, *output, thread.get());
+ } else if (flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) {
thread = new SpatializerThread(this, outputStream, *output,
mSystemReady, mixerConfig);
ALOGV("openOutput_l() created spatializer output: ID %d thread %p",
@@ -3458,17 +3522,23 @@
closeInputFinish(thread);
}
-status_t AudioFlinger::invalidateStream(audio_stream_type_t stream)
-{
+status_t AudioFlinger::invalidateTracks(const std::vector<audio_port_handle_t> &portIds) {
Mutex::Autolock _l(mLock);
- ALOGV("invalidateStream() stream %d", stream);
+ ALOGV("%s", __func__);
+ std::set<audio_port_handle_t> portIdSet(portIds.begin(), portIds.end());
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
PlaybackThread *thread = mPlaybackThreads.valueAt(i).get();
- thread->invalidateTracks(stream);
+ thread->invalidateTracks(portIdSet);
+ if (portIdSet.empty()) {
+ return NO_ERROR;
+ }
}
for (size_t i = 0; i < mMmapThreads.size(); i++) {
- mMmapThreads[i]->invalidateTracks(stream);
+ mMmapThreads[i]->invalidateTracks(portIdSet);
+ if (portIdSet.empty()) {
+ return NO_ERROR;
+ }
}
return NO_ERROR;
}
@@ -3677,6 +3747,20 @@
return thread;
}
+// checkOutputThread_l() must be called with AudioFlinger::mLock held
+sp<AudioFlinger::ThreadBase> AudioFlinger::checkOutputThread_l(audio_io_handle_t ioHandle) const
+{
+ if (audio_unique_id_get_use(ioHandle) != AUDIO_UNIQUE_ID_USE_OUTPUT) {
+ return nullptr;
+ }
+
+ sp<AudioFlinger::ThreadBase> thread = mPlaybackThreads.valueFor(ioHandle);
+ if (thread == nullptr) {
+ thread = mMmapThreads.valueFor(ioHandle);
+ }
+ return thread;
+}
+
// checkPlaybackThread_l() must be called with AudioFlinger::mLock held
AudioFlinger::PlaybackThread *AudioFlinger::checkPlaybackThread_l(audio_io_handle_t output) const
{
@@ -3907,7 +3991,7 @@
patchTrack->setPeerProxy(patchRecord, true /* holdReference */);
patchRecord->setPeerProxy(patchTrack, false /* holdReference */);
}
- track->setTeePatches(std::move(teePatches));
+ track->setTeePatchesToUpdate(std::move(teePatches));
}
sp<audioflinger::SyncEvent> AudioFlinger::createSyncEvent(AudioSystem::sync_event_t type,
@@ -4196,7 +4280,7 @@
if (sessionId == AUDIO_SESSION_DEVICE) {
sp<Client> client = registerPid(currentPid);
ALOGV("%s device type %#x address %s", __func__, device.mType, device.getAddress());
- handle = mDeviceEffectManager.createEffect_l(
+ handle = mDeviceEffectManager->createEffect_l(
&descOut, device, client, effectClient, mPatchPanel.patches_l(),
&enabledOut, &lStatus, probe, request.notifyFramesProcessed);
if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
@@ -4668,7 +4752,6 @@
case TransactionCode::RESTORE_OUTPUT:
case TransactionCode::OPEN_INPUT:
case TransactionCode::CLOSE_INPUT:
- case TransactionCode::INVALIDATE_STREAM:
case TransactionCode::SET_VOICE_VOLUME:
case TransactionCode::MOVE_EFFECTS:
case TransactionCode::SET_EFFECT_SUSPENDED:
@@ -4683,6 +4766,7 @@
case TransactionCode::SET_DEVICE_CONNECTED_STATE:
case TransactionCode::SET_REQUESTED_LATENCY_MODE:
case TransactionCode::GET_SUPPORTED_LATENCY_MODES:
+ case TransactionCode::INVALIDATE_TRACKS:
case TransactionCode::GET_AUDIO_POLICY_CONFIG:
ALOGW("%s: transaction %d received from PID %d",
__func__, code, IPCThreadState::self()->getCallingPid());
@@ -4705,6 +4789,7 @@
case TransactionCode::SET_MASTER_VOLUME:
case TransactionCode::SET_MASTER_MUTE:
case TransactionCode::MASTER_MUTE:
+ case TransactionCode::GET_SOUND_DOSE_INTERFACE:
case TransactionCode::SET_MODE:
case TransactionCode::SET_MIC_MUTE:
case TransactionCode::SET_LOW_RAM_DEVICE:
@@ -4719,7 +4804,7 @@
ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
__func__, code, IPCThreadState::self()->getCallingPid(),
IPCThreadState::self()->getCallingUid());
- // return status only for non void methods
+ // return status only for non-void methods
switch (code) {
case TransactionCode::SYSTEM_READY:
break;
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 8fb8ea8..8b1d70b 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -75,18 +75,23 @@
#include <media/ExtendedAudioBufferProvider.h>
#include <media/VolumeShaper.h>
#include <mediautils/ServiceUtilities.h>
+#include <mediautils/SharedMemoryAllocator.h>
#include <mediautils/Synchronization.h>
#include <mediautils/ThreadSnapshot.h>
+#include <afutils/AllocatorFactory.h>
#include <afutils/AudioWatchdog.h>
#include <afutils/NBAIO_Tee.h>
#include <audio_utils/clock.h>
#include <audio_utils/FdToString.h>
#include <audio_utils/LinearMap.h>
+#include <audio_utils/MelAggregator.h>
+#include <audio_utils/MelProcessor.h>
#include <audio_utils/SimpleLog.h>
#include <audio_utils/TimestampVerifier.h>
+#include <sounddose/SoundDoseManager.h>
#include <timing/MonotonicFrameCounter.h>
#include <timing/SyncEvent.h>
#include <timing/SynchronizedRecordState.h>
@@ -100,7 +105,6 @@
#include <fastpath/FastMixer.h>
#include <media/nbaio/NBAIO.h>
-
#include <android/os/IPowerManager.h>
#include <media/nblog/NBLog.h>
@@ -123,6 +127,7 @@
class DevicesFactoryHalInterface;
class EffectsFactoryHalInterface;
class FastMixer;
+class IAudioManager;
class PassthruBufferProvider;
class RecordBufferConverter;
class ServerProxy;
@@ -209,8 +214,6 @@
virtual status_t closeInput(audio_io_handle_t input);
- virtual status_t invalidateStream(audio_stream_type_t stream);
-
virtual status_t setVoiceVolume(float volume);
virtual status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames,
@@ -315,6 +318,11 @@
virtual status_t supportsBluetoothVariableLatency(bool* support);
+ virtual status_t getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
+ sp<media::ISoundDose>* soundDose);
+
+ status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
+
virtual status_t getAudioPolicyConfig(media::AudioPolicyConfig* config);
status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
@@ -336,7 +344,8 @@
sp<MmapStreamInterface>& interface,
audio_port_handle_t *handle);
- static int onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration);
+ static os::HapticScale onExternalVibrationStart(
+ const sp<os::ExternalVibration>& externalVibration);
static void onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration);
status_t addEffectToHal(
@@ -386,6 +395,8 @@
bool btNrecIsOff() const { return mBtNrecIsOff.load(); }
+ void lock() ACQUIRE(mLock) { mLock.lock(); }
+ void unlock() RELEASE(mLock) { mLock.unlock(); }
private:
@@ -479,19 +490,19 @@
// --- Client ---
class Client : public RefBase {
- public:
- Client(const sp<AudioFlinger>& audioFlinger, pid_t pid);
+ public:
+ Client(const sp<AudioFlinger>& audioFlinger, pid_t pid);
virtual ~Client();
- sp<MemoryDealer> heap() const;
+ AllocatorFactory::ClientAllocator& allocator();
pid_t pid() const { return mPid; }
sp<AudioFlinger> audioFlinger() const { return mAudioFlinger; }
private:
DISALLOW_COPY_AND_ASSIGN(Client);
- const sp<AudioFlinger> mAudioFlinger;
- sp<MemoryDealer> mMemoryDealer;
+ const sp<AudioFlinger> mAudioFlinger;
const pid_t mPid;
+ AllocatorFactory::ClientAllocator mClientAllocator;
};
// --- Notification Client ---
@@ -561,6 +572,7 @@
class OffloadThread;
class DuplicatingThread;
class AsyncCallbackThread;
+ class BitPerfectThread;
class Track;
class RecordTrack;
class EffectBase;
@@ -603,10 +615,14 @@
#include "PatchPanel.h"
+#include "PatchCommandThread.h"
+
#include "Effects.h"
#include "DeviceEffectManager.h"
+#include "MelReporter.h"
+
// Find io handle by session id.
// Preference is given to an io handle with a matching effect chain to session id.
// If none found, AUDIO_IO_HANDLE_NONE is returned.
@@ -707,12 +723,15 @@
audio_port_handle_t *handle);
virtual status_t stop(audio_port_handle_t handle);
virtual status_t standby();
+ status_t reportData(const void* buffer, size_t frameCount) override;
private:
const sp<MmapThread> mThread;
};
ThreadBase *checkThread_l(audio_io_handle_t ioHandle) const;
+ sp<AudioFlinger::ThreadBase> checkOutputThread_l(audio_io_handle_t ioHandle) const
+ REQUIRES(mLock);
PlaybackThread *checkPlaybackThread_l(audio_io_handle_t output) const;
MixerThread *checkMixerThread_l(audio_io_handle_t output) const;
RecordThread *checkRecordThread_l(audio_io_handle_t input) const;
@@ -966,6 +985,8 @@
size_t rejectedKVPSize, const String8& rejectedKVPs,
uid_t callingUid);
+ sp<IAudioManager> getOrCreateAudioManager();
+
public:
// These methods read variables atomically without mLock,
// though the variables are updated with mLock.
@@ -985,7 +1006,9 @@
PatchPanel mPatchPanel;
sp<EffectsFactoryHalInterface> mEffectsFactoryHal;
- DeviceEffectManager mDeviceEffectManager;
+ const sp<PatchCommandThread> mPatchCommandThread;
+ sp<DeviceEffectManager> mDeviceEffectManager;
+ sp<MelReporter> mMelReporter;
bool mSystemReady;
std::atomic_bool mAudioPolicyReady{};
@@ -1008,6 +1031,9 @@
int32_t mAAudioBurstsPerBuffer = 0;
int32_t mAAudioHwBurstMinMicros = 0;
+ /** Interface for interacting with the AudioService. */
+ mediautils::atomic_sp<IAudioManager> mAudioManager;
+
// Bluetooth Variable latency control logic is enabled or disabled
std::atomic_bool mBluetoothLatencyModesEnabled;
};
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index cc9e367..f996157 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -23,6 +23,7 @@
#include <audio_utils/primitives.h>
#include "AudioFlinger.h"
+#include "EffectConfiguration.h"
#include <media/audiohal/EffectsFactoryHalInterface.h>
// ----------------------------------------------------------------------------
@@ -33,16 +34,6 @@
using detail::AudioHalVersionInfo;
using media::IEffectClient;
-void AudioFlinger::DeviceEffectManager::createAudioPatch(audio_patch_handle_t handle,
- const PatchPanel::Patch& patch) {
- ALOGV("%s handle %d mHalHandle %d num sinks %d device sink %08x",
- __func__, handle, patch.mHalHandle,
- patch.mAudioPatch.num_sinks,
- patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
-
- mCommandThread->createAudioPatchCommand(handle, patch);
-}
-
void AudioFlinger::DeviceEffectManager::onCreateAudioPatch(audio_patch_handle_t handle,
const PatchPanel::Patch& patch) {
ALOGV("%s handle %d mHalHandle %d device sink %08x",
@@ -56,11 +47,6 @@
}
}
-void AudioFlinger::DeviceEffectManager::releaseAudioPatch(audio_patch_handle_t handle) {
- ALOGV("%s", __func__);
- mCommandThread->releaseAudioPatchCommand(handle);
-}
-
void AudioFlinger::DeviceEffectManager::onReleaseAudioPatch(audio_patch_handle_t handle) {
ALOGV("%s", __func__);
Mutex::Autolock _l(mLock);
@@ -117,7 +103,7 @@
}
}
}
- if (enabled != NULL) {
+ if (enabled != nullptr) {
*enabled = (int)effect->isEnabled();
}
*status = lStatus;
@@ -126,14 +112,16 @@
status_t AudioFlinger::DeviceEffectManager::checkEffectCompatibility(
const effect_descriptor_t *desc) {
- sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+ const sp<EffectsFactoryHalInterface> effectsFactory =
+ audioflinger::EffectConfiguration::getEffectsFactoryHal();
if (effectsFactory == nullptr) {
return BAD_VALUE;
}
- static AudioHalVersionInfo sMinDeviceEffectHalVersion =
+ static const AudioHalVersionInfo sMinDeviceEffectHalVersion =
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 6, 0);
- AudioHalVersionInfo halVersion = effectsFactory->getHalVersion();
+ static const AudioHalVersionInfo halVersion =
+ audioflinger::EffectConfiguration::getAudioHalVersionInfo();
// We can trust AIDL generated AudioHalVersionInfo comparison operator (based on std::tie) as
// long as the type, major and minor sequence doesn't change in the definition.
@@ -152,7 +140,8 @@
const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
sp<EffectHalInterface> *effect) {
status_t status = NO_INIT;
- sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+ const sp<EffectsFactoryHalInterface> effectsFactory =
+ audioflinger::EffectConfiguration::getEffectsFactoryHal();
if (effectsFactory != 0) {
status = effectsFactory->createEffect(
pEffectUuid, sessionId, AUDIO_IO_HANDLE_NONE, deviceId, effect);
@@ -214,91 +203,4 @@
return true;
}
-// ----------- DeviceEffectManager::CommandThread implementation ----------
-
-
-AudioFlinger::DeviceEffectManager::CommandThread::~CommandThread()
-{
- Mutex::Autolock _l(mLock);
- mCommands.clear();
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::onFirstRef()
-{
- run("DeviceEffectManage_CommandThread", ANDROID_PRIORITY_AUDIO);
-}
-
-bool AudioFlinger::DeviceEffectManager::CommandThread::threadLoop()
-{
- mLock.lock();
- while (!exitPending())
- {
- while (!mCommands.empty() && !exitPending()) {
- sp<Command> command = mCommands.front();
- mCommands.pop_front();
- mLock.unlock();
-
- switch (command->mCommand) {
- case CREATE_AUDIO_PATCH: {
- CreateAudioPatchData *data = (CreateAudioPatchData *)command->mData.get();
- ALOGV("CommandThread() processing create audio patch handle %d", data->mHandle);
- mManager.onCreateAudioPatch(data->mHandle, data->mPatch);
- } break;
- case RELEASE_AUDIO_PATCH: {
- ReleaseAudioPatchData *data = (ReleaseAudioPatchData *)command->mData.get();
- ALOGV("CommandThread() processing release audio patch handle %d", data->mHandle);
- mManager.onReleaseAudioPatch(data->mHandle);
- } break;
- default:
- ALOGW("CommandThread() unknown command %d", command->mCommand);
- }
- mLock.lock();
- }
-
- // At this stage we have either an empty command queue or the first command in the queue
- // has a finite delay. So unless we are exiting it is safe to wait.
- if (!exitPending()) {
- ALOGV("CommandThread() going to sleep");
- mWaitWorkCV.wait(mLock);
- }
- }
- mLock.unlock();
- return false;
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::sendCommand(const sp<Command>& command) {
- Mutex::Autolock _l(mLock);
- mCommands.push_back(command);
- mWaitWorkCV.signal();
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::createAudioPatchCommand(
- audio_patch_handle_t handle, const PatchPanel::Patch& patch)
-{
- sp<Command> command = new Command(CREATE_AUDIO_PATCH, new CreateAudioPatchData(handle, patch));
- ALOGV("CommandThread() adding create patch handle %d mHalHandle %d.", handle, patch.mHalHandle);
- sendCommand(command);
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::releaseAudioPatchCommand(
- audio_patch_handle_t handle)
-{
- sp<Command> command = new Command(RELEASE_AUDIO_PATCH, new ReleaseAudioPatchData(handle));
- ALOGV("CommandThread() adding release patch");
- sendCommand(command);
-}
-
-void AudioFlinger::DeviceEffectManager::CommandThread::exit()
-{
- ALOGV("CommandThread::exit");
- {
- AutoMutex _l(mLock);
- requestExit();
- mWaitWorkCV.signal();
- }
- // Note that we can call it from the thread loop if all other references have been released
- // but it will safely return WOULD_BLOCK in this case
- requestExitAndWait();
-}
-
} // namespace android
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index dc1e2ec..b87f830 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -20,15 +20,15 @@
#endif
// DeviceEffectManager is concealed within AudioFlinger, their lifetimes are the same.
-class DeviceEffectManager {
+class DeviceEffectManager : public PatchCommandThread::PatchCommandListener {
public:
- explicit DeviceEffectManager(AudioFlinger* audioFlinger)
- : mCommandThread(new CommandThread(*this)), mAudioFlinger(*audioFlinger),
- mMyCallback(new DeviceEffectManagerCallback(this)) {}
+ explicit DeviceEffectManager(AudioFlinger& audioFlinger)
+ : mAudioFlinger(audioFlinger),
+ mMyCallback(new DeviceEffectManagerCallback(*this)) {}
- ~DeviceEffectManager() {
- mCommandThread->exit();
- }
+ void onFirstRef() override {
+ mAudioFlinger.mPatchCommandThread->addListener(this);
+ }
sp<EffectHandle> createEffect_l(effect_descriptor_t *descriptor,
const AudioDeviceTypeAddr& device,
@@ -39,8 +39,6 @@
status_t *status,
bool probe,
bool notifyFramesProcessed);
- void createAudioPatch(audio_patch_handle_t handle, const PatchPanel::Patch& patch);
- void releaseAudioPatch(audio_patch_handle_t handle);
size_t removeEffect(const sp<DeviceEffectProxy>& effect);
status_t createEffectHal(const effect_uuid_t *pEffectUuid,
@@ -59,94 +57,25 @@
void dump(int fd);
+ // PatchCommandThread::PatchCommandListener implementation
+
+ void onCreateAudioPatch(audio_patch_handle_t handle,
+ const PatchPanel::Patch& patch) override;
+ void onReleaseAudioPatch(audio_patch_handle_t handle) override;
+
private:
-
- // Thread to execute create and release patch commands asynchronously. This is needed because
- // PatchPanel::createAudioPatch and releaseAudioPatch are executed from audio policy service
- // with mutex locked and effect management requires to call back into audio policy service
- class Command;
- class CommandThread : public Thread {
- public:
-
- enum {
- CREATE_AUDIO_PATCH,
- RELEASE_AUDIO_PATCH,
- };
-
- explicit CommandThread(DeviceEffectManager& manager)
- : Thread(false), mManager(manager) {}
- ~CommandThread() override;
-
- // Thread virtuals
- void onFirstRef() override;
- bool threadLoop() override;
-
- void exit();
-
- void createAudioPatchCommand(audio_patch_handle_t handle,
- const PatchPanel::Patch& patch);
- void releaseAudioPatchCommand(audio_patch_handle_t handle);
-
- private:
- class CommandData;
-
- // descriptor for requested tone playback event
- class Command: public RefBase {
- public:
- Command() = default;
- Command(int command, const sp<CommandData>& data)
- : mCommand(command), mData(data) {}
-
- int mCommand = -1;
- sp<CommandData> mData;
- };
-
- class CommandData: public RefBase {
- public:
- virtual ~CommandData() = default;
- };
-
- class CreateAudioPatchData : public CommandData {
- public:
- CreateAudioPatchData(audio_patch_handle_t handle, const PatchPanel::Patch& patch)
- : mHandle(handle), mPatch(patch) {}
-
- audio_patch_handle_t mHandle;
- const PatchPanel::Patch mPatch;
- };
-
- class ReleaseAudioPatchData : public CommandData {
- public:
- explicit ReleaseAudioPatchData(audio_patch_handle_t handle)
- : mHandle(handle) {}
-
- audio_patch_handle_t mHandle;
- };
-
- void sendCommand(const sp<Command>& command);
-
- Mutex mLock;
- Condition mWaitWorkCV;
- std::deque <sp<Command>> mCommands; // list of pending commands
- DeviceEffectManager& mManager;
- };
-
- void onCreateAudioPatch(audio_patch_handle_t handle, const PatchPanel::Patch& patch);
- void onReleaseAudioPatch(audio_patch_handle_t handle);
-
status_t checkEffectCompatibility(const effect_descriptor_t *desc);
Mutex mLock;
- sp<CommandThread> mCommandThread;
AudioFlinger &mAudioFlinger;
const sp<DeviceEffectManagerCallback> mMyCallback;
std::map<AudioDeviceTypeAddr, sp<DeviceEffectProxy>> mDeviceEffects;
};
-class DeviceEffectManagerCallback : public EffectCallbackInterface {
+class DeviceEffectManagerCallback : public EffectCallbackInterface {
public:
- explicit DeviceEffectManagerCallback(DeviceEffectManager *manager)
- : mManager(*manager) {}
+ explicit DeviceEffectManagerCallback(DeviceEffectManager& manager)
+ : mManager(manager) {}
status_t createEffectHal(const effect_uuid_t *pEffectUuid,
int32_t sessionId, int32_t deviceId,
diff --git a/services/audioflinger/EffectConfiguration.h b/services/audioflinger/EffectConfiguration.h
new file mode 100644
index 0000000..2f07fa2
--- /dev/null
+++ b/services/audioflinger/EffectConfiguration.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+
+namespace android::audioflinger {
+
+/**
+ * Effect Configuration abstraction and helper class.
+ */
+class EffectConfiguration {
+public:
+ static bool isHidl() {
+ static const bool isHidl = getAudioHalVersionInfo().isHidl();
+ return isHidl;
+ }
+
+ static const sp<EffectsFactoryHalInterface>& getEffectsFactoryHal() {
+ static const auto effectsFactoryHal = EffectsFactoryHalInterface::create();
+ return effectsFactoryHal;
+ }
+
+ static const detail::AudioHalVersionInfo& getAudioHalVersionInfo() {
+ static const auto audioHalVersionInfo = getEffectsFactoryHal() ?
+ getEffectsFactoryHal()->getHalVersion() : detail::AudioHalVersionInfo{
+ detail::AudioHalVersionInfo::Type::HIDL, 0 /* major */, 0 /* minor */ };
+ return audioHalVersionInfo;
+ }
+};
+
+} // namespace android::audioflinger
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 42c4986..d1bd276 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -44,6 +44,7 @@
#include <mediautils/TimeCheck.h>
#include "AudioFlinger.h"
+#include "EffectConfiguration.h"
// ----------------------------------------------------------------------------
@@ -65,6 +66,7 @@
namespace android {
using aidl_utils::statusTFromBinderStatus;
+using audioflinger::EffectConfiguration;
using binder::Status;
namespace {
@@ -278,8 +280,8 @@
if (!doRegister && !(registered && doEnable)) {
return NO_ERROR;
}
- mPolicyLock.lock();
}
+ mPolicyLock.lock();
ALOGV("%s name %s id %d session %d doRegister %d registered %d doEnable %d enabled %d",
__func__, mDescriptor.name, mId, mSessionId, doRegister, registered, doEnable, enabled);
if (doRegister) {
@@ -570,7 +572,6 @@
mMaxDisableWaitCnt(1), // set by configure(), should be >= 1
mDisableWaitCnt(0), // set by process() and updateState()
mOffloaded(false),
- mAddedToHal(false),
mIsOutput(false)
, mSupportsFloat(false)
{
@@ -917,6 +918,7 @@
}
if (status != NO_ERROR &&
+ EffectConfiguration::isHidl() && // only HIDL effects support channel conversion
mIsOutput &&
(mConfig.inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO
|| mConfig.outputCfg.channels != AUDIO_CHANNEL_OUT_STEREO)) {
@@ -945,7 +947,8 @@
mSupportsFloat = true;
}
- if (status != NO_ERROR) {
+ // only HIDL effects support integer conversion.
+ if (status != NO_ERROR && EffectConfiguration::isHidl()) {
ALOGV("EFFECT_CMD_SET_CONFIG failed with float format, retry with int16_t.");
mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
@@ -1035,12 +1038,12 @@
{
if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
(mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
- if (mAddedToHal) {
+ if (mCurrentHalStream == getCallback()->io()) {
return;
}
(void)getCallback()->addEffectToHal(mEffectInterface);
- mAddedToHal = true;
+ mCurrentHalStream = getCallback()->io();
}
}
@@ -1136,12 +1139,11 @@
{
if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
(mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
- if (!mAddedToHal) {
- return NO_ERROR;
+ if (mCurrentHalStream != getCallback()->io()) {
+ return (mCurrentHalStream == AUDIO_IO_HANDLE_NONE) ? NO_ERROR : INVALID_OPERATION;
}
-
getCallback()->removeEffectFromHal(mEffectInterface);
- mAddedToHal = false;
+ mCurrentHalStream = AUDIO_IO_HANDLE_NONE;
}
return NO_ERROR;
}
@@ -1525,7 +1527,7 @@
return isHapticGenerator(&mDescriptor.type);
}
-status_t AudioFlinger::EffectModule::setHapticIntensity(int id, int intensity)
+status_t AudioFlinger::EffectModule::setHapticIntensity(int id, os::HapticScale intensity)
{
if (mStatus != NO_ERROR) {
return mStatus;
@@ -1541,7 +1543,7 @@
param->vsize = sizeof(int32_t) * 2;
*(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
*((int32_t*)param->data + 1) = id;
- *((int32_t*)param->data + 2) = intensity;
+ *((int32_t*)param->data + 2) = static_cast<int32_t>(intensity);
std::vector<uint8_t> response;
status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
if (status == NO_ERROR) {
@@ -1688,7 +1690,14 @@
return;
}
int bufOffset = ((sizeof(effect_param_cblk_t) - 1) / sizeof(int) + 1) * sizeof(int);
- mCblkMemory = client->heap()->allocate(EFFECT_PARAM_BUFFER_SIZE + bufOffset);
+ mCblkMemory = client->allocator().allocate(mediautils::NamedAllocRequest{
+ {static_cast<size_t>(EFFECT_PARAM_BUFFER_SIZE + bufOffset)},
+ std::string("Effect ID: ")
+ .append(std::to_string(effect->id()))
+ .append(" Session ID: ")
+ .append(std::to_string(static_cast<int>(effect->sessionId())))
+ .append(" \n")
+ });
if (mCblkMemory == 0 ||
(mCblk = static_cast<effect_param_cblk_t *>(mCblkMemory->unsecurePointer())) == NULL) {
ALOGE("not enough memory for Effect size=%zu", EFFECT_PARAM_BUFFER_SIZE +
@@ -2594,7 +2603,7 @@
return false;
}
-void AudioFlinger::EffectChain::setHapticIntensity_l(int id, int intensity)
+void AudioFlinger::EffectChain::setHapticIntensity_l(int id, os::HapticScale intensity)
{
Mutex::Autolock _l(mLock);
for (size_t i = 0; i < mEffects.size(); ++i) {
@@ -2879,6 +2888,9 @@
if ((*flags & AUDIO_OUTPUT_FLAG_FAST) != 0 && !isFastCompatible()) {
*flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_FAST);
}
+ if ((*flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != 0 && !isBitPerfectCompatible()) {
+ *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+ }
}
void AudioFlinger::EffectChain::checkInputFlagCompatibility(audio_input_flags_t *flags) const
@@ -2916,6 +2928,18 @@
return true;
}
+bool AudioFlinger::EffectChain::isBitPerfectCompatible() const {
+ Mutex::Autolock _l(mLock);
+ for (const auto &effect : mEffects) {
+ if (effect->isProcessImplemented()
+ && effect->isImplementationSoftware()) {
+ return false;
+ }
+ }
+ // Allow effects without processing or hw accelerated effects.
+ return true;
+}
+
// isCompatibleWithThread_l() must be called with thread->mLock held
bool AudioFlinger::EffectChain::isCompatibleWithThread_l(const sp<ThreadBase>& thread) const
{
@@ -2933,7 +2957,8 @@
const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
sp<EffectHalInterface> *effect) {
status_t status = NO_INIT;
- sp<EffectsFactoryHalInterface> effectsFactory = mAudioFlinger.getEffectsFactory();
+ const sp<EffectsFactoryHalInterface> effectsFactory =
+ EffectConfiguration::getEffectsFactoryHal();
if (effectsFactory != 0) {
status = effectsFactory->createEffect(pEffectUuid, sessionId, io(), deviceId, effect);
}
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index e1a76fc..57acc67 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -280,7 +280,7 @@
static bool isHapticGenerator(const effect_uuid_t* type);
bool isHapticGenerator() const;
- status_t setHapticIntensity(int id, int intensity);
+ status_t setHapticIntensity(int id, os::HapticScale intensity);
status_t setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo);
status_t getConfigs(audio_config_base_t* inputCfg,
@@ -319,7 +319,8 @@
// sending disable command.
uint32_t mDisableWaitCnt; // current process() calls count during disable period.
bool mOffloaded; // effect is currently offloaded to the audio DSP
- bool mAddedToHal; // effect has been added to the audio HAL
+ // effect has been added to this HAL input stream
+ audio_io_handle_t mCurrentHalStream = AUDIO_IO_HANDLE_NONE;
bool mIsOutput; // direction of the AF thread
bool mSupportsFloat; // effect supports float processing
@@ -543,12 +544,15 @@
// Is this EffectChain compatible with the FAST audio flag.
bool isFastCompatible() const;
+ // Is this EffectChain compatible with the bit-perfect audio flag.
+ bool isBitPerfectCompatible() const;
+
// isCompatibleWithThread_l() must be called with thread->mLock held
bool isCompatibleWithThread_l(const sp<ThreadBase>& thread) const;
bool containsHapticGeneratingEffect_l();
- void setHapticIntensity_l(int id, int intensity);
+ void setHapticIntensity_l(int id, os::HapticScale intensity);
sp<EffectCallbackInterface> effectCallback() const { return mEffectCallback; }
wp<ThreadBase> thread() const { return mEffectCallback->thread(); }
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
new file mode 100644
index 0000000..39f772b
--- /dev/null
+++ b/services/audioflinger/MelReporter.cpp
@@ -0,0 +1,296 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "AudioFlinger::MelReporter"
+
+#include "AudioFlinger.h"
+
+#include <android/media/ISoundDoseCallback.h>
+#include <audio_utils/power.h>
+#include <utils/Log.h>
+
+using aidl::android::hardware::audio::core::sounddose::ISoundDose;
+
+namespace android {
+
+bool AudioFlinger::MelReporter::activateHalSoundDoseComputation(const std::string& module,
+ const sp<DeviceHalInterface>& device) {
+ if (mSoundDoseManager->forceUseFrameworkMel()) {
+ ALOGD("%s: Forcing use of internal MEL computation.", __func__);
+ activateInternalSoundDoseComputation();
+ return false;
+ }
+
+ ndk::SpAIBinder soundDoseBinder;
+ if (device->getSoundDoseInterface(module, &soundDoseBinder) != OK) {
+ ALOGW("%s: HAL cannot provide sound dose interface for module %s, use internal MEL",
+ __func__, module.c_str());
+ activateInternalSoundDoseComputation();
+ return false;
+ }
+
+ if (soundDoseBinder == nullptr) {
+ ALOGW("%s: HAL doesn't implement a sound dose interface for module %s, use internal MEL",
+ __func__, module.c_str());
+ activateInternalSoundDoseComputation();
+ return false;
+ }
+
+ std::shared_ptr<ISoundDose> soundDoseInterface = ISoundDose::fromBinder(soundDoseBinder);
+
+ if (!mSoundDoseManager->setHalSoundDoseInterface(soundDoseInterface)) {
+ ALOGW("%s: cannot activate HAL MEL reporting for module %s", __func__, module.c_str());
+ activateInternalSoundDoseComputation();
+ return false;
+ }
+
+ stopInternalMelComputation();
+ return true;
+}
+
+void AudioFlinger::MelReporter::activateInternalSoundDoseComputation() {
+ {
+ std::lock_guard _l(mLock);
+ if (!mUseHalSoundDoseInterface) {
+ // no need to start internal MEL on active patches
+ return;
+ }
+ mUseHalSoundDoseInterface = false;
+ }
+
+ mSoundDoseManager->setHalSoundDoseInterface(nullptr);
+}
+
+void AudioFlinger::MelReporter::onFirstRef() {
+ mAudioFlinger.mPatchCommandThread->addListener(this);
+}
+
+bool AudioFlinger::MelReporter::shouldComputeMelForDeviceType(audio_devices_t device) {
+ if (!mSoundDoseManager->isCsdEnabled()) {
+ ALOGV("%s csd is disabled", __func__);
+ return false;
+ }
+ if (mSoundDoseManager->forceComputeCsdOnAllDevices()) {
+ return true;
+ }
+
+ switch (device) {
+ case AUDIO_DEVICE_OUT_WIRED_HEADSET:
+ case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
+ // TODO(b/278265907): enable A2DP when we can distinguish A2DP headsets
+ // case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
+ case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
+ case AUDIO_DEVICE_OUT_USB_HEADSET:
+ case AUDIO_DEVICE_OUT_BLE_HEADSET:
+ case AUDIO_DEVICE_OUT_BLE_BROADCAST:
+ return true;
+ default:
+ return false;
+ }
+}
+
+void AudioFlinger::MelReporter::updateMetadataForCsd(audio_io_handle_t streamHandle,
+ const std::vector<playback_track_metadata_v7_t>& metadataVec) {
+ if (!mSoundDoseManager->isCsdEnabled()) {
+ ALOGV("%s csd is disabled", __func__);
+ return;
+ }
+
+ std::lock_guard _laf(mAudioFlinger.mLock);
+ std::lock_guard _l(mLock);
+ auto activeMelPatchId = activePatchStreamHandle_l(streamHandle);
+ if (!activeMelPatchId) {
+ ALOGV("%s stream handle %d does not have an active patch", __func__, streamHandle);
+ return;
+ }
+
+ bool shouldActivateCsd = false;
+ for (const auto& metadata : metadataVec) {
+ if (metadata.base.usage == AUDIO_USAGE_GAME || metadata.base.usage == AUDIO_USAGE_MEDIA) {
+ shouldActivateCsd = true;
+ }
+ }
+
+ auto activeMelPatchIt = mActiveMelPatches.find(activeMelPatchId.value());
+ if (activeMelPatchIt != mActiveMelPatches.end()
+ && shouldActivateCsd != activeMelPatchIt->second.csdActive) {
+ if (activeMelPatchIt->second.csdActive) {
+ ALOGV("%s should not compute CSD for stream handle %d", __func__, streamHandle);
+ stopMelComputationForPatch_l(activeMelPatchIt->second);
+ } else {
+ ALOGV("%s should compute CSD for stream handle %d", __func__, streamHandle);
+ startMelComputationForActivePatch_l(activeMelPatchIt->second);
+ }
+ activeMelPatchIt->second.csdActive = shouldActivateCsd;
+ }
+}
+
+void AudioFlinger::MelReporter::onCreateAudioPatch(audio_patch_handle_t handle,
+ const PatchPanel::Patch& patch) {
+ if (!mSoundDoseManager->isCsdEnabled()) {
+ ALOGV("%s csd is disabled", __func__);
+ return;
+ }
+
+ ALOGV("%s: handle %d mHalHandle %d device sink %08x",
+ __func__, handle, patch.mHalHandle,
+ patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+ if (patch.mAudioPatch.num_sources == 0
+ || patch.mAudioPatch.sources[0].type != AUDIO_PORT_TYPE_MIX) {
+ ALOGV("%s: patch does not contain any mix sources", __func__);
+ return;
+ }
+
+ audio_io_handle_t streamHandle = patch.mAudioPatch.sources[0].ext.mix.handle;
+ ActiveMelPatch newPatch;
+ newPatch.streamHandle = streamHandle;
+ for (size_t i = 0; i < patch.mAudioPatch.num_sinks; ++i) {
+ if (patch.mAudioPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE
+ && shouldComputeMelForDeviceType(patch.mAudioPatch.sinks[i].ext.device.type)) {
+ audio_port_handle_t deviceId = patch.mAudioPatch.sinks[i].id;
+ newPatch.deviceHandles.push_back(deviceId);
+ AudioDeviceTypeAddr adt{patch.mAudioPatch.sinks[i].ext.device.type,
+ patch.mAudioPatch.sinks[i].ext.device.address};
+ mSoundDoseManager->mapAddressToDeviceId(adt, deviceId);
+ }
+ }
+
+ if (!newPatch.deviceHandles.empty()) {
+ std::lock_guard _afl(mAudioFlinger.mLock);
+ std::lock_guard _l(mLock);
+ ALOGV("%s add patch handle %d to active devices", __func__, handle);
+ startMelComputationForActivePatch_l(newPatch);
+ newPatch.csdActive = true;
+ mActiveMelPatches[handle] = newPatch;
+ }
+}
+
+void AudioFlinger::MelReporter::startMelComputationForActivePatch_l(const ActiveMelPatch& patch)
+NO_THREAD_SAFETY_ANALYSIS // access of AudioFlinger::checkOutputThread_l
+{
+ auto outputThread = mAudioFlinger.checkOutputThread_l(patch.streamHandle);
+ if (outputThread == nullptr) {
+ ALOGE("%s cannot find thread for stream handle %d", __func__, patch.streamHandle);
+ return;
+ }
+
+ for (const auto& deviceHandle : patch.deviceHandles) {
+ ++mActiveDevices[deviceHandle];
+ ALOGI("%s add stream %d that uses device %d for CSD, nr of streams: %d", __func__,
+ patch.streamHandle, deviceHandle, mActiveDevices[deviceHandle]);
+
+ if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
+ outputThread->startMelComputation_l(mSoundDoseManager->getOrCreateProcessorForDevice(
+ deviceHandle,
+ patch.streamHandle,
+ outputThread->mSampleRate,
+ outputThread->mChannelCount,
+ outputThread->mFormat));
+ }
+ }
+}
+
+void AudioFlinger::MelReporter::onReleaseAudioPatch(audio_patch_handle_t handle) {
+ if (!mSoundDoseManager->isCsdEnabled()) {
+ ALOGV("%s csd is disabled", __func__);
+ return;
+ }
+
+ ActiveMelPatch melPatch;
+ {
+ std::lock_guard _l(mLock);
+
+ auto patchIt = mActiveMelPatches.find(handle);
+ if (patchIt == mActiveMelPatches.end()) {
+ ALOGV("%s patch handle %d does not contain any mix sources with active MEL calculation",
+ __func__, handle);
+ return;
+ }
+
+ melPatch = patchIt->second;
+ mActiveMelPatches.erase(patchIt);
+ }
+
+ std::lock_guard _afl(mAudioFlinger.mLock);
+ std::lock_guard _l(mLock);
+ stopMelComputationForPatch_l(melPatch);
+}
+
+sp<media::ISoundDose> AudioFlinger::MelReporter::getSoundDoseInterface(
+ const sp<media::ISoundDoseCallback>& callback) {
+ // no need to lock since getSoundDoseInterface is synchronized
+ return mSoundDoseManager->getSoundDoseInterface(callback);
+}
+
+void AudioFlinger::MelReporter::stopInternalMelComputation() {
+ ALOGV("%s", __func__);
+ std::lock_guard _l(mLock);
+ mActiveMelPatches.clear();
+ mUseHalSoundDoseInterface = true;
+}
+
+void AudioFlinger::MelReporter::stopMelComputationForPatch_l(const ActiveMelPatch& patch)
+NO_THREAD_SAFETY_ANALYSIS // access of AudioFlinger::checkOutputThread_l
+{
+ if (!patch.csdActive) {
+ // no need to stop CSD inactive patches
+ return;
+ }
+
+ auto outputThread = mAudioFlinger.checkOutputThread_l(patch.streamHandle);
+
+ ALOGV("%s: stop MEL for stream id: %d", __func__, patch.streamHandle);
+ for (const auto& deviceId : patch.deviceHandles) {
+ if (mActiveDevices[deviceId] > 0) {
+ --mActiveDevices[deviceId];
+ if (mActiveDevices[deviceId] == 0) {
+ // no stream is using deviceId anymore
+ ALOGI("%s removing device %d from active CSD devices", __func__, deviceId);
+ mSoundDoseManager->clearMapDeviceIdEntries(deviceId);
+ }
+ }
+ }
+
+ if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
+ outputThread->stopMelComputation_l();
+ }
+}
+
+
+std::optional<audio_patch_handle_t> AudioFlinger::MelReporter::activePatchStreamHandle_l(
+ audio_io_handle_t streamHandle) {
+ for(const auto& patchIt : mActiveMelPatches) {
+ if (patchIt.second.streamHandle == streamHandle) {
+ return patchIt.first;
+ }
+ }
+ return std::nullopt;
+}
+
+bool AudioFlinger::MelReporter::useHalSoundDoseInterface_l() {
+ return !mSoundDoseManager->forceUseFrameworkMel() & mUseHalSoundDoseInterface;
+}
+
+std::string AudioFlinger::MelReporter::dump() {
+ std::lock_guard _l(mLock);
+ std::string output("\nSound Dose:\n");
+ output.append(mSoundDoseManager->dump());
+ return output;
+}
+
+} // namespace android
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
new file mode 100644
index 0000000..2bc33f2
--- /dev/null
+++ b/services/audioflinger/MelReporter.h
@@ -0,0 +1,117 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef INCLUDING_FROM_AUDIOFLINGER_H
+ #error This header file should only be included from AudioFlinger.h
+#endif
+
+#include <mutex>
+#include <sounddose/SoundDoseManager.h>
+#include <unordered_map>
+
+constexpr static int kMaxTimestampDeltaInSec = 120;
+
+/**
+ * Class for listening to new patches and starting the MEL computation. MelReporter is
+ * concealed within AudioFlinger, their lifetimes are the same.
+ */
+class MelReporter : public PatchCommandThread::PatchCommandListener {
+public:
+ explicit MelReporter(AudioFlinger& audioFlinger)
+ : mAudioFlinger(audioFlinger),
+ mSoundDoseManager(sp<SoundDoseManager>::make()) {}
+
+ void onFirstRef() override;
+
+ /**
+ * Activates the MEL reporting from the HAL sound dose interface. If the HAL
+ * does not support the sound dose interface for this module, the internal MEL
+ * calculation will be use.
+ *
+ * <p>If the device is using the audio AIDL HAL then this method will try to get the sound
+ * dose interface from IModule#getSoundDose(). Otherwise, if the legacy audio HIDL HAL is used
+ * this method will be looking for the standalone sound dose implementation. It falls back to
+ * the internal MEL computation if no valid sound dose interface can be retrieved.
+ *
+ * @return true if the MEL reporting will be done from any sound dose HAL interface
+ * implementation, false otherwise.
+ */
+ bool activateHalSoundDoseComputation(const std::string& module,
+ const sp<DeviceHalInterface>& device);
+
+ /**
+ * Activates the MEL reporting from internal framework values. These are used
+ * as a fallback when there is no sound dose interface implementation from HAL.
+ * Note: the internal CSD computation does not guarantee a certification with
+ * IEC62368-1 3rd edition or EN50332-3
+ */
+ void activateInternalSoundDoseComputation();
+
+ sp<media::ISoundDose> getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback);
+
+ std::string dump();
+
+ // PatchCommandListener methods
+ void onCreateAudioPatch(audio_patch_handle_t handle,
+ const PatchPanel::Patch& patch) override;
+ void onReleaseAudioPatch(audio_patch_handle_t handle) override;
+
+ /**
+ * The new metadata can determine whether we should compute MEL for the given thread.
+ * This is the case only if one of the tracks in the thread mix is using MEDIA or GAME.
+ * Otherwise, this method will disable CSD.
+ **/
+ void updateMetadataForCsd(audio_io_handle_t streamHandle,
+ const std::vector<playback_track_metadata_v7_t>& metadataVec);
+private:
+ struct ActiveMelPatch {
+ audio_io_handle_t streamHandle{AUDIO_IO_HANDLE_NONE};
+ std::vector<audio_port_handle_t> deviceHandles;
+ bool csdActive;
+ };
+
+ /** Returns true if we should compute MEL for the given device. */
+ bool shouldComputeMelForDeviceType(audio_devices_t device);
+
+ void stopInternalMelComputation();
+
+ /** Should be called with the following order of locks: mAudioFlinger.mLock -> mLock. */
+ void stopMelComputationForPatch_l(const ActiveMelPatch& patch) REQUIRES(mLock);
+
+ /** Should be called with the following order of locks: mAudioFlinger.mLock -> mLock. */
+ void startMelComputationForActivePatch_l(const ActiveMelPatch& patch) REQUIRES(mLock);
+
+ std::optional<audio_patch_handle_t>
+ activePatchStreamHandle_l(audio_io_handle_t streamHandle) REQUIRES(mLock);
+
+ bool useHalSoundDoseInterface_l() REQUIRES(mLock);
+
+ AudioFlinger& mAudioFlinger; // does not own the object
+
+ sp<SoundDoseManager> mSoundDoseManager;
+
+ /**
+ * Lock for protecting the active mel patches. Do not mix with the AudioFlinger lock.
+ * Locking order AudioFlinger::mLock -> PatchCommandThread::mLock -> MelReporter::mLock.
+ */
+ std::mutex mLock;
+ std::unordered_map<audio_patch_handle_t, ActiveMelPatch>
+ mActiveMelPatches GUARDED_BY(AudioFlinger::MelReporter::mLock);
+ std::unordered_map<audio_port_handle_t, int>
+ mActiveDevices GUARDED_BY(AudioFlinger::MelReporter::mLock);
+ bool mUseHalSoundDoseInterface GUARDED_BY(AudioFlinger::MelReporter::mLock) = false;
+};
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index eb640bb..cb46c52 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -54,6 +54,14 @@
bool getAndSetSilencedNotified_l() { bool silencedNotified = mSilencedNotified;
mSilencedNotified = true;
return silencedNotified; }
+
+ /**
+ * Updates the mute state and notifies the audio service. Call this only when holding player
+ * thread lock.
+ */
+ void processMuteEvent_l(const sp<IAudioManager>& audioManager,
+ mute_state_t muteState)
+ REQUIRES(AudioFlinger::MmapPlaybackThread::mLock);
private:
friend class MmapThread;
@@ -71,5 +79,12 @@
pid_t mPid;
bool mSilenced; // protected by MMapThread::mLock
bool mSilencedNotified; // protected by MMapThread::mLock
+
+ // TODO: replace PersistableBundle with own struct
+ // access these two variables only when holding player thread lock.
+ std::unique_ptr<os::PersistableBundle> mMuteEventExtras
+ GUARDED_BY(AudioFlinger::MmapPlaybackThread::mLock);
+ mute_state_t mMuteState
+ GUARDED_BY(AudioFlinger::MmapPlaybackThread::mLock);
}; // end of Track
diff --git a/services/audioflinger/PatchCommandThread.cpp b/services/audioflinger/PatchCommandThread.cpp
new file mode 100644
index 0000000..f4aab1f
--- /dev/null
+++ b/services/audioflinger/PatchCommandThread.cpp
@@ -0,0 +1,158 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define LOG_TAG "AudioFlinger::PatchCommandThread"
+//#define LOG_NDEBUG 0
+
+#include "AudioFlinger.h"
+
+namespace android {
+
+constexpr char kPatchCommandThreadName[] = "AudioFlinger_PatchCommandThread";
+
+AudioFlinger::PatchCommandThread::~PatchCommandThread() {
+ exit();
+
+ std::lock_guard _l(mLock);
+ mCommands.clear();
+}
+
+void AudioFlinger::PatchCommandThread::onFirstRef() {
+ run(kPatchCommandThreadName, ANDROID_PRIORITY_AUDIO);
+}
+
+void AudioFlinger::PatchCommandThread::addListener(const sp<PatchCommandListener>& listener) {
+ ALOGV("%s add listener %p", __func__, static_cast<void*>(listener.get()));
+ std::lock_guard _l(mListenerLock);
+ mListeners.emplace_back(listener);
+}
+
+void AudioFlinger::PatchCommandThread::createAudioPatch(audio_patch_handle_t handle,
+ const PatchPanel::Patch& patch) {
+ ALOGV("%s handle %d mHalHandle %d num sinks %d device sink %08x",
+ __func__, handle, patch.mHalHandle,
+ patch.mAudioPatch.num_sinks,
+ patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+
+ createAudioPatchCommand(handle, patch);
+}
+
+void AudioFlinger::PatchCommandThread::releaseAudioPatch(audio_patch_handle_t handle) {
+ ALOGV("%s", __func__);
+ releaseAudioPatchCommand(handle);
+}
+
+bool AudioFlinger::PatchCommandThread::threadLoop()
+NO_THREAD_SAFETY_ANALYSIS // bug in clang compiler.
+{
+ std::unique_lock _l(mLock);
+
+ while (!exitPending()) {
+ while (!mCommands.empty() && !exitPending()) {
+ const sp<Command> command = mCommands.front();
+ mCommands.pop_front();
+ _l.unlock();
+
+ std::vector<wp<PatchCommandListener>> listenersCopy;
+ {
+ std::lock_guard _ll(mListenerLock);
+ listenersCopy = mListeners;
+ }
+
+ switch (command->mCommand) {
+ case CREATE_AUDIO_PATCH: {
+ const auto data = (CreateAudioPatchData*) command->mData.get();
+ ALOGV("%s processing create audio patch handle %d",
+ __func__,
+ data->mHandle);
+
+ for (const auto& listener : listenersCopy) {
+ auto spListener = listener.promote();
+ if (spListener) {
+ spListener->onCreateAudioPatch(data->mHandle, data->mPatch);
+ }
+ }
+ }
+ break;
+ case RELEASE_AUDIO_PATCH: {
+ const auto data = (ReleaseAudioPatchData*) command->mData.get();
+ ALOGV("%s processing release audio patch handle %d",
+ __func__,
+ data->mHandle);
+
+ for (const auto& listener : listenersCopy) {
+ auto spListener = listener.promote();
+ if (spListener) {
+ spListener->onReleaseAudioPatch(data->mHandle);
+ }
+ }
+ }
+ break;
+ default:
+ ALOGW("%s unknown command %d", __func__, command->mCommand);
+ break;
+ }
+ _l.lock();
+ }
+
+ // At this stage we have either an empty command queue or the first command in the queue
+ // has a finite delay. So unless we are exiting it is safe to wait.
+ if (!exitPending()) {
+ ALOGV("%s going to sleep", __func__);
+ mWaitWorkCV.wait(_l);
+ }
+ }
+ return false;
+}
+
+void AudioFlinger::PatchCommandThread::sendCommand(const sp<Command>& command) {
+ std::lock_guard _l(mLock);
+ mCommands.emplace_back(command);
+ mWaitWorkCV.notify_one();
+}
+
+void AudioFlinger::PatchCommandThread::createAudioPatchCommand(
+ audio_patch_handle_t handle, const PatchPanel::Patch& patch) {
+ auto command = sp<Command>::make(CREATE_AUDIO_PATCH,
+ new CreateAudioPatchData(handle, patch));
+ ALOGV("%s adding create patch handle %d mHalHandle %d.",
+ __func__,
+ handle,
+ patch.mHalHandle);
+ sendCommand(command);
+}
+
+void AudioFlinger::PatchCommandThread::releaseAudioPatchCommand(audio_patch_handle_t handle) {
+ sp<Command> command =
+ sp<Command>::make(RELEASE_AUDIO_PATCH, new ReleaseAudioPatchData(handle));
+ ALOGV("%s adding release patch", __func__);
+ sendCommand(command);
+}
+
+void AudioFlinger::PatchCommandThread::exit() {
+ ALOGV("%s", __func__);
+ {
+ std::lock_guard _l(mLock);
+ requestExit();
+ mWaitWorkCV.notify_one();
+ }
+ // Note that we can call it from the thread loop if all other references have been released
+ // but it will safely return WOULD_BLOCK in this case
+ requestExitAndWait();
+}
+
+} // namespace android
diff --git a/services/audioflinger/PatchCommandThread.h b/services/audioflinger/PatchCommandThread.h
new file mode 100644
index 0000000..b52e0a9
--- /dev/null
+++ b/services/audioflinger/PatchCommandThread.h
@@ -0,0 +1,102 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef INCLUDING_FROM_AUDIOFLINGER_H
+ #error This header file should only be included from AudioFlinger.h
+#endif
+
+class Command;
+
+// Thread to execute create and release patch commands asynchronously. This is needed because
+// PatchPanel::createAudioPatch and releaseAudioPatch are executed from audio policy service
+// with mutex locked and effect management requires to call back into audio policy service
+class PatchCommandThread : public Thread {
+public:
+
+ enum {
+ CREATE_AUDIO_PATCH,
+ RELEASE_AUDIO_PATCH,
+ };
+
+ class PatchCommandListener : public virtual RefBase {
+ public:
+ virtual void onCreateAudioPatch(audio_patch_handle_t handle,
+ const PatchPanel::Patch& patch) = 0;
+ virtual void onReleaseAudioPatch(audio_patch_handle_t handle) = 0;
+ };
+
+ PatchCommandThread() : Thread(false /* canCallJava */) {}
+ ~PatchCommandThread() override;
+
+ void addListener(const sp<PatchCommandListener>& listener);
+
+ void createAudioPatch(audio_patch_handle_t handle, const PatchPanel::Patch& patch);
+ void releaseAudioPatch(audio_patch_handle_t handle);
+
+ // Thread virtuals
+ void onFirstRef() override;
+ bool threadLoop() override;
+
+ void exit();
+
+ void createAudioPatchCommand(audio_patch_handle_t handle,
+ const PatchPanel::Patch& patch);
+ void releaseAudioPatchCommand(audio_patch_handle_t handle);
+
+private:
+ class CommandData;
+
+ // Command type received from the PatchPanel
+ class Command: public RefBase {
+ public:
+ Command() = default;
+ Command(int command, const sp<CommandData>& data)
+ : mCommand(command), mData(data) {}
+
+ const int mCommand = -1;
+ const sp<CommandData> mData;
+ };
+
+ class CommandData: public RefBase {};
+
+ class CreateAudioPatchData : public CommandData {
+ public:
+ CreateAudioPatchData(audio_patch_handle_t handle, const PatchPanel::Patch& patch)
+ : mHandle(handle), mPatch(patch) {}
+
+ const audio_patch_handle_t mHandle;
+ const PatchPanel::Patch mPatch;
+ };
+
+ class ReleaseAudioPatchData : public CommandData {
+ public:
+ explicit ReleaseAudioPatchData(audio_patch_handle_t handle)
+ : mHandle(handle) {}
+
+ audio_patch_handle_t mHandle;
+ };
+
+ void sendCommand(const sp<Command>& command);
+
+ std::string mThreadName;
+ std::mutex mLock;
+ std::condition_variable mWaitWorkCV;
+ std::deque<sp<Command>> mCommands GUARDED_BY(mLock); // list of pending commands
+
+ std::mutex mListenerLock;
+ std::vector<wp<PatchCommandListener>> mListeners GUARDED_BY(mListenerLock);
+};
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 972c870..567d12a 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -135,6 +135,10 @@
status_t AudioFlinger::PatchPanel::createAudioPatch(const struct audio_patch *patch,
audio_patch_handle_t *handle,
bool endpointPatch)
+ //unlocks AudioFlinger::mLock when calling ThreadBase::sendCreateAudioPatchConfigEvent
+ //to avoid deadlocks if the thread loop needs to acquire AudioFlinger::mLock
+ //before processing the create patch request.
+ NO_THREAD_SAFETY_ANALYSIS
{
if (handle == NULL || patch == NULL) {
return BAD_VALUE;
@@ -245,7 +249,6 @@
status = INVALID_OPERATION;
goto exit;
}
-
sp<ThreadBase> thread =
mAudioFlinger.checkPlaybackThread_l(patch->sources[1].ext.mix.handle);
if (thread == 0) {
@@ -356,11 +359,12 @@
goto exit;
}
}
+ mAudioFlinger.unlock();
status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
+ mAudioFlinger.lock();
if (status == NO_ERROR) {
newPatch.setThread(thread);
}
-
// remove stale audio patch with same input as sink if any
for (auto& iter : mPatches) {
if (iter.second.mAudioPatch.sinks[0].ext.mix.handle == thread->id()) {
@@ -422,7 +426,9 @@
mAudioFlinger.updateOutDevicesForRecordThreads_l(devices);
}
+ mAudioFlinger.unlock();
status = thread->sendCreateAudioPatchConfigEvent(patch, &halHandle);
+ mAudioFlinger.lock();
if (status == NO_ERROR) {
newPatch.setThread(thread);
}
@@ -449,7 +455,7 @@
if (status == NO_ERROR) {
*handle = (audio_patch_handle_t) mAudioFlinger.nextUniqueId(AUDIO_UNIQUE_ID_USE_PATCH);
newPatch.mHalHandle = halHandle;
- mAudioFlinger.mDeviceEffectManager.createAudioPatch(*handle, newPatch);
+ mAudioFlinger.mPatchCommandThread->createAudioPatch(*handle, newPatch);
if (insertedModule != AUDIO_MODULE_HANDLE_NONE) {
addSoftwarePatchToInsertedModules(insertedModule, *handle, &newPatch.mAudioPatch);
}
@@ -729,7 +735,11 @@
/* Disconnect a patch */
status_t AudioFlinger::PatchPanel::releaseAudioPatch(audio_patch_handle_t handle)
-{
+ //unlocks AudioFlinger::mLock when calling ThreadBase::sendReleaseAudioPatchConfigEvent
+ //to avoid deadlocks if the thread loop needs to acquire AudioFlinger::mLock
+ //before processing the release patch request.
+ NO_THREAD_SAFETY_ANALYSIS
+ {
ALOGV("%s handle %d", __func__, handle);
status_t status = NO_ERROR;
@@ -766,7 +776,9 @@
break;
}
}
+ mAudioFlinger.unlock();
status = thread->sendReleaseAudioPatchConfigEvent(removedPatch.mHalHandle);
+ mAudioFlinger.lock();
} else {
status = hwDevice->releaseAudioPatch(removedPatch.mHalHandle);
}
@@ -787,7 +799,9 @@
break;
}
}
+ mAudioFlinger.unlock();
status = thread->sendReleaseAudioPatchConfigEvent(removedPatch.mHalHandle);
+ mAudioFlinger.lock();
} break;
default:
status = BAD_VALUE;
@@ -800,7 +814,7 @@
void AudioFlinger::PatchPanel::erasePatch(audio_patch_handle_t handle) {
mPatches.erase(handle);
removeSoftwarePatchFromInsertedModules(handle);
- mAudioFlinger.mDeviceEffectManager.releaseAudioPatch(handle);
+ mAudioFlinger.mPatchCommandThread->releaseAudioPatch(handle);
}
/* List connected audio ports and they attributes */
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 181829f..464940e 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -90,7 +90,8 @@
* ready as possible (aka. Buffer is full). */
size_t frameCountToBeReady = SIZE_MAX,
float speed = 1.0f,
- bool isSpatialized = false);
+ bool isSpatialized = false,
+ bool isBitPerfect = false);
virtual ~Track();
virtual status_t initCheck() const;
@@ -154,8 +155,12 @@
sp<media::VolumeHandler> getVolumeHandler() { return mVolumeHandler; }
/** Set the computed normalized final volume of the track.
* !masterMute * masterVolume * streamVolume * averageLRVolume */
- void setFinalVolume(float volume);
+ void setFinalVolume(float volumeLeft, float volumeRight);
float getFinalVolume() const { return mFinalVolume; }
+ void getFinalVolume(float* left, float* right) const {
+ *left = mFinalVolumeLeft;
+ *right = mFinalVolumeRight;
+ }
using SourceMetadatas = std::vector<playback_track_metadata_v7_t>;
using MetadataInserter = std::back_insert_iterator<SourceMetadatas>;
@@ -188,7 +193,9 @@
}
sp<os::ExternalVibration> getExternalVibration() const { return mExternalVibration; }
- void setTeePatches(TeePatches teePatches);
+ // This function should be called with holding thread lock.
+ void updateTeePatches();
+ void setTeePatchesToUpdate(TeePatches teePatchesToUpdate);
void tallyUnderrunFrames(size_t frames) override {
if (isOut()) { // we expect this from output tracks only
@@ -210,6 +217,13 @@
audio_output_flags_t getOutputFlags() const { return mFlags; }
float getSpeed() const { return mSpeed; }
bool isSpatialized() const override { return mIsSpatialized; }
+ bool isBitPerfect() const override { return mIsBitPerfect; }
+
+ /**
+ * Updates the mute state and notifies the audio service. Call this only when holding player
+ * thread lock.
+ */
+ void processMuteEvent_l(const sp<IAudioManager>& audioManager, mute_state_t muteState);
protected:
// for numerous
@@ -354,14 +368,25 @@
// 'volatile' means accessed without lock or
// barrier, but is read/written atomically
float mFinalVolume; // combine master volume, stream type volume and track volume
+ float mFinalVolumeLeft; // combine master volume, stream type volume and track
+ // volume
+ float mFinalVolumeRight; // combine master volume, stream type volume and track
+ // volume
sp<AudioTrackServerProxy> mAudioTrackServerProxy;
bool mResumeToStopping; // track was paused in stopping state.
bool mFlushHwPending; // track requests for thread flush
bool mPauseHwPending = false; // direct/offload track request for thread pause
audio_output_flags_t mFlags;
TeePatches mTeePatches;
+ std::optional<TeePatches> mTeePatchesToUpdate;
const float mSpeed;
const bool mIsSpatialized;
+ const bool mIsBitPerfect;
+
+ // TODO: replace PersistableBundle with own struct
+ // access these two variables only when holding player thread lock.
+ std::unique_ptr<os::PersistableBundle> mMuteEventExtras;
+ mute_state_t mMuteState;
}; // end of Track
@@ -411,6 +436,7 @@
private:
status_t obtainBuffer(AudioBufferProvider::Buffer* buffer,
uint32_t waitTimeMs);
+ void queueBuffer(Buffer& inBuffer);
void clearBufferQueue();
void restartIfDisabled();
diff --git a/services/audioflinger/TEST_MAPPING b/services/audioflinger/TEST_MAPPING
index 3de5a9f..5d3fb0a 100644
--- a/services/audioflinger/TEST_MAPPING
+++ b/services/audioflinger/TEST_MAPPING
@@ -4,7 +4,16 @@
"name": "CtsNativeMediaAAudioTestCases",
"options" : [
{
- "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+ },
+ {
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+ },
+ {
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+ },
+ {
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
}
]
}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 6aafa07..1f01117 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -17,7 +17,7 @@
#define LOG_TAG "AudioFlinger"
-//#define LOG_NDEBUG 0
+// #define LOG_NDEBUG 0
#define ATRACE_TAG ATRACE_TAG_AUDIO
#include "Configuration.h"
@@ -31,6 +31,7 @@
#include <sys/syscall.h>
#include <cutils/bitops.h>
#include <cutils/properties.h>
+#include <binder/PersistableBundle.h>
#include <media/AudioContainers.h>
#include <media/AudioDeviceTypeAddr.h>
#include <media/AudioParameter.h>
@@ -43,6 +44,7 @@
#include <private/media/AudioTrackShared.h>
#include <private/android_filesystem_config.h>
#include <audio_utils/Balance.h>
+#include <audio_utils/MelProcessor.h>
#include <audio_utils/Metadata.h>
#include <audio_utils/channels.h>
#include <audio_utils/mono_blend.h>
@@ -532,6 +534,8 @@
return "MMAP_CAPTURE";
case SPATIALIZER:
return "SPATIALIZER";
+ case BIT_PERFECT:
+ return "BIT_PERFECT";
default:
return "unknown";
}
@@ -806,6 +810,7 @@
(CreateAudioPatchConfigEventData *)event->mData.get();
event->mStatus = createAudioPatch_l(&data->mPatch, &data->mHandle);
const DeviceTypeSet newDevices = getDeviceTypes();
+ configChanged = oldDevices != newDevices;
mLocalLog.log("CFG_EVENT_CREATE_AUDIO_PATCH: old device %s (%s) new device %s (%s)",
dumpDeviceTypes(oldDevices).c_str(), toString(oldDevices).c_str(),
dumpDeviceTypes(newDevices).c_str(), toString(newDevices).c_str());
@@ -816,6 +821,7 @@
(ReleaseAudioPatchConfigEventData *)event->mData.get();
event->mStatus = releaseAudioPatch_l(data->mHandle);
const DeviceTypeSet newDevices = getDeviceTypes();
+ configChanged = oldDevices != newDevices;
mLocalLog.log("CFG_EVENT_RELEASE_AUDIO_PATCH: old device %s (%s) new device %s (%s)",
dumpDeviceTypes(oldDevices).c_str(), toString(oldDevices).c_str(),
dumpDeviceTypes(newDevices).c_str(), toString(newDevices).c_str());
@@ -1499,6 +1505,26 @@
}
}
break;
+ case BIT_PERFECT:
+ if ((desc->flags & EFFECT_FLAG_HW_ACC_TUNNEL) != 0) {
+ // Allow HW accelerated effects of tunnel type
+ break;
+ }
+ // As bit-perfect tracks will not be allowed to apply audio effect that will touch the audio
+ // data, effects will not be allowed on 1) global effects (AUDIO_SESSION_OUTPUT_MIX),
+ // 2) post-processing effects (AUDIO_SESSION_OUTPUT_STAGE or AUDIO_SESSION_DEVICE) and
+ // 3) there is any bit-perfect track with the given session id.
+ if (sessionId == AUDIO_SESSION_OUTPUT_MIX || sessionId == AUDIO_SESSION_OUTPUT_STAGE ||
+ sessionId == AUDIO_SESSION_DEVICE) {
+ ALOGW("%s: effect %s not supported on bit-perfect thread %s",
+ __func__, desc->name, mThreadName);
+ return BAD_VALUE;
+ } else if ((hasAudioSession_l(sessionId) & ThreadBase::BIT_PERFECT_SESSION) != 0) {
+ ALOGW("%s: effect %s not supported as there is a bit-perfect track with session as %d",
+ __func__, desc->name, sessionId);
+ return BAD_VALUE;
+ }
+ break;
default:
LOG_ALWAYS_FATAL("checkEffectCompatibility_l(): wrong thread type %d", mType);
}
@@ -2005,6 +2031,21 @@
return AudioSystem::getStrategyForStream(stream);
}
+// startMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::ThreadBase::startMelComputation_l(
+ const sp<audio_utils::MelProcessor>& /*processor*/)
+{
+ // Do nothing
+ ALOGW("%s: ThreadBase does not support CSD", __func__);
+}
+
+// stopMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::ThreadBase::stopMelComputation_l()
+{
+ // Do nothing
+ ALOGW("%s: ThreadBase does not support CSD", __func__);
+}
+
// ----------------------------------------------------------------------------
// Playback
// ----------------------------------------------------------------------------
@@ -2049,8 +2090,7 @@
mHwSupportsPause(false), mHwPaused(false), mFlushPending(false),
mLeftVolFloat(-1.0), mRightVolFloat(-1.0),
mDownStreamPatch{},
- mIsTimestampAdvancing(kMinimumTimeBetweenTimestampChecksNs),
- mBluetoothLatencyModesEnabled(true)
+ mIsTimestampAdvancing(kMinimumTimeBetweenTimestampChecksNs)
{
snprintf(mThreadName, kThreadNameLength, "AudioOut_%X", id);
mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mThreadName);
@@ -2276,7 +2316,8 @@
status_t *status,
audio_port_handle_t portId,
const sp<media::IAudioTrackCallback>& callback,
- bool isSpatialized)
+ bool isSpatialized,
+ bool isBitPerfect)
{
size_t frameCount = *pFrameCount;
size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2308,6 +2349,25 @@
*flags = (audio_output_flags_t)(*flags & outputFlags);
}
+ if (isBitPerfect) {
+ sp<EffectChain> chain = getEffectChain_l(sessionId);
+ if (chain.get() != nullptr) {
+ // Bit-perfect is required according to the configuration and preferred mixer
+ // attributes, but it is not in the output flag from the client's request. Explicitly
+ // adding bit-perfect flag to check the compatibility
+ audio_output_flags_t flagsToCheck =
+ (audio_output_flags_t)(*flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+ chain->checkOutputFlagCompatibility(&flagsToCheck);
+ if ((flagsToCheck & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_NONE) {
+ ALOGE("%s cannot create track as there is data-processing effect attached to "
+ "given session id(%d)", __func__, sessionId);
+ lStatus = BAD_VALUE;
+ goto Exit;
+ }
+ *flags = flagsToCheck;
+ }
+ }
+
// client expresses a preference for FAST, but we get the final say
if (*flags & AUDIO_OUTPUT_FLAG_FAST) {
if (
@@ -2488,6 +2548,18 @@
*pNotificationFrameCount = notificationFrameCount;
switch (mType) {
+ case BIT_PERFECT:
+ if (isBitPerfect) {
+ if (sampleRate != mSampleRate || format != mFormat || channelMask != mChannelMask) {
+ ALOGE("%s, bad parameter when request streaming bit-perfect, sampleRate=%u, "
+ "format=%#x, channelMask=%#x, mSampleRate=%u, mFormat=%#x, mChannelMask=%#x",
+ __func__, sampleRate, format, channelMask, mSampleRate, mFormat,
+ mChannelMask);
+ lStatus = BAD_VALUE;
+ goto Exit;
+ }
+ }
+ break;
case DIRECT:
if (audio_is_linear_pcm(format)) { // TODO maybe use audio_has_proportional_frames()?
@@ -2569,7 +2641,7 @@
nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
sessionId, creatorPid, attributionSource, trackFlags,
TrackBase::TYPE_DEFAULT, portId, SIZE_MAX /*frameCountToBeReady*/,
- speed, isSpatialized);
+ speed, isSpatialized, isBitPerfect);
lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
if (lStatus != NO_ERROR) {
@@ -2726,7 +2798,11 @@
}
// abort if start is rejected by audio policy manager
if (status != NO_ERROR) {
- return PERMISSION_DENIED;
+ // Do not replace the error if it is DEAD_OBJECT. When this happens, it indicates
+ // current playback thread is reopened, which may happen when clients set preferred
+ // mixer configuration. Returning DEAD_OBJECT will make the client restore track
+ // immediately.
+ return status == DEAD_OBJECT ? status : PERMISSION_DENIED;
}
#ifdef ADD_BATTERY_DATA
// to track the speaker usage
@@ -2756,7 +2832,7 @@
// Unlock due to VibratorService will lock for this call and will
// call Tracks.mute/unmute which also require thread's lock.
mLock.unlock();
- const int intensity = AudioFlinger::onExternalVibrationStart(
+ const os::HapticScale intensity = AudioFlinger::onExternalVibrationStart(
track->getExternalVibration());
std::optional<media::AudioVibratorInfo> vibratorInfo;
{
@@ -2766,7 +2842,7 @@
vibratorInfo = std::move(mAudioFlinger->getDefaultVibratorInfo_l());
}
mLock.lock();
- track->setHapticIntensity(static_cast<os::HapticScale>(intensity));
+ track->setHapticIntensity(intensity);
if (vibratorInfo) {
track->setHapticMaxAmplitude(vibratorInfo->maxAmplitude);
}
@@ -3161,10 +3237,10 @@
item.record();
}
-void AudioFlinger::PlaybackThread::updateMetadata_l()
+AudioFlinger::ThreadBase::MetadataUpdate AudioFlinger::PlaybackThread::updateMetadata_l()
{
if (!isStreamInitialized() || !mActiveTracks.readAndClearHasChanged()) {
- return; // nothing to do
+ return {}; // nothing to do
}
StreamOutHalInterface::SourceMetadata metadata;
auto backInserter = std::back_inserter(metadata.tracks);
@@ -3173,6 +3249,9 @@
track->copyMetadataTo(backInserter);
}
sendMetadataToBackend_l(metadata);
+ MetadataUpdate change;
+ change.playbackMetadataUpdate = metadata.tracks;
+ return change;
}
void AudioFlinger::PlaybackThread::sendMetadataToBackend_l(
@@ -3349,8 +3428,10 @@
}
ssize_t framesWritten = mNormalSink->write((char *)mSinkBuffer + offset, count);
ATRACE_END();
+
if (framesWritten > 0) {
bytesWritten = framesWritten * mFrameSize;
+
#ifdef TEE_SINK
mTee.write((char *)mSinkBuffer + offset, framesWritten);
#endif
@@ -3393,6 +3474,25 @@
return bytesWritten;
}
+// startMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::PlaybackThread::startMelComputation_l(
+ const sp<audio_utils::MelProcessor>& processor)
+{
+ auto outputSink = static_cast<AudioStreamOutSink*>(mOutputSink.get());
+ if (outputSink != nullptr) {
+ outputSink->startMelComputation(processor);
+ }
+}
+
+// stopMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::PlaybackThread::stopMelComputation_l()
+{
+ auto outputSink = static_cast<AudioStreamOutSink*>(mOutputSink.get());
+ if (outputSink != nullptr) {
+ outputSink->stopMelComputation();
+ }
+}
+
void AudioFlinger::PlaybackThread::threadLoop_drain()
{
bool supportsDrain = false;
@@ -3451,9 +3551,10 @@
mActiveSleepTimeUs = activeSleepTimeUs();
mIdleSleepTimeUs = idleSleepTimeUs();
+ mStandbyDelayNs = AudioFlinger::mStandbyTimeInNsecs;
+
// make sure standby delay is not too short when connected to an A2DP sink to avoid
// truncating audio when going to standby.
- mStandbyDelayNs = AudioFlinger::mStandbyTimeInNsecs;
if (!Intersection(outDeviceTypes(), getAudioDeviceOutAllA2dpSet()).empty()) {
if (mStandbyDelayNs < kDefaultStandbyTimeInNsecs) {
mStandbyDelayNs = kDefaultStandbyTimeInNsecs;
@@ -3483,6 +3584,28 @@
invalidateTracks_l(streamType);
}
+void AudioFlinger::PlaybackThread::invalidateTracks(std::set<audio_port_handle_t>& portIds) {
+ Mutex::Autolock _l(mLock);
+ invalidateTracks_l(portIds);
+}
+
+bool AudioFlinger::PlaybackThread::invalidateTracks_l(std::set<audio_port_handle_t>& portIds) {
+ bool trackMatch = false;
+ const size_t size = mTracks.size();
+ for (size_t i = 0; i < size; i++) {
+ sp<Track> t = mTracks[i];
+ if (t->isExternalTrack() && portIds.find(t->portId()) != portIds.end()) {
+ t->invalidate();
+ portIds.erase(t->portId());
+ trackMatch = true;
+ }
+ if (portIds.empty()) {
+ break;
+ }
+ }
+ return trackMatch;
+}
+
// getTrackById_l must be called with holding thread lock
AudioFlinger::PlaybackThread::Track* AudioFlinger::PlaybackThread::getTrackById_l(
audio_port_handle_t trackPortId) {
@@ -3810,6 +3933,7 @@
checkOutputStageEffects();
}
+ MetadataUpdate metadataUpdate;
{ // scope for mLock
Mutex::Autolock _l(mLock);
@@ -3869,7 +3993,7 @@
LOG_AUDIO_STATE();
mThreadMetrics.logEndInterval();
mThreadSnapshot.onEnd();
- mStandby = true;
+ setStandby_l();
}
sendStatistics(false /* force */);
}
@@ -3911,7 +4035,7 @@
mActiveTracks.updatePowerState(this);
- updateMetadata_l();
+ metadataUpdate = updateMetadata_l();
// prevent any changes in effect chain list and in each effect chain
// during mixing and effect process as the audio buffers could be deleted
@@ -3949,6 +4073,19 @@
activeTracks.insert(activeTracks.end(), mActiveTracks.begin(), mActiveTracks.end());
setHalLatencyMode_l();
+
+ for (const auto &track : mActiveTracks ) {
+ track->updateTeePatches();
+ }
+
+ // signal actual start of output stream when the render position reported by the kernel
+ // starts moving.
+ if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
+ && (mKernelPositionOnStandby
+ != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
+ mHalStarted = true;
+ mWaitHalStartCV.broadcast();
+ }
} // mLock scope ends
if (mBytesRemaining == 0) {
@@ -3981,7 +4118,8 @@
// Either threadLoop_mix() or threadLoop_sleepTime() should have set
// mMixerBuffer with data if mMixerBufferValid is true and mSleepTimeUs == 0.
// Merge mMixerBuffer data into mEffectBuffer (if any effects are valid)
- // or mSinkBuffer (if there are no effects).
+ // or mSinkBuffer (if there are no effects and there is no data already copied to
+ // mSinkBuffer).
//
// This is done pre-effects computation; if effects change to
// support higher precision, this needs to move.
@@ -3990,7 +4128,7 @@
// TODO use mSleepTimeUs == 0 as an additional condition.
uint32_t mixerChannelCount = mEffectBufferValid ?
audio_channel_count_from_out_mask(mMixerChannelMask) : mChannelCount;
- if (mMixerBufferValid) {
+ if (mMixerBufferValid && (mEffectBufferValid || !mHasDataCopiedToSinkBuffer)) {
void *buffer = mEffectBufferValid ? mEffectBuffer : mSinkBuffer;
audio_format_t format = mEffectBufferValid ? mEffectBufferFormat : mFormat;
@@ -4081,7 +4219,7 @@
// Effects buffer (buffer valid), we need to
// copy into the sink buffer.
// TODO use mSleepTimeUs == 0 as an additional condition.
- if (mEffectBufferValid) {
+ if (mEffectBufferValid && !mHasDataCopiedToSinkBuffer) {
//ALOGV("writing effect buffer to sink buffer format %#x", mFormat);
void *effectBuffer = (mType == SPATIALIZER) ? mPostSpatializerBuffer : mEffectBuffer;
if (requireMonoBlend()) {
@@ -4140,6 +4278,11 @@
// enable changes in effect chain
unlockEffectChains(effectChains);
+ if (!metadataUpdate.playbackMetadataUpdate.empty()) {
+ mAudioFlinger->mMelReporter->updateMetadataForCsd(id(),
+ metadataUpdate.playbackMetadataUpdate);
+ }
+
if (!waitingAsyncCallback()) {
// mSleepTimeUs == 0 means we must write to audio hardware
if (mSleepTimeUs == 0) {
@@ -4328,7 +4471,7 @@
if (!mStandby) {
threadLoop_standby();
- mStandby = true;
+ setStandby();
}
releaseWakeLock();
@@ -4509,7 +4652,7 @@
// When the track is stop, set the haptic intensity as MUTE
// for the HapticGenerator effect.
if (chain != nullptr) {
- chain->setHapticIntensity_l(track->id(), static_cast<int>(os::HapticScale::MUTE));
+ chain->setHapticIntensity_l(track->id(), os::HapticScale::MUTE);
}
}
}
@@ -4578,6 +4721,8 @@
} else {
status = PlaybackThread::createAudioPatch_l(patch, handle);
}
+
+ updateHalSupportedLatencyModes_l();
return status;
}
@@ -4659,6 +4804,9 @@
if (configChanged) {
sendIoConfigEvent_l(AUDIO_OUTPUT_CONFIG_CHANGED);
}
+ // Force metadata update after a route change
+ mActiveTracks.setHasChanged();
+
return status;
}
@@ -4689,6 +4837,9 @@
} else {
status = mOutput->stream->legacyReleaseAudioPatch();
}
+ // Force meteadata update after a route change
+ mActiveTracks.setHasChanged();
+
return status;
}
@@ -4723,6 +4874,7 @@
: PlaybackThread(audioFlinger, output, id, type, systemReady, mixerConfig),
// mAudioMixer below
// mFastMixer below
+ mBluetoothLatencyModesEnabled(false),
mFastMixerFutex(0),
mMasterMono(false)
// mOutputSink below
@@ -4758,7 +4910,7 @@
// initialize fast mixer depending on configuration
bool initFastMixer;
- if (mType == SPATIALIZER) {
+ if (mType == SPATIALIZER || mType == BIT_PERFECT) {
initFastMixer = false;
} else {
switch (kUseFastMixer) {
@@ -4808,7 +4960,7 @@
const NBAIO_Format offersFast[1] = {format};
size_t numCounterOffersFast = 0;
#if !LOG_NDEBUG
- ssize_t index =
+ index =
#else
(void)
#endif
@@ -4939,6 +5091,21 @@
delete mAudioMixer;
}
+void AudioFlinger::MixerThread::onFirstRef() {
+ PlaybackThread::onFirstRef();
+
+ Mutex::Autolock _l(mLock);
+ if (mOutput != nullptr && mOutput->stream != nullptr) {
+ status_t status = mOutput->stream->setLatencyModeCallback(this);
+ if (status != INVALID_OPERATION) {
+ updateHalSupportedLatencyModes_l();
+ }
+ // Default to enabled if the HAL supports it. This can be changed by Audioflinger after
+ // the thread construction according to AudioFlinger::mBluetoothLatencyModesEnabled
+ mBluetoothLatencyModesEnabled.store(
+ mOutput->audioHwDev->supportsBluetoothVariableLatency());
+ }
+}
uint32_t AudioFlinger::MixerThread::correctLatency_l(uint32_t latency) const
{
@@ -5402,10 +5569,21 @@
volume *= vh;
track->mCachedVolume = volume;
gain_minifloat_packed_t vlr = proxy->getVolumeLR();
- float vlf = volume * float_from_gain(gain_minifloat_unpack_left(vlr));
- float vrf = volume * float_from_gain(gain_minifloat_unpack_right(vlr));
+ float vlf = float_from_gain(gain_minifloat_unpack_left(vlr));
+ float vrf = float_from_gain(gain_minifloat_unpack_right(vlr));
- track->setFinalVolume((vlf + vrf) / 2.f);
+ track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
+ /*muteState=*/{masterVolume == 0.f,
+ mStreamTypes[track->streamType()].volume == 0.f,
+ mStreamTypes[track->streamType()].mute,
+ track->isPlaybackRestricted(),
+ vlf == 0.f && vrf == 0.f,
+ vh == 0.f});
+
+ vlf *= volume;
+ vrf *= volume;
+
+ track->setFinalVolume(vlf, vrf);
++fastTracks;
} else {
// was it previously active?
@@ -5576,6 +5754,15 @@
ALOGV("Track right volume out of range: %.3g", vrf);
vrf = GAIN_FLOAT_UNITY;
}
+
+ track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
+ /*muteState=*/{masterVolume == 0.f,
+ mStreamTypes[track->streamType()].volume == 0.f,
+ mStreamTypes[track->streamType()].mute,
+ track->isPlaybackRestricted(),
+ vlf == 0.f && vrf == 0.f,
+ vh == 0.f});
+
// now apply the master volume and stream type volume and shaper volume
vlf *= v * vh;
vrf *= v * vh;
@@ -5595,7 +5782,7 @@
vaf = v * sendLevel * (1. / MAX_GAIN_INT);
}
- track->setFinalVolume((vrf + vlf) / 2.f);
+ track->setFinalVolume(vrf, vlf);
// Delegate volume control to effect in track effect chain if needed
if (chain != 0 && chain->setVolume_l(&vl, &vr)) {
@@ -6037,7 +6224,7 @@
mOutput->standby();
mThreadMetrics.logEndInterval();
mThreadSnapshot.onEnd();
- mStandby = true;
+ setStandby_l();
mBytesWritten = 0;
status = mOutput->stream->setParameters(keyValuePair);
}
@@ -6105,6 +6292,12 @@
} else {
dprintf(fd, " No FastMixer\n");
}
+
+ dprintf(fd, "Bluetooth latency modes are %senabled\n",
+ mBluetoothLatencyModesEnabled ? "" : "not ");
+ dprintf(fd, "HAL does %ssupport Bluetooth latency modes\n", mOutput != nullptr &&
+ mOutput->audioHwDev->supportsBluetoothVariableLatency() ? "" : "not ");
+ dprintf(fd, "Supported latency modes: %s\n", toString(mSupportedLatencyModes).c_str());
}
uint32_t AudioFlinger::MixerThread::idleSleepTimeUs() const
@@ -6128,6 +6321,100 @@
maxPeriod = seconds(mNormalFrameCount) / mSampleRate * 15;
}
+void AudioFlinger::MixerThread::onHalLatencyModesChanged_l() {
+ mAudioFlinger->onSupportedLatencyModesChanged(mId, mSupportedLatencyModes);
+}
+
+void AudioFlinger::MixerThread::setHalLatencyMode_l() {
+ // Only handle latency mode if:
+ // - mBluetoothLatencyModesEnabled is true
+ // - the HAL supports latency modes
+ // - the selected device is Bluetooth LE or A2DP
+ if (!mBluetoothLatencyModesEnabled.load() || mSupportedLatencyModes.empty()) {
+ return;
+ }
+ if (mOutDeviceTypeAddrs.size() != 1
+ || !(audio_is_a2dp_out_device(mOutDeviceTypeAddrs[0].mType)
+ || audio_is_ble_out_device(mOutDeviceTypeAddrs[0].mType))) {
+ return;
+ }
+
+ audio_latency_mode_t latencyMode = AUDIO_LATENCY_MODE_FREE;
+ if (mSupportedLatencyModes.size() == 1) {
+ // If the HAL only support one latency mode currently, confirm the choice
+ latencyMode = mSupportedLatencyModes[0];
+ } else if (mSupportedLatencyModes.size() > 1) {
+ // Request low latency if:
+ // - At least one active track is either:
+ // - a fast track with gaming usage or
+ // - a track with acessibility usage
+ for (const auto& track : mActiveTracks) {
+ if ((track->isFastTrack() && track->attributes().usage == AUDIO_USAGE_GAME)
+ || track->attributes().usage == AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY) {
+ latencyMode = AUDIO_LATENCY_MODE_LOW;
+ break;
+ }
+ }
+ }
+
+ if (latencyMode != mSetLatencyMode) {
+ status_t status = mOutput->stream->setLatencyMode(latencyMode);
+ ALOGD("%s: thread(%d) setLatencyMode(%s) returned %d",
+ __func__, mId, toString(latencyMode).c_str(), status);
+ if (status == NO_ERROR) {
+ mSetLatencyMode = latencyMode;
+ }
+ }
+}
+
+void AudioFlinger::MixerThread::updateHalSupportedLatencyModes_l() {
+
+ if (mOutput == nullptr || mOutput->stream == nullptr) {
+ return;
+ }
+ std::vector<audio_latency_mode_t> latencyModes;
+ const status_t status = mOutput->stream->getRecommendedLatencyModes(&latencyModes);
+ if (status != NO_ERROR) {
+ latencyModes.clear();
+ }
+ if (latencyModes != mSupportedLatencyModes) {
+ ALOGD("%s: thread(%d) status %d supported latency modes: %s",
+ __func__, mId, status, toString(latencyModes).c_str());
+ mSupportedLatencyModes.swap(latencyModes);
+ sendHalLatencyModesChangedEvent_l();
+ }
+}
+
+status_t AudioFlinger::MixerThread::getSupportedLatencyModes(
+ std::vector<audio_latency_mode_t>* modes) {
+ if (modes == nullptr) {
+ return BAD_VALUE;
+ }
+ Mutex::Autolock _l(mLock);
+ *modes = mSupportedLatencyModes;
+ return NO_ERROR;
+}
+
+void AudioFlinger::MixerThread::onRecommendedLatencyModeChanged(
+ std::vector<audio_latency_mode_t> modes) {
+ Mutex::Autolock _l(mLock);
+ if (modes != mSupportedLatencyModes) {
+ ALOGD("%s: thread(%d) supported latency modes: %s",
+ __func__, mId, toString(modes).c_str());
+ mSupportedLatencyModes.swap(modes);
+ sendHalLatencyModesChangedEvent_l();
+ }
+}
+
+status_t AudioFlinger::MixerThread::setBluetoothVariableLatencyEnabled(bool enabled) {
+ if (mOutput == nullptr || mOutput->audioHwDev == nullptr
+ || !mOutput->audioHwDev->supportsBluetoothVariableLatency()) {
+ return INVALID_OPERATION;
+ }
+ mBluetoothLatencyModesEnabled.store(enabled);
+ return NO_ERROR;
+}
+
// ----------------------------------------------------------------------------
AudioFlinger::DirectOutputThread::DirectOutputThread(const sp<AudioFlinger>& audioFlinger,
@@ -6180,18 +6467,21 @@
track->getVolumeHandler()->getVolume(volumeShaperFrames);
mVolumeShaperActive = shaperActive;
+ gain_minifloat_packed_t vlr = proxy->getVolumeLR();
+ left = float_from_gain(gain_minifloat_unpack_left(vlr));
+ right = float_from_gain(gain_minifloat_unpack_right(vlr));
+
+ const bool clientVolumeMute = (left == 0.f && right == 0.f);
+
if (mMasterMute || mStreamTypes[track->streamType()].mute || track->isPlaybackRestricted()) {
left = right = 0;
} else {
float typeVolume = mStreamTypes[track->streamType()].volume;
const float v = mMasterVolume * typeVolume * shaperVolume;
- gain_minifloat_packed_t vlr = proxy->getVolumeLR();
- left = float_from_gain(gain_minifloat_unpack_left(vlr));
if (left > GAIN_FLOAT_UNITY) {
left = GAIN_FLOAT_UNITY;
}
- right = float_from_gain(gain_minifloat_unpack_right(vlr));
if (right > GAIN_FLOAT_UNITY) {
right = GAIN_FLOAT_UNITY;
}
@@ -6204,8 +6494,16 @@
}
}
+ track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
+ /*muteState=*/{mMasterMute,
+ mStreamTypes[track->streamType()].volume == 0.f,
+ mStreamTypes[track->streamType()].mute,
+ track->isPlaybackRestricted(),
+ clientVolumeMute,
+ shaperVolume == 0.f});
+
if (lastTrack) {
- track->setFinalVolume((left + right) / 2.f);
+ track->setFinalVolume(left, right);
if (left != mLeftVolFloat || right != mRightVolFloat) {
mLeftVolFloat = left;
mRightVolFloat = right;
@@ -6582,7 +6880,7 @@
if (!mStandby) {
mThreadMetrics.logEndInterval();
mThreadSnapshot.onEnd();
- mStandby = true;
+ setStandby_l();
}
mBytesWritten = 0;
status = mOutput->stream->setParameters(keyValuePair);
@@ -7095,6 +7393,13 @@
}
}
+void AudioFlinger::OffloadThread::invalidateTracks(std::set<audio_port_handle_t>& portIds) {
+ Mutex::Autolock _l(mLock);
+ if (PlaybackThread::invalidateTracks_l(portIds)) {
+ mFlushPending = true;
+ }
+}
+
// ----------------------------------------------------------------------------
AudioFlinger::DuplicatingThread::DuplicatingThread(const sp<AudioFlinger>& audioFlinger,
@@ -7344,13 +7649,7 @@
}
void AudioFlinger::SpatializerThread::onFirstRef() {
- PlaybackThread::onFirstRef();
-
- Mutex::Autolock _l(mLock);
- status_t status = mOutput->stream->setLatencyModeCallback(this);
- if (status != INVALID_OPERATION) {
- updateHalSupportedLatencyModes_l();
- }
+ MixerThread::onFirstRef();
const pid_t tid = getTid();
if (tid == -1) {
@@ -7364,32 +7663,6 @@
}
}
-status_t AudioFlinger::SpatializerThread::createAudioPatch_l(const struct audio_patch *patch,
- audio_patch_handle_t *handle)
-{
- status_t status = MixerThread::createAudioPatch_l(patch, handle);
- updateHalSupportedLatencyModes_l();
- return status;
-}
-
-void AudioFlinger::SpatializerThread::updateHalSupportedLatencyModes_l() {
- std::vector<audio_latency_mode_t> latencyModes;
- const status_t status = mOutput->stream->getRecommendedLatencyModes(&latencyModes);
- if (status != NO_ERROR) {
- latencyModes.clear();
- }
- if (latencyModes != mSupportedLatencyModes) {
- ALOGD("%s: thread(%d) status %d supported latency modes: %s",
- __func__, mId, status, toString(latencyModes).c_str());
- mSupportedLatencyModes.swap(latencyModes);
- sendHalLatencyModesChangedEvent_l();
- }
-}
-
-void AudioFlinger::SpatializerThread::onHalLatencyModesChanged_l() {
- mAudioFlinger->onSupportedLatencyModesChanged(mId, mSupportedLatencyModes);
-}
-
void AudioFlinger::SpatializerThread::setHalLatencyMode_l() {
// if mSupportedLatencyModes is empty, the HAL stream does not support
// latency mode control and we can exit.
@@ -7437,25 +7710,6 @@
return NO_ERROR;
}
-status_t AudioFlinger::SpatializerThread::getSupportedLatencyModes(
- std::vector<audio_latency_mode_t>* modes) {
- if (modes == nullptr) {
- return BAD_VALUE;
- }
- Mutex::Autolock _l(mLock);
- *modes = mSupportedLatencyModes;
- return NO_ERROR;
-}
-
-status_t AudioFlinger::PlaybackThread::setBluetoothVariableLatencyEnabled(bool enabled) {
- if (mOutput == nullptr || mOutput->audioHwDev == nullptr
- || !mOutput->audioHwDev->supportsBluetoothVariableLatency()) {
- return INVALID_OPERATION;
- }
- mBluetoothLatencyModesEnabled.store(enabled);
- return NO_ERROR;
-}
-
void AudioFlinger::SpatializerThread::checkOutputStageEffects()
{
bool hasVirtualizer = false;
@@ -7508,17 +7762,6 @@
}
}
-void AudioFlinger::SpatializerThread::onRecommendedLatencyModeChanged(
- std::vector<audio_latency_mode_t> modes) {
- Mutex::Autolock _l(mLock);
- if (modes != mSupportedLatencyModes) {
- ALOGD("%s: thread(%d) supported latency modes: %s",
- __func__, mId, toString(modes).c_str());
- mSupportedLatencyModes.swap(modes);
- sendHalLatencyModesChangedEvent_l();
- }
-}
-
// ----------------------------------------------------------------------------
// Record
// ----------------------------------------------------------------------------
@@ -7622,15 +7865,15 @@
Pipe *pipe = new Pipe(pipeFramesP2, format, pipeBuffer);
const NBAIO_Format offersFast[1] = {format};
size_t numCounterOffersFast = 0;
- [[maybe_unused]] ssize_t index = pipe->negotiate(offersFast, std::size(offersFast),
+ [[maybe_unused]] ssize_t index2 = pipe->negotiate(offersFast, std::size(offersFast),
nullptr /* counterOffers */, numCounterOffersFast);
- ALOG_ASSERT(index == 0);
+ ALOG_ASSERT(index2 == 0);
mPipeSink = pipe;
PipeReader *pipeReader = new PipeReader(*pipe);
numCounterOffersFast = 0;
- index = pipeReader->negotiate(offersFast, std::size(offersFast),
+ index2 = pipeReader->negotiate(offersFast, std::size(offersFast),
nullptr /* counterOffers */, numCounterOffersFast);
- ALOG_ASSERT(index == 0);
+ ALOG_ASSERT(index2 == 0);
mPipeSource = pipeReader;
mPipeFramesP2 = pipeFramesP2;
mPipeMemory = pipeMemory;
@@ -8788,10 +9031,10 @@
mSharedAudioPackageName = "";
}
-void AudioFlinger::RecordThread::updateMetadata_l()
+AudioFlinger::ThreadBase::MetadataUpdate AudioFlinger::RecordThread::updateMetadata_l()
{
if (!isStreamInitialized() || !mActiveTracks.readAndClearHasChanged()) {
- return; // nothing to do
+ return {}; // nothing to do
}
StreamInHalInterface::SinkMetadata metadata;
auto backInserter = std::back_inserter(metadata.tracks);
@@ -8799,6 +9042,9 @@
track->copyMetadataTo(backInserter);
}
mInput->stream->updateSinkMetadata(metadata);
+ MetadataUpdate change;
+ change.recordMetadataUpdate = metadata.tracks;
+ return change;
}
// destroyTrack_l() must be called with ThreadBase::mLock held
@@ -9017,7 +9263,7 @@
if (stepCount == 0) {
return;
}
- ALOG_ASSERT(stepCount <= mRsmpInUnrel);
+ ALOG_ASSERT(stepCount <= (int32_t)mRsmpInUnrel);
mRsmpInUnrel -= stepCount;
mRsmpInFront = audio_utils::safe_add_overflow(mRsmpInFront, stepCount);
buffer->raw = NULL;
@@ -9332,6 +9578,9 @@
track->logEndInterval();
track->logBeginInterval(pathSourcesAsString);
}
+ // Force meteadata update after a route change
+ mActiveTracks.setHasChanged();
+
return status;
}
@@ -9348,6 +9597,9 @@
} else {
status = mInput->stream->legacyReleaseAudioPatch();
}
+ // Force meteadata update after a route change
+ mActiveTracks.setHasChanged();
+
return status;
}
@@ -9566,6 +9818,10 @@
return mThread->standby();
}
+status_t AudioFlinger::MmapThreadHandle::reportData(const void* buffer, size_t frameCount) {
+ return mThread->reportData(buffer, frameCount);
+}
+
AudioFlinger::MmapThread::MmapThread(
const sp<AudioFlinger>& audioFlinger, audio_io_handle_t id,
@@ -9644,8 +9900,10 @@
return mHalStream->getMmapPosition(position);
}
-status_t AudioFlinger::MmapThread::exitStandby()
+status_t AudioFlinger::MmapThread::exitStandby_l()
{
+ // The HAL must receive track metadata before starting the stream
+ updateMetadata_l();
status_t ret = mHalStream->start();
if (ret != NO_ERROR) {
ALOGE("%s: error mHalStream->start() = %d for first track", __FUNCTION__, ret);
@@ -9671,18 +9929,18 @@
status_t ret;
+ // For the first track, reuse portId and session allocated when the stream was opened.
if (*handle == mPortId) {
- // For the first track, reuse portId and session allocated when the stream was opened.
- ret = exitStandby();
- if (ret == NO_ERROR) {
- acquireWakeLock();
- }
- return ret;
+ acquireWakeLock();
+ return NO_ERROR;
}
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
audio_io_handle_t io = mId;
+ AttributionSourceState adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+ client.attributionSource);
+
if (isOutput()) {
audio_config_t config = AUDIO_CONFIG_INITIALIZER;
config.sample_rate = mSampleRate;
@@ -9694,16 +9952,18 @@
audio_port_handle_t deviceId = mDeviceId;
std::vector<audio_io_handle_t> secondaryOutputs;
bool isSpatialized;
+ bool isBitPerfect;
ret = AudioSystem::getOutputForAttr(&mAttr, &io,
mSessionId,
&stream,
- client.attributionSource,
+ adjAttributionSource,
&config,
flags,
&deviceId,
&portId,
&secondaryOutputs,
- &isSpatialized);
+ &isSpatialized,
+ &isBitPerfect);
ALOGD_IF(!secondaryOutputs.empty(),
"MmapThread::start does not support secondary outputs, ignoring them");
} else {
@@ -9715,7 +9975,7 @@
ret = AudioSystem::getInputForAttr(&mAttr, &io,
RECORD_RIID_INVALID,
mSessionId,
- client.attributionSource,
+ adjAttributionSource,
&config,
AUDIO_INPUT_FLAG_MMAP_NOIRQ,
&deviceId,
@@ -9781,7 +10041,6 @@
}
}
-
mActiveTracks.add(track);
sp<EffectChain> chain = getEffectChain_l(mSessionId);
if (chain != 0) {
@@ -9792,11 +10051,16 @@
track->logBeginInterval(patchSinksToString(&mPatch)); // log to MediaMetrics
*handle = portId;
+
+ if (mActiveTracks.size() == 1) {
+ ret = exitStandby_l();
+ }
+
broadcast_l();
- ALOGV("%s DONE handle %d stream %p", __FUNCTION__, *handle, mHalStream.get());
+ ALOGV("%s DONE status %d handle %d stream %p", __FUNCTION__, ret, *handle, mHalStream.get());
- return NO_ERROR;
+ return ret;
}
status_t AudioFlinger::MmapThread::stop(audio_port_handle_t handle)
@@ -9808,7 +10072,6 @@
}
if (handle == mPortId) {
- mHalStream->stop();
releaseWakeLock();
return NO_ERROR;
}
@@ -9845,6 +10108,10 @@
chain->decTrackCnt();
}
+ if (mActiveTracks.isEmpty()) {
+ mHalStream->stop();
+ }
+
broadcast_l();
return NO_ERROR;
@@ -9870,6 +10137,10 @@
return NO_ERROR;
}
+status_t AudioFlinger::MmapThread::reportData(const void* /*buffer*/, size_t /*frameCount*/) {
+ // This is a stub implementation. The MmapPlaybackThread overrides this function.
+ return INVALID_OPERATION;
+}
void AudioFlinger::MmapThread::readHalParameters_l()
{
@@ -10116,6 +10387,9 @@
mPatch = *patch;
mDeviceId = deviceId;
}
+ // Force meteadata update after a route change
+ mActiveTracks.setHasChanged();
+
return status;
}
@@ -10135,6 +10409,9 @@
} else {
status = mHalStream->legacyReleaseAudioPatch();
}
+ // Force meteadata update after a route change
+ mActiveTracks.setHasChanged();
+
return status;
}
@@ -10311,24 +10588,14 @@
AudioHwDevice *hwDev, AudioStreamOut *output, bool systemReady)
: MmapThread(audioFlinger, id, hwDev, output->stream, systemReady, true /* isOut */),
mStreamType(AUDIO_STREAM_MUSIC),
+ mStreamVolume(1.0),
+ mStreamMute(false),
mOutput(output)
{
snprintf(mThreadName, kThreadNameLength, "AudioMmapOut_%X", id);
mChannelCount = audio_channel_count_from_out_mask(mChannelMask);
mMasterVolume = audioFlinger->masterVolume_l();
mMasterMute = audioFlinger->masterMute_l();
-
- for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
- const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
- mStreamTypes[stream].volume = 0.0f;
- mStreamTypes[stream].mute = mAudioFlinger->streamMute_l(stream);
- }
- // Audio patch and call assistant volume are always max
- mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
- mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
- mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
- mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
-
if (mAudioHwDev) {
if (mAudioHwDev->canSetMasterVolume()) {
mMasterVolume = 1.0;
@@ -10385,8 +10652,8 @@
void AudioFlinger::MmapPlaybackThread::setStreamVolume(audio_stream_type_t stream, float value)
{
Mutex::Autolock _l(mLock);
- mStreamTypes[stream].volume = value;
if (stream == mStreamType) {
+ mStreamVolume = value;
broadcast_l();
}
}
@@ -10394,14 +10661,17 @@
float AudioFlinger::MmapPlaybackThread::streamVolume(audio_stream_type_t stream) const
{
Mutex::Autolock _l(mLock);
- return mStreamTypes[stream].volume;
+ if (stream == mStreamType) {
+ return mStreamVolume;
+ }
+ return 0.0f;
}
void AudioFlinger::MmapPlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted)
{
Mutex::Autolock _l(mLock);
- mStreamTypes[stream].mute = muted;
if (stream == mStreamType) {
+ mStreamMute= muted;
broadcast_l();
}
}
@@ -10417,18 +10687,38 @@
}
}
+void AudioFlinger::MmapPlaybackThread::invalidateTracks(std::set<audio_port_handle_t>& portIds)
+{
+ Mutex::Autolock _l(mLock);
+ bool trackMatch = false;
+ for (const sp<MmapTrack> &track : mActiveTracks) {
+ if (portIds.find(track->portId()) != portIds.end()) {
+ track->invalidate();
+ trackMatch = true;
+ portIds.erase(track->portId());
+ }
+ if (portIds.empty()) {
+ break;
+ }
+ }
+ if (trackMatch) {
+ broadcast_l();
+ }
+}
+
void AudioFlinger::MmapPlaybackThread::processVolume_l()
NO_THREAD_SAFETY_ANALYSIS // access of track->processMuteEvent_l
{
float volume;
- if (mMasterMute || streamMuted_l()) {
+ if (mMasterMute || mStreamMute) {
volume = 0;
} else {
- volume = mMasterVolume * streamVolume_l();
+ volume = mMasterVolume * mStreamVolume;
}
if (volume != mHalVolFloat) {
+
// Convert volumes from float to 8.24
uint32_t vol = (uint32_t)(volume * (1 << 24));
@@ -10446,20 +10736,10 @@
} else {
sp<MmapStreamCallback> callback = mCallback.promote();
if (callback != 0) {
- int channelCount;
- if (isOutput()) {
- channelCount = audio_channel_count_from_out_mask(mChannelMask);
- } else {
- channelCount = audio_channel_count_from_in_mask(mChannelMask);
- }
- Vector<float> values;
- for (int i = 0; i < channelCount; i++) {
- values.add(volume);
- }
mHalVolFloat = volume; // SW volume control worked, so update value.
mNoCallbackWarningCount = 0;
mLock.unlock();
- callback->onVolumeChanged(mChannelMask, values);
+ callback->onVolumeChanged(volume);
mLock.lock();
} else {
if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
@@ -10470,14 +10750,22 @@
}
for (const sp<MmapTrack> &track : mActiveTracks) {
track->setMetadataHasChanged();
+ track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
+ /*muteState=*/{mMasterMute,
+ mStreamVolume == 0.f,
+ mStreamMute,
+ // TODO(b/241533526): adjust logic to include mute from AppOps
+ false /*muteFromPlaybackRestricted*/,
+ false /*muteFromClientVolume*/,
+ false /*muteFromVolumeShaper*/});
}
}
}
-void AudioFlinger::MmapPlaybackThread::updateMetadata_l()
+AudioFlinger::ThreadBase::MetadataUpdate AudioFlinger::MmapPlaybackThread::updateMetadata_l()
{
if (!isStreamInitialized() || !mActiveTracks.readAndClearHasChanged()) {
- return; // nothing to do
+ return {}; // nothing to do
}
StreamOutHalInterface::SourceMetadata metadata;
for (const sp<MmapTrack> &track : mActiveTracks) {
@@ -10493,7 +10781,11 @@
metadata.tracks.push_back(trackMetadata);
}
mOutput->stream->updateSourceMetadata(metadata);
-}
+
+ MetadataUpdate change;
+ change.playbackMetadataUpdate = metadata.tracks;
+ return change;
+};
void AudioFlinger::MmapPlaybackThread::checkSilentMode_l()
{
@@ -10535,12 +10827,46 @@
return status;
}
+status_t AudioFlinger::MmapPlaybackThread::reportData(const void* buffer, size_t frameCount) {
+ // Send to MelProcessor for sound dose measurement.
+ auto processor = mMelProcessor.load();
+ if (processor) {
+ processor->process(buffer, frameCount * mFrameSize);
+ }
+
+ return NO_ERROR;
+}
+
+// startMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::MmapPlaybackThread::startMelComputation_l(
+ const sp<audio_utils::MelProcessor>& processor)
+{
+ ALOGV("%s: starting mel processor for thread %d", __func__, id());
+ mMelProcessor.store(processor);
+ if (processor) {
+ processor->resume();
+ }
+
+ // no need to update output format for MMapPlaybackThread since it is
+ // assigned constant for each thread
+}
+
+// stopMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::MmapPlaybackThread::stopMelComputation_l()
+{
+ ALOGV("%s: pausing mel processor for thread %d", __func__, id());
+ auto melProcessor = mMelProcessor.load();
+ if (melProcessor != nullptr) {
+ melProcessor->pause();
+ }
+}
+
void AudioFlinger::MmapPlaybackThread::dumpInternals_l(int fd, const Vector<String16>& args)
{
MmapThread::dumpInternals_l(fd, args);
dprintf(fd, " Stream type: %d Stream volume: %f HAL volume: %f Stream mute %d\n",
- mStreamType, streamVolume_l(), mHalVolFloat, streamMuted_l());
+ mStreamType, mStreamVolume, mHalVolFloat, mStreamMute);
dprintf(fd, " Master volume: %f Master mute %d\n", mMasterVolume, mMasterMute);
}
@@ -10554,16 +10880,15 @@
mChannelCount = audio_channel_count_from_in_mask(mChannelMask);
}
-status_t AudioFlinger::MmapCaptureThread::exitStandby()
+status_t AudioFlinger::MmapCaptureThread::exitStandby_l()
{
{
// mInput might have been cleared by clearInput()
- Mutex::Autolock _l(mLock);
if (mInput != nullptr && mInput->stream != nullptr) {
mInput->stream->setGain(1.0f);
}
}
- return MmapThread::exitStandby();
+ return MmapThread::exitStandby_l();
}
AudioFlinger::AudioStreamIn* AudioFlinger::MmapCaptureThread::clearInput()
@@ -10602,10 +10927,10 @@
}
}
-void AudioFlinger::MmapCaptureThread::updateMetadata_l()
+AudioFlinger::ThreadBase::MetadataUpdate AudioFlinger::MmapCaptureThread::updateMetadata_l()
{
if (!isStreamInitialized() || !mActiveTracks.readAndClearHasChanged()) {
- return; // nothing to do
+ return {}; // nothing to do
}
StreamInHalInterface::SinkMetadata metadata;
for (const sp<MmapTrack> &track : mActiveTracks) {
@@ -10620,6 +10945,9 @@
metadata.tracks.push_back(trackMetadata);
}
mInput->stream->updateSinkMetadata(metadata);
+ MetadataUpdate change;
+ change.recordMetadataUpdate = metadata.tracks;
+ return change;
}
void AudioFlinger::MmapCaptureThread::setRecordSilenced(audio_port_handle_t portId, bool silenced)
@@ -10652,4 +10980,48 @@
return mInput->getCapturePosition((int64_t*)position, timeNanos);
}
+// ----------------------------------------------------------------------------
+
+AudioFlinger::BitPerfectThread::BitPerfectThread(const sp<AudioFlinger> &audioflinger,
+ AudioStreamOut *output, audio_io_handle_t id, bool systemReady)
+ : MixerThread(audioflinger, output, id, systemReady, BIT_PERFECT) {}
+
+AudioFlinger::PlaybackThread::mixer_state AudioFlinger::BitPerfectThread::prepareTracks_l(
+ Vector<sp<Track>> *tracksToRemove) {
+ mixer_state result = MixerThread::prepareTracks_l(tracksToRemove);
+ // If there is only one active track and it is bit-perfect, enable tee buffer.
+ float volumeLeft = 1.0f;
+ float volumeRight = 1.0f;
+ if (mActiveTracks.size() == 1 && mActiveTracks[0]->isBitPerfect()) {
+ const int trackId = mActiveTracks[0]->id();
+ mAudioMixer->setParameter(
+ trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER, (void *)mSinkBuffer);
+ mAudioMixer->setParameter(
+ trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER_FRAME_COUNT,
+ (void *)(uintptr_t)mNormalFrameCount);
+ mActiveTracks[0]->getFinalVolume(&volumeLeft, &volumeRight);
+ mIsBitPerfect = true;
+ } else {
+ mIsBitPerfect = false;
+ // No need to copy bit-perfect data directly to sink buffer given there are multiple tracks
+ // active.
+ for (const auto& track : mActiveTracks) {
+ const int trackId = track->id();
+ mAudioMixer->setParameter(
+ trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER, nullptr);
+ }
+ }
+ if (mVolumeLeft != volumeLeft || mVolumeRight != volumeRight) {
+ mVolumeLeft = volumeLeft;
+ mVolumeRight = volumeRight;
+ setVolumeForOutput_l(volumeLeft, volumeRight);
+ }
+ return result;
+}
+
+void AudioFlinger::BitPerfectThread::threadLoop_mix() {
+ MixerThread::threadLoop_mix();
+ mHasDataCopiedToSinkBuffer = mIsBitPerfect;
+}
+
} // namespace android
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 1d00a34..6795a13 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -33,6 +33,7 @@
MMAP_PLAYBACK, // Thread class for MMAP playback stream
MMAP_CAPTURE, // Thread class for MMAP capture stream
SPATIALIZER, //
+ BIT_PERFECT, // Thread class for BitPerfectThread
// If you add any values here, also update ThreadBase::threadTypeToString()
};
@@ -375,8 +376,6 @@
virtual void toAudioPortConfig(struct audio_port_config *config) = 0;
virtual void resizeInputBuffer_l(int32_t maxSharedAudioHistoryMs);
- virtual void onHalLatencyModesChanged_l() {}
-
// see note at declaration of mStandby, mOutDevice and mInDevice
bool standby() const { return mStandby; }
@@ -430,8 +429,10 @@
// track
FAST_SESSION = 0x4, // the audio session corresponds to at least one
// fast track
- SPATIALIZED_SESSION = 0x8 // the audio session corresponds to at least one
- // spatialized track
+ SPATIALIZED_SESSION = 0x8, // the audio session corresponds to at least one
+ // spatialized track
+ BIT_PERFECT_SESSION = 0x10 // the audio session corresponds to at least one
+ // bit-perfect track
};
// get effect chain corresponding to session Id.
@@ -497,6 +498,9 @@
if (track->isSpatialized()) {
result |= SPATIALIZED_SESSION; // caution, only first track.
}
+ if (track->isBitPerfect()) {
+ result |= BIT_PERFECT_SESSION;
+ }
break;
}
}
@@ -572,6 +576,9 @@
virtual bool isStreamInitialized() = 0;
+ virtual void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor);
+ virtual void stopMelComputation_l();
+
protected:
// entry describing an effect being suspended in mSuspendedSessions keyed vector
@@ -603,7 +610,11 @@
void checkSuspendOnAddEffectChain_l(const sp<EffectChain>& chain);
// sends the metadata of the active tracks to the HAL
- virtual void updateMetadata_l() = 0;
+ struct MetadataUpdate {
+ std::vector<playback_track_metadata_v7_t> playbackMetadataUpdate;
+ std::vector<record_track_metadata_v7_t> recordMetadataUpdate;
+ };
+ virtual MetadataUpdate updateMetadata_l() = 0;
String16 getWakeLockTag();
@@ -619,12 +630,14 @@
product_strategy_t getStrategyForStream(audio_stream_type_t stream) const;
+ virtual void onHalLatencyModesChanged_l() {}
+
virtual void dumpInternals_l(int fd __unused, const Vector<String16>& args __unused)
{ }
virtual void dumpTracks_l(int fd __unused, const Vector<String16>& args __unused) { }
- friend class AudioFlinger; // for mEffectChains
+ friend class AudioFlinger; // for mEffectChains and mAudioManager
const type_t mType;
@@ -797,6 +810,11 @@
*/
bool readAndClearHasChanged();
+ /** Force updating track metadata to audio HAL stream next time
+ * readAndClearHasChanged() is called.
+ */
+ void setHasChanged() { mHasChanged = true; }
+
private:
void logTrack(const char *funcName, const sp<T> &track) const;
@@ -977,7 +995,8 @@
status_t *status /*non-NULL*/,
audio_port_handle_t portId,
const sp<media::IAudioTrackCallback>& callback,
- bool isSpatialized);
+ bool isSpatialized,
+ bool isBitPerfect);
AudioStreamOut* getOutput() const;
AudioStreamOut* clearOutput();
@@ -1024,7 +1043,11 @@
// called with AudioFlinger lock held
bool invalidateTracks_l(audio_stream_type_t streamType);
+ bool invalidateTracks_l(std::set<audio_port_handle_t>& portIds);
virtual void invalidateTracks(audio_stream_type_t streamType);
+ // Invalidate tracks by a set of port ids. The port id will be removed from
+ // the given set if the corresponding track is found and invalidated.
+ virtual void invalidateTracks(std::set<audio_port_handle_t>& portIds);
virtual size_t frameCount() const { return mNormalFrameCount; }
@@ -1086,8 +1109,39 @@
return INVALID_OPERATION;
}
- virtual status_t setBluetoothVariableLatencyEnabled(bool enabled);
+ virtual status_t setBluetoothVariableLatencyEnabled(bool enabled __unused) {
+ return INVALID_OPERATION;
+ }
+ void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) override;
+ void stopMelComputation_l() override;
+
+ void setStandby() {
+ Mutex::Autolock _l(mLock);
+ setStandby_l();
+ }
+
+ void setStandby_l() {
+ mStandby = true;
+ mHalStarted = false;
+ mKernelPositionOnStandby =
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+ }
+
+ bool waitForHalStart() {
+ Mutex::Autolock _l(mLock);
+ static const nsecs_t kWaitHalTimeoutNs = seconds(2);
+ nsecs_t endWaitTimetNs = systemTime() + kWaitHalTimeoutNs;
+ while (!mHalStarted) {
+ nsecs_t timeNs = systemTime();
+ if (timeNs >= endWaitTimetNs) {
+ break;
+ }
+ nsecs_t waitTimeLeftNs = endWaitTimetNs - timeNs;
+ mWaitHalStartCV.waitRelative(mLock, waitTimeLeftNs);
+ }
+ return mHalStarted;
+ }
protected:
// updated by readOutputParameters_l()
size_t mNormalFrameCount; // normal mixer and effects
@@ -1157,6 +1211,9 @@
// for any processing (including output processing).
bool mEffectBufferValid;
+ // Set to "true" to enable when data has already copied to sink
+ bool mHasDataCopiedToSinkBuffer = false;
+
// Frame size aligned buffer used as input and output to all post processing effects
// except the Spatializer in a SPATIALIZER thread. Non spatialized tracks are mixed into
// this buffer so that post processing effects can be applied.
@@ -1251,7 +1308,7 @@
void removeTrack_l(const sp<Track>& track);
void readOutputParameters_l();
- void updateMetadata_l() final;
+ MetadataUpdate updateMetadata_l() final;
virtual void sendMetadataToBackend_l(const StreamOutHalInterface::SourceMetadata& metadata);
void collectTimestamps_l();
@@ -1384,6 +1441,14 @@
// Downstream patch latency, available if mDownstreamLatencyStatMs.getN() > 0.
audio_utils::Statistics<double> mDownstreamLatencyStatMs{0.999};
+ // output stream start detection based on render position returned by the kernel
+ // condition signalled when the output stream has started
+ Condition mWaitHalStartCV;
+ // true when the output stream render position has moved, reset to false in standby
+ bool mHalStarted = false;
+ // last kernel render position saved when entering standby
+ int64_t mKernelPositionOnStandby = 0;
+
public:
virtual bool hasFastMixer() const = 0;
virtual FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex __unused) const
@@ -1438,12 +1503,10 @@
virtual void flushHw_l() {
mIsTimestampAdvancing.clear();
}
-
- // Bluetooth Variable latency control logic is enabled or disabled for this thread
- std::atomic_bool mBluetoothLatencyModesEnabled;
};
-class MixerThread : public PlaybackThread {
+class MixerThread : public PlaybackThread,
+ public StreamOutHalInterfaceLatencyModeCallback {
public:
MixerThread(const sp<AudioFlinger>& audioFlinger,
AudioStreamOut* output,
@@ -1453,6 +1516,13 @@
audio_config_base_t *mixerConfig = nullptr);
virtual ~MixerThread();
+ // RefBase
+ virtual void onFirstRef();
+
+ // StreamOutHalInterfaceLatencyModeCallback
+ void onRecommendedLatencyModeChanged(
+ std::vector<audio_latency_mode_t> modes) override;
+
// Thread virtuals
virtual bool checkForNewParameter_l(const String8& keyValuePair,
@@ -1489,6 +1559,17 @@
virtual status_t releaseAudioPatch_l(const audio_patch_handle_t handle);
AudioMixer* mAudioMixer; // normal mixer
+
+ // Support low latency mode by default as unless explicitly indicated by the audio HAL
+ // we assume the audio path is compatible with the head tracking latency requirements
+ std::vector<audio_latency_mode_t> mSupportedLatencyModes = {AUDIO_LATENCY_MODE_LOW};
+ // default to invalid value to force first update to the audio HAL
+ audio_latency_mode_t mSetLatencyMode =
+ (audio_latency_mode_t)AUDIO_LATENCY_MODE_INVALID;
+
+ // Bluetooth Variable latency control logic is enabled or disabled for this thread
+ std::atomic_bool mBluetoothLatencyModesEnabled;
+
private:
// one-time initialization, no locks required
sp<FastMixer> mFastMixer; // non-0 if there is also a fast mixer
@@ -1522,6 +1603,11 @@
return INVALID_OPERATION;
}
+ status_t getSupportedLatencyModes(
+ std::vector<audio_latency_mode_t>* modes) override;
+
+ status_t setBluetoothVariableLatencyEnabled(bool enabled) override;
+
protected:
virtual void setMasterMono_l(bool mono) {
mMasterMono.store(mono);
@@ -1540,6 +1626,10 @@
mFastMixer->setMasterBalance(balance);
}
}
+
+ void updateHalSupportedLatencyModes_l();
+ void onHalLatencyModesChanged_l() override;
+ void setHalLatencyMode_l() override;
};
class DirectOutputThread : public PlaybackThread {
@@ -1640,6 +1730,7 @@
virtual bool waitingAsyncCallback();
virtual bool waitingAsyncCallback_l();
virtual void invalidateTracks(audio_stream_type_t streamType);
+ void invalidateTracks(std::set<audio_port_handle_t>& portIds) override;
virtual bool keepWakeLock() const { return (mKeepWakeLock || (mDrainSequence & 1)); }
@@ -1742,8 +1833,7 @@
}
};
-class SpatializerThread : public MixerThread,
- public StreamOutHalInterfaceLatencyModeCallback {
+class SpatializerThread : public MixerThread {
public:
SpatializerThread(const sp<AudioFlinger>& audioFlinger,
AudioStreamOut* output,
@@ -1754,32 +1844,16 @@
bool hasFastMixer() const override { return false; }
- status_t createAudioPatch_l(const struct audio_patch *patch,
- audio_patch_handle_t *handle) override;
-
// RefBase
virtual void onFirstRef();
- // StreamOutHalInterfaceLatencyModeCallback
- void onRecommendedLatencyModeChanged(std::vector<audio_latency_mode_t> modes) override;
-
status_t setRequestedLatencyMode(audio_latency_mode_t mode) override;
- status_t getSupportedLatencyModes(std::vector<audio_latency_mode_t>* modes) override;
protected:
void checkOutputStageEffects() override;
- void onHalLatencyModesChanged_l() override;
void setHalLatencyMode_l() override;
private:
- void updateHalSupportedLatencyModes_l();
-
- // Support low latency mode by default as unless explicitly indicated by the audio HAL
- // we assume the audio path is compatible with the head tracking latency requirements
- std::vector<audio_latency_mode_t> mSupportedLatencyModes = {AUDIO_LATENCY_MODE_LOW};
- // default to invalid value to force first update to the audio HAL
- audio_latency_mode_t mSetLatencyMode =
- (audio_latency_mode_t)AUDIO_LATENCY_MODE_INVALID;
// Do not request a specific mode by default
audio_latency_mode_t mRequestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
@@ -1949,7 +2023,7 @@
status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
status_t setPreferredMicrophoneFieldDimension(float zoom);
- void updateMetadata_l() override;
+ MetadataUpdate updateMetadata_l() override;
bool fastTrackAvailable() const { return mFastTrackAvail; }
@@ -2091,6 +2165,7 @@
status_t stop(audio_port_handle_t handle);
status_t standby();
virtual status_t getExternalPosition(uint64_t *position, int64_t *timeNaos) = 0;
+ virtual status_t reportData(const void* buffer, size_t frameCount);
// RefBase
virtual void onFirstRef();
@@ -2101,7 +2176,7 @@
virtual void threadLoop_exit();
virtual void threadLoop_standby();
virtual bool shouldStandby_l() { return false; }
- virtual status_t exitStandby();
+ virtual status_t exitStandby_l() REQUIRES(mLock);
virtual status_t initCheck() const { return (mHalStream == 0) ? NO_INIT : NO_ERROR; }
virtual size_t frameCount() const { return mFrameCount; }
@@ -2137,6 +2212,7 @@
virtual audio_stream_type_t streamType() { return AUDIO_STREAM_DEFAULT; }
virtual void invalidateTracks(audio_stream_type_t streamType __unused) {}
+ virtual void invalidateTracks(std::set<audio_port_handle_t>& portIds __unused) {}
// Sets the UID records silence
virtual void setRecordSilenced(audio_port_handle_t portId __unused,
@@ -2216,12 +2292,13 @@
void setMasterMute_l(bool muted) { mMasterMute = muted; }
virtual void invalidateTracks(audio_stream_type_t streamType);
+ void invalidateTracks(std::set<audio_port_handle_t>& portIds) override;
virtual audio_stream_type_t streamType() { return mStreamType; }
virtual void checkSilentMode_l();
void processVolume_l() override;
- void updateMetadata_l() override;
+ MetadataUpdate updateMetadata_l() override;
virtual void toAudioPortConfig(struct audio_port_config *config);
@@ -2231,20 +2308,22 @@
return !(mOutput == nullptr || mOutput->stream == nullptr);
}
+ status_t reportData(const void* buffer, size_t frameCount) override;
+
+ void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) override;
+ void stopMelComputation_l() override;
+
protected:
void dumpInternals_l(int fd, const Vector<String16>& args) override;
- float streamVolume_l() const {
- return mStreamTypes[mStreamType].volume;
- }
- bool streamMuted_l() const {
- return mStreamTypes[mStreamType].mute;
- }
- stream_type_t mStreamTypes[AUDIO_STREAM_CNT];
audio_stream_type_t mStreamType;
float mMasterVolume;
+ float mStreamVolume;
bool mMasterMute;
+ bool mStreamMute;
AudioStreamOut* mOutput;
+
+ mediautils::atomic_sp<audio_utils::MelProcessor> mMelProcessor;
};
class MmapCaptureThread : public MmapThread
@@ -2257,9 +2336,9 @@
AudioStreamIn* clearInput();
- status_t exitStandby() override;
+ status_t exitStandby_l() REQUIRES(mLock) override;
- void updateMetadata_l() override;
+ MetadataUpdate updateMetadata_l() override;
void processVolume_l() override;
void setRecordSilenced(audio_port_handle_t portId,
bool silenced) override;
@@ -2276,3 +2355,18 @@
AudioStreamIn* mInput;
};
+
+class BitPerfectThread : public MixerThread {
+public:
+ BitPerfectThread(const sp<AudioFlinger>& audioflinger, AudioStreamOut *output,
+ audio_io_handle_t id, bool systemReady);
+
+protected:
+ mixer_state prepareTracks_l(Vector<sp<Track>> *tracksToRemove) override;
+ void threadLoop_mix() override;
+
+private:
+ bool mIsBitPerfect;
+ float mVolumeLeft = 0.f;
+ float mVolumeRight = 0.f;
+};
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 6c42dc8..d5b6a98 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -109,6 +109,8 @@
virtual bool isSpatialized() const { return false; }
+ virtual bool isBitPerfect() const { return false; }
+
#ifdef TEE_SINK
void dumpTee(int fd, const std::string &reason) const {
mTee.dump(fd, reason);
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 14c45f7..30c8240 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -119,7 +119,7 @@
mThreadIoHandle(thread ? thread->id() : AUDIO_IO_HANDLE_NONE),
mPortId(portId),
mIsInvalid(false),
- mTrackMetrics(std::move(metricsId), isOut),
+ mTrackMetrics(std::move(metricsId), isOut, clientUid),
mCreatorPid(creatorPid)
{
const uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -163,11 +163,12 @@
}
if (client != 0) {
- mCblkMemory = client->heap()->allocate(size);
+ mCblkMemory = client->allocator().allocate(mediautils::NamedAllocRequest{{size},
+ std::string("Track ID: ").append(std::to_string(mId))});
if (mCblkMemory == 0 ||
(mCblk = static_cast<audio_track_cblk_t *>(mCblkMemory->unsecurePointer())) == NULL) {
ALOGE("%s(%d): not enough memory for AudioTrack size=%zu", __func__, mId, size);
- client->heap()->dump("AudioTrack");
+ ALOGE("%s", client->allocator().dump().c_str());
mCblkMemory.clear();
return;
}
@@ -652,7 +653,8 @@
audio_port_handle_t portId,
size_t frameCountToBeReady,
float speed,
- bool isSpatialized)
+ bool isSpatialized,
+ bool isBitPerfect)
: TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
// TODO: Using unsecurePointer() has some associated security pitfalls
// (see declaration for details).
@@ -687,7 +689,8 @@
mFlushHwPending(false),
mFlags(flags),
mSpeed(speed),
- mIsSpatialized(isSpatialized)
+ mIsSpatialized(isSpatialized),
+ mIsBitPerfect(isBitPerfect)
{
// client == 0 implies sharedBuffer == 0
ALOG_ASSERT(!(client == 0 && sharedBuffer != 0));
@@ -809,7 +812,7 @@
" Format Chn mask SRate "
"ST Usg CT "
" G db L dB R dB VS dB "
- " Server FrmCnt FrmRdy F Underruns Flushed"
+ " Server FrmCnt FrmRdy F Underruns Flushed BitPerfect"
"%s\n",
isServerLatencySupported() ? " Latency" : "");
}
@@ -895,7 +898,7 @@
"%08X %08X %6u "
"%2u %3x %2x "
"%5.2g %5.2g %5.2g %5.2g%c "
- "%08X %6zu%c %6zu %c %9u%c %7u",
+ "%08X %6zu%c %6zu %c %9u%c %7u %10s",
active ? "yes" : "no",
(mClient == 0) ? getpid() : mClient->pid(),
mSessionId,
@@ -924,7 +927,8 @@
fillingStatus,
mAudioTrackServerProxy->getUnderrunFrames(),
nowInUnderrun,
- (unsigned)mAudioTrackServerProxy->framesFlushed() % 10000000
+ (unsigned)mAudioTrackServerProxy->framesFlushed() % 10000000,
+ isBitPerfect() ? "true" : "false"
);
if (isServerLatencySupported()) {
@@ -1135,10 +1139,10 @@
mObservedUnderruns = playbackThread->getFastTrackUnderruns(mFastIndex);
}
status = playbackThread->addTrack_l(this);
- if (status == INVALID_OPERATION || status == PERMISSION_DENIED) {
+ if (status == INVALID_OPERATION || status == PERMISSION_DENIED || status == DEAD_OBJECT) {
triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE);
// restore previous state if start was rejected by policy manager
- if (status == PERMISSION_DENIED) {
+ if (status == PERMISSION_DENIED || status == DEAD_OBJECT) {
mState = state;
}
}
@@ -1177,6 +1181,23 @@
}
if (status == NO_ERROR) {
forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+
+ // send format to AudioManager for playback activity monitoring
+ sp<IAudioManager> audioManager = thread->mAudioFlinger->getOrCreateAudioManager();
+ if (audioManager && mPortId != AUDIO_PORT_HANDLE_NONE) {
+ std::unique_ptr<os::PersistableBundle> bundle =
+ std::make_unique<os::PersistableBundle>();
+ bundle->putBoolean(String16(kExtraPlayerEventSpatializedKey),
+ isSpatialized());
+ bundle->putInt(String16(kExtraPlayerEventSampleRateKey), mSampleRate);
+ bundle->putInt(String16(kExtraPlayerEventChannelMaskKey), mChannelMask);
+ status_t result = audioManager->portEvent(mPortId,
+ PLAYER_UPDATE_FORMAT, bundle);
+ if (result != OK) {
+ ALOGE("%s: unable to send playback format for port ID %d, status error %d",
+ __func__, mPortId, result);
+ }
+ }
}
return status;
}
@@ -1401,8 +1422,11 @@
return mVolumeHandler->getVolumeShaperState(id);
}
-void AudioFlinger::PlaybackThread::Track::setFinalVolume(float volume)
+void AudioFlinger::PlaybackThread::Track::setFinalVolume(float volumeLeft, float volumeRight)
{
+ mFinalVolumeLeft = volumeLeft;
+ mFinalVolumeRight = volumeRight;
+ const float volume = (volumeLeft + volumeRight) * 0.5f;
if (mFinalVolume != volume) { // Compare to an epsilon if too many meaningless updates
mFinalVolume = volume;
setMetadataHasChanged();
@@ -1486,12 +1510,54 @@
*backInserter++ = metadata;
}
-void AudioFlinger::PlaybackThread::Track::setTeePatches(TeePatches teePatches) {
- forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
- mTeePatches = std::move(teePatches);
- if (mState == TrackBase::ACTIVE || mState == TrackBase::RESUMING ||
- mState == TrackBase::STOPPING_1) {
- forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+void AudioFlinger::PlaybackThread::Track::updateTeePatches() {
+ if (mTeePatchesToUpdate.has_value()) {
+ forEachTeePatchTrack([](auto patchTrack) { patchTrack->destroy(); });
+ mTeePatches = mTeePatchesToUpdate.value();
+ if (mState == TrackBase::ACTIVE || mState == TrackBase::RESUMING ||
+ mState == TrackBase::STOPPING_1) {
+ forEachTeePatchTrack([](auto patchTrack) { patchTrack->start(); });
+ }
+ mTeePatchesToUpdate.reset();
+ }
+}
+
+void AudioFlinger::PlaybackThread::Track::setTeePatchesToUpdate(TeePatches teePatchesToUpdate) {
+ ALOGW_IF(mTeePatchesToUpdate.has_value(),
+ "%s, existing tee patches to update will be ignored", __func__);
+ mTeePatchesToUpdate = std::move(teePatchesToUpdate);
+}
+
+// must be called with player thread lock held
+void AudioFlinger::PlaybackThread::Track::processMuteEvent_l(const sp<
+ IAudioManager>& audioManager, mute_state_t muteState)
+{
+ if (mMuteState == muteState) {
+ // mute state did not change, do nothing
+ return;
+ }
+
+ status_t result = UNKNOWN_ERROR;
+ if (audioManager && mPortId != AUDIO_PORT_HANDLE_NONE) {
+ if (mMuteEventExtras == nullptr) {
+ mMuteEventExtras = std::make_unique<os::PersistableBundle>();
+ }
+ mMuteEventExtras->putInt(String16(kExtraPlayerEventMuteKey),
+ static_cast<int>(muteState));
+
+ result = audioManager->portEvent(mPortId,
+ PLAYER_UPDATE_MUTED,
+ mMuteEventExtras);
+ }
+
+ if (result == OK) {
+ mMuteState = muteState;
+ } else {
+ ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d",
+ __func__,
+ id(),
+ mPortId,
+ result);
}
}
@@ -1931,6 +1997,8 @@
PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
if ((mTrack->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
&& playbackThread->mHapticChannelCount > 0) {
+ ALOGD("%s, haptic playback was %s for track %d",
+ __func__, muted ? "muted" : "unmuted", mTrack->id());
mTrack->setHapticPlaybackEnabled(!muted);
return true;
}
@@ -2020,17 +2088,50 @@
ssize_t AudioFlinger::PlaybackThread::OutputTrack::write(void* data, uint32_t frames)
{
+ if (!mActive && frames != 0) {
+ sp<ThreadBase> thread = mThread.promote();
+ if (thread != nullptr && thread->standby()) {
+ // preload one silent buffer to trigger mixer on start()
+ ClientProxy::Buffer buf { .mFrameCount = mClientProxy->getStartThresholdInFrames() };
+ status_t status = mClientProxy->obtainBuffer(&buf);
+ if (status != NO_ERROR && status != NOT_ENOUGH_DATA && status != WOULD_BLOCK) {
+ ALOGE("%s(%d): could not obtain buffer on start", __func__, mId);
+ return 0;
+ }
+ memset(buf.mRaw, 0, buf.mFrameCount * mFrameSize);
+ mClientProxy->releaseBuffer(&buf);
+
+ (void) start();
+
+ // wait for HAL stream to start before sending actual audio. Doing this on each
+ // OutputTrack makes that playback start on all output streams is synchronized.
+ // If another OutputTrack has already started it can underrun but this is OK
+ // as only silence has been played so far and the retry count is very high on
+ // OutputTrack.
+ auto pt = static_cast<PlaybackThread *>(thread.get());
+ if (!pt->waitForHalStart()) {
+ ALOGW("%s(%d): timeout waiting for thread to exit standby", __func__, mId);
+ stop();
+ return 0;
+ }
+
+ // enqueue the first buffer and exit so that other OutputTracks will also start before
+ // write() is called again and this buffer actually consumed.
+ Buffer firstBuffer;
+ firstBuffer.frameCount = frames;
+ firstBuffer.raw = data;
+ queueBuffer(firstBuffer);
+ return frames;
+ } else {
+ (void) start();
+ }
+ }
+
Buffer *pInBuffer;
Buffer inBuffer;
inBuffer.frameCount = frames;
inBuffer.raw = data;
-
uint32_t waitTimeLeftMs = mSourceThread->waitTimeMs();
-
- if (!mActive && frames != 0) {
- (void) start();
- }
-
while (waitTimeLeftMs) {
// First write pending buffers, then new data
if (mBufferQueue.size()) {
@@ -2098,25 +2199,7 @@
if (inBuffer.frameCount) {
sp<ThreadBase> thread = mThread.promote();
if (thread != 0 && !thread->standby()) {
- if (mBufferQueue.size() < kMaxOverFlowBuffers) {
- pInBuffer = new Buffer;
- const size_t bufferSize = inBuffer.frameCount * mFrameSize;
- pInBuffer->mBuffer = malloc(bufferSize);
- LOG_ALWAYS_FATAL_IF(pInBuffer->mBuffer == nullptr,
- "%s: Unable to malloc size %zu", __func__, bufferSize);
- pInBuffer->frameCount = inBuffer.frameCount;
- pInBuffer->raw = pInBuffer->mBuffer;
- memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * mFrameSize);
- mBufferQueue.add(pInBuffer);
- ALOGV("%s(%d): thread %d adding overflow buffer %zu", __func__, mId,
- (int)mThreadIoHandle, mBufferQueue.size());
- // audio data is consumed (stored locally); set frameCount to 0.
- inBuffer.frameCount = 0;
- } else {
- ALOGW("%s(%d): thread %d no more overflow buffers",
- __func__, mId, (int)mThreadIoHandle);
- // TODO: return error for this.
- }
+ queueBuffer(inBuffer);
}
}
@@ -2129,6 +2212,29 @@
return frames - inBuffer.frameCount; // number of frames consumed.
}
+void AudioFlinger::PlaybackThread::OutputTrack::queueBuffer(Buffer& inBuffer) {
+
+ if (mBufferQueue.size() < kMaxOverFlowBuffers) {
+ Buffer *pInBuffer = new Buffer;
+ const size_t bufferSize = inBuffer.frameCount * mFrameSize;
+ pInBuffer->mBuffer = malloc(bufferSize);
+ LOG_ALWAYS_FATAL_IF(pInBuffer->mBuffer == nullptr,
+ "%s: Unable to malloc size %zu", __func__, bufferSize);
+ pInBuffer->frameCount = inBuffer.frameCount;
+ pInBuffer->raw = pInBuffer->mBuffer;
+ memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * mFrameSize);
+ mBufferQueue.add(pInBuffer);
+ ALOGV("%s(%d): thread %d adding overflow buffer %zu", __func__, mId,
+ (int)mThreadIoHandle, mBufferQueue.size());
+ // audio data is consumed (stored locally); set frameCount to 0.
+ inBuffer.frameCount = 0;
+ } else {
+ ALOGW("%s(%d): thread %d no more overflow buffers",
+ __func__, mId, (int)mThreadIoHandle);
+ // TODO: return error for this.
+ }
+}
+
void AudioFlinger::PlaybackThread::OutputTrack::copyMetadataTo(MetadataInserter& backInserter) const
{
std::lock_guard<std::mutex> lock(mTrackMetadatasMutex);
@@ -3118,6 +3224,38 @@
{
}
+void AudioFlinger::MmapThread::MmapTrack::processMuteEvent_l(const sp<
+ IAudioManager>& audioManager, mute_state_t muteState)
+{
+ if (mMuteState == muteState) {
+ // mute state did not change, do nothing
+ return;
+ }
+
+ status_t result = UNKNOWN_ERROR;
+ if (audioManager && mPortId != AUDIO_PORT_HANDLE_NONE) {
+ if (mMuteEventExtras == nullptr) {
+ mMuteEventExtras = std::make_unique<os::PersistableBundle>();
+ }
+ mMuteEventExtras->putInt(String16(kExtraPlayerEventMuteKey),
+ static_cast<int>(muteState));
+
+ result = audioManager->portEvent(mPortId,
+ PLAYER_UPDATE_MUTED,
+ mMuteEventExtras);
+ }
+
+ if (result == OK) {
+ mMuteState = muteState;
+ } else {
+ ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d",
+ __func__,
+ id(),
+ mPortId,
+ result);
+ }
+}
+
void AudioFlinger::MmapThread::MmapTrack::appendDumpHeader(String8& result)
{
result.appendFormat("Client Session Port Id Format Chn mask SRate Flags %s\n",
diff --git a/services/audioflinger/afutils/AllocatorFactory.h b/services/audioflinger/afutils/AllocatorFactory.h
new file mode 100644
index 0000000..7534607
--- /dev/null
+++ b/services/audioflinger/afutils/AllocatorFactory.h
@@ -0,0 +1,95 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#include <mediautils/SharedMemoryAllocator.h>
+
+#pragma once
+
+// TODO how do we appropriately restrict visibility of this header?
+// It should only be included in AudioFlinger.h
+// We will make everything internal linkage for now.
+namespace android {
+namespace AllocatorFactory {
+namespace {
+// TODO make sure these are appropriate
+constexpr inline size_t MAX_MEMORY_SIZE = 1024 * 1024 * 100; // 100 MiB
+constexpr inline size_t DED_SIZE = (MAX_MEMORY_SIZE * 4) / 10; // 40 MiB
+constexpr inline size_t SHARED_SIZE = MAX_MEMORY_SIZE - DED_SIZE; // 60 MiB
+constexpr inline size_t SHARED_SIZE_LARGE = (SHARED_SIZE * 4) / 6; // 40 MiB
+constexpr inline size_t SHARED_SIZE_SMALL = SHARED_SIZE - SHARED_SIZE_LARGE; // 20 MiB
+constexpr inline size_t SMALL_THRESHOLD = 1024 * 40; // 40 KiB
+
+inline auto getDedicated() {
+ using namespace mediautils;
+ static const auto allocator =
+ std::make_shared<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<DED_SIZE>>>();
+ return allocator;
+}
+
+inline auto getSharedLarge() {
+ using namespace mediautils;
+ static const auto allocator = std::make_shared<
+ PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<SHARED_SIZE_LARGE>>>();
+ return allocator;
+}
+
+inline auto getSharedSmall() {
+ using namespace mediautils;
+ static const auto allocator =
+ std::make_shared<PolicyAllocator<MemoryHeapBaseAllocator,
+ SizePolicy<SHARED_SIZE_SMALL, 0, SMALL_THRESHOLD>>>();
+ return allocator;
+}
+
+template <typename Policy, typename Allocator>
+inline auto wrapWithPolicySnooping(Allocator allocator, std::string_view name) {
+ using namespace mediautils;
+ return SnoopingAllocator{PolicyAllocator{IndirectAllocator{allocator}, Policy{}}, name};
+}
+
+// A reasonable upper bound on how many clients we expect, and how many pieces to slice
+// the dedicate pool.
+constexpr inline size_t CLIENT_BOUND = 32;
+// Maximum amount of shared pools a single client can take (50%).
+constexpr inline size_t ADV_THRESHOLD_INV = 2;
+
+inline auto getClientAllocator() {
+ using namespace mediautils;
+ const auto makeDedPool = []() {
+ return wrapWithPolicySnooping<SizePolicy<DED_SIZE / CLIENT_BOUND>>(getDedicated(),
+ "Dedicated Pool");
+ };
+ const auto makeLargeShared = []() {
+ return wrapWithPolicySnooping<SizePolicy<SHARED_SIZE_LARGE / ADV_THRESHOLD_INV>>(
+ getSharedLarge(), "Large Shared");
+ };
+ const auto makeSmallShared = []() {
+ return wrapWithPolicySnooping<
+ SizePolicy<SHARED_SIZE_SMALL / ADV_THRESHOLD_INV>>(
+ getSharedSmall(), "Small Shared");
+ };
+
+ return ScopedAllocator{std::make_shared<
+ FallbackAllocator<decltype(makeDedPool()),
+ decltype(FallbackAllocator(makeLargeShared(), makeSmallShared()))>>(
+ makeDedPool(), FallbackAllocator{makeLargeShared(), makeSmallShared()})};
+}
+
+using ClientAllocator = decltype(getClientAllocator());
+} // namespace
+} // namespace AllocatorFactory
+} // namespace android
diff --git a/services/audioflinger/datapath/TrackMetrics.h b/services/audioflinger/datapath/TrackMetrics.h
index ed3928a..f3425df 100644
--- a/services/audioflinger/datapath/TrackMetrics.h
+++ b/services/audioflinger/datapath/TrackMetrics.h
@@ -17,6 +17,9 @@
#ifndef ANDROID_AUDIO_TRACKMETRICS_H
#define ANDROID_AUDIO_TRACKMETRICS_H
+#include <binder/IActivityManager.h>
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
#include <mutex>
namespace android {
@@ -38,10 +41,13 @@
* We currently deliver metrics based on an AudioIntervalGroup.
*/
class TrackMetrics final {
+
+
public:
- TrackMetrics(std::string metricsId, bool isOut)
+ TrackMetrics(std::string metricsId, bool isOut, int clientUid)
: mMetricsId(std::move(metricsId))
, mIsOut(isOut)
+ , mUid(clientUid)
{} // we don't log a constructor item, we wait for more info in logConstructor().
~TrackMetrics() {
@@ -64,6 +70,18 @@
AMEDIAMETRICS_PROP_EVENT_VALUE_BEGINAUDIOINTERVALGROUP, devices.c_str());
}
++mIntervalCount;
+ const auto& mActivityManager = getActivityManager();
+ if (mActivityManager) {
+ if (mIsOut) {
+ mActivityManager->logFgsApiBegin(AUDIO_API,
+ mUid,
+ IPCThreadState::self() -> getCallingPid());
+ } else {
+ mActivityManager->logFgsApiBegin(MICROPHONE_API,
+ mUid,
+ IPCThreadState::self() -> getCallingPid());
+ }
+ }
}
void logConstructor(pid_t creatorPid, uid_t creatorUid, int32_t internalTrackId,
@@ -93,6 +111,18 @@
logVolume_l(mVolume); // flush out the last volume.
mLastVolumeChangeTimeNs = 0;
}
+ const auto& mActivityManager = getActivityManager();
+ if (mActivityManager) {
+ if (mIsOut) {
+ mActivityManager->logFgsApiEnd(AUDIO_API,
+ mUid,
+ IPCThreadState::self() -> getCallingPid());
+ } else {
+ mActivityManager->logFgsApiEnd(MICROPHONE_API,
+ mUid,
+ IPCThreadState::self() -> getCallingPid());
+ }
+ }
}
void logInvalidate() const {
@@ -222,9 +252,25 @@
// do not reset mUnderrunCount - it keeps continuously running for tracks.
}
+ // Meyer's singleton is thread-safe.
+ static const sp<IActivityManager>& getActivityManager() {
+ static const auto activityManager = []() -> sp<IActivityManager> {
+ const sp<IServiceManager> sm(defaultServiceManager());
+ if (sm != nullptr) {
+ return interface_cast<IActivityManager>(sm->checkService(String16("activity")));
+ }
+ return nullptr;
+ }();
+ return activityManager;
+ }
+
const std::string mMetricsId;
const bool mIsOut; // if true, than a playback track, otherwise used for record.
+ static constexpr int AUDIO_API = 5;
+ static constexpr int MICROPHONE_API = 6;
+ const int mUid;
+
mutable std::mutex mLock;
// Devices in the interval group.
diff --git a/services/audioflinger/fastpath/AutoPark.h b/services/audioflinger/fastpath/AutoPark.h
index 6e68327..d71a305 100644
--- a/services/audioflinger/fastpath/AutoPark.h
+++ b/services/audioflinger/fastpath/AutoPark.h
@@ -25,7 +25,6 @@
// Park the specific FastThread, which can be nullptr, in hot idle if not currently idling
explicit AutoPark(const sp<T>& fastThread) : mFastThread(fastThread)
{
- mPreviousCommand = FastThreadState::HOT_IDLE;
if (fastThread != nullptr) {
auto sq = mFastThread->sq();
FastThreadState *state = sq->begin();
@@ -57,7 +56,7 @@
private:
const sp<T> mFastThread;
// if !&IDLE, holds the FastThread state to restore after new parameters processed
- FastThreadState::Command mPreviousCommand;
+ FastThreadState::Command mPreviousCommand = FastThreadState::HOT_IDLE;
}; // class AutoPark
} // namespace android
diff --git a/services/audioflinger/fastpath/FastMixer.h b/services/audioflinger/fastpath/FastMixer.h
index 48b94a3..6e48df6 100644
--- a/services/audioflinger/fastpath/FastMixer.h
+++ b/services/audioflinger/fastpath/FastMixer.h
@@ -80,12 +80,13 @@
// if sink format is different than mixer output.
size_t mSinkBufferSize = 0;
uint32_t mSinkChannelCount = FCC_2;
- audio_channel_mask_t mSinkChannelMask;
+ audio_channel_mask_t mSinkChannelMask; // set in ctor
void* mMixerBuffer = nullptr; // mixer output buffer.
size_t mMixerBufferSize = 0;
static constexpr audio_format_t mMixerBufferFormat = AUDIO_FORMAT_PCM_FLOAT;
- uint32_t mAudioChannelCount; // audio channel count, excludes haptic channels.
+ // audio channel count, excludes haptic channels. Set in onStateChange().
+ uint32_t mAudioChannelCount = 0;
enum {UNDEFINED, MIXED, ZEROED} mMixerBufferState = UNDEFINED;
NBAIO_Format mFormat{Format_Invalid};
@@ -103,7 +104,7 @@
// accessed without lock between multiple threads.
std::atomic_bool mMasterMono{};
std::atomic<float> mMasterBalance{};
- std::atomic_int_fast64_t mBoottimeOffset;
+ std::atomic_int_fast64_t mBoottimeOffset{};
// parent thread id for debugging purposes
[[maybe_unused]] const audio_io_handle_t mThreadIoHandle;
diff --git a/services/audioflinger/fastpath/FastMixerDumpState.h b/services/audioflinger/fastpath/FastMixerDumpState.h
index 1b0e029..4d2e1a0 100644
--- a/services/audioflinger/fastpath/FastMixerDumpState.h
+++ b/services/audioflinger/fastpath/FastMixerDumpState.h
@@ -56,7 +56,7 @@
struct FastTrackDump {
FastTrackUnderruns mUnderruns;
size_t mFramesReady = 0; // most recent value only; no long-term statistics kept
- int64_t mFramesWritten; // last value from track
+ int64_t mFramesWritten = 0; // last value from track
};
// No virtuals.
diff --git a/services/audioflinger/fastpath/FastMixerState.h b/services/audioflinger/fastpath/FastMixerState.h
index fdf3eaa..c70e42a 100644
--- a/services/audioflinger/fastpath/FastMixerState.h
+++ b/services/audioflinger/fastpath/FastMixerState.h
@@ -37,7 +37,7 @@
// The provider implementation is responsible for validating that the return value is in range.
virtual gain_minifloat_packed_t getVolumeLR() = 0;
protected:
- VolumeProvider() { }
+ VolumeProvider() = default;
virtual ~VolumeProvider() = default;
};
@@ -63,12 +63,12 @@
// Represents a single state of the fast mixer
struct FastMixerState : FastThreadState {
- FastMixerState();
+ FastMixerState();
// These are the minimum, maximum, and default values for maximum number of fast tracks
- static const unsigned kMinFastTracks = 2;
- static const unsigned kMaxFastTracks = 32;
- static const unsigned kDefaultFastTracks = 8;
+ static constexpr unsigned kMinFastTracks = 2;
+ static constexpr unsigned kMaxFastTracks = 32;
+ static constexpr unsigned kDefaultFastTracks = 8;
static unsigned sMaxFastTracks; // Configured maximum number of fast tracks
static pthread_once_t sMaxFastTracksOnce; // Protects initializer for sMaxFastTracks
diff --git a/services/audioflinger/fastpath/StateQueue.h b/services/audioflinger/fastpath/StateQueue.h
index 29d1809..36d986b 100644
--- a/services/audioflinger/fastpath/StateQueue.h
+++ b/services/audioflinger/fastpath/StateQueue.h
@@ -123,11 +123,10 @@
#endif
// manages a FIFO queue of states
-template<typename T> class StateQueue {
+// marking as final to avoid derived classes as there are no virtuals.
+template<typename T> class StateQueue final {
public:
- virtual ~StateQueue() = default; // why is this virtual?
-
// Observer APIs
// Poll for a state change. Returns a pointer to a read-only state,
diff --git a/services/audioflinger/sounddose/Android.bp b/services/audioflinger/sounddose/Android.bp
new file mode 100644
index 0000000..2cab5d1
--- /dev/null
+++ b/services/audioflinger/sounddose/Android.bp
@@ -0,0 +1,80 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_base_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
+}
+
+audioflinger_sounddose_tidy_errors = audioflinger_base_tidy_errors + [
+ "modernize-avoid-c-arrays",
+ "modernize-deprecated-headers",
+ "modernize-pass-by-value",
+ "modernize-use-auto",
+ "modernize-use-nodiscard",
+
+ // TODO(b/275642749) Reenable these warnings
+ "-misc-non-private-member-variables-in-classes",
+]
+
+// Eventually use common tidy defaults
+cc_defaults {
+ name: "audioflinger_sounddose_flags_defaults",
+ // https://clang.llvm.org/docs/UsersManual.html#command-line-options
+ // https://clang.llvm.org/docs/DiagnosticsReference.html
+ cflags: audioflinger_base_cflags,
+ // https://clang.llvm.org/extra/clang-tidy/
+ tidy: true,
+ tidy_checks: audioflinger_sounddose_tidy_errors,
+ tidy_checks_as_errors: audioflinger_sounddose_tidy_errors,
+ tidy_flags: [
+ "-format-style=file",
+ ],
+}
+
+cc_library {
+ name: "libsounddose",
+
+ double_loadable: true,
+
+ defaults: [
+ "audioflinger_sounddose_flags_defaults",
+ "latest_android_media_audio_common_types_ndk_shared",
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "latest_android_hardware_audio_sounddose_ndk_shared",
+ ],
+
+ srcs: [
+ "SoundDoseManager.cpp",
+ ],
+
+ shared_libs: [
+ "audioflinger-aidl-cpp",
+ "libaudio_aidl_conversion_common_ndk",
+ "libaudiofoundation",
+ "libaudioutils",
+ "libbase",
+ "libbinder",
+ "libbinder_ndk",
+ "liblog",
+ "libutils",
+ ],
+
+ header_libs: [
+ "libaudioutils_headers",
+ ],
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-DBACKEND_NDK",
+ ],
+}
+
+cc_library_headers {
+ name: "libsounddose_headers",
+ host_supported: true,
+ device_supported: true,
+ export_include_dirs: ["."],
+}
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
new file mode 100644
index 0000000..21f346e
--- /dev/null
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -0,0 +1,570 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "SoundDoseManager"
+
+#include "SoundDoseManager.h"
+
+#include "android/media/SoundDoseRecord.h"
+#include <android-base/stringprintf.h>
+#include <media/AidlConversionCppNdk.h>
+#include <cinttypes>
+#include <ctime>
+#include <utils/Log.h>
+
+namespace android {
+
+using aidl::android::media::audio::common::AudioDevice;
+
+namespace {
+
+int64_t getMonotonicSecond() {
+ struct timespec now_ts;
+ if (clock_gettime(CLOCK_MONOTONIC, &now_ts) != 0) {
+ ALOGE("%s: cannot get timestamp", __func__);
+ return -1;
+ }
+ return now_ts.tv_sec;
+}
+
+} // namespace
+
+sp<audio_utils::MelProcessor> SoundDoseManager::getOrCreateProcessorForDevice(
+ audio_port_handle_t deviceId, audio_io_handle_t streamHandle, uint32_t sampleRate,
+ size_t channelCount, audio_format_t format) {
+ const std::lock_guard _l(mLock);
+
+ if (mHalSoundDose != nullptr && mEnabledCsd) {
+ ALOGD("%s: using HAL MEL computation, no MelProcessor needed.", __func__);
+ return nullptr;
+ }
+
+ auto streamProcessor = mActiveProcessors.find(streamHandle);
+ if (streamProcessor != mActiveProcessors.end()) {
+ auto processor = streamProcessor->second.promote();
+ // if processor is nullptr it means it was removed by the playback
+ // thread and can be replaced in the mActiveProcessors map
+ if (processor != nullptr) {
+ ALOGV("%s: found callback for stream id %d", __func__, streamHandle);
+ const auto activeTypeIt = mActiveDeviceTypes.find(deviceId);
+ if (activeTypeIt != mActiveDeviceTypes.end()) {
+ processor->setAttenuation(mMelAttenuationDB[activeTypeIt->second]);
+ }
+ processor->setDeviceId(deviceId);
+ processor->setOutputRs2UpperBound(mRs2UpperBound);
+ return processor;
+ }
+ }
+
+ ALOGV("%s: creating new callback for stream id %d", __func__, streamHandle);
+ sp<audio_utils::MelProcessor> melProcessor = sp<audio_utils::MelProcessor>::make(
+ sampleRate, channelCount, format, this, deviceId, mRs2UpperBound);
+ const auto activeTypeIt = mActiveDeviceTypes.find(deviceId);
+ if (activeTypeIt != mActiveDeviceTypes.end()) {
+ melProcessor->setAttenuation(mMelAttenuationDB[activeTypeIt->second]);
+ }
+ mActiveProcessors[streamHandle] = melProcessor;
+ return melProcessor;
+}
+
+bool SoundDoseManager::setHalSoundDoseInterface(const std::shared_ptr<ISoundDose>& halSoundDose) {
+ ALOGV("%s", __func__);
+
+ std::shared_ptr<HalSoundDoseCallback> halSoundDoseCallback;
+ {
+ const std::lock_guard _l(mLock);
+
+ mHalSoundDose = halSoundDose;
+ if (halSoundDose == nullptr) {
+ ALOGI("%s: passed ISoundDose object is null, switching to internal CSD", __func__);
+ return false;
+ }
+
+ if (!mHalSoundDose->setOutputRs2UpperBound(mRs2UpperBound).isOk()) {
+ ALOGW("%s: Cannot set RS2 value for momentary exposure %f",
+ __func__,
+ mRs2UpperBound);
+ }
+
+ // initialize the HAL sound dose callback lazily
+ if (mHalSoundDoseCallback == nullptr) {
+ mHalSoundDoseCallback =
+ ndk::SharedRefBase::make<HalSoundDoseCallback>(this);
+ }
+ halSoundDoseCallback = mHalSoundDoseCallback;
+ }
+
+ auto status = halSoundDose->registerSoundDoseCallback(halSoundDoseCallback);
+
+ if (!status.isOk()) {
+ // Not a warning since this can happen if the callback was registered before
+ ALOGI("%s: Cannot register HAL sound dose callback with status message: %s",
+ __func__,
+ status.getMessage());
+ }
+
+ return true;
+}
+
+void SoundDoseManager::setOutputRs2UpperBound(float rs2Value) {
+ ALOGV("%s", __func__);
+ const std::lock_guard _l(mLock);
+
+ if (mHalSoundDose != nullptr) {
+ // using the HAL sound dose interface
+ if (!mHalSoundDose->setOutputRs2UpperBound(rs2Value).isOk()) {
+ ALOGE("%s: Cannot set RS2 value for momentary exposure %f", __func__, rs2Value);
+ return;
+ }
+ mRs2UpperBound = rs2Value;
+ return;
+ }
+
+ for (auto& streamProcessor : mActiveProcessors) {
+ const sp<audio_utils::MelProcessor> processor = streamProcessor.second.promote();
+ if (processor != nullptr) {
+ const status_t result = processor->setOutputRs2UpperBound(rs2Value);
+ if (result != NO_ERROR) {
+ ALOGW("%s: could not set RS2 upper bound %f for stream %d", __func__, rs2Value,
+ streamProcessor.first);
+ return;
+ }
+ mRs2UpperBound = rs2Value;
+ }
+ }
+}
+
+void SoundDoseManager::removeStreamProcessor(audio_io_handle_t streamHandle) {
+ const std::lock_guard _l(mLock);
+ auto callbackToRemove = mActiveProcessors.find(streamHandle);
+ if (callbackToRemove != mActiveProcessors.end()) {
+ mActiveProcessors.erase(callbackToRemove);
+ }
+}
+
+audio_port_handle_t SoundDoseManager::getIdForAudioDevice(const AudioDevice& audioDevice) const {
+ const std::lock_guard _l(mLock);
+
+ audio_devices_t type;
+ std::string address;
+ auto result = aidl::android::aidl2legacy_AudioDevice_audio_device(
+ audioDevice, &type, &address);
+ if (result != NO_ERROR) {
+ ALOGE("%s: could not convert from AudioDevice to AudioDeviceTypeAddr", __func__);
+ return AUDIO_PORT_HANDLE_NONE;
+ }
+
+ auto adt = AudioDeviceTypeAddr(type, address);
+ auto deviceIt = mActiveDevices.find(adt);
+ if (deviceIt == mActiveDevices.end()) {
+ ALOGI("%s: could not find port id for device %s", __func__, adt.toString().c_str());
+ return AUDIO_PORT_HANDLE_NONE;
+ }
+ return deviceIt->second;
+}
+
+void SoundDoseManager::mapAddressToDeviceId(const AudioDeviceTypeAddr& adt,
+ const audio_port_handle_t deviceId) {
+ const std::lock_guard _l(mLock);
+ ALOGI("%s: map address: %d to device id: %d", __func__, adt.mType, deviceId);
+ mActiveDevices[adt] = deviceId;
+ mActiveDeviceTypes[deviceId] = adt.mType;
+}
+
+void SoundDoseManager::clearMapDeviceIdEntries(audio_port_handle_t deviceId) {
+ const std::lock_guard _l(mLock);
+ for (auto activeDevice = mActiveDevices.begin(); activeDevice != mActiveDevices.end();) {
+ if (activeDevice->second == deviceId) {
+ ALOGI("%s: clear mapping type: %d to deviceId: %d",
+ __func__, activeDevice->first.mType, deviceId);
+ activeDevice = mActiveDevices.erase(activeDevice);
+ continue;
+ }
+ ++activeDevice;
+ }
+ mActiveDeviceTypes.erase(deviceId);
+}
+
+ndk::ScopedAStatus SoundDoseManager::HalSoundDoseCallback::onMomentaryExposureWarning(
+ float in_currentDbA, const AudioDevice& in_audioDevice) {
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager == nullptr) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+ }
+
+ std::shared_ptr<ISoundDose> halSoundDose;
+ soundDoseManager->getHalSoundDose(&halSoundDose);
+ if(halSoundDose == nullptr) {
+ ALOGW("%s: HAL sound dose interface deactivated. Ignoring", __func__);
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+ }
+
+ auto id = soundDoseManager->getIdForAudioDevice(in_audioDevice);
+ if (id == AUDIO_PORT_HANDLE_NONE) {
+ ALOGI("%s: no mapped id for audio device with type %d and address %s",
+ __func__, in_audioDevice.type.type,
+ in_audioDevice.address.toString().c_str());
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ soundDoseManager->onMomentaryExposure(in_currentDbA, id);
+
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus SoundDoseManager::HalSoundDoseCallback::onNewMelValues(
+ const ISoundDose::IHalSoundDoseCallback::MelRecord& in_melRecord,
+ const AudioDevice& in_audioDevice) {
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager == nullptr) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+ }
+
+ std::shared_ptr<ISoundDose> halSoundDose;
+ soundDoseManager->getHalSoundDose(&halSoundDose);
+ if(halSoundDose == nullptr) {
+ ALOGW("%s: HAL sound dose interface deactivated. Ignoring", __func__);
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
+ }
+
+ auto id = soundDoseManager->getIdForAudioDevice(in_audioDevice);
+ if (id == AUDIO_PORT_HANDLE_NONE) {
+ ALOGI("%s: no mapped id for audio device with type %d and address %s",
+ __func__, in_audioDevice.type.type,
+ in_audioDevice.address.toString().c_str());
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ // TODO: introduce timestamp in onNewMelValues callback
+ soundDoseManager->onNewMelValues(in_melRecord.melValues, 0,
+ in_melRecord.melValues.size(), id);
+
+ return ndk::ScopedAStatus::ok();
+}
+
+void SoundDoseManager::SoundDose::binderDied(__unused const wp<IBinder>& who) {
+ ALOGV("%s", __func__);
+
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager != nullptr) {
+ soundDoseManager->resetSoundDose();
+ }
+}
+
+binder::Status SoundDoseManager::SoundDose::setOutputRs2UpperBound(float value) {
+ ALOGV("%s", __func__);
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager != nullptr) {
+ soundDoseManager->setOutputRs2UpperBound(value);
+ }
+ return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::resetCsd(
+ float currentCsd, const std::vector<media::SoundDoseRecord>& records) {
+ ALOGV("%s", __func__);
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager != nullptr) {
+ soundDoseManager->resetCsd(currentCsd, records);
+ }
+ return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::updateAttenuation(float attenuationDB, int device) {
+ ALOGV("%s", __func__);
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager != nullptr) {
+ soundDoseManager->updateAttenuation(attenuationDB, static_cast<audio_devices_t>(device));
+ }
+ return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::setCsdEnabled(bool enabled) {
+ ALOGV("%s", __func__);
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager != nullptr) {
+ soundDoseManager->setCsdEnabled(enabled);
+ }
+ return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::getOutputRs2UpperBound(float* value) {
+ ALOGV("%s", __func__);
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager != nullptr) {
+ const std::lock_guard _l(soundDoseManager->mLock);
+ *value = soundDoseManager->mRs2UpperBound;
+ }
+ return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::getCsd(float* value) {
+ ALOGV("%s", __func__);
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager != nullptr) {
+ *value = soundDoseManager->mMelAggregator->getCsd();
+ }
+ return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::forceUseFrameworkMel(bool useFrameworkMel) {
+ ALOGV("%s", __func__);
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager != nullptr) {
+ soundDoseManager->setUseFrameworkMel(useFrameworkMel);
+ }
+ return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::forceComputeCsdOnAllDevices(
+ bool computeCsdOnAllDevices) {
+ ALOGV("%s", __func__);
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager != nullptr) {
+ soundDoseManager->setComputeCsdOnAllDevices(computeCsdOnAllDevices);
+ }
+ return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::isSoundDoseHalSupported(bool* value) {
+ ALOGV("%s", __func__);
+ *value = false;
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager != nullptr) {
+ *value = soundDoseManager->isSoundDoseHalSupported();
+ }
+ return binder::Status::ok();
+}
+
+void SoundDoseManager::updateAttenuation(float attenuationDB, audio_devices_t deviceType) {
+ const std::lock_guard _l(mLock);
+ ALOGV("%s: updating MEL processor attenuation for device type %d to %f",
+ __func__, deviceType, attenuationDB);
+ mMelAttenuationDB[deviceType] = attenuationDB;
+ for (const auto& mp : mActiveProcessors) {
+ auto melProcessor = mp.second.promote();
+ if (melProcessor != nullptr) {
+ auto deviceId = melProcessor->getDeviceId();
+ if (mActiveDeviceTypes[deviceId] == deviceType) {
+ ALOGV("%s: set attenuation for deviceId %d to %f",
+ __func__, deviceId, attenuationDB);
+ melProcessor->setAttenuation(attenuationDB);
+ }
+ }
+ }
+}
+
+void SoundDoseManager::setCsdEnabled(bool enabled) {
+ ALOGV("%s", __func__);
+
+ const std::lock_guard _l(mLock);
+ mEnabledCsd = enabled;
+
+ for (auto& activeEntry : mActiveProcessors) {
+ auto melProcessor = activeEntry.second.promote();
+ if (melProcessor != nullptr) {
+ if (enabled) {
+ melProcessor->resume();
+ } else {
+ melProcessor->pause();
+ }
+ }
+ }
+}
+
+bool SoundDoseManager::isCsdEnabled() {
+ const std::lock_guard _l(mLock);
+ return mEnabledCsd;
+}
+
+void SoundDoseManager::setUseFrameworkMel(bool useFrameworkMel) {
+ // invalidate any HAL sound dose interface used
+ setHalSoundDoseInterface(nullptr);
+
+ const std::lock_guard _l(mLock);
+ mUseFrameworkMel = useFrameworkMel;
+}
+
+bool SoundDoseManager::forceUseFrameworkMel() const {
+ const std::lock_guard _l(mLock);
+ return mUseFrameworkMel;
+}
+
+void SoundDoseManager::setComputeCsdOnAllDevices(bool computeCsdOnAllDevices) {
+ const std::lock_guard _l(mLock);
+ mComputeCsdOnAllDevices = computeCsdOnAllDevices;
+}
+
+bool SoundDoseManager::forceComputeCsdOnAllDevices() const {
+ const std::lock_guard _l(mLock);
+ return mComputeCsdOnAllDevices;
+}
+
+bool SoundDoseManager::isSoundDoseHalSupported() const {
+ {
+ const std::lock_guard _l(mLock);
+ if (!mEnabledCsd) return false;
+ }
+
+ std::shared_ptr<ISoundDose> halSoundDose;
+ getHalSoundDose(&halSoundDose);
+ return halSoundDose != nullptr;
+}
+
+void SoundDoseManager::getHalSoundDose(std::shared_ptr<ISoundDose>* halSoundDose) const {
+ const std::lock_guard _l(mLock);
+ *halSoundDose = mHalSoundDose;
+}
+
+void SoundDoseManager::resetSoundDose() {
+ const std::lock_guard lock(mLock);
+ mSoundDose = nullptr;
+}
+
+void SoundDoseManager::resetCsd(float currentCsd,
+ const std::vector<media::SoundDoseRecord>& records) {
+ const std::lock_guard lock(mLock);
+ std::vector<audio_utils::CsdRecord> resetRecords;
+ resetRecords.reserve(records.size());
+ for (const auto& record : records) {
+ resetRecords.emplace_back(record.timestamp, record.duration, record.value,
+ record.averageMel);
+ }
+
+ mMelAggregator->reset(currentCsd, resetRecords);
+}
+
+void SoundDoseManager::onNewMelValues(const std::vector<float>& mels, size_t offset, size_t length,
+ audio_port_handle_t deviceId) const {
+ ALOGV("%s", __func__);
+
+
+ sp<media::ISoundDoseCallback> soundDoseCallback;
+ std::vector<audio_utils::CsdRecord> records;
+ float currentCsd;
+ {
+ const std::lock_guard _l(mLock);
+ if (!mEnabledCsd) {
+ return;
+ }
+
+
+ const int64_t timestampSec = getMonotonicSecond();
+
+ // only for internal callbacks
+ records = mMelAggregator->aggregateAndAddNewMelRecord(audio_utils::MelRecord(
+ deviceId, std::vector<float>(mels.begin() + offset, mels.begin() + offset + length),
+ timestampSec - length));
+
+ currentCsd = mMelAggregator->getCsd();
+ }
+
+ soundDoseCallback = getSoundDoseCallback();
+
+ if (records.size() > 0 && soundDoseCallback != nullptr) {
+ std::vector<media::SoundDoseRecord> newRecordsToReport;
+ newRecordsToReport.resize(records.size());
+ for (const auto& record : records) {
+ newRecordsToReport.emplace_back(csdRecordToSoundDoseRecord(record));
+ }
+
+ soundDoseCallback->onNewCsdValue(currentCsd, newRecordsToReport);
+ }
+}
+
+sp<media::ISoundDoseCallback> SoundDoseManager::getSoundDoseCallback() const {
+ const std::lock_guard _l(mLock);
+ if (mSoundDose == nullptr) {
+ return nullptr;
+ }
+
+ return mSoundDose->mSoundDoseCallback;
+}
+
+void SoundDoseManager::onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const {
+ ALOGV("%s: Momentary exposure for device %d triggered: %f MEL", __func__, deviceId, currentMel);
+
+ {
+ const std::lock_guard _l(mLock);
+ if (!mEnabledCsd) {
+ return;
+ }
+ }
+
+ auto soundDoseCallback = getSoundDoseCallback();
+ if (soundDoseCallback != nullptr) {
+ soundDoseCallback->onMomentaryExposure(currentMel, deviceId);
+ }
+}
+
+sp<media::ISoundDose> SoundDoseManager::getSoundDoseInterface(
+ const sp<media::ISoundDoseCallback>& callback) {
+ ALOGV("%s: Register ISoundDoseCallback", __func__);
+
+ const std::lock_guard _l(mLock);
+ if (mSoundDose == nullptr) {
+ mSoundDose = sp<SoundDose>::make(this, callback);
+ }
+ return mSoundDose;
+}
+
+std::string SoundDoseManager::dump() const {
+ std::string output;
+ {
+ const std::lock_guard _l(mLock);
+ if (!mEnabledCsd) {
+ base::StringAppendF(&output, "CSD is disabled");
+ return output;
+ }
+ }
+
+ mMelAggregator->foreachCsd([&output](audio_utils::CsdRecord csdRecord) {
+ base::StringAppendF(&output,
+ "CSD %f with average MEL %f in interval [%" PRId64 ", %" PRId64 "]",
+ csdRecord.value, csdRecord.averageMel, csdRecord.timestamp,
+ csdRecord.timestamp + csdRecord.duration);
+ base::StringAppendF(&output, "\n");
+ });
+
+ base::StringAppendF(&output, "\nCached Mel Records:\n");
+ mMelAggregator->foreachCachedMel([&output](const audio_utils::MelRecord& melRecord) {
+ base::StringAppendF(&output, "Continuous MELs for portId=%d, ", melRecord.portId);
+ base::StringAppendF(&output, "starting at timestamp %" PRId64 ": ", melRecord.timestamp);
+
+ for (const auto& mel : melRecord.mels) {
+ base::StringAppendF(&output, "%.2f ", mel);
+ }
+ base::StringAppendF(&output, "\n");
+ });
+
+ return output;
+}
+
+size_t SoundDoseManager::getCachedMelRecordsSize() const {
+ return mMelAggregator->getCachedMelRecordsSize();
+}
+
+media::SoundDoseRecord SoundDoseManager::csdRecordToSoundDoseRecord(
+ const audio_utils::CsdRecord& legacy) {
+ media::SoundDoseRecord soundDoseRecord{};
+ soundDoseRecord.timestamp = legacy.timestamp;
+ soundDoseRecord.duration = legacy.duration;
+ soundDoseRecord.value = legacy.value;
+ soundDoseRecord.averageMel = legacy.averageMel;
+ return soundDoseRecord;
+}
+
+} // namespace android
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
new file mode 100644
index 0000000..9ed0661
--- /dev/null
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -0,0 +1,208 @@
+/*
+**
+** Copyright 2022, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#pragma once
+
+#include <aidl/android/hardware/audio/core/sounddose/ISoundDose.h>
+#include <aidl/android/media/audio/common/AudioDevice.h>
+#include <android/media/BnSoundDose.h>
+#include <android/media/ISoundDoseCallback.h>
+#include <media/AudioDeviceTypeAddr.h>
+#include <audio_utils/MelAggregator.h>
+#include <audio_utils/MelProcessor.h>
+#include <binder/Status.h>
+#include <mutex>
+#include <unordered_map>
+
+namespace android {
+
+using aidl::android::hardware::audio::core::sounddose::ISoundDose;
+
+class SoundDoseManager : public audio_utils::MelProcessor::MelCallback {
+public:
+ /** CSD is computed with a rolling window of 7 days. */
+ static constexpr int64_t kCsdWindowSeconds = 604800; // 60s * 60m * 24h * 7d
+ /** Default RS2 upper bound in dBA as defined in IEC 62368-1 3rd edition. */
+ static constexpr float kDefaultRs2UpperBound = 100.f;
+
+ SoundDoseManager()
+ : mMelAggregator(sp<audio_utils::MelAggregator>::make(kCsdWindowSeconds)),
+ mRs2UpperBound(kDefaultRs2UpperBound) {};
+
+ /**
+ * \brief Creates or gets the MelProcessor assigned to the streamHandle
+ *
+ * \param deviceId id for the devices where the stream is active.
+ * \param streamHandle handle to the stream
+ * \param sampleRate sample rate for the processor
+ * \param channelCount number of channels to be processed.
+ * \param format format of the input samples.
+ *
+ * \return MelProcessor assigned to the stream and device id.
+ */
+ sp<audio_utils::MelProcessor> getOrCreateProcessorForDevice(audio_port_handle_t deviceId,
+ audio_io_handle_t streamHandle,
+ uint32_t sampleRate,
+ size_t channelCount,
+ audio_format_t format);
+
+ /**
+ * \brief Removes stream processor when MEL computation is not needed anymore
+ *
+ * \param streamHandle handle to the stream
+ */
+ void removeStreamProcessor(audio_io_handle_t streamHandle);
+
+ /**
+ * Sets the output RS2 upper bound for momentary exposure warnings. Must not be
+ * higher than 100dBA and not lower than 80dBA.
+ *
+ * \param rs2Value value to use for momentary exposure
+ */
+ void setOutputRs2UpperBound(float rs2Value);
+
+ /**
+ * \brief Registers the interface for passing callbacks to the AudioService and gets
+ * the ISoundDose interface.
+ *
+ * \returns the sound dose binder to send commands to the SoundDoseManager
+ **/
+ sp<media::ISoundDose> getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback);
+
+ /**
+ * Sets the HAL sound dose interface to use for the MEL computation. Use nullptr
+ * for using the internal MEL computation.
+ *
+ * @return true if setting the HAL sound dose value was successful, false otherwise.
+ */
+ bool setHalSoundDoseInterface(const std::shared_ptr<ISoundDose>& halSoundDose);
+
+ /** Returns the cached audio port id from the active devices. */
+ audio_port_handle_t getIdForAudioDevice(
+ const aidl::android::media::audio::common::AudioDevice& audioDevice) const;
+
+ /** Caches mapping between address, device port id and device type. */
+ void mapAddressToDeviceId(const AudioDeviceTypeAddr& adt, const audio_port_handle_t deviceId);
+
+ /** Clear all map entries with passed audio_port_handle_t. */
+ void clearMapDeviceIdEntries(audio_port_handle_t deviceId);
+
+ /** Returns true if CSD is enabled. */
+ bool isCsdEnabled();
+
+ std::string dump() const;
+
+ // used for testing only
+ size_t getCachedMelRecordsSize() const;
+ bool forceUseFrameworkMel() const;
+ bool forceComputeCsdOnAllDevices() const;
+
+ /** Method for converting from audio_utils::CsdRecord to media::SoundDoseRecord. */
+ static media::SoundDoseRecord csdRecordToSoundDoseRecord(const audio_utils::CsdRecord& legacy);
+
+ // ------ Override audio_utils::MelProcessor::MelCallback ------
+ void onNewMelValues(const std::vector<float>& mels, size_t offset, size_t length,
+ audio_port_handle_t deviceId) const override;
+
+ void onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const override;
+
+private:
+ class SoundDose : public media::BnSoundDose,
+ public IBinder::DeathRecipient {
+ public:
+ SoundDose(SoundDoseManager* manager, const sp<media::ISoundDoseCallback>& callback)
+ : mSoundDoseManager(manager),
+ mSoundDoseCallback(callback) {}
+
+ /** IBinder::DeathRecipient. Listen to the death of ISoundDoseCallback. */
+ void binderDied(const wp<IBinder>& who) override;
+
+ /** BnSoundDose override */
+ binder::Status setOutputRs2UpperBound(float value) override;
+ binder::Status resetCsd(float currentCsd,
+ const std::vector<media::SoundDoseRecord>& records) override;
+ binder::Status updateAttenuation(float attenuationDB, int device) override;
+ binder::Status getOutputRs2UpperBound(float* value) override;
+ binder::Status setCsdEnabled(bool enabled) override;
+
+ binder::Status getCsd(float* value) override;
+ binder::Status forceUseFrameworkMel(bool useFrameworkMel) override;
+ binder::Status forceComputeCsdOnAllDevices(bool computeCsdOnAllDevices) override;
+ binder::Status isSoundDoseHalSupported(bool* value) override;
+
+ wp<SoundDoseManager> mSoundDoseManager;
+ const sp<media::ISoundDoseCallback> mSoundDoseCallback;
+ };
+
+ class HalSoundDoseCallback : public ISoundDose::BnHalSoundDoseCallback {
+ public:
+ explicit HalSoundDoseCallback(SoundDoseManager* manager)
+ : mSoundDoseManager(manager) {}
+
+ ndk::ScopedAStatus onMomentaryExposureWarning(
+ float in_currentDbA,
+ const aidl::android::media::audio::common::AudioDevice& in_audioDevice) override;
+ ndk::ScopedAStatus onNewMelValues(
+ const ISoundDose::IHalSoundDoseCallback::MelRecord& in_melRecord,
+ const aidl::android::media::audio::common::AudioDevice& in_audioDevice) override;
+
+ wp<SoundDoseManager> mSoundDoseManager;
+ };
+
+ void resetSoundDose();
+
+ void resetCsd(float currentCsd, const std::vector<media::SoundDoseRecord>& records);
+
+ sp<media::ISoundDoseCallback> getSoundDoseCallback() const;
+
+ void updateAttenuation(float attenuationDB, audio_devices_t deviceType);
+ void setCsdEnabled(bool enabled);
+ void setUseFrameworkMel(bool useFrameworkMel);
+ void setComputeCsdOnAllDevices(bool computeCsdOnAllDevices);
+ bool isSoundDoseHalSupported() const;
+ /** Returns the HAL sound dose interface or null if internal MEL computation is used. */
+ void getHalSoundDose(std::shared_ptr<ISoundDose>* halSoundDose) const;
+
+ mutable std::mutex mLock;
+
+ // no need for lock since MelAggregator is thread-safe
+ const sp<audio_utils::MelAggregator> mMelAggregator;
+
+ std::unordered_map<audio_io_handle_t, wp<audio_utils::MelProcessor>> mActiveProcessors
+ GUARDED_BY(mLock);
+
+ // map active device address and type to device id, used also for managing the pause/resume
+ // logic for deviceId's that should not report MEL values (e.g.: do not have an active MUSIC
+ // or GAME stream).
+ std::map<AudioDeviceTypeAddr, audio_port_handle_t> mActiveDevices GUARDED_BY(mLock);
+ std::unordered_map<audio_port_handle_t, audio_devices_t> mActiveDeviceTypes GUARDED_BY(mLock);
+
+ float mRs2UpperBound GUARDED_BY(mLock);
+ std::unordered_map<audio_devices_t, float> mMelAttenuationDB GUARDED_BY(mLock);
+
+ sp<SoundDose> mSoundDose GUARDED_BY(mLock);
+
+ std::shared_ptr<ISoundDose> mHalSoundDose GUARDED_BY(mLock);
+ std::shared_ptr<HalSoundDoseCallback> mHalSoundDoseCallback GUARDED_BY(mLock);
+
+ bool mUseFrameworkMel GUARDED_BY(mLock) = true;
+ bool mComputeCsdOnAllDevices GUARDED_BY(mLock) = false;
+
+ bool mEnabledCsd GUARDED_BY(mLock) = true;
+};
+
+} // namespace android
diff --git a/services/audioflinger/sounddose/tests/Android.bp b/services/audioflinger/sounddose/tests/Android.bp
new file mode 100644
index 0000000..2a2addf
--- /dev/null
+++ b/services/audioflinger/sounddose/tests/Android.bp
@@ -0,0 +1,54 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_base_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_services_audioflinger_license"],
+}
+
+cc_test {
+ name: "sounddosemanager_tests",
+
+ srcs: [
+ "sounddosemanager_tests.cpp"
+ ],
+
+ defaults: [
+ "latest_android_media_audio_common_types_ndk_static",
+ "latest_android_hardware_audio_core_sounddose_ndk_static",
+ "latest_android_hardware_audio_sounddose_ndk_static",
+ ],
+
+ shared_libs: [
+ "audioflinger-aidl-cpp",
+ "libaudiofoundation",
+ "libaudioutils",
+ "libbase",
+ "libbinder_ndk",
+ "liblog",
+ "libutils",
+ ],
+
+ static_libs: [
+ "libaudio_aidl_conversion_common_ndk",
+ "libgmock",
+ "libsounddose",
+ ],
+
+ header_libs: [
+ "libaudioutils_headers",
+ "libsounddose_headers",
+ ],
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ "-DBACKEND_NDK",
+ ],
+
+ test_suites: [
+ "general-tests",
+ ],
+}
diff --git a/services/audioflinger/sounddose/tests/TEST_MAPPING b/services/audioflinger/sounddose/tests/TEST_MAPPING
new file mode 100644
index 0000000..dd7fe4d
--- /dev/null
+++ b/services/audioflinger/sounddose/tests/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+ "presubmit": [
+ {
+ "name": "sounddosemanager_tests"
+ }
+ ]
+}
diff --git a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
new file mode 100644
index 0000000..9fab77d
--- /dev/null
+++ b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
@@ -0,0 +1,247 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "SoundDoseManager_tests"
+
+#include <SoundDoseManager.h>
+
+#include <aidl/android/hardware/audio/core/sounddose/BnSoundDose.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <media/AidlConversionCppNdk.h>
+
+namespace android {
+namespace {
+
+using aidl::android::hardware::audio::core::sounddose::BnSoundDose;
+using aidl::android::media::audio::common::AudioDevice;
+using aidl::android::media::audio::common::AudioDeviceAddress;
+
+class HalSoundDoseMock : public BnSoundDose {
+public:
+ MOCK_METHOD(ndk::ScopedAStatus, getOutputRs2UpperBound, (float*), (override));
+ MOCK_METHOD(ndk::ScopedAStatus, setOutputRs2UpperBound, (float), (override));
+ MOCK_METHOD(ndk::ScopedAStatus, registerSoundDoseCallback,
+ (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>&), (override));
+};
+
+class SoundDoseManagerTest : public ::testing::Test {
+protected:
+ void SetUp() override {
+ mSoundDoseManager = sp<SoundDoseManager>::make();
+ mHalSoundDose = ndk::SharedRefBase::make<HalSoundDoseMock>();
+
+ ON_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound)
+ .WillByDefault([] (float rs2) {
+ EXPECT_EQ(rs2, ISoundDose::DEFAULT_MAX_RS2);
+ return ndk::ScopedAStatus::ok();
+ });
+ }
+
+ sp<SoundDoseManager> mSoundDoseManager;
+ std::shared_ptr<HalSoundDoseMock> mHalSoundDose;
+};
+
+TEST_F(SoundDoseManagerTest, GetProcessorForExistingStream) {
+ sp<audio_utils::MelProcessor> processor1 =
+ mSoundDoseManager->getOrCreateProcessorForDevice(/*deviceId=*/1,
+ /*streamHandle=*/1,
+ /*sampleRate*/44100,
+ /*channelCount*/2,
+ /*format*/AUDIO_FORMAT_PCM_FLOAT);
+ sp<audio_utils::MelProcessor> processor2 =
+ mSoundDoseManager->getOrCreateProcessorForDevice(/*deviceId=*/2,
+ /*streamHandle=*/1,
+ /*sampleRate*/44100,
+ /*channelCount*/2,
+ /*format*/AUDIO_FORMAT_PCM_FLOAT);
+
+ EXPECT_EQ(processor1, processor2);
+}
+
+TEST_F(SoundDoseManagerTest, RemoveExistingStream) {
+ sp<audio_utils::MelProcessor> processor1 =
+ mSoundDoseManager->getOrCreateProcessorForDevice(/*deviceId=*/1,
+ /*streamHandle=*/1,
+ /*sampleRate*/44100,
+ /*channelCount*/2,
+ /*format*/AUDIO_FORMAT_PCM_FLOAT);
+
+ mSoundDoseManager->removeStreamProcessor(1);
+ sp<audio_utils::MelProcessor> processor2 =
+ mSoundDoseManager->getOrCreateProcessorForDevice(/*deviceId=*/2,
+ /*streamHandle=*/1,
+ /*sampleRate*/44100,
+ /*channelCount*/2,
+ /*format*/AUDIO_FORMAT_PCM_FLOAT);
+
+ EXPECT_NE(processor1, processor2);
+}
+
+TEST_F(SoundDoseManagerTest, NewMelValuesCacheNewRecord) {
+ std::vector<float>mels{1, 1};
+
+ mSoundDoseManager->onNewMelValues(mels, 0, mels.size(), /*deviceId=*/1);
+
+ EXPECT_EQ(mSoundDoseManager->getCachedMelRecordsSize(), size_t{1});
+}
+
+TEST_F(SoundDoseManagerTest, InvalidHalInterfaceIsNotSet) {
+ EXPECT_FALSE(mSoundDoseManager->setHalSoundDoseInterface(nullptr));
+}
+
+TEST_F(SoundDoseManagerTest, SetHalSoundDoseDisablesNewMelProcessorCallbacks) {
+ EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
+ EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
+ .Times(1)
+ .WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
+ EXPECT_NE(nullptr, callback);
+ return ndk::ScopedAStatus::ok();
+ });
+
+ EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+
+ EXPECT_EQ(nullptr, mSoundDoseManager->getOrCreateProcessorForDevice(/*deviceId=*/2,
+ /*streamHandle=*/1,
+ /*sampleRate*/44100,
+ /*channelCount*/2,
+ /*format*/AUDIO_FORMAT_PCM_FLOAT));
+}
+
+TEST_F(SoundDoseManagerTest, SetHalSoundDoseRegistersHalCallbacks) {
+ EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
+ EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
+ .Times(1)
+ .WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
+ EXPECT_NE(nullptr, callback);
+ return ndk::ScopedAStatus::ok();
+ });
+
+ EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+}
+
+TEST_F(SoundDoseManagerTest, MomentaryExposureFromHalWithNoAddressIllegalArgument) {
+ std::shared_ptr<ISoundDose::IHalSoundDoseCallback> halCallback;
+
+ EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
+ EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
+ .Times(1)
+ .WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
+ halCallback = callback;
+ return ndk::ScopedAStatus::ok();
+ });
+
+ EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+
+ EXPECT_NE(nullptr, halCallback);
+ AudioDevice audioDevice = {};
+ audioDevice.address.set<AudioDeviceAddress::id>("test");
+ auto status = halCallback->onMomentaryExposureWarning(
+ /*in_currentDbA=*/101.f, audioDevice);
+ EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(SoundDoseManagerTest, MomentaryExposureFromHalAfterInternalSelectedReturnsException) {
+ std::shared_ptr<ISoundDose::IHalSoundDoseCallback> halCallback;
+
+ EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
+ EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
+ .Times(1)
+ .WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
+ halCallback = callback;
+ return ndk::ScopedAStatus::ok();
+ });
+
+ EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+ EXPECT_NE(nullptr, halCallback);
+ EXPECT_FALSE(mSoundDoseManager->setHalSoundDoseInterface(nullptr));
+
+ AudioDevice audioDevice = {};
+ audioDevice.address.set<AudioDeviceAddress::id>("test");
+ auto status = halCallback->onMomentaryExposureWarning(
+ /*in_currentDbA=*/101.f, audioDevice);
+ EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(SoundDoseManagerTest, OnNewMelValuesFromHalWithNoAddressIllegalArgument) {
+ std::shared_ptr<ISoundDose::IHalSoundDoseCallback> halCallback;
+
+ EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
+ EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
+ .Times(1)
+ .WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
+ halCallback = callback;
+ return ndk::ScopedAStatus::ok();
+ });
+
+ EXPECT_TRUE(mSoundDoseManager->setHalSoundDoseInterface(mHalSoundDose));
+
+ EXPECT_NE(nullptr, halCallback);
+ AudioDevice audioDevice = {};
+ audioDevice.address.set<AudioDeviceAddress::id>("test");
+ auto status = halCallback->onNewMelValues(/*in_melRecord=*/{}, audioDevice);
+ EXPECT_FALSE(status.isOk());
+}
+
+TEST_F(SoundDoseManagerTest, GetIdReturnsMappedAddress) {
+ const std::string address = "testAddress";
+ const audio_port_handle_t deviceId = 2;
+ const audio_devices_t deviceType = AUDIO_DEVICE_OUT_WIRED_HEADSET;
+ const AudioDeviceTypeAddr adt{deviceType, address};
+ auto audioDevice = aidl::android::legacy2aidl_audio_device_AudioDevice(
+ deviceType, address.c_str());
+ ASSERT_TRUE(audioDevice.ok());
+
+ mSoundDoseManager->mapAddressToDeviceId(adt, deviceId);
+
+ EXPECT_EQ(deviceId, mSoundDoseManager->getIdForAudioDevice(audioDevice.value()));
+}
+
+TEST_F(SoundDoseManagerTest, GetAfterClearIdReturnsNone) {
+ const std::string address = "testAddress";
+ const audio_devices_t deviceType = AUDIO_DEVICE_OUT_WIRED_HEADSET;
+ const AudioDeviceTypeAddr adt{deviceType, address};
+ const audio_port_handle_t deviceId = 2;
+ auto audioDevice = aidl::android::legacy2aidl_audio_device_AudioDevice(
+ deviceType, address.c_str());
+ ASSERT_TRUE(audioDevice.ok());
+
+ mSoundDoseManager->mapAddressToDeviceId(adt, deviceId);
+ mSoundDoseManager->clearMapDeviceIdEntries(deviceId);
+
+ EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, mSoundDoseManager->getIdForAudioDevice(audioDevice.value()));
+}
+
+TEST_F(SoundDoseManagerTest, GetUnmappedIdReturnsHandleNone) {
+ const std::string address = "testAddress";
+ AudioDevice audioDevice;
+ audioDevice.address.set<AudioDeviceAddress::id>(address);
+
+ EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, mSoundDoseManager->getIdForAudioDevice(audioDevice));
+}
+
+TEST_F(SoundDoseManagerTest, GetDefaultForceComputeCsdOnAllDevices) {
+ EXPECT_FALSE(mSoundDoseManager->forceComputeCsdOnAllDevices());
+}
+
+TEST_F(SoundDoseManagerTest, GetDefaultForceUseFrameworkMel) {
+ // TODO: for now dogfooding with internal MEL. Revert to false when using the HAL MELs
+ EXPECT_TRUE(mSoundDoseManager->forceUseFrameworkMel());
+}
+
+} // namespace
+} // namespace android
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index e170713..da0df5f 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -135,17 +135,18 @@
// request an output appropriate for playback of the supplied stream type and parameters
virtual audio_io_handle_t getOutput(audio_stream_type_t stream) = 0;
virtual status_t getOutputForAttr(const audio_attributes_t *attr,
- audio_io_handle_t *output,
- audio_session_t session,
- audio_stream_type_t *stream,
- const AttributionSourceState& attributionSouce,
- const audio_config_t *config,
- audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
- audio_port_handle_t *portId,
- std::vector<audio_io_handle_t> *secondaryOutputs,
- output_type_t *outputType,
- bool *isSpatialized) = 0;
+ audio_io_handle_t *output,
+ audio_session_t session,
+ audio_stream_type_t *stream,
+ const AttributionSourceState& attributionSource,
+ audio_config_t *config,
+ audio_output_flags_t *flags,
+ audio_port_handle_t *selectedDeviceId,
+ audio_port_handle_t *portId,
+ std::vector<audio_io_handle_t> *secondaryOutputs,
+ output_type_t *outputType,
+ bool *isSpatialized,
+ bool *isBitPerfect) = 0;
// indicates to the audio policy manager that the output starts being used by corresponding
// stream.
virtual status_t startOutput(audio_port_handle_t portId) = 0;
@@ -160,8 +161,8 @@
audio_io_handle_t *input,
audio_unique_id_t riid,
audio_session_t session,
- const AttributionSourceState& attributionSouce,
- const audio_config_base_t *config,
+ const AttributionSourceState& attributionSource,
+ audio_config_base_t *config,
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
input_type_t *inputType,
@@ -302,6 +303,8 @@
virtual bool isUltrasoundSupported() = 0;
+ virtual bool isHotwordStreamSupported(bool lookbackAudio) = 0;
+
virtual status_t getHwOffloadFormatsSupportedForBluetoothMedia(
audio_devices_t device, std::vector<audio_format_t> *formats) = 0;
@@ -325,8 +328,11 @@
const AudioDeviceTypeAddrVector &devices) = 0;
virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
- device_role_t role) = 0;
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) = 0;
+ virtual status_t clearDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role) = 0;
virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
device_role_t role,
@@ -410,6 +416,20 @@
// retrieves the list of available direct audio profiles for the given audio attributes
virtual status_t getDirectProfilesForAttributes(const audio_attributes_t* attr,
AudioProfileVector& audioProfiles) = 0;
+
+ virtual status_t getSupportedMixerAttributes(
+ audio_port_handle_t portId, std::vector<audio_mixer_attributes_t>& mixerAttrs) = 0;
+ virtual status_t setPreferredMixerAttributes(
+ const audio_attributes_t* attr,
+ audio_port_handle_t portId,
+ uid_t uid,
+ const audio_mixer_attributes_t* mixerAttributes) = 0;
+ virtual status_t getPreferredMixerAttributes(const audio_attributes_t* attr,
+ audio_port_handle_t portId,
+ audio_mixer_attributes_t* mixerAttributes) = 0;
+ virtual status_t clearPreferredMixerAttributes(const audio_attributes_t* attr,
+ audio_port_handle_t portId,
+ uid_t uid) = 0;
};
// Audio Policy client Interface
@@ -482,9 +502,6 @@
virtual status_t setStreamVolume(audio_stream_type_t stream, float volume,
audio_io_handle_t output, int delayMs = 0) = 0;
- // invalidate a stream type, causing a reroute to an unspecified new output
- virtual status_t invalidateStream(audio_stream_type_t stream) = 0;
-
// function enabling to send proprietary informations directly from audio policy manager to
// audio hardware interface.
virtual void setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs,
@@ -555,6 +572,8 @@
virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port,
media::DeviceConnectedState state) = 0;
+
+ virtual status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) = 0;
};
// These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index f130f7c..fa3a5d3 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -16,9 +16,23 @@
"name": "CtsNativeMediaAAudioTestCases",
"options" : [
{
- "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__INPUT"
+ },
+ {
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_DEFAULT__OUTPUT"
+ },
+ {
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__INPUT"
+ },
+ {
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic_TestAAudioBasic_TestBasic_LOW_LATENCY__OUTPUT"
}
]
}
+ ],
+ "auto-presubmit": [
+ {
+ "name": "audiopolicy_tests"
+ }
]
}
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 3d3e0cf..d266e63 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -218,12 +218,15 @@
return *(deviceTypes.begin());
} else {
// Multiple device selection is either:
+ // - dock + one other device: give priority to dock in this case.
// - speaker + one other device: give priority to speaker in this case.
// - one A2DP device + another device: happens with duplicated output. In this case
// retain the device on the A2DP output as the other must not correspond to an active
// selection if not the speaker.
// - HDMI-CEC system audio mode only output: give priority to available item in order.
- if (deviceTypes.count(AUDIO_DEVICE_OUT_SPEAKER) != 0) {
+ if (deviceTypes.count(AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET) != 0) {
+ return AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET;
+ } else if (deviceTypes.count(AUDIO_DEVICE_OUT_SPEAKER) != 0) {
return AUDIO_DEVICE_OUT_SPEAKER;
} else if (deviceTypes.count(AUDIO_DEVICE_OUT_SPEAKER_SAFE) != 0) {
return AUDIO_DEVICE_OUT_SPEAKER_SAFE;
@@ -246,3 +249,16 @@
}
}
}
+
+/**
+ * Indicates if two given audio output flags are considered as matched, which means that
+ * 1) the `supersetFlags` and `subsetFlags` both contain or both don't contain must match flags and
+ * 2) `supersetFlags` contains all flags from `subsetFlags`.
+ */
+static inline bool audio_output_flags_is_subset(audio_output_flags_t supersetFlags,
+ audio_output_flags_t subsetFlags,
+ uint32_t mustMatchFlags)
+{
+ return ((supersetFlags ^ subsetFlags) & mustMatchFlags) == AUDIO_OUTPUT_FLAG_NONE
+ && (supersetFlags & subsetFlags) == subsetFlags;
+}
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 972de02..8b76842 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -29,6 +29,7 @@
"src/HwModule.cpp",
"src/IOProfile.cpp",
"src/PolicyAudioPort.cpp",
+ "src/PreferredMixerAttributesInfo.cpp",
"src/Serializer.cpp",
"src/SoundTriggerSession.cpp",
"src/TypeConverter.cpp",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 0e4bc6c..876911d 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -444,6 +444,10 @@
void setTracksInvalidatedStatusByStrategy(product_strategy_t strategy);
+ bool isConfigurationMatched(const audio_config_base_t& config, audio_output_flags_t flags);
+
+ PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
+
const sp<IOProfile> mProfile; // I/O profile this output derives from
audio_io_handle_t mIoHandle; // output handle
uint32_t mLatency; //
@@ -454,6 +458,7 @@
audio_session_t mDirectClientSession; // session id of the direct output client
bool mPendingReopenToQueryProfiles = false;
audio_channel_mask_t mMixerChannelMask = AUDIO_CHANNEL_NONE;
+ bool mUsePreferredMixerAttributes = false;
};
// Audio output driven by an input device directly.
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index 3b19e52..92292e1 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -62,23 +62,42 @@
void closeOutput(sp<SwAudioOutputDescriptor> &desc);
/**
- * Try to find an output descriptor for the given attributes.
+ * Tries to find the best matching audio policy mix
*
- * @param[in] attributes to consider fowr the research of output descriptor.
- * @param[out] desc to return if an primary output could be found.
- * @param[out] secondaryDesc other desc that the audio should be routed to.
+ * @param[in] attributes to consider for the research of the audio policy mix.
+ * @param[in] config to consider for the research of the audio policy mix.
+ * @param[in] uid to consider for the research of the audio policy mix.
+ * @param[in] session to consider for the research of the audio policy mix.
+ * @param[in] flags to consider for the research of the audio policy mix.
+ * @param[in] availableOutputDevices available output devices can be use during the research
+ * of the audio policy mix
+ * @param[in] requestedDevice currently requested device that can be used determined if the
+ * matching audio policy mix should be used instead of the currently set preferred device.
+ * @param[out] primaryMix to return in case a matching audio plicy mix could be found.
+ * @param[out] secondaryMixes that audio should be routed to in case a matching
+ * secondary mixes could be found.
+ * @param[out] usePrimaryOutputFromPolicyMixes to return in case the audio policy mix
+ * should be use, including the case where the requested device is explicitly disallowed
+ * by the audio policy.
+ *
* @return OK if the request is valid
* otherwise if the request is not supported
*/
status_t getOutputForAttr(const audio_attributes_t& attributes,
const audio_config_base_t& config,
- uid_t uid, audio_output_flags_t flags,
+ uid_t uid,
+ audio_session_t session,
+ audio_output_flags_t flags,
+ const DeviceVector &availableOutputDevices,
+ const sp<DeviceDescriptor>& requestedDevice,
sp<AudioPolicyMix> &primaryMix,
- std::vector<sp<AudioPolicyMix>> *secondaryMixes);
+ std::vector<sp<AudioPolicyMix>> *secondaryMixes,
+ bool& usePrimaryOutputFromPolicyMixes);
sp<DeviceDescriptor> getDeviceAndMixForInputSource(const audio_attributes_t& attributes,
const DeviceVector &availableDeviceTypes,
uid_t uid,
+ audio_session_t session,
sp<AudioPolicyMix> *policyMix) const;
/**
@@ -124,11 +143,20 @@
void dump(String8 *dst) const;
private:
- enum class MixMatchStatus { MATCH, NO_MATCH };
- MixMatchStatus mixMatch(const AudioMix* mix, size_t mixIndex,
+ bool mixMatch(const AudioMix* mix, size_t mixIndex,
const audio_attributes_t& attributes,
const audio_config_base_t& config,
- uid_t uid);
+ uid_t uid,
+ audio_session_t session);
+ bool mixDisallowsRequestedDevice(const AudioMix* mix,
+ const sp<DeviceDescriptor>& requestedDevice,
+ const sp<DeviceDescriptor>& mixDevice,
+ const uid_t uid);
+
+ sp<DeviceDescriptor> getOutputDeviceForMix(const AudioMix* mix,
+ const DeviceVector& availableOutputDevices);
};
+std::optional<std::string> extractAddressFromAudioAttributes(const audio_attributes_t& attr);
+
} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index 4adc920..80e098b 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -183,6 +183,10 @@
return isSingleDeviceType(mDeviceTypes, deviceType);
}
+ bool onlyContainsDevice(const sp<DeviceDescriptor>& item) const {
+ return this->size() == 1 && contains(item);
+ }
+
bool contains(const sp<DeviceDescriptor>& item) const { return indexOf(item) >= 0; }
/**
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index 90b812d..c489eed 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -35,10 +35,7 @@
class IOProfile : public AudioPort, public PolicyAudioPort
{
public:
- IOProfile(const std::string &name, audio_port_role_t role)
- : AudioPort(name, AUDIO_PORT_TYPE_MIX, role),
- curOpenCount(0),
- curActiveCount(0) {}
+ IOProfile(const std::string &name, audio_port_role_t role);
virtual ~IOProfile() = default;
@@ -66,6 +63,17 @@
if (getRole() == AUDIO_PORT_ROLE_SINK && (flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
maxActiveCount = 0;
}
+ if (getRole() == AUDIO_PORT_ROLE_SOURCE) {
+ mMixerBehaviors.clear();
+ mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_DEFAULT);
+ if (mFlags.output & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+ mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_BIT_PERFECT);
+ }
+ }
+ }
+
+ const MixerBehaviorSet& getMixerBehaviors() const {
+ return mMixerBehaviors;
}
/**
@@ -97,6 +105,25 @@
uint32_t flags,
bool exactMatchRequiredForInputFlags = false) const;
+ /**
+ * @brief areAllDevicesSupported: Checks if the given devices are supported by the IO profile.
+ *
+ * @param devices vector of devices to be checked for compatibility
+ * @return true if all devices are supported, false otherwise.
+ */
+ bool areAllDevicesSupported(const DeviceVector &devices) const;
+
+ /**
+ * @brief isCompatibleProfileForFlags: Checks if the IO profile is compatible with
+ * specified flags.
+ *
+ * @param flags to be checked for compatibility
+ * @param exactMatchRequiredForInputFlags true if exact match is required on flags
+ * @return true if the profile is compatible, false otherwise.
+ */
+ bool isCompatibleProfileForFlags(uint32_t flags,
+ bool exactMatchRequiredForInputFlags = false) const;
+
void dump(String8 *dst, int spaces) const;
void log();
@@ -193,6 +220,8 @@
return false;
}
+ void toSupportedMixerAttributes(std::vector<audio_mixer_attributes_t>* mixerAttributes) const;
+
// Number of streams currently opened for this profile.
uint32_t curOpenCount;
// Number of streams currently active for this profile. This is not the number of active clients
@@ -201,6 +230,8 @@
private:
DeviceVector mSupportedDevices; // supported devices: this input/output can be routed from/to
+
+ MixerBehaviorSet mMixerBehaviors;
};
class InputProfile : public IOProfile
diff --git a/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
new file mode 100644
index 0000000..9472481
--- /dev/null
+++ b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+
+#include <utils/RefBase.h>
+
+#include "AudioRoute.h"
+#include "HwModule.h"
+#include "IOProfile.h"
+
+namespace android {
+
+class PreferredMixerAttributesInfo : public RefBase {
+public:
+ PreferredMixerAttributesInfo(uid_t uid, audio_port_handle_t devicePortId,
+ const sp<IOProfile>& profile, audio_output_flags_t flags,
+ const audio_mixer_attributes_t& mixerAttributes)
+ : mDevicePortId(devicePortId), mUid(uid), mProfile(profile),
+ mOutputFlags(flags), mMixerAttributes(mixerAttributes) { }
+
+ audio_port_handle_t getDeviceId() const { return mDevicePortId; }
+ const audio_config_base_t& getConfigBase() const { return mMixerAttributes.config; }
+ uid_t getUid() const { return mUid; }
+ int getActiveClientCount() const { return mActiveClientsCount; }
+ const sp<IOProfile> getProfile() const { return mProfile; };
+ audio_output_flags_t getFlags() const { return mOutputFlags; }
+ const audio_mixer_attributes_t& getMixerAttributes() const { return mMixerAttributes; }
+
+ void increaseActiveClient() { mActiveClientsCount++; }
+ void decreaseActiveClient() { mActiveClientsCount--; }
+
+ void dump(String8 *dst);
+
+private:
+ const audio_port_handle_t mDevicePortId;
+ const uid_t mUid;
+ const sp<IOProfile> mProfile;
+ const audio_output_flags_t mOutputFlags;
+ const audio_mixer_attributes_t mMixerAttributes;
+ int mActiveClientsCount = 0;
+};
+
+} // namespace android
\ No newline at end of file
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 14c49ef..4877166 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -26,6 +26,7 @@
#include "Volume.h"
#include "HwModule.h"
#include "TypeConverter.h"
+#include "policy.h"
#include <media/AudioGain.h>
#include <media/AudioParameter.h>
#include <media/AudioPolicy.h>
@@ -322,7 +323,9 @@
mDirectClientSession(AUDIO_SESSION_NONE)
{
if (profile != NULL) {
- mFlags = (audio_output_flags_t)profile->getFlags();
+ // By default, opening the output without immutable flags, the bit-perfect flags should be
+ // applied when the apps explicitly request.
+ mFlags = (audio_output_flags_t)(profile->getFlags() & (~AUDIO_OUTPUT_FLAG_BIT_PERFECT));
}
}
@@ -332,6 +335,9 @@
if (extraInfo != nullptr) {
allExtraInfo.appendFormat("%s; ", extraInfo);
}
+ if (mProfile != nullptr) {
+ allExtraInfo.appendFormat("IOProfile name:%s; ", mProfile->getName().c_str());
+ }
std::string flagsLiteral = toString(mFlags);
allExtraInfo.appendFormat("Latency: %d; 0x%04x", mLatency, mFlags);
if (!flagsLiteral.empty()) {
@@ -948,6 +954,27 @@
return false;
}
+bool SwAudioOutputDescriptor::isConfigurationMatched(const audio_config_base_t &config,
+ audio_output_flags_t flags) {
+ const uint32_t mustMatchOutputFlags =
+ AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+ return audio_output_flags_is_subset(AudioOutputDescriptor::mFlags, flags, mustMatchOutputFlags)
+ && mSamplingRate == config.sample_rate
+ && mChannelMask == config.channel_mask
+ && mFormat == config.format;
+}
+
+PortHandleVector SwAudioOutputDescriptor::getClientsForStream(
+ audio_stream_type_t streamType) const {
+ PortHandleVector clientsForStream;
+ for (const auto& client : getClientIterable()) {
+ if (client->stream() == streamType) {
+ clientsForStream.push_back(client->portId());
+ }
+ }
+ return clientsForStream;
+}
+
void SwAudioOutputCollection::dump(String8 *dst) const
{
dst->appendFormat("\n Outputs (%zu):\n", size());
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index f5cf7e3..b41f86d 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -15,9 +15,12 @@
*/
#define LOG_TAG "APM_AudioPolicyMix"
-//#define LOG_NDEBUG 0
+// #define LOG_NDEBUG 0
#include <algorithm>
+#include <iterator>
+#include <optional>
+#include <regex>
#include "AudioPolicyMix.h"
#include "TypeConverter.h"
#include "HwModule.h"
@@ -28,13 +31,19 @@
namespace android {
namespace {
+bool matchAddressToTags(const audio_attributes_t& attr, const String8& addr) {
+ std::optional<std::string> tagAddress = extractAddressFromAudioAttributes(attr);
+ return tagAddress.has_value() && tagAddress->compare(addr.c_str()) == 0;
+}
+
// Returns true if the criterion matches.
// The exclude criteria are handled in the same way as positive
// ones - only condition is matched (the function will return
// same result both for RULE_MATCH_X and RULE_EXCLUDE_X).
bool isCriterionMatched(const AudioMixMatchCriterion& criterion,
const audio_attributes_t& attr,
- const uid_t uid) {
+ const uid_t uid,
+ const audio_session_t session) {
uint32_t ruleWithoutExclusion = criterion.mRule & ~RULE_EXCLUSION_MASK;
switch(ruleWithoutExclusion) {
case RULE_MATCH_ATTRIBUTE_USAGE:
@@ -48,6 +57,8 @@
userid_t userId = multiuser_get_user_id(uid);
return criterion.mValue.mUserId == userId;
}
+ case RULE_MATCH_AUDIO_SESSION_ID:
+ return criterion.mValue.mAudioSessionId == session;
}
ALOGE("Encountered invalid mix rule 0x%x", criterion.mRule);
return false;
@@ -60,10 +71,11 @@
// for the criteria to match.
bool areMixCriteriaMatched(const std::vector<AudioMixMatchCriterion>& criteria,
const audio_attributes_t& attr,
- const uid_t uid) {
+ const uid_t uid,
+ const audio_session_t session) {
// If any of the exclusion criteria are matched the mix doesn't match.
auto isMatchingExcludeCriterion = [&](const AudioMixMatchCriterion& c) {
- return c.isExcludeCriterion() && isCriterionMatched(c, attr, uid);
+ return c.isExcludeCriterion() && isCriterionMatched(c, attr, uid, session);
};
if (std::any_of(criteria.begin(), criteria.end(), isMatchingExcludeCriterion)) {
return false;
@@ -76,7 +88,7 @@
continue;
}
presentPositiveRules |= criterion.mRule;
- if (isCriterionMatched(criterion, attr, uid)) {
+ if (isCriterionMatched(criterion, attr, uid, session)) {
matchedPositiveRules |= criterion.mRule;
}
}
@@ -150,6 +162,9 @@
case RULE_MATCH_USERID:
ruleValue = std::to_string(criterion.mValue.mUserId);
break;
+ case RULE_MATCH_AUDIO_SESSION_ID:
+ ruleValue = std::to_string(criterion.mValue.mAudioSessionId);
+ break;
default:
unknownRule = true;
}
@@ -168,7 +183,8 @@
for (size_t i = 0; i < size(); i++) {
const sp<AudioPolicyMix>& registeredMix = itemAt(i);
if (mix.mDeviceType == registeredMix->mDeviceType
- && mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0) {
+ && mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0
+ && is_mix_loopback(mix.mRouteFlags)) {
ALOGE("registerMix(): mix already registered for dev=0x%x addr=%s",
mix.mDeviceType, mix.mDeviceAddress.c_str());
return BAD_VALUE;
@@ -241,25 +257,26 @@
}
status_t AudioPolicyMixCollection::getOutputForAttr(
- const audio_attributes_t& attributes, const audio_config_base_t& config, uid_t uid,
+ const audio_attributes_t& attributes, const audio_config_base_t& config, const uid_t uid,
+ const audio_session_t session,
audio_output_flags_t flags,
+ const DeviceVector &availableOutputDevices,
+ const sp<DeviceDescriptor>& requestedDevice,
sp<AudioPolicyMix> &primaryMix,
- std::vector<sp<AudioPolicyMix>> *secondaryMixes)
+ std::vector<sp<AudioPolicyMix>> *secondaryMixes,
+ bool& usePrimaryOutputFromPolicyMixes)
{
ALOGV("getOutputForAttr() querying %zu mixes:", size());
primaryMix.clear();
+ bool mixesDisallowsRequestedDevice = false;
for (size_t i = 0; i < size(); i++) {
sp<AudioPolicyMix> policyMix = itemAt(i);
const bool primaryOutputMix = !is_mix_loopback_render(policyMix->mRouteFlags);
- if (!primaryOutputMix && (flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)) {
- // AAudio does not support MMAP_NO_IRQ loopback render, and there is no way with
- // the current MmapStreamInterface::start to reject a specific client added to a shared
- // mmap stream.
- // As a result all MMAP_NOIRQ requests have to be rejected when an loopback render
- // policy is present. That ensures no shared mmap stream is used when an loopback
- // render policy is registered.
- ALOGD("%s: Rejecting MMAP_NOIRQ request due to LOOPBACK|RENDER mix present.", __func__);
- return INVALID_OPERATION;
+ sp<DeviceDescriptor> mixDevice = getOutputDeviceForMix(policyMix.get(),
+ availableOutputDevices);
+ if (mixDisallowsRequestedDevice(policyMix.get(), requestedDevice, mixDevice, uid)) {
+ ALOGV("%s: Mix %zu: does not allows device", __func__, i);
+ mixesDisallowsRequestedDevice = true;
}
if (primaryOutputMix && primaryMix != nullptr) {
@@ -267,11 +284,24 @@
continue; // Primary output already found
}
- if(mixMatch(policyMix.get(), i, attributes, config, uid) == MixMatchStatus::NO_MATCH) {
+ if(!mixMatch(policyMix.get(), i, attributes, config, uid, session)) {
ALOGV("%s: Mix %zu: does not match", __func__, i);
continue; // skip the mix
}
+ if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) && is_mix_loopback(policyMix->mRouteFlags)) {
+ // AAudio MMAP_NOIRQ streams cannot be routed to loopback/loopback+render
+ // using dynamic audio policy.
+ ALOGD("%s: Rejecting MMAP_NOIRQ request matched to loopback dynamic audio policy mix.",
+ __func__);
+ return INVALID_OPERATION;
+ }
+
+ if (mixDevice != nullptr && mixDevice->equals(requestedDevice)) {
+ ALOGV("%s: Mix %zu: requested device mathches", __func__, i);
+ mixesDisallowsRequestedDevice = false;
+ }
+
if (primaryOutputMix) {
primaryMix = policyMix;
ALOGV("%s: Mix %zu: set primary desc", __func__, i);
@@ -282,12 +312,39 @@
}
}
}
+
+ // Explicit routing is higher priority than dynamic policy primary output, but policy may
+ // explicitly deny it
+ usePrimaryOutputFromPolicyMixes =
+ (mixesDisallowsRequestedDevice || requestedDevice == nullptr) && primaryMix != nullptr;
+
return NO_ERROR;
}
-AudioPolicyMixCollection::MixMatchStatus AudioPolicyMixCollection::mixMatch(
- const AudioMix* mix, size_t mixIndex, const audio_attributes_t& attributes,
- const audio_config_base_t& config, uid_t uid) {
+sp<DeviceDescriptor> AudioPolicyMixCollection::getOutputDeviceForMix(const AudioMix* mix,
+ const DeviceVector& availableOutputDevices) {
+ ALOGV("%s: device (0x%x, addr=%s) forced by mix", __func__, mix->mDeviceType,
+ mix->mDeviceAddress.c_str());
+ return availableOutputDevices.getDevice(mix->mDeviceType, mix->mDeviceAddress,
+ AUDIO_FORMAT_DEFAULT);
+}
+
+bool AudioPolicyMixCollection::mixDisallowsRequestedDevice(const AudioMix* mix,
+ const sp<DeviceDescriptor>& requestedDevice,
+ const sp<DeviceDescriptor>& mixDevice,
+ const uid_t uid) {
+ if (requestedDevice == nullptr || mixDevice == nullptr) {
+ return false;
+ }
+
+ return is_mix_disallows_preferred_device(mix->mRouteFlags)
+ && requestedDevice->equals(mixDevice)
+ && mix->hasUserIdRule(false /* match */, multiuser_get_user_id(uid));
+}
+
+bool AudioPolicyMixCollection::mixMatch(const AudioMix* mix, size_t mixIndex,
+ const audio_attributes_t& attributes, const audio_config_base_t& config,
+ uid_t uid, audio_session_t session) {
if (mix->mMixType == MIX_TYPE_PLAYERS) {
// Loopback render mixes are created from a public API and thus restricted
@@ -297,20 +354,20 @@
attributes.usage == AUDIO_USAGE_MEDIA ||
attributes.usage == AUDIO_USAGE_GAME ||
attributes.usage == AUDIO_USAGE_VOICE_COMMUNICATION)) {
- return MixMatchStatus::NO_MATCH;
+ return false;
}
auto hasFlag = [](auto flags, auto flag) { return (flags & flag) == flag; };
if (hasFlag(attributes.flags, AUDIO_FLAG_NO_SYSTEM_CAPTURE)) {
- return MixMatchStatus::NO_MATCH;
+ return false;
}
if (attributes.usage == AUDIO_USAGE_VOICE_COMMUNICATION) {
if (!mix->mVoiceCommunicationCaptureAllowed) {
- return MixMatchStatus::NO_MATCH;
+ return false;
}
} else if (!mix->mAllowPrivilegedMediaPlaybackCapture &&
hasFlag(attributes.flags, AUDIO_FLAG_NO_MEDIA_PROJECTION)) {
- return MixMatchStatus::NO_MATCH;
+ return false;
}
}
@@ -320,32 +377,22 @@
!((audio_is_linear_pcm(config.format) && audio_is_linear_pcm(mix->mFormat.format)) ||
(config.format == mix->mFormat.format)) &&
config.format != AUDIO_CONFIG_BASE_INITIALIZER.format) {
- return MixMatchStatus::NO_MATCH;
+ return false;
}
// if there is an address match, prioritize that match
- if (strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
- strncmp(attributes.tags + strlen("addr="),
- mix->mDeviceAddress.c_str(),
- AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
- ALOGV("\tgetOutputForAttr will use mix %zu", mixIndex);
- return MixMatchStatus::MATCH;
- }
-
- if (areMixCriteriaMatched(mix->mCriteria, attributes, uid)) {
- ALOGV("\tgetOutputForAttr will use mix %zu", mixIndex);
- return MixMatchStatus::MATCH;
+ if (matchAddressToTags(attributes, mix->mDeviceAddress)
+ || areMixCriteriaMatched(mix->mCriteria, attributes, uid, session)) {
+ ALOGV("\tgetOutputForAttr will use mix %zu", mixIndex);
+ return true;
}
} else if (mix->mMixType == MIX_TYPE_RECORDERS) {
if (attributes.usage == AUDIO_USAGE_VIRTUAL_SOURCE &&
- strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 &&
- strncmp(attributes.tags + strlen("addr="),
- mix->mDeviceAddress.c_str(),
- AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) {
- return MixMatchStatus::MATCH;
+ matchAddressToTags(attributes, mix->mDeviceAddress)) {
+ return true;
}
}
- return MixMatchStatus::NO_MATCH;
+ return false;
}
sp<DeviceDescriptor> AudioPolicyMixCollection::getDeviceAndMixForOutput(
@@ -355,11 +402,7 @@
for (size_t i = 0; i < size(); i++) {
if (itemAt(i)->getOutput() == output) {
// This Desc is involved in a Mix, which has the highest prio
- audio_devices_t deviceType = itemAt(i)->mDeviceType;
- String8 address = itemAt(i)->mDeviceAddress;
- ALOGV("%s: device (0x%x, addr=%s) forced by mix",
- __FUNCTION__, deviceType, address.c_str());
- return availableOutputDevices.getDevice(deviceType, address, AUDIO_FORMAT_DEFAULT);
+ return getOutputDeviceForMix(itemAt(i).get(), availableOutputDevices);
}
}
return nullptr;
@@ -369,6 +412,7 @@
const audio_attributes_t& attributes,
const DeviceVector &availDevices,
uid_t uid,
+ audio_session_t session,
sp<AudioPolicyMix> *policyMix) const
{
for (size_t i = 0; i < size(); i++) {
@@ -376,7 +420,7 @@
if (mix->mMixType != MIX_TYPE_RECORDERS) {
continue;
}
- if (areMixCriteriaMatched(mix->mCriteria, attributes, uid)) {
+ if (areMixCriteriaMatched(mix->mCriteria, attributes, uid, session)) {
// Assuming PolicyMix only for remote submix for input
// so mix->mDeviceType can only be AUDIO_DEVICE_OUT_REMOTE_SUBMIX.
auto mixDevice = availDevices.getDevice(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
@@ -395,14 +439,14 @@
status_t AudioPolicyMixCollection::getInputMixForAttr(
audio_attributes_t attr, sp<AudioPolicyMix> *policyMix)
{
- if (strncmp(attr.tags, "addr=", strlen("addr=")) != 0) {
+ std::optional<std::string> address = extractAddressFromAudioAttributes(attr);
+ if (!address.has_value()) {
return BAD_VALUE;
}
- String8 address(attr.tags + strlen("addr="));
#ifdef LOG_NDEBUG
ALOGV("getInputMixForAttr looking for address %s for source %d\n mixes available:",
- address.c_str(), attr.source);
+ address->c_str(), attr.source);
for (size_t i = 0; i < size(); i++) {
const sp<AudioPolicyMix> audioPolicyMix = itemAt(i);
ALOGV("\tmix %zu address=%s", i, audioPolicyMix->mDeviceAddress.c_str());
@@ -412,20 +456,20 @@
size_t index;
for (index = 0; index < size(); index++) {
const sp<AudioPolicyMix>& registeredMix = itemAt(index);
- if (registeredMix->mDeviceAddress.compare(address) == 0) {
+ if (address->compare(registeredMix->mDeviceAddress.c_str()) == 0) {
ALOGD("getInputMixForAttr found addr=%s dev=0x%x",
registeredMix->mDeviceAddress.c_str(), registeredMix->mDeviceType);
break;
}
}
if (index == size()) {
- ALOGW("getInputMixForAttr() no policy for address %s", address.c_str());
+ ALOGW("getInputMixForAttr() no policy for address %s", address->c_str());
return BAD_VALUE;
}
const sp<AudioPolicyMix> audioPolicyMix = itemAt(index);
if (audioPolicyMix->mMixType != MIX_TYPE_PLAYERS) {
- ALOGW("getInputMixForAttr() bad policy mix type for address %s", address.c_str());
+ ALOGW("getInputMixForAttr() bad policy mix type for address %s", address->c_str());
return BAD_VALUE;
}
if (policyMix != nullptr) {
@@ -561,13 +605,14 @@
break;
}
}
- if (!deviceMatch && !mix->hasMatchUserIdRule()) {
+ if (!deviceMatch && !mix->hasUserIdRule(true /*match*/)) {
// this mix doesn't go to one of the listed devices for the given userId,
// and it's not already restricting the mix on a userId,
// modify its rules to exclude the userId
- if (!mix->hasUserIdRule(false /*match*/, userId)) {
+ if (!mix->hasUserIdRule(false /* match */, userId)) {
// no need to do it again if userId is already excluded
mix->setExcludeUserId(userId);
+ mix->mRouteFlags = mix->mRouteFlags | MIX_ROUTE_FLAG_DISALLOWS_PREFERRED_DEVICE;
}
}
}
@@ -588,6 +633,10 @@
EraseCriteriaIf(mix->mCriteria, [userId](const AudioMixMatchCriterion& c) {
return c.mRule == RULE_EXCLUDE_USERID && c.mValue.mUserId == userId;
});
+
+ if (!mix->hasUserIdRule(false /* match */)) {
+ mix->mRouteFlags = mix->mRouteFlags & ~MIX_ROUTE_FLAG_DISALLOWS_PREFERRED_DEVICE;
+ }
}
return NO_ERROR;
}
@@ -625,4 +674,14 @@
}
}
+std::optional<std::string> extractAddressFromAudioAttributes(const audio_attributes_t& attr) {
+ static const std::regex addrTagRegex("addr=([^;]+)");
+
+ std::cmatch match;
+ if (std::regex_search(attr.tags, match, addrTagRegex)) {
+ return match[1].str();
+ }
+ return std::nullopt;
+}
+
}; //namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index 2cbdeaa..03ab3f8 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -24,6 +24,15 @@
namespace android {
+IOProfile::IOProfile(const std::string &name, audio_port_role_t role)
+ : AudioPort(name, AUDIO_PORT_TYPE_MIX, role),
+ curOpenCount(0),
+ curActiveCount(0) {
+ if (role == AUDIO_PORT_ROLE_SOURCE) {
+ mMixerBehaviors.insert(AUDIO_MIXER_BEHAVIOR_DEFAULT);
+ }
+}
+
bool IOProfile::isCompatibleProfile(const DeviceVector &devices,
uint32_t samplingRate,
uint32_t *updatedSamplingRate,
@@ -40,11 +49,9 @@
const bool isRecordThread =
getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SINK;
ALOG_ASSERT(isPlaybackThread != isRecordThread);
-
- if (!devices.isEmpty()) {
- if (!mSupportedDevices.containsAllDevices(devices)) {
- return false;
- }
+ if (!areAllDevicesSupported(devices) ||
+ !isCompatibleProfileForFlags(flags, exactMatchRequiredForInputFlags)) {
+ return false;
}
if (!audio_is_valid_format(format) ||
@@ -78,21 +85,6 @@
}
}
- const uint32_t mustMatchOutputFlags =
- AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
- if (isPlaybackThread && (((getFlags() ^ flags) & mustMatchOutputFlags)
- || (getFlags() & flags) != flags)) {
- return false;
- }
- // The only input flag that is allowed to be different is the fast flag.
- // An existing fast stream is compatible with a normal track request.
- // An existing normal stream is compatible with a fast track request,
- // but the fast request will be denied by AudioFlinger and converted to normal track.
- if (isRecordThread && ((getFlags() ^ flags) &
- ~(exactMatchRequiredForInputFlags ? AUDIO_INPUT_FLAG_NONE : AUDIO_INPUT_FLAG_FAST))) {
- return false;
- }
-
if (updatedSamplingRate != NULL) {
*updatedSamplingRate = myUpdatedSamplingRate;
}
@@ -105,6 +97,41 @@
return true;
}
+bool IOProfile::areAllDevicesSupported(const DeviceVector &devices) const {
+ if (devices.empty()) {
+ return true;
+ }
+ return mSupportedDevices.containsAllDevices(devices);
+}
+
+bool IOProfile::isCompatibleProfileForFlags(uint32_t flags,
+ bool exactMatchRequiredForInputFlags) const {
+ const bool isPlaybackThread =
+ getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
+ const bool isRecordThread =
+ getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SINK;
+ ALOG_ASSERT(isPlaybackThread != isRecordThread);
+
+ const uint32_t mustMatchOutputFlags =
+ AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+ if (isPlaybackThread &&
+ !audio_output_flags_is_subset((audio_output_flags_t)getFlags(),
+ (audio_output_flags_t)flags,
+ mustMatchOutputFlags)) {
+ return false;
+ }
+ // The only input flag that is allowed to be different is the fast flag.
+ // An existing fast stream is compatible with a normal track request.
+ // An existing normal stream is compatible with a fast track request,
+ // but the fast request will be denied by AudioFlinger and converted to normal track.
+ if (isRecordThread && ((getFlags() ^ flags) &
+ ~(exactMatchRequiredForInputFlags ? AUDIO_INPUT_FLAG_NONE : AUDIO_INPUT_FLAG_FAST))) {
+ return false;
+ }
+
+ return true;
+}
+
bool IOProfile::containsSingleDeviceSupportingEncodedFormats(
const sp<DeviceDescriptor>& device) const {
if (device == nullptr) {
@@ -116,6 +143,34 @@
return device == deviceDesc && deviceDesc->hasCurrentEncodedFormat(); }) == 1;
}
+void IOProfile::toSupportedMixerAttributes(
+ std::vector<audio_mixer_attributes_t> *mixerAttributes) const {
+ if (!hasDynamicAudioProfile()) {
+ // The mixer attributes is only supported when there is a dynamic profile.
+ return;
+ }
+ for (const auto& profile : mProfiles) {
+ if (!profile->isValid()) {
+ continue;
+ }
+ for (const auto sampleRate : profile->getSampleRates()) {
+ for (const auto channelMask : profile->getChannels()) {
+ const audio_config_base_t config = {
+ .format = profile->getFormat(),
+ .sample_rate = sampleRate,
+ .channel_mask = channelMask
+ };
+ for (const auto mixerBehavior : mMixerBehaviors) {
+ mixerAttributes->push_back({
+ .config = config,
+ .mixer_behavior = mixerBehavior
+ });
+ }
+ }
+ }
+ }
+}
+
void IOProfile::dump(String8 *dst, int spaces) const
{
String8 extraInfo;
diff --git a/services/audiopolicy/common/managerdefinitions/src/PreferredMixerAttributesInfo.cpp b/services/audiopolicy/common/managerdefinitions/src/PreferredMixerAttributesInfo.cpp
new file mode 100644
index 0000000..edb2c6d
--- /dev/null
+++ b/services/audiopolicy/common/managerdefinitions/src/PreferredMixerAttributesInfo.cpp
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PreferredMixerAttributesInfo.h"
+
+namespace android {
+
+void PreferredMixerAttributesInfo::dump(String8 *dst) {
+ dst->appendFormat("device port ID: %d; owner uid: %d; profile name: %s; flags: %#x; "
+ "sample rate: %u; channel mask: %#x; format: %#x; mixer behavior: %d; "
+ "active clients count: %d\n",
+ mDevicePortId, mUid, mProfile->getName().c_str(), mOutputFlags,
+ mMixerAttributes.config.sample_rate, mMixerAttributes.config.channel_mask,
+ mMixerAttributes.config.format, mMixerAttributes.mixer_behavior,
+ mActiveClientsCount);
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
index c5b3546..8a44547 100644
--- a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
@@ -56,10 +56,12 @@
MAKE_STRING_FROM_ENUM(RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET),
MAKE_STRING_FROM_ENUM(RULE_MATCH_UID),
MAKE_STRING_FROM_ENUM(RULE_MATCH_USERID),
+ MAKE_STRING_FROM_ENUM(RULE_MATCH_AUDIO_SESSION_ID),
MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_USAGE),
MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET),
MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_UID),
MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_USERID),
+ MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_AUDIO_SESSION_ID),
TERMINATOR
};
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
index 5f4080e..b9c94a4 100644
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -108,7 +108,10 @@
status_t setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
const AudioDeviceTypeAddrVector &devices) override;
- status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) override;
+ status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) override;
+
+ status_t clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) override;
status_t getDevicesForRoleAndStrategy(product_strategy_t strategy, device_role_t role,
AudioDeviceTypeAddrVector &devices) const override;
@@ -172,6 +175,12 @@
void updateDeviceSelectionCache() override;
+protected:
+ DeviceVector getPreferredAvailableDevicesForProductStrategy(
+ const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
+ DeviceVector getDisabledDevicesForProductStrategy(
+ const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
+
private:
engineConfig::ParsingResult processParsingResult(engineConfig::ParsingResult&& rawResult);
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index adb1ca3..218aff8 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -392,13 +392,11 @@
}
switch (role) {
- case DEVICE_ROLE_PREFERRED:
- case DEVICE_ROLE_DISABLED: {
+ case DEVICE_ROLE_PREFERRED: {
tDevicesRoleMap[std::make_pair(t, role)] = devices;
// The preferred devices and disabled devices are mutually exclusive. Once a device is added
// the a list, it must be removed from the other one.
- const device_role_t roleToRemove = role == DEVICE_ROLE_PREFERRED ? DEVICE_ROLE_DISABLED
- : DEVICE_ROLE_PREFERRED;
+ const device_role_t roleToRemove = DEVICE_ROLE_DISABLED;
auto it = tDevicesRoleMap.find(std::make_pair(t, roleToRemove));
if (it != tDevicesRoleMap.end()) {
it->second = excludeDeviceTypeAddrsFrom(it->second, devices);
@@ -407,6 +405,25 @@
}
}
} break;
+ case DEVICE_ROLE_DISABLED: {
+ auto it = tDevicesRoleMap.find(std::make_pair(t, role));
+ if (it != tDevicesRoleMap.end()) {
+ it->second = joinDeviceTypeAddrs(it->second, devices);
+ } else {
+ tDevicesRoleMap[std::make_pair(t, role)] = devices;
+ }
+
+ // The preferred devices and disabled devices are mutually exclusive. Once a device is added
+ // the a list, it must be removed from the other one.
+ const device_role_t roleToRemove = DEVICE_ROLE_PREFERRED;
+ it = tDevicesRoleMap.find(std::make_pair(t, roleToRemove));
+ if (it != tDevicesRoleMap.end()) {
+ it->second = excludeDeviceTypeAddrsFrom(it->second, devices);
+ if (it->second.empty()) {
+ tDevicesRoleMap.erase(it);
+ }
+ }
+ } break;
case DEVICE_ROLE_NONE:
// Intentionally fall-through as it is no need to set device role as none for a strategy.
default:
@@ -417,6 +434,36 @@
}
template <typename T>
+status_t removeDevicesRoleForT(
+ std::map<std::pair<T, device_role_t>, AudioDeviceTypeAddrVector>& tDevicesRoleMap,
+ T t, device_role_t role, const AudioDeviceTypeAddrVector &devices,
+ const std::string& logStr, std::function<bool(T)> p) {
+ if (!p(t)) {
+ ALOGE("%s invalid %s %u", __func__, logStr.c_str(), t);
+ return BAD_VALUE;
+ }
+
+ switch (role) {
+ case DEVICE_ROLE_PREFERRED:
+ case DEVICE_ROLE_DISABLED: {
+ auto it = tDevicesRoleMap.find(std::make_pair(t, role));
+ if (it != tDevicesRoleMap.end()) {
+ it->second = excludeDeviceTypeAddrsFrom(it->second, devices);
+ if (it->second.empty()) {
+ tDevicesRoleMap.erase(it);
+ }
+ }
+ } break;
+ case DEVICE_ROLE_NONE:
+ // Intentionally fall-through as it is not needed to set device role as none for a strategy.
+ default:
+ ALOGE("%s invalid role %d", __func__, role);
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+}
+
+template <typename T>
status_t removeAllDevicesRoleForT(
std::map<std::pair<T, device_role_t>, AudioDeviceTypeAddrVector>& tDevicesRoleMap,
T t, device_role_t role, const std::string& logStr, std::function<bool(T)> p) {
@@ -485,7 +532,18 @@
mProductStrategyDeviceRoleMap, strategy, role, devices, "strategy" /*logStr*/, p);
}
-status_t EngineBase::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
+status_t EngineBase::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices)
+{
+ std::function<bool(product_strategy_t)> p = [this](product_strategy_t strategy) {
+ return mProductStrategies.find(strategy) != mProductStrategies.end();
+ };
+ return removeDevicesRoleForT(
+ mProductStrategyDeviceRoleMap, strategy, role, devices, "strategy" /*logStr*/, p);
+}
+
+status_t EngineBase::clearDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role)
{
std::function<bool(product_strategy_t)> p = [this](product_strategy_t strategy) {
return mProductStrategies.find(strategy) != mProductStrategies.end();
@@ -670,6 +728,38 @@
}
}
+DeviceVector EngineBase::getPreferredAvailableDevicesForProductStrategy(
+ const DeviceVector& availableOutputDevices, product_strategy_t strategy) const {
+ DeviceVector preferredAvailableDevVec = {};
+ AudioDeviceTypeAddrVector preferredStrategyDevices;
+ const status_t status = getDevicesForRoleAndStrategy(
+ strategy, DEVICE_ROLE_PREFERRED, preferredStrategyDevices);
+ if (status == NO_ERROR) {
+ // there is a preferred device, is it available?
+ preferredAvailableDevVec =
+ availableOutputDevices.getDevicesFromDeviceTypeAddrVec(preferredStrategyDevices);
+ if (preferredAvailableDevVec.size() == preferredStrategyDevices.size()) {
+ ALOGV("%s using pref device %s for strategy %u",
+ __func__, preferredAvailableDevVec.toString().c_str(), strategy);
+ return preferredAvailableDevVec;
+ }
+ }
+ return preferredAvailableDevVec;
+}
+
+DeviceVector EngineBase::getDisabledDevicesForProductStrategy(
+ const DeviceVector &availableOutputDevices, product_strategy_t strategy) const {
+ DeviceVector disabledDevices = {};
+ AudioDeviceTypeAddrVector disabledDevicesTypeAddr;
+ const status_t status = getDevicesForRoleAndStrategy(
+ strategy, DEVICE_ROLE_DISABLED, disabledDevicesTypeAddr);
+ if (status == NO_ERROR) {
+ disabledDevices =
+ availableOutputDevices.getDevicesFromDeviceTypeAddrVec(disabledDevicesTypeAddr);
+ }
+ return disabledDevices;
+}
+
void EngineBase::dumpCapturePresetDevicesRoleMap(String8 *dst, int spaces) const
{
dst->appendFormat("\n%*sDevice role per capture preset dump:", spaces, "");
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index f132ced..548a20d 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -140,7 +140,7 @@
* For compatibility reason why apm volume config file, volume group name is the stream type.
*/
const engineConfig::ProductStrategies gOrderedSystemStrategies = {
- {"rerouting",
+ {"STRATEGY_REROUTING",
{
{AUDIO_STREAM_REROUTING, "AUDIO_STREAM_REROUTING",
{{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VIRTUAL_SOURCE, AUDIO_SOURCE_DEFAULT,
@@ -148,7 +148,7 @@
}
},
},
- {"patch",
+ {"STRATEGY_PATCH",
{
{AUDIO_STREAM_PATCH, "AUDIO_STREAM_PATCH",
{{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
diff --git a/services/audiopolicy/engine/interface/EngineInterface.h b/services/audiopolicy/engine/interface/EngineInterface.h
index 5c37409..70461ad 100644
--- a/services/audiopolicy/engine/interface/EngineInterface.h
+++ b/services/audiopolicy/engine/interface/EngineInterface.h
@@ -190,10 +190,11 @@
* @param[out] mix to be used if a mix has been installed for the given audio attributes.
* @return selected input device for the audio attributes, may be null if error.
*/
- virtual sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
- uid_t uid = 0,
- sp<AudioPolicyMix> *mix = nullptr)
- const = 0;
+ virtual sp<DeviceDescriptor> getInputDeviceForAttributes(
+ const audio_attributes_t &attr,
+ uid_t uid = 0,
+ audio_session_t session = AUDIO_SESSION_NONE,
+ sp<AudioPolicyMix> *mix = nullptr) const = 0;
/**
* Get the legacy stream type for a given audio attributes.
@@ -342,10 +343,22 @@
* for the given strategy
* @param strategy the audio strategy whose routing will be affected
* @param role the role of the devices for strategy
+ * @param devices the audio devices to be removed
* @return BAD_VALUE if the strategy or role is invalid,
* or NO_ERROR if the devices for this role was removed
*/
- virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
+ virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) = 0;
+
+ /**
+ * @brief clearDevicesRoleForStrategy removes the role of all devices previously set
+ * for the given strategy
+ * @param strategy the audio strategy whose routing will be affected
+ * @param role the role of the devices for strategy
+ * @return BAD_VALUE if the strategy or role is invalid,
+ * or NO_ERROR if the devices for this role was removed
+ */
+ virtual status_t clearDevicesRoleForStrategy(product_strategy_t strategy,
device_role_t role) = 0;
/**
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index f07ce82..ccd4316 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -170,6 +170,21 @@
return mPolicyParameterMgr->getForceUse(usage);
}
+status_t Engine::setOutputDevicesConnectionState(const DeviceVector &devices,
+ audio_policy_dev_state_t state)
+{
+ for (const auto &device : devices) {
+ mPolicyParameterMgr->setDeviceConnectionState(device->type(), device->address(), state);
+ }
+ DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+ if (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) {
+ availableOutputDevices.remove(devices);
+ } else {
+ availableOutputDevices.add(devices);
+ }
+ return mPolicyParameterMgr->setAvailableOutputDevices(availableOutputDevices.types());
+}
+
status_t Engine::setDeviceConnectionState(const sp<DeviceDescriptor> device,
audio_policy_dev_state_t state)
{
@@ -210,17 +225,126 @@
return result.nbSkippedElement == 0? NO_ERROR : BAD_VALUE;
}
+status_t Engine::setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices)
+{
+ DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+ DeviceVector prevDisabledDevices =
+ getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+ status_t status = EngineBase::setDevicesRoleForStrategy(strategy, role, devices);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ DeviceVector newDisabledDevices =
+ getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+ if (role == DEVICE_ROLE_PREFERRED) {
+ DeviceVector reenabledDevices = prevDisabledDevices;
+ reenabledDevices.remove(newDisabledDevices);
+ if (reenabledDevices.empty()) {
+ ALOGD("%s DEVICE_ROLE_PREFERRED empty renabled devices", __func__);
+ return status;
+ }
+ // some devices were moved from disabled to preferred, need to force a resync for these
+ enableDevicesForStrategy(strategy, prevDisabledDevices);
+ }
+ if (newDisabledDevices.empty()) {
+ return status;
+ }
+ return disableDevicesForStrategy(strategy, newDisabledDevices);
+}
+
+status_t Engine::removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices)
+{
+ const auto productStrategies = getProductStrategies();
+ if (productStrategies.find(strategy) == end(productStrategies)) {
+ ALOGE("%s invalid %d", __func__, strategy);
+ return BAD_VALUE;
+ }
+ DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+ DeviceVector prevDisabledDevices =
+ getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+ status_t status = EngineBase::removeDevicesRoleForStrategy(strategy, role, devices);
+ if (status != NO_ERROR || role == DEVICE_ROLE_PREFERRED) {
+ return status;
+ }
+ // Removing ROLE_DISABLED for given devices, need to force a resync for these
+ enableDevicesForStrategy(strategy, prevDisabledDevices);
+
+ DeviceVector remainingDisabledDevices = getDisabledDevicesForProductStrategy(
+ availableOutputDevices, strategy);
+ if (remainingDisabledDevices.empty()) {
+ return status;
+ }
+ return disableDevicesForStrategy(strategy, remainingDisabledDevices);
+}
+
+status_t Engine::clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role)
+{
+ const auto productStrategies = getProductStrategies();
+ if (productStrategies.find(strategy) == end(productStrategies)) {
+ ALOGE("%s invalid %d", __func__, strategy);
+ return BAD_VALUE;
+ }
+ DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+ DeviceVector prevDisabledDevices =
+ getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+ status_t status = EngineBase::clearDevicesRoleForStrategy(strategy, role);
+ if (status != NO_ERROR || role == DEVICE_ROLE_PREFERRED || prevDisabledDevices.empty()) {
+ return status;
+ }
+ // Disabled devices were removed, need to force a resync for these
+ enableDevicesForStrategy(strategy, prevDisabledDevices);
+ return NO_ERROR;
+}
+
+void Engine::enableDevicesForStrategy(product_strategy_t strategy __unused,
+ const DeviceVector &devicesToEnable) {
+ // devices were (re)enabled, need to force a resync for these
+ setOutputDevicesConnectionState(devicesToEnable, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
+ setOutputDevicesConnectionState(devicesToEnable, AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
+}
+
+status_t Engine::disableDevicesForStrategy(product_strategy_t strategy,
+ const DeviceVector &devicesToDisable) {
+ // Filter out disabled devices for this strategy.
+ // However, to update the output device decision, availability criterion shall be updated,
+ // which may impact other strategies. So, as a WA, reconsider now and later to prevent from
+ // altering decision for other strategies;
+ setOutputDevicesConnectionState(devicesToDisable, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
+
+ DeviceTypeSet deviceTypes = getProductStrategies().getDeviceTypesForProductStrategy(strategy);
+ const std::string address(getProductStrategies().getDeviceAddressForProductStrategy(strategy));
+
+ setOutputDevicesConnectionState(devicesToDisable, AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
+
+ // Force reapply devices for given strategy
+ getProductStrategies().at(strategy)->setDeviceTypes(deviceTypes);
+ setDeviceAddressForProductStrategy(strategy, address);
+ return NO_ERROR;
+}
+
DeviceVector Engine::getDevicesForProductStrategy(product_strategy_t ps) const
{
+ DeviceVector selectedDevices = {};
+ DeviceVector disabledDevices = {};
const auto productStrategies = getProductStrategies();
if (productStrategies.find(ps) == productStrategies.end()) {
ALOGE("%s: Trying to get device on invalid strategy %d", __FUNCTION__, ps);
- return {};
+ return selectedDevices;
}
- const DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
+ DeviceVector availableOutputDevices = getApmObserver()->getAvailableOutputDevices();
const SwAudioOutputCollection &outputs = getApmObserver()->getOutputs();
DeviceTypeSet availableOutputDevicesTypes = availableOutputDevices.types();
+ // check if this strategy has a preferred device that is available,
+ // if yes, give priority to it.
+ DeviceVector preferredAvailableDevVec =
+ getPreferredAvailableDevicesForProductStrategy(availableOutputDevices, ps);
+ if (!preferredAvailableDevVec.isEmpty()) {
+ return preferredAvailableDevVec;
+ }
+
/** This is the only case handled programmatically because the PFW is unable to know the
* activity of streams.
*
@@ -232,33 +356,34 @@
* -When media is not playing anymore, fall back on the sonification behavior
*/
DeviceTypeSet deviceTypes;
+ product_strategy_t psOrFallback = ps;
if (ps == getProductStrategyForStream(AUDIO_STREAM_NOTIFICATION) &&
!is_state_in_call(getPhoneState()) &&
!outputs.isActiveRemotely(toVolumeSource(AUDIO_STREAM_MUSIC),
SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY) &&
outputs.isActive(toVolumeSource(AUDIO_STREAM_MUSIC),
SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)) {
- product_strategy_t strategyForMedia =
- getProductStrategyForStream(AUDIO_STREAM_MUSIC);
- deviceTypes = productStrategies.getDeviceTypesForProductStrategy(strategyForMedia);
+ psOrFallback = getProductStrategyForStream(AUDIO_STREAM_MUSIC);
} else if (ps == getProductStrategyForStream(AUDIO_STREAM_ACCESSIBILITY) &&
(outputs.isActive(toVolumeSource(AUDIO_STREAM_RING)) ||
outputs.isActive(toVolumeSource(AUDIO_STREAM_ALARM)))) {
// do not route accessibility prompts to a digital output currently configured with a
// compressed format as they would likely not be mixed and dropped.
// Device For Sonification conf file has HDMI, SPDIF and HDMI ARC unreacheable.
- product_strategy_t strategyNotification = getProductStrategyForStream(AUDIO_STREAM_RING);
- deviceTypes = productStrategies.getDeviceTypesForProductStrategy(strategyNotification);
- } else {
- deviceTypes = productStrategies.getDeviceTypesForProductStrategy(ps);
+ psOrFallback = getProductStrategyForStream(AUDIO_STREAM_RING);
}
+ disabledDevices = getDisabledDevicesForProductStrategy(availableOutputDevices, psOrFallback);
+ deviceTypes = productStrategies.getDeviceTypesForProductStrategy(psOrFallback);
+ // In case a fallback is decided on other strategy, prevent from selecting this device if
+ // disabled for current strategy.
+ availableOutputDevices.remove(disabledDevices);
+
if (deviceTypes.empty() ||
Intersection(deviceTypes, availableOutputDevicesTypes).empty()) {
auto defaultDevice = getApmObserver()->getDefaultOutputDevice();
ALOG_ASSERT(defaultDevice != nullptr, "no valid default device defined");
- return DeviceVector(defaultDevice);
- }
- if (/*device_distinguishes_on_address(*deviceTypes.begin())*/ isSingleDeviceType(
+ selectedDevices = DeviceVector(defaultDevice);
+ } else if (/*device_distinguishes_on_address(*deviceTypes.begin())*/ isSingleDeviceType(
deviceTypes, AUDIO_DEVICE_OUT_BUS)) {
// We do expect only one device for these types of devices
// Criterion device address garantee this one is available
@@ -273,12 +398,15 @@
dumpDeviceTypes(deviceTypes).c_str(), address.c_str());
auto defaultDevice = getApmObserver()->getDefaultOutputDevice();
ALOG_ASSERT(defaultDevice != nullptr, "Default Output Device NOT available");
- return DeviceVector(defaultDevice);
+ selectedDevices = DeviceVector(defaultDevice);
+ } else {
+ selectedDevices = DeviceVector(busDevice);
}
- return DeviceVector(busDevice);
+ } else {
+ ALOGV("%s:device %s %d", __FUNCTION__, dumpDeviceTypes(deviceTypes).c_str(), ps);
+ selectedDevices = availableOutputDevices.getDevicesFromTypes(deviceTypes);
}
- ALOGV("%s:device %s %d", __FUNCTION__, dumpDeviceTypes(deviceTypes).c_str(), ps);
- return availableOutputDevices.getDevicesFromTypes(deviceTypes);
+ return selectedDevices;
}
DeviceVector Engine::getOutputDevicesForAttributes(const audio_attributes_t &attributes,
@@ -320,6 +448,7 @@
sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
uid_t uid,
+ audio_session_t session,
sp<AudioPolicyMix> *mix) const
{
const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -341,6 +470,7 @@
device = policyMixes.getDeviceAndMixForInputSource(attr,
availableInputDevices,
uid,
+ session,
mix);
if (device != nullptr) {
return device;
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index 903ab34..4f3e620 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -71,9 +71,17 @@
sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
uid_t uid = 0,
+ audio_session_t session = AUDIO_SESSION_NONE,
sp<AudioPolicyMix> *mix = nullptr)
const override;
+ status_t setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) override;
+
+ status_t removeDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) override;
+ status_t clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) override;
+
///
/// from AudioPolicyPluginInterface
///
@@ -101,6 +109,12 @@
}
private:
+ android::status_t disableDevicesForStrategy(product_strategy_t strategy,
+ const DeviceVector &devicesToDisable);
+ void enableDevicesForStrategy(product_strategy_t strategy, const DeviceVector &devicesToEnable);
+ android::status_t setOutputDevicesConnectionState(const DeviceVector &devices,
+ audio_policy_dev_state_t state);
+
/* Copy facilities are put private to disable copy. */
Engine(const Engine &object);
Engine &operator=(const Engine &object);
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index f00e4e3..15f7842 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -87,7 +87,7 @@
case AUDIO_POLICY_FORCE_FOR_COMMUNICATION:
if (config != AUDIO_POLICY_FORCE_SPEAKER && config != AUDIO_POLICY_FORCE_BT_SCO &&
config != AUDIO_POLICY_FORCE_NONE) {
- ALOGW("setForceUse() invalid config %d for FOR_COMMUNICATION", config);
+ ALOGW("setForceUse() invalid config %d for COMMUNICATION", config);
return BAD_VALUE;
}
break;
@@ -97,14 +97,14 @@
config != AUDIO_POLICY_FORCE_ANALOG_DOCK &&
config != AUDIO_POLICY_FORCE_DIGITAL_DOCK && config != AUDIO_POLICY_FORCE_NONE &&
config != AUDIO_POLICY_FORCE_NO_BT_A2DP && config != AUDIO_POLICY_FORCE_SPEAKER ) {
- ALOGW("setForceUse() invalid config %d for FOR_MEDIA", config);
+ ALOGW("setForceUse() invalid config %d for MEDIA", config);
return BAD_VALUE;
}
break;
case AUDIO_POLICY_FORCE_FOR_RECORD:
if (config != AUDIO_POLICY_FORCE_BT_SCO && config != AUDIO_POLICY_FORCE_WIRED_ACCESSORY &&
config != AUDIO_POLICY_FORCE_NONE) {
- ALOGW("setForceUse() invalid config %d for FOR_RECORD", config);
+ ALOGW("setForceUse() invalid config %d for RECORD", config);
return BAD_VALUE;
}
break;
@@ -114,19 +114,22 @@
config != AUDIO_POLICY_FORCE_WIRED_ACCESSORY &&
config != AUDIO_POLICY_FORCE_ANALOG_DOCK &&
config != AUDIO_POLICY_FORCE_DIGITAL_DOCK) {
- ALOGW("setForceUse() invalid config %d for FOR_DOCK", config);
+ ALOGW("setForceUse() invalid config %d for DOCK", config);
+ return BAD_VALUE;
}
break;
case AUDIO_POLICY_FORCE_FOR_SYSTEM:
if (config != AUDIO_POLICY_FORCE_NONE &&
config != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) {
- ALOGW("setForceUse() invalid config %d for FOR_SYSTEM", config);
+ ALOGW("setForceUse() invalid config %d for SYSTEM", config);
+ return BAD_VALUE;
}
break;
case AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO:
if (config != AUDIO_POLICY_FORCE_NONE &&
config != AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED) {
ALOGW("setForceUse() invalid config %d for HDMI_SYSTEM_AUDIO", config);
+ return BAD_VALUE;
}
break;
case AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND:
@@ -140,13 +143,13 @@
break;
case AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING:
if (config != AUDIO_POLICY_FORCE_BT_SCO && config != AUDIO_POLICY_FORCE_NONE) {
- ALOGW("setForceUse() invalid config %d for FOR_VIBRATE_RINGING", config);
+ ALOGW("setForceUse() invalid config %d for VIBRATE_RINGING", config);
return BAD_VALUE;
}
break;
default:
ALOGW("setForceUse() invalid usage %d", usage);
- break; // TODO return BAD_VALUE?
+ return BAD_VALUE;
}
return EngineBase::setForceUse(usage, config);
}
@@ -172,8 +175,12 @@
// - cannot route from voice call RX OR
// - audio HAL version is < 3.0 and TX device is on the primary HW module
if (getPhoneState() == AUDIO_MODE_IN_CALL) {
- audio_devices_t txDevice = getDeviceForInputSource(
- AUDIO_SOURCE_VOICE_COMMUNICATION)->type();
+ audio_devices_t txDevice = AUDIO_DEVICE_NONE;
+ sp<DeviceDescriptor> txDeviceDesc =
+ getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+ if (txDeviceDesc != nullptr) {
+ txDevice = txDeviceDesc->type();
+ }
sp<AudioOutputDescriptor> primaryOutput = outputs.getPrimaryOutput();
LOG_ALWAYS_FATAL_IF(primaryOutput == nullptr, "Primary output not found");
DeviceVector availPrimaryInputDevices =
@@ -472,7 +479,7 @@
}
if (devices.isEmpty()) {
- ALOGV("%s no device found for strategy %d", __func__, strategy);
+ ALOGI("%s no device found for strategy %d", __func__, strategy);
sp<DeviceDescriptor> defaultOutputDevice = getApmObserver()->getDefaultOutputDevice();
if (defaultOutputDevice != nullptr) {
devices.add(defaultOutputDevice);
@@ -486,6 +493,37 @@
return devices;
}
+DeviceVector Engine::getPreferredAvailableDevicesForInputSource(
+ const DeviceVector& availableInputDevices, audio_source_t inputSource) const {
+ DeviceVector preferredAvailableDevVec = {};
+ AudioDeviceTypeAddrVector preferredDevices;
+ const status_t status = getDevicesForRoleAndCapturePreset(
+ inputSource, DEVICE_ROLE_PREFERRED, preferredDevices);
+ if (status == NO_ERROR) {
+ // Only use preferred devices when they are all available.
+ preferredAvailableDevVec =
+ availableInputDevices.getDevicesFromDeviceTypeAddrVec(preferredDevices);
+ if (preferredAvailableDevVec.size() == preferredDevices.size()) {
+ ALOGVV("%s using pref device %s for source %u",
+ __func__, preferredAvailableDevVec.toString().c_str(), inputSource);
+ return preferredAvailableDevVec;
+ }
+ }
+ return preferredAvailableDevVec;
+}
+
+DeviceVector Engine::getDisabledDevicesForInputSource(
+ const DeviceVector& availableInputDevices, audio_source_t inputSource) const {
+ DeviceVector disabledDevices = {};
+ AudioDeviceTypeAddrVector disabledDevicesTypeAddr;
+ const status_t status = getDevicesForRoleAndCapturePreset(
+ inputSource, DEVICE_ROLE_DISABLED, disabledDevicesTypeAddr);
+ if (status == NO_ERROR) {
+ disabledDevices =
+ availableInputDevices.getDevicesFromDeviceTypeAddrVec(disabledDevicesTypeAddr);
+ }
+ return disabledDevices;
+}
sp<DeviceDescriptor> Engine::getDeviceForInputSource(audio_source_t inputSource) const
{
@@ -517,6 +555,20 @@
}
}
+ // Use the preferred device for the input source if it is available.
+ DeviceVector preferredInputDevices = getPreferredAvailableDevicesForInputSource(
+ availableDevices, inputSource);
+ if (!preferredInputDevices.isEmpty()) {
+ // Currently, only support single device for input. The public JAVA API also only
+ // support setting single device as preferred device. In that case, returning the
+ // first device is OK here.
+ return preferredInputDevices[0];
+ }
+ // Remove the disabled device for the input source from the available input device list.
+ DeviceVector disabledInputDevices = getDisabledDevicesForInputSource(
+ availableDevices, inputSource);
+ availableDevices.remove(disabledInputDevices);
+
audio_devices_t commDeviceType =
getPreferredDeviceTypeForLegacyStrategy(availableOutputDevices, STRATEGY_PHONE);
@@ -556,22 +608,26 @@
}
}
switch (commDeviceType) {
- case AUDIO_DEVICE_OUT_BLE_HEADSET:
- device = availableDevices.getDevice(
- AUDIO_DEVICE_IN_BLE_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
- break;
case AUDIO_DEVICE_OUT_SPEAKER:
device = availableDevices.getFirstExistingDevice({
AUDIO_DEVICE_IN_BACK_MIC, AUDIO_DEVICE_IN_BUILTIN_MIC,
AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_USB_HEADSET});
break;
+ case AUDIO_DEVICE_OUT_BLE_HEADSET:
+ device = availableDevices.getDevice(
+ AUDIO_DEVICE_IN_BLE_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
+ if (device != nullptr) {
+ break;
+ }
+ ALOGE("%s LE Audio selected for communication but input device not available",
+ __func__);
+ FALLTHROUGH_INTENDED;
default: // FORCE_NONE
device = availableDevices.getFirstExistingDevice({
AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BLUETOOTH_BLE,
AUDIO_DEVICE_IN_BUILTIN_MIC});
break;
-
}
break;
@@ -683,26 +739,6 @@
return AUDIO_DEVICE_NONE;
}
-DeviceVector Engine::getPreferredAvailableDevicesForProductStrategy(
- const DeviceVector& availableOutputDevices, product_strategy_t strategy) const {
- DeviceVector preferredAvailableDevVec = {};
- AudioDeviceTypeAddrVector preferredStrategyDevices;
- const status_t status = getDevicesForRoleAndStrategy(
- strategy, DEVICE_ROLE_PREFERRED, preferredStrategyDevices);
- if (status == NO_ERROR) {
- // there is a preferred device, is it available?
- preferredAvailableDevVec =
- availableOutputDevices.getDevicesFromDeviceTypeAddrVec(preferredStrategyDevices);
- if (preferredAvailableDevVec.size() == preferredStrategyDevices.size()) {
- ALOGVV("%s using pref device %s for strategy %u",
- __func__, preferredAvailableDevVec.toString().c_str(), strategy);
- return preferredAvailableDevVec;
- }
- }
- return preferredAvailableDevVec;
-}
-
-
DeviceVector Engine::getDevicesForProductStrategy(product_strategy_t strategy) const {
const SwAudioOutputCollection& outputs = getApmObserver()->getOutputs();
@@ -725,6 +761,11 @@
return preferredAvailableDevVec;
}
+ // Remove all disabled devices from the available device list.
+ DeviceVector disabledDevVec =
+ getDisabledDevicesForProductStrategy(availableOutputDevices, strategy);
+ availableOutputDevices.remove(disabledDevVec);
+
return getDevicesForStrategyInt(legacyStrategy,
availableOutputDevices,
outputs);
@@ -764,6 +805,7 @@
sp<DeviceDescriptor> Engine::getInputDeviceForAttributes(const audio_attributes_t &attr,
uid_t uid,
+ audio_session_t session,
sp<AudioPolicyMix> *mix) const
{
const auto &policyMixes = getApmObserver()->getAudioPolicyMixCollection();
@@ -786,6 +828,7 @@
device = policyMixes.getDeviceAndMixForInputSource(attr,
availableInputDevices,
uid,
+ session,
mix);
if (device != nullptr) {
return device;
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 66225a1..878bca9 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -73,6 +73,7 @@
sp<DeviceDescriptor> getInputDeviceForAttributes(const audio_attributes_t &attr,
uid_t uid = 0,
+ audio_session_t session = AUDIO_SESSION_NONE,
sp<AudioPolicyMix> *mix = nullptr)
const override;
@@ -103,8 +104,10 @@
product_strategy_t getProductStrategyFromLegacy(legacy_strategy legacyStrategy) const;
audio_devices_t getPreferredDeviceTypeForLegacyStrategy(
const DeviceVector& availableOutputDevices, legacy_strategy legacyStrategy) const;
- DeviceVector getPreferredAvailableDevicesForProductStrategy(
- const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
+ DeviceVector getPreferredAvailableDevicesForInputSource(
+ const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
+ DeviceVector getDisabledDevicesForInputSource(
+ const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
std::map<product_strategy_t, legacy_strategy> mLegacyStrategyMap;
};
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index fba4e0f..8793085 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -263,13 +263,15 @@
*portId = AUDIO_PORT_HANDLE_NONE;
AudioPolicyInterface::output_type_t outputType;
bool isSpatialized;
+ bool isBitPerfect;
// TODO b/182392769: use attribution source util
AttributionSourceState attributionSource;
attributionSource.uid = 0;
attributionSource.token = sp<BBinder>::make();
if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, attributionSource,
- &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized) != OK) {
+ &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized,
+ &isBitPerfect) != OK) {
return false;
}
if (*output == AUDIO_IO_HANDLE_NONE || *portId == AUDIO_PORT_HANDLE_NONE) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 87da6fa..1658f40 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -34,6 +34,7 @@
#include <map>
#include <math.h>
#include <set>
+#include <type_traits>
#include <unordered_set>
#include <vector>
@@ -253,6 +254,9 @@
// remove device from mReportedFormatsMap cache
mReportedFormatsMap.erase(device);
+ // remove preferred mixer configurations
+ mPreferredMixerAttrInfos.erase(device->getId());
+
} break;
default:
@@ -309,10 +313,10 @@
checkCloseOutputs();
}
(void)updateCallRouting(false /*fromCache*/);
- std::vector<audio_io_handle_t> outputsToReopen;
const DeviceVector msdOutDevices = getMsdAudioOutDevices();
const DeviceVector activeMediaDevices =
mEngine->getActiveMediaDevices(mAvailableOutputDevices);
+ std::map<audio_io_handle_t, DeviceVector> outputsToReopenWithDevices;
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
if (desc->isActive() && ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) ||
@@ -326,6 +330,13 @@
&& (!device_distinguishes_on_address(device->type())
// always force when disconnecting (a non-duplicated device)
|| (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+ if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
+ // If the device is using preferred mixer attributes, the output need to reopen
+ // with default configuration when the new selected devices are different from
+ // current routing devices
+ outputsToReopenWithDevices.emplace(mOutputs.keyAt(i), newDevices);
+ continue;
+ }
setOutputDevices(desc, newDevices, force, 0);
}
if (!desc->isDuplicated() && desc->mProfile->hasDynamicAudioProfile() &&
@@ -336,7 +347,7 @@
// `mPendingReopenToQueryProfiles` in the SwOutputDescriptor so that the output
// can be reopened to query dynamic profiles when all clients are inactive.
if (areAllActiveTracksRerouted(desc)) {
- outputsToReopen.push_back(mOutputs.keyAt(i));
+ outputsToReopenWithDevices.emplace(mOutputs.keyAt(i), activeMediaDevices);
} else {
desc->mPendingReopenToQueryProfiles = true;
}
@@ -346,11 +357,7 @@
desc->mPendingReopenToQueryProfiles = false;
}
}
- for (const auto& output : outputsToReopen) {
- sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
- closeOutput(output);
- openOutputWithProfileAndDevice(desc->mProfile, activeMediaDevices);
- }
+ reopenOutputsWithDevices(outputsToReopenWithDevices);
if (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) {
cleanUpForDevice(device);
@@ -671,7 +678,10 @@
audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
- ALOG_ASSERT(txSourceDevice != 0, "%s() input selected device not available", __func__);
+ if (txSourceDevice == nullptr) {
+ ALOGE("%s() selected input device not available", __func__);
+ return INVALID_OPERATION;
+ }
ALOGV("%s device rxDevice %s txDevice %s", __func__,
rxDevices.itemAt(0)->toString().c_str(), txSourceDevice->toString().c_str());
@@ -824,7 +834,7 @@
if (isStateInCall(oldState)) {
ALOGV("setPhoneState() in call state management: new state is %d", state);
// force reevaluating accessibility routing when call stops
- mpClientInterface->invalidateStream(AUDIO_STREAM_ACCESSIBILITY);
+ invalidateStreams({AUDIO_STREAM_ACCESSIBILITY});
}
/**
@@ -882,23 +892,32 @@
}
}
+ std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
// reevaluate routing on all outputs in case tracks have been started during the call
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
DeviceVector newDevices = getNewOutputDevices(desc, true /*fromCache*/);
if (state != AUDIO_MODE_IN_CALL || (desc != mPrimaryOutput && !isTelephonyRxOrTx(desc))) {
bool forceRouting = !newDevices.isEmpty();
+ if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
+ // If the device is using preferred mixer attributes, the output need to reopen
+ // with default configuration when the new selected devices are different from
+ // current routing devices.
+ outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+ continue;
+ }
setOutputDevices(desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
true /*requiresMuteCheck*/, !forceRouting /*requiresVolumeCheck*/);
}
}
+ reopenOutputsWithDevices(outputsToReopen);
checkLeBroadcastRoutes(wasLeUnicastActive, nullptr, delayMs);
if (isStateInCall(state)) {
ALOGV("setPhoneState() in call state management: new state is %d", state);
// force reevaluating accessibility routing when call starts
- mpClientInterface->invalidateStream(AUDIO_STREAM_ACCESSIBILITY);
+ invalidateStreams({AUDIO_STREAM_ACCESSIBILITY});
}
// Flag that ringtone volume must be limited to music volume until we exit MODE_RINGTONE
@@ -931,8 +950,7 @@
// force client reconnection to reevaluate flag AUDIO_FLAG_AUDIBILITY_ENFORCED
if (usage == AUDIO_POLICY_FORCE_FOR_SYSTEM) {
- mpClientInterface->invalidateStream(AUDIO_STREAM_SYSTEM);
- mpClientInterface->invalidateStream(AUDIO_STREAM_ENFORCED_AUDIBLE);
+ invalidateStreams({AUDIO_STREAM_SYSTEM, AUDIO_STREAM_ENFORCED_AUDIBLE});
}
//FIXME: workaround for truncated touch sounds
@@ -1138,13 +1156,14 @@
const audio_attributes_t *attr,
audio_stream_type_t *stream,
uid_t uid,
- const audio_config_t *config,
+ audio_config_t *config,
audio_output_flags_t *flags,
audio_port_handle_t *selectedDeviceId,
bool *isRequestedDeviceForExclusiveUse,
std::vector<sp<AudioPolicyMix>> *secondaryMixes,
output_type_t *outputType,
- bool *isSpatialized)
+ bool *isSpatialized,
+ bool *isBitPerfect)
{
DeviceVector outputDevices;
const audio_port_handle_t requestedPortId = *selectedDeviceId;
@@ -1167,6 +1186,8 @@
ALOGV("%s() attributes=%s stream=%s session %d selectedDeviceId %d", __func__,
toString(*resultAttr).c_str(), toString(*stream).c_str(), session, requestedPortId);
+ bool usePrimaryOutputFromPolicyMixes = false;
+
// The primary output is the explicit routing (eg. setPreferredDevice) if specified,
// otherwise, fallback to the dynamic policies, if none match, query the engine.
// Secondary outputs are always found by dynamic policies as the engine do not support them
@@ -1175,15 +1196,13 @@
.channel_mask = config->channel_mask,
.format = config->format,
};
- status = mPolicyMixes.getOutputForAttr(*resultAttr, clientConfig, uid, *flags, primaryMix,
- secondaryMixes);
+ status = mPolicyMixes.getOutputForAttr(*resultAttr, clientConfig, uid, session, *flags,
+ mAvailableOutputDevices, requestedDevice, primaryMix,
+ secondaryMixes, usePrimaryOutputFromPolicyMixes);
if (status != OK) {
return status;
}
- // Explicit routing is higher priority then any dynamic policy primary output
- bool usePrimaryOutputFromPolicyMixes = requestedDevice == nullptr && primaryMix != nullptr;
-
// FIXME: in case of RENDER policy, the output capabilities should be checked
if ((secondaryMixes != nullptr && !secondaryMixes->empty())
&& !audio_is_linear_pcm(config->format)) {
@@ -1275,10 +1294,42 @@
}
}
if (*output == AUDIO_IO_HANDLE_NONE) {
+ sp<PreferredMixerAttributesInfo> info = nullptr;
+ if (outputDevices.size() == 1) {
+ info = getPreferredMixerAttributesInfo(
+ outputDevices.itemAt(0)->getId(),
+ mEngine->getProductStrategyForAttributes(*resultAttr));
+ // Only use preferred mixer if the uid matches or the preferred mixer is bit-perfect
+ // and it is currently active.
+ if (info != nullptr && info->getUid() != uid &&
+ ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_NONE ||
+ info->getActiveClientCount() == 0)) {
+ info = nullptr;
+ }
+ }
*output = getOutputForDevices(outputDevices, session, resultAttr, config,
- flags, isSpatialized, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
+ flags, isSpatialized, info, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
+ // The client will be active if the client is currently preferred mixer owner and the
+ // requested configuration matches the preferred mixer configuration.
+ *isBitPerfect = (info != nullptr
+ && (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
+ && info->getUid() == uid
+ && *output != AUDIO_IO_HANDLE_NONE
+ // When bit-perfect output is selected for the preferred mixer attributes owner,
+ // only need to consider the config matches.
+ && mOutputs.valueFor(*output)->isConfigurationMatched(
+ clientConfig, AUDIO_OUTPUT_FLAG_NONE));
}
if (*output == AUDIO_IO_HANDLE_NONE) {
+ AudioProfileVector profiles;
+ status_t ret = getProfilesForDevices(outputDevices, profiles, *flags, false /*isInput*/);
+ if (ret == NO_ERROR && !profiles.empty()) {
+ config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
+ : *profiles[0]->getChannels().begin();
+ config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
+ : *profiles[0]->getSampleRates().begin();
+ config->format = profiles[0]->getFormat();
+ }
return INVALID_OPERATION;
}
@@ -1306,13 +1357,14 @@
audio_session_t session,
audio_stream_type_t *stream,
const AttributionSourceState& attributionSource,
- const audio_config_t *config,
+ audio_config_t *config,
audio_output_flags_t *flags,
audio_port_handle_t *selectedDeviceId,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
output_type_t *outputType,
- bool *isSpatialized)
+ bool *isSpatialized,
+ bool *isBitPerfect)
{
// The supplied portId must be AUDIO_PORT_HANDLE_NONE
if (*portId != AUDIO_PORT_HANDLE_NONE) {
@@ -1334,7 +1386,8 @@
status_t status = getOutputForAttrInt(&resultAttr, output, session, attr, stream, uid,
config, flags, selectedDeviceId, &isRequestedDeviceForExclusiveUse,
- secondaryOutputs != nullptr ? &secondaryMixes : nullptr, outputType, isSpatialized);
+ secondaryOutputs != nullptr ? &secondaryMixes : nullptr, outputType, isSpatialized,
+ isBitPerfect);
if (status != NO_ERROR) {
return status;
}
@@ -1478,6 +1531,7 @@
const audio_config_t *config,
audio_output_flags_t *flags,
bool *isSpatialized,
+ sp<PreferredMixerAttributesInfo> prefMixerConfigInfo,
bool forceMutingHaptic)
{
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
@@ -1557,11 +1611,36 @@
// get which output is suitable for the specified stream. The actual
// routing change will happen when startOutput() will be called
SortedVector<audio_io_handle_t> outputs = getOutputsForDevices(devices, mOutputs);
-
- // at this stage we should ignore the DIRECT flag as no direct output could be found earlier
- *flags = (audio_output_flags_t)(*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
- output = selectOutput(
- outputs, *flags, config->format, channelMask, config->sample_rate, session);
+ if (prefMixerConfigInfo != nullptr) {
+ for (audio_io_handle_t outputHandle : outputs) {
+ sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(outputHandle);
+ if (outputDesc->mProfile == prefMixerConfigInfo->getProfile()) {
+ output = outputHandle;
+ break;
+ }
+ }
+ if (output == AUDIO_IO_HANDLE_NONE) {
+ // No output open with the preferred profile. Open a new one.
+ audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+ config.channel_mask = prefMixerConfigInfo->getConfigBase().channel_mask;
+ config.sample_rate = prefMixerConfigInfo->getConfigBase().sample_rate;
+ config.format = prefMixerConfigInfo->getConfigBase().format;
+ sp<SwAudioOutputDescriptor> preferredOutput = openOutputWithProfileAndDevice(
+ prefMixerConfigInfo->getProfile(), devices, nullptr /*mixerConfig*/,
+ &config, prefMixerConfigInfo->getFlags());
+ if (preferredOutput == nullptr) {
+ ALOGE("%s failed to open output with preferred mixer config", __func__);
+ } else {
+ output = preferredOutput->mIoHandle;
+ }
+ }
+ } else {
+ // at this stage we should ignore the DIRECT flag as no direct output could be
+ // found earlier
+ *flags = (audio_output_flags_t) (*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
+ output = selectOutput(
+ outputs, *flags, config->format, channelMask, config->sample_rate, session);
+ }
}
ALOGW_IF((output == 0), "getOutputForDevices() could not find output for stream %d, "
"sampling rate %d, format %#x, channels %#x, flags %#x",
@@ -2035,8 +2114,47 @@
if (status != NO_ERROR) {
outputDesc->stop();
+ if (status == DEAD_OBJECT) {
+ sp<SwAudioOutputDescriptor> desc =
+ reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+ if (desc == nullptr) {
+ // This is not common, it may indicate something wrong with the HAL.
+ ALOGE("%s unable to open output with default config", __func__);
+ return status;
+ }
+ desc->mUsePreferredMixerAttributes = true;
+ }
return status;
}
+
+ // If the client is the first one active on preferred mixer parameters, reopen the output
+ // if the current mixer parameters doesn't match the preferred one.
+ if (outputDesc->devices().size() == 1) {
+ sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
+ outputDesc->devices()[0]->getId(), client->strategy());
+ if (info != nullptr && info->getUid() == client->uid()) {
+ if (info->getActiveClientCount() == 0 && !outputDesc->isConfigurationMatched(
+ info->getConfigBase(), info->getFlags())) {
+ stopSource(outputDesc, client);
+ outputDesc->stop();
+ audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+ config.channel_mask = info->getConfigBase().channel_mask;
+ config.sample_rate = info->getConfigBase().sample_rate;
+ config.format = info->getConfigBase().format;
+ sp<SwAudioOutputDescriptor> desc =
+ reopenOutput(outputDesc, &config, info->getFlags(), __func__);
+ if (desc == nullptr) {
+ return BAD_VALUE;
+ }
+ desc->mUsePreferredMixerAttributes = true;
+ // Intentionally return error to let the client side resending request for
+ // creating and starting.
+ return DEAD_OBJECT;
+ }
+ info->increaseActiveClient();
+ }
+ }
+
if (client->hasPreferredDevice()) {
// playback activity with preferred device impacts routing occurred, inform upper layers
mpClientInterface->onRoutingUpdated();
@@ -2188,8 +2306,14 @@
}
}
+ if (outputDesc->mUsePreferredMixerAttributes && devices != outputDesc->devices()) {
+ // If the output is open with preferred mixer attributes, but the routed device is
+ // changed when calling this function, returning DEAD_OBJECT to indicate routing
+ // changed.
+ return DEAD_OBJECT;
+ }
const uint32_t muteWaitMs =
- setOutputDevices(outputDesc, devices, force, 0, NULL, requiresMuteCheck);
+ setOutputDevices(outputDesc, devices, force, 0, nullptr, requiresMuteCheck);
// apply volume rules for current stream and device if necessary
auto &curves = getVolumeCurves(client->attributes());
@@ -2209,7 +2333,7 @@
// force reevaluating accessibility routing when ringtone or alarm starts
if (followsSameRouting(clientAttr, attributes_initializer(AUDIO_USAGE_ALARM))) {
- mpClientInterface->invalidateStream(AUDIO_STREAM_ACCESSIBILITY);
+ invalidateStreams({AUDIO_STREAM_ACCESSIBILITY});
}
if (waitMs > muteWaitMs) {
@@ -2252,6 +2376,7 @@
bool isUnicastActive = isLeUnicastActive();
if (wasUnicastActive != isUnicastActive) {
+ std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
//reroute all outputs routed to LE broadcast if LE unicast activy changed on any output
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
@@ -2264,6 +2389,13 @@
getAudioDeviceOutLeAudioUnicastSet()).isEmpty()))) {
DeviceVector newDevices = getNewOutputDevices(desc, false /*fromCache*/);
bool force = desc->devices() != newDevices;
+ if (desc->mUsePreferredMixerAttributes && force) {
+ // If the device is using preferred mixer attributes, the output need to reopen
+ // with default configuration when the new selected devices are different from
+ // current routing devices.
+ outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+ continue;
+ }
setOutputDevices(desc, newDevices, force, delayMs);
// re-apply device specific volume if not done by setOutputDevice()
if (!force) {
@@ -2271,6 +2403,7 @@
}
}
}
+ reopenOutputsWithDevices(outputsToReopen);
}
}
@@ -2297,6 +2430,19 @@
if (status == NO_ERROR ) {
outputDesc->stop();
+ } else {
+ return status;
+ }
+
+ if (outputDesc->devices().size() == 1) {
+ sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
+ outputDesc->devices()[0]->getId(), client->strategy());
+ if (info != nullptr && info->getUid() == client->uid()) {
+ info->decreaseActiveClient();
+ if (info->getActiveClientCount() == 0) {
+ reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+ }
+ }
}
return status;
}
@@ -2357,6 +2503,7 @@
// force restoring the device selection on other active outputs if it differs from the
// one being selected for this output
+ std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
uint32_t delayMs = outputDesc->latency()*2;
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
@@ -2367,6 +2514,13 @@
DeviceVector newDevices2 = getNewOutputDevices(desc, false /*fromCache*/);
bool force = desc->devices() != newDevices2;
+ if (desc->mUsePreferredMixerAttributes && force) {
+ // If the device is using preferred mixer attributes, the output need to
+ // reopen with default configuration when the new selected devices are
+ // different from current routing devices.
+ outputsToReopen.emplace(mOutputs.keyAt(i), newDevices2);
+ continue;
+ }
setOutputDevices(desc, newDevices2, force, delayMs);
// re-apply device specific volume if not done by setOutputDevice()
@@ -2375,6 +2529,7 @@
}
}
}
+ reopenOutputsWithDevices(outputsToReopen);
// update the outputs if stopping one with a stream that can affect notification routing
handleNotificationRoutingForStream(stream);
}
@@ -2453,7 +2608,7 @@
audio_unique_id_t riid,
audio_session_t session,
const AttributionSourceState& attributionSource,
- const audio_config_base_t *config,
+ audio_config_base_t *config,
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
input_type_t *inputType,
@@ -2536,7 +2691,7 @@
*inputType = API_INPUT_INVALID;
if (attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX &&
- strncmp(attributes.tags, "addr=", strlen("addr=")) == 0) {
+ extractAddressFromAudioAttributes(attributes).has_value()) {
status = mPolicyMixes.getInputMixForAttr(attributes, &policyMix);
if (status != NO_ERROR) {
ALOGW("%s could not find input mix for attr %s",
@@ -2564,7 +2719,7 @@
} else {
// Prevent from storing invalid requested device id in clients
requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
- device = mEngine->getInputDeviceForAttributes(attributes, uid, &policyMix);
+ device = mEngine->getInputDeviceForAttributes(attributes, uid, session, &policyMix);
ALOGV_IF(device != nullptr, "%s found device type is 0x%X",
__FUNCTION__, device->type());
}
@@ -2594,6 +2749,16 @@
*input = getInputForDevice(device, session, attributes, config, flags, policyMix);
if (*input == AUDIO_IO_HANDLE_NONE) {
status = INVALID_OPERATION;
+ AudioProfileVector profiles;
+ status_t ret = getProfilesForDevices(
+ DeviceVector(device), profiles, flags, true /*isInput*/);
+ if (ret == NO_ERROR && !profiles.empty()) {
+ config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
+ : *profiles[0]->getChannels().begin();
+ config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
+ : *profiles[0]->getSampleRates().begin();
+ config->format = profiles[0]->getFormat();
+ }
goto error;
}
@@ -2625,7 +2790,7 @@
audio_io_handle_t AudioPolicyManager::getInputForDevice(const sp<DeviceDescriptor> &device,
audio_session_t session,
const audio_attributes_t &attributes,
- const audio_config_base_t *config,
+ audio_config_base_t *config,
audio_input_flags_t flags,
const sp<AudioPolicyMix> &policyMix)
{
@@ -2652,31 +2817,19 @@
flags = (audio_input_flags_t)(flags | AUDIO_INPUT_FLAG_ULTRASOUND);
}
- // find a compatible input profile (not necessarily identical in parameters)
- sp<IOProfile> profile;
// sampling rate and flags may be updated by getInputProfile
uint32_t profileSamplingRate = (config->sample_rate == 0) ?
SAMPLE_RATE_HZ_DEFAULT : config->sample_rate;
- audio_format_t profileFormat;
+ audio_format_t profileFormat = config->format;
audio_channel_mask_t profileChannelMask = config->channel_mask;
audio_input_flags_t profileFlags = flags;
- for (;;) {
- profileFormat = config->format; // reset each time through loop, in case it is updated
- profile = getInputProfile(device, profileSamplingRate, profileFormat, profileChannelMask,
- profileFlags);
- if (profile != 0) {
- break; // success
- } else if (profileFlags & AUDIO_INPUT_FLAG_RAW) {
- profileFlags = (audio_input_flags_t) (profileFlags & ~AUDIO_INPUT_FLAG_RAW); // retry
- } else if (profileFlags != AUDIO_INPUT_FLAG_NONE && audio_is_linear_pcm(config->format)) {
- profileFlags = AUDIO_INPUT_FLAG_NONE; // retry
- } else { // fail
- ALOGW("%s could not find profile for device %s, sampling rate %u, format %#x, "
- "channel mask 0x%X, flags %#x", __func__, device->toString().c_str(),
- config->sample_rate, config->format, config->channel_mask, flags);
- return input;
- }
+ // find a compatible input profile (not necessarily identical in parameters)
+ sp<IOProfile> profile = getInputProfile(
+ device, profileSamplingRate, profileFormat, profileChannelMask, profileFlags);
+ if (profile == nullptr) {
+ return input;
}
+
// Pick input sampling rate if not specified by client
uint32_t samplingRate = config->sample_rate;
if (samplingRate == 0) {
@@ -2973,7 +3126,8 @@
bool close = false;
for (const auto& client : input->clientsList()) {
sp<DeviceDescriptor> device =
- mEngine->getInputDeviceForAttributes(client->attributes(), client->uid());
+ mEngine->getInputDeviceForAttributes(client->attributes(), client->uid(),
+ client->session());
if (!input->supportedDevices().contains(device)) {
close = true;
break;
@@ -3618,11 +3772,12 @@
bool AudioPolicyManager::areAllDevicesSupported(
const AudioDeviceTypeAddrVector& devices,
std::function<bool(audio_devices_t)> predicate,
- const char *context) {
+ const char *context,
+ bool matchAddress) {
for (size_t i = 0; i < devices.size(); i++) {
sp<DeviceDescriptor> devDesc = mHwModules.getDeviceDescriptor(
devices[i].mType, devices[i].getAddress(), String8(),
- AUDIO_FORMAT_DEFAULT, false /*allowToCreate*/, true /*matchAddress*/);
+ AUDIO_FORMAT_DEFAULT, false /*allowToCreate*/, matchAddress);
if (devDesc == nullptr || (predicate != nullptr && !predicate(devices[i].mType))) {
ALOGE("%s: device type %#x address %s not supported or not match predicate",
context, devices[i].mType, devices[i].getAddress());
@@ -3706,6 +3861,7 @@
// Only apply special touch sound delay once
delayMs = 0;
}
+ std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueAt(i);
DeviceVector newDevices = getNewOutputDevices(outputDesc, true /*fromCache*/);
@@ -3715,6 +3871,13 @@
// preventing the force re-routing in case of default dev that distinguishes on address.
// Let's give back to engine full device choice decision however.
bool forceRouting = !newDevices.isEmpty();
+ if (outputDesc->mUsePreferredMixerAttributes && newDevices != outputDesc->devices()) {
+ // If the device is using preferred mixer attributes, the output need to reopen
+ // with default configuration when the new selected devices are different from
+ // current routing devices.
+ outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+ continue;
+ }
waitMs = setOutputDevices(outputDesc, newDevices, forceRouting, delayMs, nullptr,
true /*requiresMuteCheck*/,
!forceRouting /*requiresVolumeCheck*/);
@@ -3725,6 +3888,7 @@
applyStreamVolumes(outputDesc, newDevices.types(), waitMs, true);
}
}
+ reopenOutputsWithDevices(outputsToReopen);
checkLeBroadcastRoutes(wasLeUnicastActive, nullptr, delayMs);
}
@@ -3745,14 +3909,49 @@
}
}
-status_t AudioPolicyManager::removeDevicesRoleForStrategy(product_strategy_t strategy,
- device_role_t role)
+status_t
+AudioPolicyManager::removeDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices) {
+ ALOGV("%s() strategy=%d role=%d %s", __func__, strategy, role,
+ dumpAudioDeviceTypeAddrVector(devices).c_str());
+
+ if (!areAllDevicesSupported(
+ devices, audio_is_output_device, __func__, /*matchAddress*/false)) {
+ return BAD_VALUE;
+ }
+ status_t status = mEngine->removeDevicesRoleForStrategy(strategy, role, devices);
+ if (status != NO_ERROR) {
+ ALOGW("Engine could not remove devices %s for strategy %d role %d",
+ dumpAudioDeviceTypeAddrVector(devices).c_str(), strategy, role);
+ return status;
+ }
+
+ checkForDeviceAndOutputChanges();
+
+ bool forceVolumeReeval = false;
+ // TODO(b/263479999): workaround for truncated touch sounds
+ // to be removed when the problem is handled by system UI
+ uint32_t delayMs = 0;
+ if (strategy == mCommunnicationStrategy) {
+ forceVolumeReeval = true;
+ delayMs = TOUCH_SOUND_FIXED_DELAY_MS;
+ updateInputRouting();
+ }
+ updateCallAndOutputRouting(forceVolumeReeval, delayMs);
+
+ return NO_ERROR;
+}
+
+status_t AudioPolicyManager::clearDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role)
{
ALOGV("%s() strategy=%d role=%d", __func__, strategy, role);
- status_t status = mEngine->removeDevicesRoleForStrategy(strategy, role);
+ status_t status = mEngine->clearDevicesRoleForStrategy(strategy, role);
if (status != NO_ERROR) {
- ALOGV("Engine could not remove preferred device for strategy %d status %d",
+ ALOGW_IF(status != NAME_NOT_FOUND,
+ "Engine could not remove device role for strategy %d status %d",
strategy, status);
return status;
}
@@ -3818,16 +4017,18 @@
ALOGV("%s() audioSource=%d role=%d devices=%s", __func__, audioSource, role,
dumpAudioDeviceTypeAddrVector(devices).c_str());
- if (!areAllDevicesSupported(devices, audio_call_is_input_device, __func__)) {
+ if (!areAllDevicesSupported(
+ devices, audio_call_is_input_device, __func__, /*matchAddress*/false)) {
return BAD_VALUE;
}
status_t status = mEngine->removeDevicesRoleForCapturePreset(
audioSource, role, devices);
- ALOGW_IF(status != NO_ERROR,
+ ALOGW_IF(status != NO_ERROR && status != NAME_NOT_FOUND,
"Engine could not remove devices role (%d) for capture preset %d", role, audioSource);
-
- updateInputRouting();
+ if (status == NO_ERROR) {
+ updateInputRouting();
+ }
return status;
}
@@ -3836,10 +4037,11 @@
ALOGV("%s() audioSource=%d role=%d", __func__, audioSource, role);
status_t status = mEngine->clearDevicesRoleForCapturePreset(audioSource, role);
- ALOGW_IF(status != NO_ERROR,
+ ALOGW_IF(status != NO_ERROR && status != NAME_NOT_FOUND,
"Engine could not clear devices role (%d) for capture preset %d", role, audioSource);
-
- updateInputRouting();
+ if (status == NO_ERROR) {
+ updateInputRouting();
+ }
return status;
}
@@ -3929,6 +4131,15 @@
dst->appendFormat(" - uid=%d flag_mask=%#x\n", policy.first, policy.second);
}
+ dst->appendFormat(" Preferred mixer audio configuration:\n");
+ for (const auto it : mPreferredMixerAttrInfos) {
+ dst->appendFormat(" - device port id: %d\n", it.first);
+ for (const auto preferredMixerInfoIt : it.second) {
+ dst->appendFormat(" - strategy: %d; ", preferredMixerInfoIt.first);
+ preferredMixerInfoIt.second->dump(dst);
+ }
+ }
+
dst->appendFormat("\nPolicy Engine dump:\n");
mEngine->dump(dst);
}
@@ -4146,8 +4357,8 @@
if (mEffects.isNonOffloadableEffectEnabled()) {
return OK;
}
- AudioDeviceTypeAddrVector devices;
- status_t status = getDevicesForAttributes(*attr, &devices, false /* forVolume */);
+ DeviceVector devices;
+ status_t status = getDevicesForAttributes(*attr, devices, false /* forVolume */);
if (status != OK) {
return status;
}
@@ -4155,43 +4366,187 @@
if (devices.empty()) {
return OK; // no output devices for the attributes
}
+ return getProfilesForDevices(devices, audioProfilesVector,
+ AUDIO_OUTPUT_FLAG_DIRECT /*flags*/, false /*isInput*/);
+}
+status_t AudioPolicyManager::getSupportedMixerAttributes(
+ audio_port_handle_t portId, std::vector<audio_mixer_attributes_t> &mixerAttrs) {
+ ALOGV("%s, portId=%d", __func__, portId);
+ sp<DeviceDescriptor> deviceDescriptor = mAvailableOutputDevices.getDeviceFromId(portId);
+ if (deviceDescriptor == nullptr) {
+ ALOGE("%s the requested device is currently unavailable", __func__);
+ return BAD_VALUE;
+ }
+ if (!audio_is_usb_out_device(deviceDescriptor->type())) {
+ ALOGE("%s the requested device(type=%#x) is not usb device", __func__,
+ deviceDescriptor->type());
+ return BAD_VALUE;
+ }
for (const auto& hwModule : mHwModules) {
- // the MSD module checks for different conditions
- if (strcmp(hwModule->getName(), AUDIO_HARDWARE_MODULE_ID_MSD) == 0) {
+ for (const auto& curProfile : hwModule->getOutputProfiles()) {
+ if (curProfile->supportsDevice(deviceDescriptor)) {
+ curProfile->toSupportedMixerAttributes(&mixerAttrs);
+ }
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t AudioPolicyManager::setPreferredMixerAttributes(
+ const audio_attributes_t *attr,
+ audio_port_handle_t portId,
+ uid_t uid,
+ const audio_mixer_attributes_t *mixerAttributes) {
+ ALOGV("%s, attr=%s, mixerAttributes={format=%#x, channelMask=%#x, samplingRate=%u, "
+ "mixerBehavior=%d}, uid=%d, portId=%u",
+ __func__, toString(*attr).c_str(), mixerAttributes->config.format,
+ mixerAttributes->config.channel_mask, mixerAttributes->config.sample_rate,
+ mixerAttributes->mixer_behavior, uid, portId);
+ if (attr->usage != AUDIO_USAGE_MEDIA) {
+ ALOGE("%s failed, only media is allowed, the given usage is %d", __func__, attr->usage);
+ return BAD_VALUE;
+ }
+ sp<DeviceDescriptor> deviceDescriptor = mAvailableOutputDevices.getDeviceFromId(portId);
+ if (deviceDescriptor == nullptr) {
+ ALOGE("%s the requested device is currently unavailable", __func__);
+ return BAD_VALUE;
+ }
+ if (!audio_is_usb_out_device(deviceDescriptor->type())) {
+ ALOGE("%s(%d), type=%d, is not a usb output device",
+ __func__, portId, deviceDescriptor->type());
+ return BAD_VALUE;
+ }
+
+ audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
+ audio_flags_to_audio_output_flags(attr->flags, &flags);
+ flags = (audio_output_flags_t) (flags |
+ audio_output_flags_from_mixer_behavior(mixerAttributes->mixer_behavior));
+ sp<IOProfile> profile = nullptr;
+ DeviceVector devices(deviceDescriptor);
+ for (const auto& hwModule : mHwModules) {
+ for (const auto& curProfile : hwModule->getOutputProfiles()) {
+ if (curProfile->hasDynamicAudioProfile()
+ && curProfile->isCompatibleProfile(devices,
+ mixerAttributes->config.sample_rate,
+ nullptr /*updatedSamplingRate*/,
+ mixerAttributes->config.format,
+ nullptr /*updatedFormat*/,
+ mixerAttributes->config.channel_mask,
+ nullptr /*updatedChannelMask*/,
+ flags,
+ false /*exactMatchRequiredForInputFlags*/)) {
+ profile = curProfile;
+ break;
+ }
+ }
+ }
+ if (profile == nullptr) {
+ ALOGE("%s, there is no compatible profile found", __func__);
+ return BAD_VALUE;
+ }
+
+ sp<PreferredMixerAttributesInfo> mixerAttrInfo =
+ sp<PreferredMixerAttributesInfo>::make(
+ uid, portId, profile, flags, *mixerAttributes);
+ const product_strategy_t strategy = mEngine->getProductStrategyForAttributes(*attr);
+ mPreferredMixerAttrInfos[portId][strategy] = mixerAttrInfo;
+
+ // If 1) there is any client from the preferred mixer configuration owner that is currently
+ // active and matches the strategy and 2) current output is on the preferred device and the
+ // mixer configuration doesn't match the preferred one, reopen output with preferred mixer
+ // configuration.
+ std::vector<audio_io_handle_t> outputsToReopen;
+ for (size_t i = 0; i < mOutputs.size(); i++) {
+ const auto output = mOutputs.valueAt(i);
+ if (output->mProfile == profile && output->devices().onlyContainsDevice(deviceDescriptor)) {
+ if (output->isConfigurationMatched(mixerAttributes->config, flags)) {
+ output->mUsePreferredMixerAttributes = true;
+ } else {
+ for (const auto &client: output->getActiveClients()) {
+ if (client->uid() == uid && client->strategy() == strategy) {
+ client->setIsInvalid();
+ outputsToReopen.push_back(output->mIoHandle);
+ }
+ }
+ }
+ }
+ }
+ audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+ config.sample_rate = mixerAttributes->config.sample_rate;
+ config.channel_mask = mixerAttributes->config.channel_mask;
+ config.format = mixerAttributes->config.format;
+ for (const auto output : outputsToReopen) {
+ sp<SwAudioOutputDescriptor> desc =
+ reopenOutput(mOutputs.valueFor(output), &config, flags, __func__);
+ if (desc == nullptr) {
+ ALOGE("%s, failed to reopen output with preferred mixer attributes", __func__);
continue;
}
- for (const auto& outputProfile : hwModule->getOutputProfiles()) {
- if (!outputProfile->asAudioPort()->isDirectOutput()) {
- continue;
- }
- // allow only profiles that support all the available and routed devices
- if (outputProfile->getSupportedDevices().getDevicesFromDeviceTypeAddrVec(devices).size()
- != devices.size()) {
- continue;
- }
- audioProfilesVector.addAllValidProfiles(
- outputProfile->asAudioPort()->getAudioProfiles());
- }
+ desc->mUsePreferredMixerAttributes = true;
}
- // add the direct profiles from MSD if present and has audio patches to all the output(s)
- const auto& msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
- if (msdModule != nullptr) {
- if (msdHasPatchesToAllDevices(devices)) {
- ALOGV("%s: MSD audio patches set to all output devices.", __func__);
- for (const auto& outputProfile : msdModule->getOutputProfiles()) {
- if (!outputProfile->asAudioPort()->isDirectOutput()) {
- continue;
- }
- audioProfilesVector.addAllValidProfiles(
- outputProfile->asAudioPort()->getAudioProfiles());
- }
- } else {
- ALOGV("%s: MSD audio patches NOT set to all output devices.", __func__);
- }
+ return NO_ERROR;
+}
+
+sp<PreferredMixerAttributesInfo> AudioPolicyManager::getPreferredMixerAttributesInfo(
+ audio_port_handle_t devicePortId, product_strategy_t strategy) {
+ auto it = mPreferredMixerAttrInfos.find(devicePortId);
+ if (it == mPreferredMixerAttrInfos.end()) {
+ return nullptr;
+ }
+ auto mixerAttrInfoIt = it->second.find(strategy);
+ if (mixerAttrInfoIt == it->second.end()) {
+ return nullptr;
+ }
+ return mixerAttrInfoIt->second;
+}
+
+status_t AudioPolicyManager::getPreferredMixerAttributes(
+ const audio_attributes_t *attr,
+ audio_port_handle_t portId,
+ audio_mixer_attributes_t* mixerAttributes) {
+ sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
+ portId, mEngine->getProductStrategyForAttributes(*attr));
+ if (info == nullptr) {
+ return NAME_NOT_FOUND;
+ }
+ *mixerAttributes = info->getMixerAttributes();
+ return NO_ERROR;
+}
+
+status_t AudioPolicyManager::clearPreferredMixerAttributes(const audio_attributes_t *attr,
+ audio_port_handle_t portId,
+ uid_t uid) {
+ const product_strategy_t strategy = mEngine->getProductStrategyForAttributes(*attr);
+ const auto preferredMixerAttrInfo = getPreferredMixerAttributesInfo(portId, strategy);
+ if (preferredMixerAttrInfo == nullptr) {
+ return NAME_NOT_FOUND;
+ }
+ if (preferredMixerAttrInfo->getUid() != uid) {
+ ALOGE("%s, requested uid=%d, owned uid=%d",
+ __func__, uid, preferredMixerAttrInfo->getUid());
+ return PERMISSION_DENIED;
+ }
+ mPreferredMixerAttrInfos[portId].erase(strategy);
+ if (mPreferredMixerAttrInfos[portId].empty()) {
+ mPreferredMixerAttrInfos.erase(portId);
}
+ // Reconfig existing output
+ std::vector<audio_io_handle_t> potentialOutputsToReopen;
+ for (size_t i = 0; i < mOutputs.size(); i++) {
+ if (mOutputs.valueAt(i)->mProfile == preferredMixerAttrInfo->getProfile()) {
+ potentialOutputsToReopen.push_back(mOutputs.keyAt(i));
+ }
+ }
+ for (const auto output : potentialOutputsToReopen) {
+ sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
+ if (desc->isConfigurationMatched(preferredMixerAttrInfo->getConfigBase(),
+ preferredMixerAttrInfo->getFlags())) {
+ reopenOutput(desc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+ }
+ }
return NO_ERROR;
}
@@ -4259,6 +4614,7 @@
*num_ports += numOutputs;
}
}
+
*generation = curAudioPortGeneration();
ALOGV("listAudioPorts() got %zu ports needed %d", portsWritten, *num_ports);
return NO_ERROR;
@@ -4614,17 +4970,22 @@
audio_attributes_t resultAttr;
audio_config_t config = AUDIO_CONFIG_INITIALIZER;
config.sample_rate = sourceDesc->config().sample_rate;
- config.channel_mask = sourceDesc->config().channel_mask;
+ audio_channel_mask_t sourceMask = sourceDesc->config().channel_mask;
+ config.channel_mask =
+ (audio_channel_mask_get_representation(sourceMask)
+ == AUDIO_CHANNEL_REPRESENTATION_INDEX) ? sourceMask
+ : audio_channel_mask_in_to_out(sourceMask);
config.format = sourceDesc->config().format;
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
bool isRequestedDeviceForExclusiveUse = false;
output_type_t outputType;
bool isSpatialized;
+ bool isBitPerfect;
getOutputForAttrInt(&resultAttr, &output, AUDIO_SESSION_NONE, &attributes,
&stream, sourceDesc->uid(), &config, &flags,
&selectedDeviceId, &isRequestedDeviceForExclusiveUse,
- nullptr, &outputType, &isSpatialized);
+ nullptr, &outputType, &isSpatialized, &isBitPerfect);
if (output == AUDIO_IO_HANDLE_NONE) {
ALOGV("%s no output for device %s",
__FUNCTION__, sinkDevice->toString().c_str());
@@ -4916,6 +5277,7 @@
auto attributes = mEngine->getAllAttributesForProductStrategy(ps).front();
DeviceVector devices = mEngine->getOutputDevicesForAttributes(attributes, nullptr, false);
SortedVector<audio_io_handle_t> outputs = getOutputsForDevices(devices, mOutputs);
+ std::map<audio_io_handle_t, DeviceVector> outputsToReopen;
for (size_t j = 0; j < mOutputs.size(); j++) {
if (mOutputs.keyAt(j) == ouptutToSkip) {
continue;
@@ -4928,14 +5290,20 @@
// invalidate all tracks in this strategy to force re connection.
// Otherwise select new device on the output mix.
if (outputs.indexOf(mOutputs.keyAt(j)) < 0) {
- for (auto stream : mEngine->getStreamTypesForProductStrategy(ps)) {
- mpClientInterface->invalidateStream(stream);
- }
+ invalidateStreams(mEngine->getStreamTypesForProductStrategy(ps));
} else {
- setOutputDevices(
- outputDesc, getNewOutputDevices(outputDesc, false /*fromCache*/), false);
+ DeviceVector newDevices = getNewOutputDevices(outputDesc, false /*fromCache*/);
+ if (outputDesc->mUsePreferredMixerAttributes && outputDesc->devices() != newDevices) {
+ // If the device is using preferred mixer attributes, the output need to reopen
+ // with default configuration when the new selected devices are different from
+ // current routing devices.
+ outputsToReopen.emplace(mOutputs.keyAt(j), newDevices);
+ continue;
+ }
+ setOutputDevices(outputDesc, newDevices, false);
}
}
+ reopenOutputsWithDevices(outputsToReopen);
}
void AudioPolicyManager::clearSessionRoutes(uid_t uid)
@@ -5002,7 +5370,11 @@
*session = (audio_session_t)mpClientInterface->newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
*ioHandle = (audio_io_handle_t)mpClientInterface->newAudioUniqueId(AUDIO_UNIQUE_ID_USE_INPUT);
audio_attributes_t attr = { .source = AUDIO_SOURCE_HOTWORD };
- *device = mEngine->getInputDeviceForAttributes(attr)->type();
+ sp<DeviceDescriptor> deviceDesc = mEngine->getInputDeviceForAttributes(attr);
+ if (deviceDesc == nullptr) {
+ return INVALID_OPERATION;
+ }
+ *device = deviceDesc->type();
return mSoundTriggerSessions.acquireSession(*session, *ioHandle);
}
@@ -5384,6 +5756,21 @@
return false;
}
+bool AudioPolicyManager::isHotwordStreamSupported(bool lookbackAudio)
+{
+ const auto mask = AUDIO_INPUT_FLAG_HOTWORD_TAP |
+ (lookbackAudio ? AUDIO_INPUT_FLAG_HW_LOOKBACK : 0);
+ for (const auto& hwModule : mHwModules) {
+ const InputProfileCollection &inputProfiles = hwModule->getInputProfiles();
+ for (const auto &inputProfile : inputProfiles) {
+ if ((inputProfile->getFlags() & mask) == mask) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
bool AudioPolicyManager::isCallScreenModeSupported()
{
return mConfig->isCallScreenModeSupported();
@@ -5490,9 +5877,7 @@
}
}
- for (audio_stream_type_t stream : streamsToInvalidate) {
- mpClientInterface->invalidateStream(stream);
- }
+ invalidateStreams(StreamTypeVector(streamsToInvalidate.begin(), streamsToInvalidate.end()));
}
@@ -6382,6 +6767,7 @@
SortedVector<audio_io_handle_t> dstOutputs = getOutputsForDevices(newDevices, mOutputs);
uint32_t maxLatency = 0;
+ bool unneededUsePrimaryOutputFromPolicyMixes = false;
std::vector<sp<SwAudioOutputDescriptor>> invalidatedOutputs;
// take into account dynamic audio policies related changes: if a client is now associated
// to a different policy mix than at creation time, invalidate corresponding stream
@@ -6396,7 +6782,9 @@
}
sp<AudioPolicyMix> primaryMix;
status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
- client->uid(), client->flags(), primaryMix, nullptr);
+ client->uid(), client->session(), client->flags(), mAvailableOutputDevices,
+ nullptr /* requestedDevice */, primaryMix, nullptr /* secondaryMixes */,
+ unneededUsePrimaryOutputFromPolicyMixes);
if (status != OK) {
continue;
}
@@ -6474,9 +6862,7 @@
}
// Move tracks associated to this stream (and linked) from previous output to new output
if (!invalidatedOutputs.empty()) {
- for (auto stream : mEngine->getStreamTypesForProductStrategy(psId)) {
- mpClientInterface->invalidateStream(stream);
- }
+ invalidateStreams(mEngine->getStreamTypesForProductStrategy(psId));
for (sp<SwAudioOutputDescriptor> desc : invalidatedOutputs) {
desc->setTracksInvalidatedStatusByStrategy(psId);
}
@@ -6494,15 +6880,18 @@
}
void AudioPolicyManager::checkSecondaryOutputs() {
- std::set<audio_stream_type_t> streamsToInvalidate;
+ PortHandleVector clientsToInvalidate;
TrackSecondaryOutputsMap trackSecondaryOutputs;
+ bool unneededUsePrimaryOutputFromPolicyMixes = false;
for (size_t i = 0; i < mOutputs.size(); i++) {
const sp<SwAudioOutputDescriptor>& outputDescriptor = mOutputs[i];
for (const sp<TrackClientDescriptor>& client : outputDescriptor->getClientIterable()) {
sp<AudioPolicyMix> primaryMix;
std::vector<sp<AudioPolicyMix>> secondaryMixes;
status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
- client->uid(), client->flags(), primaryMix, &secondaryMixes);
+ client->uid(), client->session(), client->flags(), mAvailableOutputDevices,
+ nullptr /* requestedDevice */, primaryMix, &secondaryMixes,
+ unneededUsePrimaryOutputFromPolicyMixes);
std::vector<sp<SwAudioOutputDescriptor>> secondaryDescs;
for (auto &secondaryMix : secondaryMixes) {
sp<SwAudioOutputDescriptor> outputDesc = secondaryMix->getOutput();
@@ -6512,8 +6901,11 @@
}
}
- if (status != OK) {
- streamsToInvalidate.insert(client->stream());
+ if (status != OK &&
+ (client->flags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) == AUDIO_OUTPUT_FLAG_NONE) {
+ // When it failed to query secondary output, only invalidate the client that is not
+ // MMAP. The reason is that MMAP stream will not support secondary output.
+ clientsToInvalidate.push_back(client->portId());
} else if (!std::equal(
client->getSecondaryOutputs().begin(),
client->getSecondaryOutputs().end(),
@@ -6521,7 +6913,7 @@
if (!audio_is_linear_pcm(client->config().format)) {
// If the format is not PCM, the tracks should be invalidated to get correct
// behavior when the secondary output is changed.
- streamsToInvalidate.insert(client->stream());
+ clientsToInvalidate.push_back(client->portId());
} else {
std::vector<wp<SwAudioOutputDescriptor>> weakSecondaryDescs;
std::vector<audio_io_handle_t> secondaryOutputIds;
@@ -6538,9 +6930,9 @@
if (!trackSecondaryOutputs.empty()) {
mpClientInterface->updateSecondaryOutputs(trackSecondaryOutputs);
}
- for (audio_stream_type_t stream : streamsToInvalidate) {
- ALOGD("%s Invalidate stream %d due to fail getting output for attr", __func__, stream);
- mpClientInterface->invalidateStream(stream);
+ if (!clientsToInvalidate.empty()) {
+ ALOGD("%s Invalidate clients due to fail getting output for attr", __func__);
+ mpClientInterface->invalidateTracks(clientsToInvalidate);
}
}
@@ -6710,20 +7102,23 @@
// a null sp<>, causing the patch on the input stream to be released.
audio_attributes_t attributes;
uid_t uid;
+ audio_session_t session;
sp<RecordClientDescriptor> topClient = inputDesc->getHighestPriorityClient();
if (topClient != nullptr) {
attributes = topClient->attributes();
uid = topClient->uid();
+ session = topClient->session();
} else {
attributes = { .source = AUDIO_SOURCE_DEFAULT };
uid = 0;
+ session = AUDIO_SESSION_NONE;
}
if (attributes.source == AUDIO_SOURCE_DEFAULT && isInCall()) {
attributes.source = AUDIO_SOURCE_VOICE_COMMUNICATION;
}
if (attributes.source != AUDIO_SOURCE_DEFAULT) {
- device = mEngine->getInputDeviceForAttributes(attributes, uid);
+ device = mEngine->getInputDeviceForAttributes(attributes, uid, session);
}
return device;
@@ -6734,70 +7129,15 @@
return (stream1 == stream2);
}
-// TODO - consider MSD routes b/214971780
status_t AudioPolicyManager::getDevicesForAttributes(
const audio_attributes_t &attr, AudioDeviceTypeAddrVector *devices, bool forVolume) {
if (devices == nullptr) {
return BAD_VALUE;
}
- // Devices are determined in the following precedence:
- //
- // 1) Devices associated with a dynamic policy matching the attributes. This is often
- // a remote submix from MIX_ROUTE_FLAG_LOOP_BACK.
- //
- // If no such dynamic policy then
- // 2) Devices containing an active client using setPreferredDevice
- // with same strategy as the attributes.
- // (from the default Engine::getOutputDevicesForAttributes() implementation).
- //
- // If no corresponding active client with setPreferredDevice then
- // 3) Devices associated with the strategy determined by the attributes
- // (from the default Engine::getOutputDevicesForAttributes() implementation).
- //
- // See related getOutputForAttrInt().
-
- // check dynamic policies but only for primary descriptors (secondary not used for audible
- // audio routing, only used for duplication for playback capture)
- sp<AudioPolicyMix> policyMix;
- status_t status = mPolicyMixes.getOutputForAttr(attr, AUDIO_CONFIG_BASE_INITIALIZER,
- 0 /*uid unknown here*/, AUDIO_OUTPUT_FLAG_NONE, policyMix, nullptr);
- if (status != OK) {
- return status;
- }
-
DeviceVector curDevices;
- if (policyMix != nullptr && policyMix->getOutput() != nullptr &&
- // For volume control, skip LOOPBACK mixes which use AUDIO_DEVICE_OUT_REMOTE_SUBMIX
- // as they are unaffected by device/stream volume
- // (per SwAudioOutputDescriptor::isFixedVolume()).
- (!forVolume || policyMix->mDeviceType != AUDIO_DEVICE_OUT_REMOTE_SUBMIX)
- ) {
- sp<DeviceDescriptor> deviceDesc = mAvailableOutputDevices.getDevice(
- policyMix->mDeviceType, policyMix->mDeviceAddress, AUDIO_FORMAT_DEFAULT);
- curDevices.add(deviceDesc);
- } else {
- // The default Engine::getOutputDevicesForAttributes() uses findPreferredDevice()
- // which selects setPreferredDevice if active. This means forVolume call
- // will take an active setPreferredDevice, if such exists.
-
- curDevices = mEngine->getOutputDevicesForAttributes(
- attr, nullptr /* preferredDevice */, false /* fromCache */);
- }
-
- if (forVolume) {
- // We alias the device AUDIO_DEVICE_OUT_SPEAKER_SAFE to AUDIO_DEVICE_OUT_SPEAKER
- // for single volume control in AudioService (such relationship should exist if
- // SPEAKER_SAFE is present).
- //
- // (This is unrelated to a different device grouping as Volume::getDeviceCategory)
- DeviceVector speakerSafeDevices =
- curDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER_SAFE);
- if (!speakerSafeDevices.isEmpty()) {
- curDevices.merge(
- mAvailableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER));
- curDevices.remove(speakerSafeDevices);
- }
+ if (status_t status = getDevicesForAttributes(attr, curDevices, forVolume); status != OK) {
+ return status;
}
for (const auto& device : curDevices) {
devices->push_back(device->getDeviceTypeAddr());
@@ -6982,6 +7322,7 @@
audio_patch_handle_t *patchHandle,
bool requiresMuteCheck, bool requiresVolumeCheck)
{
+ // TODO(b/262404095): Consider if the output need to be reopened.
ALOGV("%s device %s delayMs %d", __func__, devices.toString().c_str(), delayMs);
uint32_t muteWaitMs;
@@ -7152,51 +7493,71 @@
{
// Choose an input profile based on the requested capture parameters: select the first available
// profile supporting all requested parameters.
+ // The flags can be ignored if it doesn't contain a much match flag.
//
// TODO: perhaps isCompatibleProfile should return a "matching" score so we can return
// the best matching profile, not the first one.
- sp<IOProfile> firstInexact;
- uint32_t updatedSamplingRate = 0;
- audio_format_t updatedFormat = AUDIO_FORMAT_INVALID;
- audio_channel_mask_t updatedChannelMask = AUDIO_CHANNEL_INVALID;
- for (const auto& hwModule : mHwModules) {
- for (const auto& profile : hwModule->getInputProfiles()) {
- // profile->log();
- //updatedFormat = format;
- if (profile->isCompatibleProfile(DeviceVector(device), samplingRate,
- &samplingRate /*updatedSamplingRate*/,
- format,
- &format, /*updatedFormat*/
- channelMask,
- &channelMask /*updatedChannelMask*/,
- // FIXME ugly cast
- (audio_output_flags_t) flags,
- true /*exactMatchRequiredForInputFlags*/)) {
- return profile;
- }
- if (firstInexact == nullptr && profile->isCompatibleProfile(DeviceVector(device),
- samplingRate,
- &updatedSamplingRate,
- format,
- &updatedFormat,
- channelMask,
- &updatedChannelMask,
- // FIXME ugly cast
- (audio_output_flags_t) flags,
- false /*exactMatchRequiredForInputFlags*/)) {
- firstInexact = profile;
- }
+ using underlying_input_flag_t = std::underlying_type_t<audio_input_flags_t>;
+ const underlying_input_flag_t mustMatchFlag = AUDIO_INPUT_FLAG_MMAP_NOIRQ |
+ AUDIO_INPUT_FLAG_HOTWORD_TAP | AUDIO_INPUT_FLAG_HW_LOOKBACK;
+ const underlying_input_flag_t oriFlags = flags;
+
+ for (;;) {
+ sp<IOProfile> firstInexact = nullptr;
+ uint32_t updatedSamplingRate = 0;
+ audio_format_t updatedFormat = AUDIO_FORMAT_INVALID;
+ audio_channel_mask_t updatedChannelMask = AUDIO_CHANNEL_INVALID;
+ for (const auto& hwModule : mHwModules) {
+ for (const auto& profile : hwModule->getInputProfiles()) {
+ // profile->log();
+ //updatedFormat = format;
+ if (profile->isCompatibleProfile(DeviceVector(device), samplingRate,
+ &samplingRate /*updatedSamplingRate*/,
+ format,
+ &format, /*updatedFormat*/
+ channelMask,
+ &channelMask /*updatedChannelMask*/,
+ // FIXME ugly cast
+ (audio_output_flags_t) flags,
+ true /*exactMatchRequiredForInputFlags*/)) {
+ return profile;
+ }
+ if (firstInexact == nullptr && profile->isCompatibleProfile(DeviceVector(device),
+ samplingRate,
+ &updatedSamplingRate,
+ format,
+ &updatedFormat,
+ channelMask,
+ &updatedChannelMask,
+ // FIXME ugly cast
+ (audio_output_flags_t) flags,
+ false /*exactMatchRequiredForInputFlags*/)) {
+ firstInexact = profile;
+ }
+ }
+ }
+
+ if (firstInexact != nullptr) {
+ samplingRate = updatedSamplingRate;
+ format = updatedFormat;
+ channelMask = updatedChannelMask;
+ return firstInexact;
+ } else if (flags & AUDIO_INPUT_FLAG_RAW) {
+ flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_RAW); // retry
+ } else if ((flags & mustMatchFlag) == AUDIO_INPUT_FLAG_NONE &&
+ flags != AUDIO_INPUT_FLAG_NONE && audio_is_linear_pcm(format)) {
+ flags = AUDIO_INPUT_FLAG_NONE;
+ } else { // fail
+ ALOGW("%s could not find profile for device %s, sampling rate %u, format %#x, "
+ "channel mask 0x%X, flags %#x", __func__, device->toString().c_str(),
+ samplingRate, format, channelMask, oriFlags);
+ break;
}
}
- if (firstInexact != nullptr) {
- samplingRate = updatedSamplingRate;
- format = updatedFormat;
- channelMask = updatedChannelMask;
- return firstInexact;
- }
- return NULL;
+
+ return nullptr;
}
float AudioPolicyManager::computeVolume(IVolumeCurves &curves,
@@ -7856,19 +8217,23 @@
sp<SwAudioOutputDescriptor> AudioPolicyManager::openOutputWithProfileAndDevice(
const sp<IOProfile>& profile, const DeviceVector& devices,
- const audio_config_base_t *mixerConfig)
+ const audio_config_base_t *mixerConfig, const audio_config_t *halConfig,
+ audio_output_flags_t flags)
{
for (const auto& device : devices) {
// TODO: This should be checking if the profile supports the device combo.
if (!profile->supportsDevice(device)) {
+ ALOGE("%s profile(%s) doesn't support device %#x", __func__, profile->getName().c_str(),
+ device->type());
return nullptr;
}
}
sp<SwAudioOutputDescriptor> desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
- status_t status = desc->open(nullptr /* halConfig */, mixerConfig, devices,
- AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
+ status_t status = desc->open(halConfig, mixerConfig, devices,
+ AUDIO_STREAM_DEFAULT, flags, &output);
if (status != NO_ERROR) {
+ ALOGE("%s failed to open output %d", __func__, status);
return nullptr;
}
@@ -7886,7 +8251,9 @@
ALOGW("%s() missing param", __func__);
desc->close();
return nullptr;
- } else if (profile->hasDynamicAudioProfile()) {
+ } else if (profile->hasDynamicAudioProfile() && halConfig == nullptr) {
+ // Reopen the output with the best audio profile picked by APM when the profile supports
+ // dynamic audio profile and the hal config is not specified.
desc->close();
output = AUDIO_IO_HANDLE_NONE;
audio_config_t config = AUDIO_CONFIG_INITIALIZER;
@@ -7896,8 +8263,7 @@
config.offload_info.channel_mask = config.channel_mask;
config.offload_info.format = config.format;
- status = desc->open(&config, mixerConfig, devices,
- AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
+ status = desc->open(&config, mixerConfig, devices, AUDIO_STREAM_DEFAULT, flags, &output);
if (status != NO_ERROR) {
return nullptr;
}
@@ -7952,4 +8318,153 @@
return desc;
}
+status_t AudioPolicyManager::getDevicesForAttributes(
+ const audio_attributes_t &attr, DeviceVector &devices, bool forVolume) {
+ // Devices are determined in the following precedence:
+ //
+ // 1) Devices associated with a dynamic policy matching the attributes. This is often
+ // a remote submix from MIX_ROUTE_FLAG_LOOP_BACK.
+ //
+ // If no such dynamic policy then
+ // 2) Devices containing an active client using setPreferredDevice
+ // with same strategy as the attributes.
+ // (from the default Engine::getOutputDevicesForAttributes() implementation).
+ //
+ // If no corresponding active client with setPreferredDevice then
+ // 3) Devices associated with the strategy determined by the attributes
+ // (from the default Engine::getOutputDevicesForAttributes() implementation).
+ //
+ // See related getOutputForAttrInt().
+
+ // check dynamic policies but only for primary descriptors (secondary not used for audible
+ // audio routing, only used for duplication for playback capture)
+ sp<AudioPolicyMix> policyMix;
+ bool unneededUsePrimaryOutputFromPolicyMixes = false;
+ status_t status = mPolicyMixes.getOutputForAttr(attr, AUDIO_CONFIG_BASE_INITIALIZER,
+ 0 /*uid unknown here*/, AUDIO_SESSION_NONE, AUDIO_OUTPUT_FLAG_NONE,
+ mAvailableOutputDevices, nullptr /* requestedDevice */, policyMix,
+ nullptr /* secondaryMixes */, unneededUsePrimaryOutputFromPolicyMixes);
+ if (status != OK) {
+ return status;
+ }
+
+ if (policyMix != nullptr && policyMix->getOutput() != nullptr &&
+ // For volume control, skip LOOPBACK mixes which use AUDIO_DEVICE_OUT_REMOTE_SUBMIX
+ // as they are unaffected by device/stream volume
+ // (per SwAudioOutputDescriptor::isFixedVolume()).
+ (!forVolume || policyMix->mDeviceType != AUDIO_DEVICE_OUT_REMOTE_SUBMIX)
+ ) {
+ sp<DeviceDescriptor> deviceDesc = mAvailableOutputDevices.getDevice(
+ policyMix->mDeviceType, policyMix->mDeviceAddress, AUDIO_FORMAT_DEFAULT);
+ devices.add(deviceDesc);
+ } else {
+ // The default Engine::getOutputDevicesForAttributes() uses findPreferredDevice()
+ // which selects setPreferredDevice if active. This means forVolume call
+ // will take an active setPreferredDevice, if such exists.
+
+ devices = mEngine->getOutputDevicesForAttributes(
+ attr, nullptr /* preferredDevice */, false /* fromCache */);
+ }
+
+ if (forVolume) {
+ // We alias the device AUDIO_DEVICE_OUT_SPEAKER_SAFE to AUDIO_DEVICE_OUT_SPEAKER
+ // for single volume control in AudioService (such relationship should exist if
+ // SPEAKER_SAFE is present).
+ //
+ // (This is unrelated to a different device grouping as Volume::getDeviceCategory)
+ DeviceVector speakerSafeDevices =
+ devices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER_SAFE);
+ if (!speakerSafeDevices.isEmpty()) {
+ devices.merge(mAvailableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER));
+ devices.remove(speakerSafeDevices);
+ }
+ }
+
+ return NO_ERROR;
+}
+
+status_t AudioPolicyManager::getProfilesForDevices(const DeviceVector& devices,
+ AudioProfileVector& audioProfiles,
+ uint32_t flags,
+ bool isInput) {
+ for (const auto& hwModule : mHwModules) {
+ // the MSD module checks for different conditions
+ if (strcmp(hwModule->getName(), AUDIO_HARDWARE_MODULE_ID_MSD) == 0) {
+ continue;
+ }
+ IOProfileCollection ioProfiles = isInput ? hwModule->getInputProfiles()
+ : hwModule->getOutputProfiles();
+ for (const auto& profile : ioProfiles) {
+ if (!profile->areAllDevicesSupported(devices) ||
+ !profile->isCompatibleProfileForFlags(
+ flags, false /*exactMatchRequiredForInputFlags*/)) {
+ continue;
+ }
+ audioProfiles.addAllValidProfiles(profile->asAudioPort()->getAudioProfiles());
+ }
+ }
+
+ if (!isInput) {
+ // add the direct profiles from MSD if present and has audio patches to all the output(s)
+ const auto &msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
+ if (msdModule != nullptr) {
+ if (msdHasPatchesToAllDevices(devices.toTypeAddrVector())) {
+ ALOGV("%s: MSD audio patches set to all output devices.", __func__);
+ for (const auto &profile: msdModule->getOutputProfiles()) {
+ if (!profile->asAudioPort()->isDirectOutput()) {
+ continue;
+ }
+ audioProfiles.addAllValidProfiles(profile->asAudioPort()->getAudioProfiles());
+ }
+ } else {
+ ALOGV("%s: MSD audio patches NOT set to all output devices.", __func__);
+ }
+ }
+ }
+
+ return NO_ERROR;
+}
+
+sp<SwAudioOutputDescriptor> AudioPolicyManager::reopenOutput(sp<SwAudioOutputDescriptor> outputDesc,
+ const audio_config_t *config,
+ audio_output_flags_t flags,
+ const char* caller) {
+ closeOutput(outputDesc->mIoHandle);
+ sp<SwAudioOutputDescriptor> preferredOutput = openOutputWithProfileAndDevice(
+ outputDesc->mProfile, outputDesc->devices(), nullptr /*mixerConfig*/, config, flags);
+ if (preferredOutput == nullptr) {
+ ALOGE("%s failed to reopen output device=%d, caller=%s",
+ __func__, outputDesc->devices()[0]->getId(), caller);
+ }
+ return preferredOutput;
+}
+
+void AudioPolicyManager::reopenOutputsWithDevices(
+ const std::map<audio_io_handle_t, DeviceVector> &outputsToReopen) {
+ for (const auto& [output, devices] : outputsToReopen) {
+ sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
+ closeOutput(output);
+ openOutputWithProfileAndDevice(desc->mProfile, devices);
+ }
+}
+
+PortHandleVector AudioPolicyManager::getClientsForStream(
+ audio_stream_type_t streamType) const {
+ PortHandleVector clients;
+ for (size_t i = 0; i < mOutputs.size(); ++i) {
+ PortHandleVector clientsForStream = mOutputs.valueAt(i)->getClientsForStream(streamType);
+ clients.insert(clients.end(), clientsForStream.begin(), clientsForStream.end());
+ }
+ return clients;
+}
+
+void AudioPolicyManager::invalidateStreams(StreamTypeVector streams) const {
+ PortHandleVector clients;
+ for (auto stream : streams) {
+ PortHandleVector clientsForStream = getClientsForStream(stream);
+ clients.insert(clients.end(), clientsForStream.begin(), clientsForStream.end());
+ }
+ mpClientInterface->invalidateTracks(clients);
+}
+
} // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 0de5c0e..88bafef 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -48,6 +48,7 @@
#include <AudioOutputDescriptor.h>
#include <AudioPolicyMix.h>
#include <EffectDescriptor.h>
+#include <PreferredMixerAttributesInfo.h>
#include <SoundTriggerSession.h>
#include "EngineLibrary.h"
#include "TypeConverter.h"
@@ -120,13 +121,14 @@
audio_session_t session,
audio_stream_type_t *stream,
const AttributionSourceState& attributionSource,
- const audio_config_t *config,
+ audio_config_t *config,
audio_output_flags_t *flags,
audio_port_handle_t *selectedDeviceId,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
output_type_t *outputType,
- bool *isSpatialized) override;
+ bool *isSpatialized,
+ bool *isBitPerfect) override;
virtual status_t startOutput(audio_port_handle_t portId);
virtual status_t stopOutput(audio_port_handle_t portId);
virtual bool releaseOutput(audio_port_handle_t portId);
@@ -135,7 +137,7 @@
audio_unique_id_t riid,
audio_session_t session,
const AttributionSourceState& attributionSource,
- const audio_config_base_t *config,
+ audio_config_base_t *config,
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
input_type_t *inputType,
@@ -302,8 +304,11 @@
const AudioDeviceTypeAddrVector &devices);
virtual status_t removeDevicesRoleForStrategy(product_strategy_t strategy,
- device_role_t role);
+ device_role_t role,
+ const AudioDeviceTypeAddrVector &devices);
+ virtual status_t clearDevicesRoleForStrategy(product_strategy_t strategy,
+ device_role_t role);
virtual status_t getDevicesForRoleAndStrategy(product_strategy_t strategy,
device_role_t role,
@@ -355,6 +360,8 @@
virtual bool isUltrasoundSupported();
+ bool isHotwordStreamSupported(bool lookbackAudio) override;
+
virtual status_t listAudioProductStrategies(AudioProductStrategyVector &strategies)
{
return mEngine->listAudioProductStrategies(strategies);
@@ -400,6 +407,21 @@
virtual status_t getDirectProfilesForAttributes(const audio_attributes_t* attr,
AudioProfileVector& audioProfiles);
+ status_t getSupportedMixerAttributes(
+ audio_port_handle_t portId,
+ std::vector<audio_mixer_attributes_t>& mixerAttrs) override;
+ status_t setPreferredMixerAttributes(
+ const audio_attributes_t* attr,
+ audio_port_handle_t portId,
+ uid_t uid,
+ const audio_mixer_attributes_t* mixerAttributes) override;
+ status_t getPreferredMixerAttributes(const audio_attributes_t* attr,
+ audio_port_handle_t portId,
+ audio_mixer_attributes_t* mixerAttributes) override;
+ status_t clearPreferredMixerAttributes(const audio_attributes_t* attr,
+ audio_port_handle_t portId,
+ uid_t uid) override;
+
bool isCallScreenModeSupported() override;
void onNewAudioModulesAvailable() override;
@@ -971,6 +993,10 @@
sp<SourceClientDescriptor> mCallRxSourceClient;
sp<SourceClientDescriptor> mCallTxSourceClient;
+ std::map<audio_port_handle_t,
+ std::map<product_strategy_t,
+ sp<PreferredMixerAttributesInfo>>> mPreferredMixerAttrInfos;
+
// Support for Multi-Stream Decoder (MSD) module
sp<DeviceDescriptor> getMsdAudioInDevice() const;
DeviceVector getMsdAudioOutDevices() const;
@@ -1045,13 +1071,14 @@
const audio_attributes_t *attr,
audio_stream_type_t *stream,
uid_t uid,
- const audio_config_t *config,
+ audio_config_t *config,
audio_output_flags_t *flags,
audio_port_handle_t *selectedDeviceId,
bool *isRequestedDeviceForExclusiveUse,
std::vector<sp<AudioPolicyMix>> *secondaryMixes,
output_type_t *outputType,
- bool *isSpatialized);
+ bool *isSpatialized,
+ bool *isBitPerfect);
// internal method to return the output handle for the given device and format
audio_io_handle_t getOutputForDevices(
const DeviceVector &devices,
@@ -1060,6 +1087,7 @@
const audio_config_t *config,
audio_output_flags_t *flags,
bool *isSpatialized,
+ sp<PreferredMixerAttributesInfo> prefMixerAttrInfo = nullptr,
bool forceMutingHaptic = false);
// Internal method checking if a direct output can be opened matching the requested
@@ -1126,7 +1154,9 @@
* @param session requester session id
* @param uid requester uid
* @param attributes requester audio attributes (e.g. input source and tags matter)
- * @param config requester audio configuration (e.g. sample rate, format, channel mask).
+ * @param config requested audio configuration (e.g. sample rate, format, channel mask),
+ * will be updated if current configuration doesn't support but another
+ * one does
* @param flags requester input flags
* @param policyMix may be null, policy rules to be followed by the requester
* @return input io handle aka unique input identifier selected for this device.
@@ -1134,7 +1164,7 @@
audio_io_handle_t getInputForDevice(const sp<DeviceDescriptor> &device,
audio_session_t session,
const audio_attributes_t &attributes,
- const audio_config_base_t *config,
+ audio_config_base_t *config,
audio_input_flags_t flags,
const sp<AudioPolicyMix> &policyMix);
@@ -1208,7 +1238,8 @@
bool areAllDevicesSupported(
const AudioDeviceTypeAddrVector& devices,
std::function<bool(audio_devices_t)> predicate,
- const char* context);
+ const char* context,
+ bool matchAddress = true);
bool isScoRequestedForComm() const;
@@ -1225,11 +1256,15 @@
* @param[in] profile IOProfile to use as template
* @param[in] devices initial route to apply to this output stream
* @param[in] mixerConfig if not null, use this to configure the mixer
+ * @param[in] halConfig if not null, use this to configure the HAL
+ * @param[in] flags the flags to be used to open the output
* @return an output descriptor for the newly opened stream or null in case of error.
*/
sp<SwAudioOutputDescriptor> openOutputWithProfileAndDevice(
const sp<IOProfile>& profile, const DeviceVector& devices,
- const audio_config_base_t *mixerConfig = nullptr);
+ const audio_config_base_t *mixerConfig = nullptr,
+ const audio_config_t *halConfig = nullptr,
+ audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE);
bool isOffloadPossible(const audio_offload_info_t& offloadInfo,
bool durationIgnored = false);
@@ -1253,6 +1288,30 @@
// Filters only the relevant flags for getProfileForOutput
audio_output_flags_t getRelevantFlags (audio_output_flags_t flags, bool directOnly);
+
+ status_t getDevicesForAttributes(const audio_attributes_t &attr,
+ DeviceVector &devices,
+ bool forVolume);
+
+ status_t getProfilesForDevices(const DeviceVector& devices,
+ AudioProfileVector& audioProfiles,
+ uint32_t flags,
+ bool isInput);
+
+ sp<PreferredMixerAttributesInfo> getPreferredMixerAttributesInfo(
+ audio_port_handle_t devicePortId, product_strategy_t strategy);
+
+ sp<SwAudioOutputDescriptor> reopenOutput(
+ sp<SwAudioOutputDescriptor> outputDesc,
+ const audio_config_t *config,
+ audio_output_flags_t flags,
+ const char* caller);
+
+ void reopenOutputsWithDevices(
+ const std::map<audio_io_handle_t, DeviceVector>& outputsToReopen);
+
+ PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
+ void invalidateStreams(StreamTypeVector streams) const;
};
};
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 734bf9e..f4fc8f1 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -37,6 +37,7 @@
"libaudiopolicy",
"libaudiopolicycomponents",
"libaudiopolicymanagerdefault",
+ "libaudiousecasevalidation",
"libaudioutils",
"libbinder",
"libcutils",
@@ -86,6 +87,7 @@
export_shared_lib_headers: [
"libactivitymanager_aidl",
+ "libaudiousecasevalidation",
"libheadtracking",
"libheadtracking-binding",
"libsensorprivacy",
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index dd06bd5..2874824 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -81,6 +81,12 @@
*halConfig = VALUE_OR_RETURN_STATUS(
aidl2legacy_AudioConfig_audio_config_t(response.config, false /*isInput*/));
*latencyMs = VALUE_OR_RETURN_STATUS(convertIntegral<uint32_t>(response.latencyMs));
+
+ audio_config_base_t config = {.sample_rate = halConfig->sample_rate,
+ .channel_mask = halConfig->channel_mask,
+ .format = halConfig->format,
+ };
+ mAudioPolicyService->registerOutput(*output, config, flags);
}
return status;
}
@@ -103,7 +109,7 @@
if (af == 0) {
return PERMISSION_DENIED;
}
-
+ mAudioPolicyService->unregisterOutput(output);
return af->closeOutput(output);
}
@@ -180,16 +186,6 @@
delay_ms);
}
-status_t AudioPolicyService::AudioPolicyClient::invalidateStream(audio_stream_type_t stream)
-{
- sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
- if (af == 0) {
- return PERMISSION_DENIED;
- }
-
- return af->invalidateStream(stream);
-}
-
void AudioPolicyService::AudioPolicyClient::setParameters(audio_io_handle_t io_handle,
const String8& keyValuePairs,
int delay_ms)
@@ -334,4 +330,14 @@
return af->setDeviceConnectedState(port, state);
}
+status_t AudioPolicyService::AudioPolicyClient::invalidateTracks(
+ const std::vector<audio_port_handle_t>& portIds) {
+ sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+ if (af == 0) {
+ return PERMISSION_DENIED;
+ }
+
+ return af->invalidateTracks(portIds);
+}
+
} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index d283007..5d86e7c 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -374,6 +374,7 @@
AutoCallerClear acc;
AudioPolicyInterface::output_type_t outputType;
bool isSpatialized = false;
+ bool isBitPerfect = false;
status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
&stream,
attributionSource,
@@ -381,7 +382,8 @@
&flags, &selectedDeviceId, &portId,
&secondaryOutputs,
&outputType,
- &isSpatialized);
+ &isSpatialized,
+ &isBitPerfect);
// FIXME: Introduce a way to check for the the telephony device before opening the output
if (result == NO_ERROR) {
@@ -416,6 +418,9 @@
}
if (result == NO_ERROR) {
+ attr = VALUE_OR_RETURN_BINDER_STATUS(
+ mUsecaseValidator->verifyAudioAttributes(output, attributionSource, attr));
+
sp<AudioPlaybackClient> client =
new AudioPlaybackClient(attr, output, attributionSource, session,
portId, selectedDeviceId, stream, isSpatialized);
@@ -433,6 +438,16 @@
convertContainer<std::vector<int32_t>>(secondaryOutputs,
legacy2aidl_audio_io_handle_t_int32_t));
_aidl_return->isSpatialized = isSpatialized;
+ _aidl_return->isBitPerfect = isBitPerfect;
+ _aidl_return->attr = VALUE_OR_RETURN_BINDER_STATUS(
+ legacy2aidl_audio_attributes_t_AudioAttributes(attr));
+ } else {
+ _aidl_return->configBase.format = VALUE_OR_RETURN_BINDER_STATUS(
+ legacy2aidl_audio_format_t_AudioFormatDescription(config.format));
+ _aidl_return->configBase.channelMask = VALUE_OR_RETURN_BINDER_STATUS(
+ legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+ config.channel_mask, false /*isInput*/));
+ _aidl_return->configBase.sampleRate = config.sample_rate;
}
return binderStatusFromStatusT(result);
}
@@ -477,6 +492,10 @@
AutoCallerClear acc;
status_t status = mAudioPolicyManager->startOutput(portId);
if (status == NO_ERROR) {
+ //TODO b/257922898: decide if/how we need to handle attributes update when playback starts
+ // or during playback
+ (void)mUsecaseValidator->startClient(client->io, client->portId, client->attributionSource,
+ client->attributes, nullptr /* callback */);
client->active = true;
onUpdateActiveSpatializerTracks_l();
}
@@ -517,6 +536,7 @@
if (status == NO_ERROR) {
client->active = false;
onUpdateActiveSpatializerTracks_l();
+ mUsecaseValidator->stopClient(client->io, client->portId);
}
return status;
}
@@ -648,7 +668,9 @@
return binderStatusFromStatusT(PERMISSION_DENIED);
}
- if (((flags & AUDIO_INPUT_FLAG_HW_HOTWORD) != 0)
+ if (((flags & (AUDIO_INPUT_FLAG_HW_HOTWORD |
+ AUDIO_INPUT_FLAG_HOTWORD_TAP |
+ AUDIO_INPUT_FLAG_HW_LOOKBACK)) != 0)
&& !canCaptureHotword) {
ALOGE("%s: permission denied: hotword mode not allowed"
" for uid %d pid %d", __func__, attributionSource.uid, attributionSource.pid);
@@ -720,6 +742,9 @@
if (status == PERMISSION_DENIED) {
AutoCallerClear acc;
mAudioPolicyManager->releaseInput(portId);
+ } else {
+ _aidl_return->config = VALUE_OR_RETURN_BINDER_STATUS(
+ legacy2aidl_audio_config_base_t_AudioConfigBase(config, true /*isInput*/));
}
return binderStatusFromStatusT(status);
}
@@ -793,8 +818,29 @@
Mutex::Autolock _l(mLock);
+ ALOGW_IF(client->silenced, "startInput on silenced input for port %d, uid %d. Unsilencing.",
+ portIdAidl,
+ client->attributionSource.uid);
+
+ if (client->active) {
+ ALOGE("Client should never be active before startInput. Uid %d port %d",
+ client->attributionSource.uid, portId);
+ finishRecording(client->attributionSource, client->attributes.source);
+ return binderStatusFromStatusT(INVALID_OPERATION);
+ }
+
+ // Force the possibly silenced client to be unsilenced since we just called
+ // startRecording (i.e. we have assumed it is unsilenced).
+ // At this point in time, the client is inactive, so no calls to appops are sent in
+ // setAppState_l.
+ // This ensures existing clients have the same behavior as new clients (starting unsilenced).
+ // TODO(b/282076713)
+ setAppState_l(client, APP_STATE_TOP);
+
client->active = true;
client->startTimeNs = systemTime();
+ // This call updates the silenced state, and since we are active, appropriately notifies appops
+ // if we silence the track.
updateUidStates_l();
status_t status;
@@ -2033,6 +2079,17 @@
return Status::ok();
}
+Status AudioPolicyService::isHotwordStreamSupported(bool lookbackAudio, bool* _aidl_return)
+{
+ if (mAudioPolicyManager == nullptr) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+ Mutex::Autolock _l(mLock);
+ AutoCallerClear acc;
+ *_aidl_return = mAudioPolicyManager->isHotwordStreamSupported(lookbackAudio);
+ return Status::ok();
+}
+
Status AudioPolicyService::listAudioProductStrategies(
std::vector<media::AudioProductStrategy>* _aidl_return) {
AudioProductStrategyVector strategies;
@@ -2144,8 +2201,31 @@
return binderStatusFromStatusT(status);
}
-Status AudioPolicyService::removeDevicesRoleForStrategy(int32_t strategyAidl,
- media::DeviceRole roleAidl) {
+Status AudioPolicyService::removeDevicesRoleForStrategy(
+ int32_t strategyAidl,
+ media::DeviceRole roleAidl,
+ const std::vector<AudioDevice>& devicesAidl) {
+ product_strategy_t strategy = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_int32_t_product_strategy_t(strategyAidl));
+ device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_DeviceRole_device_role_t(roleAidl));
+ AudioDeviceTypeAddrVector devices = VALUE_OR_RETURN_BINDER_STATUS(
+ convertContainer<AudioDeviceTypeAddrVector>(devicesAidl,
+ aidl2legacy_AudioDeviceTypeAddress));
+
+ if (mAudioPolicyManager == NULL) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+ Mutex::Autolock _l(mLock);
+ status_t status = mAudioPolicyManager->removeDevicesRoleForStrategy(strategy, role, devices);
+ if (status == NO_ERROR) {
+ onCheckSpatializer_l();
+ }
+ return binderStatusFromStatusT(status);
+}
+
+Status AudioPolicyService::clearDevicesRoleForStrategy(int32_t strategyAidl,
+ media::DeviceRole roleAidl) {
product_strategy_t strategy = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_int32_t_product_strategy_t(strategyAidl));
device_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
@@ -2154,7 +2234,7 @@
return binderStatusFromStatusT(NO_INIT);
}
Mutex::Autolock _l(mLock);
- status_t status = mAudioPolicyManager->removeDevicesRoleForStrategy(strategy, role);
+ status_t status = mAudioPolicyManager->clearDevicesRoleForStrategy(strategy, role);
if (status == NO_ERROR) {
onCheckSpatializer_l();
}
@@ -2370,4 +2450,89 @@
return Status::ok();
}
+Status AudioPolicyService::getSupportedMixerAttributes(
+ int32_t portIdAidl, std::vector<media::AudioMixerAttributesInternal>* _aidl_return) {
+ if (mAudioPolicyManager == nullptr) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+
+ audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
+ std::vector<audio_mixer_attributes_t> mixerAttrs;
+ Mutex::Autolock _l(mLock);
+ RETURN_IF_BINDER_ERROR(
+ binderStatusFromStatusT(mAudioPolicyManager->getSupportedMixerAttributes(
+ portId, mixerAttrs)));
+ *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+ convertContainer<std::vector<media::AudioMixerAttributesInternal>>(
+ mixerAttrs,
+ legacy2aidl_audio_mixer_attributes_t_AudioMixerAttributesInternal));
+ return Status::ok();
+}
+
+Status AudioPolicyService::setPreferredMixerAttributes(
+ const media::audio::common::AudioAttributes& attrAidl,
+ int32_t portIdAidl,
+ int32_t uidAidl,
+ const media::AudioMixerAttributesInternal& mixerAttrAidl) {
+ if (mAudioPolicyManager == nullptr) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+
+ audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
+ audio_mixer_attributes_t mixerAttr = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_AudioMixerAttributesInternal_audio_mixer_attributes_t(mixerAttrAidl));
+ uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
+ audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
+ Mutex::Autolock _l(mLock);
+ return binderStatusFromStatusT(
+ mAudioPolicyManager->setPreferredMixerAttributes(&attr, portId, uid, &mixerAttr));
+}
+
+Status AudioPolicyService::getPreferredMixerAttributes(
+ const media::audio::common::AudioAttributes& attrAidl,
+ int32_t portIdAidl,
+ std::optional<media::AudioMixerAttributesInternal>* _aidl_return) {
+ if (mAudioPolicyManager == nullptr) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+
+ audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
+ audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
+ Mutex::Autolock _l(mLock);
+ audio_mixer_attributes_t mixerAttr = AUDIO_MIXER_ATTRIBUTES_INITIALIZER;
+ RETURN_IF_BINDER_ERROR(
+ binderStatusFromStatusT(mAudioPolicyManager->getPreferredMixerAttributes(
+ &attr, portId, &mixerAttr)));
+ *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
+ legacy2aidl_audio_mixer_attributes_t_AudioMixerAttributesInternal(mixerAttr));
+ return Status::ok();
+}
+
+Status AudioPolicyService::clearPreferredMixerAttributes(
+ const media::audio::common::AudioAttributes& attrAidl,
+ int32_t portIdAidl,
+ int32_t uidAidl) {
+ if (mAudioPolicyManager == nullptr) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+
+ audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
+ uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
+ audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_int32_t_audio_port_handle_t(portIdAidl));
+
+ Mutex::Autolock _l(mLock);
+ return binderStatusFromStatusT(
+ mAudioPolicyManager->clearPreferredMixerAttributes(&attr, portId, uid));
+}
+
} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 5ec12bc..68092d7 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -138,6 +138,7 @@
BINDER_METHOD_ENTRY(setCurrentImeUid) \
BINDER_METHOD_ENTRY(isHapticPlaybackSupported) \
BINDER_METHOD_ENTRY(isUltrasoundSupported) \
+BINDER_METHOD_ENTRY(isHotwordStreamSupported) \
BINDER_METHOD_ENTRY(listAudioProductStrategies) \
BINDER_METHOD_ENTRY(getProductStrategyFromAudioAttributes) \
BINDER_METHOD_ENTRY(listAudioVolumeGroups) \
@@ -146,6 +147,7 @@
BINDER_METHOD_ENTRY(isCallScreenModeSupported) \
BINDER_METHOD_ENTRY(setDevicesRoleForStrategy) \
BINDER_METHOD_ENTRY(removeDevicesRoleForStrategy) \
+BINDER_METHOD_ENTRY(clearDevicesRoleForStrategy) \
BINDER_METHOD_ENTRY(getDevicesForRoleAndStrategy) \
BINDER_METHOD_ENTRY(setDevicesRoleForCapturePreset) \
BINDER_METHOD_ENTRY(addDevicesRoleForCapturePreset) \
@@ -156,7 +158,11 @@
BINDER_METHOD_ENTRY(getSpatializer) \
BINDER_METHOD_ENTRY(canBeSpatialized) \
BINDER_METHOD_ENTRY(getDirectPlaybackSupport) \
-BINDER_METHOD_ENTRY(getDirectProfilesForAttributes) \
+BINDER_METHOD_ENTRY(getDirectProfilesForAttributes) \
+BINDER_METHOD_ENTRY(getSupportedMixerAttributes) \
+BINDER_METHOD_ENTRY(setPreferredMixerAttributes) \
+BINDER_METHOD_ENTRY(getPreferredMixerAttributes) \
+BINDER_METHOD_ENTRY(clearPreferredMixerAttributes) \
// singleton for Binder Method Statistics for IAudioPolicyService
static auto& getIAudioPolicyServiceStatistics() {
@@ -218,7 +224,8 @@
mPhoneState(AUDIO_MODE_INVALID),
mCaptureStateNotifier(false),
mCreateAudioPolicyManager(createAudioPolicyManager),
- mDestroyAudioPolicyManager(destroyAudioPolicyManager) {
+ mDestroyAudioPolicyManager(destroyAudioPolicyManager),
+ mUsecaseValidator(media::createUsecaseValidator()) {
setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
}
@@ -1025,10 +1032,11 @@
// AND is on TOP or latest started
// AND there is no active privacy sensitive capture or call
// OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+ bool allowSensitiveCapture =
+ !isSensitiveActive || isTopOrLatestSensitive || current->canCaptureOutput;
bool allowCapture = !isAssistantOnTop
&& (isTopOrLatestActive || isTopOrLatestSensitive)
- && !(isSensitiveActive
- && !(isTopOrLatestSensitive || current->canCaptureOutput))
+ && allowSensitiveCapture
&& canCaptureIfInCallOrCommunication(current);
if (!current->hasOp()) {
@@ -1051,7 +1059,7 @@
if (source == AUDIO_SOURCE_HOTWORD || source == AUDIO_SOURCE_VOICE_RECOGNITION) {
allowCapture = true;
}
- } else if (!(isSensitiveActive && !current->canCaptureOutput)
+ } else if (allowSensitiveCapture
&& canCaptureIfInCallOrCommunication(current)) {
if (isTopOrLatestAssistant
&& (source == AUDIO_SOURCE_VOICE_RECOGNITION
@@ -1072,7 +1080,7 @@
if (source == AUDIO_SOURCE_HOTWORD || source == AUDIO_SOURCE_VOICE_RECOGNITION) {
allowCapture = true;
}
- } else if (!(isSensitiveActive && !current->canCaptureOutput)
+ } else if (allowSensitiveCapture
&& canCaptureIfInCallOrCommunication(current)) {
if ((source == AUDIO_SOURCE_VOICE_RECOGNITION) || (source == AUDIO_SOURCE_HOTWORD))
{
@@ -1087,7 +1095,7 @@
// OR
// Is on TOP AND the source is VOICE_RECOGNITION or HOTWORD
if (!isAssistantOnTop
- && !(isSensitiveActive && !current->canCaptureOutput)
+ && allowSensitiveCapture
&& canCaptureIfInCallOrCommunication(current)) {
allowCapture = true;
}
@@ -1325,11 +1333,13 @@
case TRANSACTION_getVolumeGroupFromAudioAttributes:
case TRANSACTION_acquireSoundTriggerSession:
case TRANSACTION_releaseSoundTriggerSession:
+ case TRANSACTION_isHotwordStreamSupported:
case TRANSACTION_setRttEnabled:
case TRANSACTION_isCallScreenModeSupported:
case TRANSACTION_setDevicesRoleForStrategy:
case TRANSACTION_setSupportedSystemUsages:
case TRANSACTION_removeDevicesRoleForStrategy:
+ case TRANSACTION_clearDevicesRoleForStrategy:
case TRANSACTION_getDevicesForRoleAndStrategy:
case TRANSACTION_getDevicesForAttributes:
case TRANSACTION_setAllowedCapturePolicy:
@@ -1341,7 +1351,9 @@
case TRANSACTION_removeDevicesRoleForCapturePreset:
case TRANSACTION_clearDevicesRoleForCapturePreset:
case TRANSACTION_getDevicesForRoleAndCapturePreset:
- case TRANSACTION_getSpatializer: {
+ case TRANSACTION_getSpatializer:
+ case TRANSACTION_setPreferredMixerAttributes:
+ case TRANSACTION_clearPreferredMixerAttributes: {
if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
__func__, code, IPCThreadState::self()->getCallingPid(),
@@ -1548,6 +1560,16 @@
" help print this message\n");
}
+status_t AudioPolicyService::registerOutput(audio_io_handle_t output,
+ const audio_config_base_t& config,
+ const audio_output_flags_t flags) {
+ return mUsecaseValidator->registerStream(output, config, flags);
+}
+
+status_t AudioPolicyService::unregisterOutput(audio_io_handle_t output) {
+ return mUsecaseValidator->unregisterStream(output);
+}
+
// ----------- AudioPolicyService::UidPolicy implementation ----------
void AudioPolicyService::UidPolicy::registerSelf() {
@@ -1690,7 +1712,7 @@
}
}
-void AudioPolicyService::UidPolicy::onUidProcAdjChanged(uid_t uid __unused) {
+void AudioPolicyService::UidPolicy::onUidProcAdjChanged(uid_t uid __unused, int32_t adj __unused) {
}
void AudioPolicyService::UidPolicy::updateOverrideUid(uid_t uid, bool active, bool insert) {
@@ -1908,6 +1930,7 @@
// since it controls the mic permission for legacy apps.
mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
mAttributionSource.packageName.value_or(""))),
+ AppOpsManager::WATCH_FOREGROUND_CHANGES,
mOpCallback);
}
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 8c85bff..d0cde64 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -36,6 +36,7 @@
#include <media/ToneGenerator.h>
#include <media/AudioEffect.h>
#include <media/AudioPolicy.h>
+#include <media/UsecaseValidator.h>
#include <mediautils/ServiceUtilities.h>
#include "AudioPolicyEffects.h"
#include "CaptureStateNotifier.h"
@@ -225,6 +226,7 @@
binder::Status setCurrentImeUid(int32_t uid) override;
binder::Status isHapticPlaybackSupported(bool* _aidl_return) override;
binder::Status isUltrasoundSupported(bool* _aidl_return) override;
+ binder::Status isHotwordStreamSupported(bool lookbackAudio, bool* _aidl_return) override;
binder::Status listAudioProductStrategies(
std::vector<media::AudioProductStrategy>* _aidl_return) override;
binder::Status getProductStrategyFromAudioAttributes(
@@ -242,7 +244,12 @@
binder::Status setDevicesRoleForStrategy(
int32_t strategy, media::DeviceRole role,
const std::vector<AudioDevice>& devices) override;
- binder::Status removeDevicesRoleForStrategy(int32_t strategy, media::DeviceRole role) override;
+ binder::Status removeDevicesRoleForStrategy(
+ int32_t strategy, media::DeviceRole role,
+ const std::vector<AudioDevice>& devices) override;
+ binder::Status clearDevicesRoleForStrategy(
+ int32_t strategy,
+ media::DeviceRole role) override;
binder::Status getDevicesForRoleAndStrategy(
int32_t strategy, media::DeviceRole role,
std::vector<AudioDevice>* _aidl_return) override;
@@ -282,6 +289,22 @@
binder::Status getDirectProfilesForAttributes(const media::audio::common::AudioAttributes& attr,
std::vector<media::audio::common::AudioProfile>* _aidl_return) override;
+ binder::Status getSupportedMixerAttributes(
+ int32_t portId,
+ std::vector<media::AudioMixerAttributesInternal>* _aidl_return) override;
+ binder::Status setPreferredMixerAttributes(
+ const media::audio::common::AudioAttributes& attr,
+ int32_t portId,
+ int32_t uid,
+ const media::AudioMixerAttributesInternal& mixerAttr) override;
+ binder::Status getPreferredMixerAttributes(
+ const media::audio::common::AudioAttributes& attr,
+ int32_t portId,
+ std::optional<media::AudioMixerAttributesInternal>* _aidl_return) override;
+ binder::Status clearPreferredMixerAttributes(const media::audio::common::AudioAttributes& attr,
+ int32_t portId,
+ int32_t uid) override;
+
status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
// IBinder::DeathRecipient
@@ -420,6 +443,11 @@
*/
static bool isAppOpSource(audio_source_t source);
+ status_t registerOutput(audio_io_handle_t output,
+ const audio_config_base_t& config,
+ const audio_output_flags_t flags);
+ status_t unregisterOutput(audio_io_handle_t output);
+
// If recording we need to make sure the UID is allowed to do that. If the UID is idle
// then it cannot record and gets buffers with zeros - silence. As soon as the UID
// transitions to an active state we will start reporting buffers with data. This approach
@@ -459,7 +487,7 @@
void onUidIdle(uid_t uid, bool disabled) override;
void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
int32_t capability) override;
- void onUidProcAdjChanged(uid_t uid) override;
+ void onUidProcAdjChanged(uid_t uid, int32_t adj) override;
void addOverrideUid(uid_t uid, bool active) { updateOverrideUid(uid, active, true); }
void removeOverrideUid(uid_t uid) { updateOverrideUid(uid, false, false); }
@@ -767,9 +795,6 @@
// for each output (destination device) it is attached to.
virtual status_t setStreamVolume(audio_stream_type_t stream, float volume, audio_io_handle_t output, int delayMs = 0);
- // invalidate a stream type, causing a reroute to an unspecified new output
- virtual status_t invalidateStream(audio_stream_type_t stream);
-
// function enabling to send proprietary informations directly from audio policy manager to audio hardware interface.
virtual void setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs, int delayMs = 0);
// function enabling to receive proprietary informations directly from audio hardware interface to audio policy manager.
@@ -829,6 +854,8 @@
status_t setDeviceConnectedState(
const struct audio_port_v7 *port, media::DeviceConnectedState state) override;
+ status_t invalidateTracks(const std::vector<audio_port_handle_t>& portIds) override;
+
private:
AudioPolicyService *mAudioPolicyService;
};
@@ -894,7 +921,7 @@
const audio_attributes_t attributes; // source, flags ...
const audio_io_handle_t io; // audio HAL stream IO handle
- const AttributionSourceState& attributionSource; //client attributionsource
+ const AttributionSourceState attributionSource; //client attributionsource
const audio_session_t session; // audio session ID
const audio_port_handle_t portId;
const audio_port_handle_t deviceId; // selected input device port ID
@@ -1073,6 +1100,7 @@
void *mLibraryHandle = nullptr;
CreateAudioPolicyManagerInstance mCreateAudioPolicyManager;
DestroyAudioPolicyManagerInstance mDestroyAudioPolicyManager;
+ std::unique_ptr<media::UsecaseValidator> mUsecaseValidator;
};
} // namespace android
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 08c974d..1245b1e 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -862,9 +862,10 @@
// create FX instance on output
AttributionSourceState attributionSource = AttributionSourceState();
mEngine = new AudioEffect(attributionSource);
- mEngine->set(nullptr, &mEngineDescriptor.uuid, 0, Spatializer::engineCallback /* cbf */,
- this /* user */, AUDIO_SESSION_OUTPUT_STAGE, output, {} /* device */,
- false /* probe */, true /* notifyFramesProcessed */);
+ mEngine->set(nullptr /* type */, &mEngineDescriptor.uuid, 0 /* priority */,
+ wp<AudioEffect::IAudioEffectCallback>::fromExisting(this),
+ AUDIO_SESSION_OUTPUT_STAGE, output, {} /* device */, false /* probe */,
+ true /* notifyFramesProcessed */);
status_t status = mEngine->initCheck();
ALOGV("%s mEngine create status %d", __func__, (int)status);
if (status != NO_ERROR) {
@@ -1042,27 +1043,10 @@
}
}
-void Spatializer::engineCallback(int32_t event, void *user, void *info) {
- if (user == nullptr) {
- return;
- }
- Spatializer* const me = reinterpret_cast<Spatializer *>(user);
- switch (event) {
- case AudioEffect::EVENT_FRAMES_PROCESSED: {
- int frames = info == nullptr ? 0 : *(int*)info;
- ALOGV("%s frames processed %d for me %p", __func__, frames, me);
- me->postFramesProcessedMsg(frames);
- } break;
- default:
- ALOGV("%s event %d", __func__, event);
- break;
- }
-}
-
-void Spatializer::postFramesProcessedMsg(int frames) {
+void Spatializer::onFramesProcessed(int32_t framesProcessed) {
sp<AMessage> msg =
new AMessage(EngineCallbackHandler::kWhatOnFramesProcessed, mHandler);
- msg->setInt32(EngineCallbackHandler::kNumFramesKey, frames);
+ msg->setInt32(EngineCallbackHandler::kNumFramesKey, framesProcessed);
msg->post();
}
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index a657b7f..0d4d3f6 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -91,6 +91,7 @@
* spatializer mixer thread is destroyed.
*/
class Spatializer : public media::BnSpatializer,
+ public AudioEffect::IAudioEffectCallback,
public IBinder::DeathRecipient,
private SpatializerPoseController::Listener,
public virtual AudioSystem::SupportedLatencyModesCallback {
@@ -307,7 +308,7 @@
return NO_ERROR;
}
- void postFramesProcessedMsg(int frames);
+ virtual void onFramesProcessed(int32_t framesProcessed) override;
/**
* Checks if head and screen sensors must be actively monitored based on
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index b9ee8dd..a4a0cd4 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -31,6 +31,7 @@
"liblog",
"libmedia_helper",
"libutils",
+ "libcutils",
"libxml2",
],
@@ -55,7 +56,10 @@
"-Wall",
],
- test_suites: ["device-tests"],
+ test_suites: [
+ "device-tests",
+ "automotive-tests",
+ ],
}
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index c11d7fc..3629c16 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -132,8 +132,6 @@
size_t getAudioPortListUpdateCount() const { return mAudioPortListUpdateCount; }
- virtual void addSupportedFormat(audio_format_t /* format */) {}
-
void onRoutingUpdated() override {
mRoutingUpdatedUpdateCount++;
}
@@ -179,6 +177,38 @@
return &(*it);
}
+ String8 getParameters(audio_io_handle_t /* ioHandle */, const String8& /* keys*/ ) override {
+ AudioParameter mAudioParameters;
+ std::string formats;
+ for (const auto& f : mSupportedFormats) {
+ if (!formats.empty()) formats += AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
+ formats += audio_format_to_string(f);
+ }
+ mAudioParameters.add(
+ String8(AudioParameter::keyStreamSupportedFormats),
+ String8(formats.c_str()));
+ mAudioParameters.addInt(String8(AudioParameter::keyStreamSupportedSamplingRates), 48000);
+ std::string channelMasks;
+ for (const auto& cm : mSupportedChannelMasks) {
+ if (!audio_channel_mask_is_valid(cm)) {
+ continue;
+ }
+ if (!channelMasks.empty()) channelMasks += AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
+ channelMasks += audio_channel_mask_to_string(cm);
+ }
+ mAudioParameters.add(
+ String8(AudioParameter::keyStreamSupportedChannels), String8(channelMasks.c_str()));
+ return mAudioParameters.toString();
+ }
+
+ void addSupportedFormat(audio_format_t format) {
+ mSupportedFormats.insert(format);
+ }
+
+ void addSupportedChannelMask(audio_channel_mask_t channelMask) {
+ mSupportedChannelMasks.insert(channelMask);
+ }
+
private:
audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
audio_io_handle_t mNextIoHandle = AUDIO_IO_HANDLE_NONE + 1;
@@ -189,6 +219,8 @@
size_t mRoutingUpdatedUpdateCount = 0;
std::vector<struct audio_port_v7> mConnectedDevicePorts;
std::vector<struct audio_port_v7> mDisconnectedDevicePorts;
+ std::set<audio_format_t> mSupportedFormats;
+ std::set<audio_channel_mask_t> mSupportedChannelMasks;
};
} // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h b/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h
deleted file mode 100644
index 7343b9b..0000000
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClientForHdmi.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <map>
-#include <set>
-
-#include <system/audio.h>
-#include <utils/Log.h>
-#include <utils/String8.h>
-
-#include "AudioPolicyTestClient.h"
-
-namespace android {
-
-class AudioPolicyManagerTestClientForHdmi : public AudioPolicyManagerTestClient {
-public:
- String8 getParameters(audio_io_handle_t /* ioHandle */, const String8& /* keys*/ ) override {
- AudioParameter mAudioParameters;
- std::string formats;
- for (const auto& f : mSupportedFormats) {
- if (!formats.empty()) formats += AUDIO_PARAMETER_VALUE_LIST_SEPARATOR;
- formats += audio_format_to_string(f);
- }
- mAudioParameters.add(
- String8(AudioParameter::keyStreamSupportedFormats),
- String8(formats.c_str()));
- mAudioParameters.addInt(String8(AudioParameter::keyStreamSupportedSamplingRates), 48000);
- mAudioParameters.add(String8(AudioParameter::keyStreamSupportedChannels), String8(""));
- return mAudioParameters.toString();
- }
-
- void addSupportedFormat(audio_format_t format) override {
- mSupportedFormats.insert(format);
- }
-
-private:
- std::set<audio_format_t> mSupportedFormats;
-};
-
-} // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index b212a32..2ae0e97 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -57,7 +57,6 @@
float /*volume*/,
audio_io_handle_t /*output*/,
int /*delayMs*/) override { return NO_INIT; }
- status_t invalidateStream(audio_stream_type_t /*stream*/) override { return NO_INIT; }
void setParameters(audio_io_handle_t /*ioHandle*/,
const String8& /*keyValuePairs*/,
int /*delayMs*/) override { }
@@ -104,6 +103,9 @@
media::DeviceConnectedState state __unused) override {
return NO_INIT;
}
+ status_t invalidateTracks(const std::vector<audio_port_handle_t>& /*portIds*/) override {
+ return NO_INIT;
+ }
};
} // namespace android
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 4486ce6..5e58dbb 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+#include <cstring>
#include <memory>
#include <string>
#include <sys/wait.h>
@@ -31,10 +32,10 @@
#include <media/RecordingActivityTracker.h>
#include <utils/Log.h>
#include <utils/Vector.h>
+#include <cutils/multiuser.h>
#include "AudioPolicyInterface.h"
#include "AudioPolicyManagerTestClient.h"
-#include "AudioPolicyManagerTestClientForHdmi.h"
#include "AudioPolicyTestClient.h"
#include "AudioPolicyTestManager.h"
@@ -51,6 +52,13 @@
return criterion;
}
+AudioMixMatchCriterion createUserIdCriterion(int userId, bool exclude = false) {
+ AudioMixMatchCriterion criterion;
+ criterion.mValue.mUserId = userId;
+ criterion.mRule = exclude ? RULE_EXCLUDE_USERID : RULE_MATCH_USERID;
+ return criterion;
+}
+
AudioMixMatchCriterion createUsageCriterion(audio_usage_t usage, bool exclude = false) {
AudioMixMatchCriterion criterion;
criterion.mValue.mUsage = usage;
@@ -66,6 +74,22 @@
return criterion;
}
+AudioMixMatchCriterion createSessionIdCriterion(audio_session_t session, bool exclude = false) {
+ AudioMixMatchCriterion criterion;
+ criterion.mValue.mAudioSessionId = session;
+ criterion.mRule = exclude ?
+ RULE_EXCLUDE_AUDIO_SESSION_ID : RULE_MATCH_AUDIO_SESSION_ID;
+ return criterion;
+}
+
+// TODO b/182392769: use attribution source util
+AttributionSourceState createAttributionSourceState(uid_t uid) {
+ AttributionSourceState attributionSourceState;
+ attributionSourceState.uid = uid;
+ attributionSourceState.token = sp<BBinder>::make();
+ return attributionSourceState;
+}
+
} // namespace
TEST(AudioPolicyConfigTest, DefaultConfigForTestsIsEmpty) {
@@ -155,9 +179,13 @@
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
audio_io_handle_t *output = nullptr,
audio_port_handle_t *portId = nullptr,
- audio_attributes_t attr = {});
+ audio_attributes_t attr = {},
+ audio_session_t session = AUDIO_SESSION_NONE,
+ int uid = 0,
+ bool* isBitPerfect = nullptr);
void getInputForAttr(
const audio_attributes_t &attr,
+ audio_session_t session,
audio_unique_id_t riid,
audio_port_handle_t *selectedDeviceId,
audio_format_t format,
@@ -239,7 +267,10 @@
audio_output_flags_t flags,
audio_io_handle_t *output,
audio_port_handle_t *portId,
- audio_attributes_t attr) {
+ audio_attributes_t attr,
+ audio_session_t session,
+ int uid,
+ bool* isBitPerfect) {
audio_io_handle_t localOutput;
if (!output) output = &localOutput;
*output = AUDIO_IO_HANDLE_NONE;
@@ -253,19 +284,19 @@
*portId = AUDIO_PORT_HANDLE_NONE;
AudioPolicyInterface::output_type_t outputType;
bool isSpatialized;
- // TODO b/182392769: use attribution source util
- AttributionSourceState attributionSource = AttributionSourceState();
- attributionSource.uid = 0;
- attributionSource.token = sp<BBinder>::make();
+ bool isBitPerfectInternal;
+ AttributionSourceState attributionSource = createAttributionSourceState(uid);
ASSERT_EQ(OK, mManager->getOutputForAttr(
- &attr, output, AUDIO_SESSION_NONE, &stream, attributionSource, &config, &flags,
- selectedDeviceId, portId, {}, &outputType, &isSpatialized));
+ &attr, output, session, &stream, attributionSource, &config, &flags,
+ selectedDeviceId, portId, {}, &outputType, &isSpatialized,
+ isBitPerfect == nullptr ? &isBitPerfectInternal : isBitPerfect));
ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
ASSERT_NE(AUDIO_IO_HANDLE_NONE, *output);
}
void AudioPolicyManagerTest::getInputForAttr(
const audio_attributes_t &attr,
+ const audio_session_t session,
audio_unique_id_t riid,
audio_port_handle_t *selectedDeviceId,
audio_format_t format,
@@ -282,12 +313,9 @@
if (!portId) portId = &localPortId;
*portId = AUDIO_PORT_HANDLE_NONE;
AudioPolicyInterface::input_type_t inputType;
- // TODO b/182392769: use attribution source util
- AttributionSourceState attributionSource = AttributionSourceState();
- attributionSource.uid = 0;
- attributionSource.token = sp<BBinder>::make();
+ AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
ASSERT_EQ(OK, mManager->getInputForAttr(
- &attr, &input, riid, AUDIO_SESSION_NONE, attributionSource, &config, flags,
+ &attr, &input, riid, session, attributionSource, &config, flags,
selectedDeviceId, &inputType, portId));
ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
}
@@ -919,8 +947,8 @@
audio_source_t source = AUDIO_SOURCE_VOICE_COMMUNICATION;
audio_attributes_t attr = {
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
- ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, 1, &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
- AUDIO_CHANNEL_IN_MONO, 8000, AUDIO_INPUT_FLAG_VOIP_TX, &mixPortId));
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_MONO, 8000, AUDIO_INPUT_FLAG_VOIP_TX, &mixPortId));
std::vector<audio_port_v7> ports;
ASSERT_NO_FATAL_FAILURE(
@@ -974,6 +1002,251 @@
}
}
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferredMixerAttributes) {
+ mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+ mClient->addSupportedChannelMask(AUDIO_CHANNEL_OUT_STEREO);
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+ auto devices = mManager->getAvailableOutputDevices();
+ audio_port_handle_t maxPortId = 0;
+ audio_port_handle_t speakerPortId;
+ audio_port_handle_t usbPortId;
+ for (auto device : devices) {
+ maxPortId = std::max(maxPortId, device->getId());
+ if (device->type() == AUDIO_DEVICE_OUT_SPEAKER) {
+ speakerPortId = device->getId();
+ } else if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+ usbPortId = device->getId();
+ }
+ }
+
+ const uid_t uid = 1234;
+ const uid_t otherUid = 4321;
+ const audio_attributes_t mediaAttr = {
+ .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+ .usage = AUDIO_USAGE_MEDIA,
+ };
+ const audio_attributes_t alarmAttr = {
+ .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+ .usage = AUDIO_USAGE_ALARM,
+ };
+
+ std::vector<audio_mixer_attributes_t> mixerAttributes;
+ EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(usbPortId, mixerAttributes));
+ for (const auto attrToSet : mixerAttributes) {
+ audio_mixer_attributes_t attrFromQuery = AUDIO_MIXER_ATTRIBUTES_INITIALIZER;
+
+ // The given device is not available
+ EXPECT_EQ(BAD_VALUE,
+ mManager->setPreferredMixerAttributes(
+ &mediaAttr, maxPortId + 1, uid, &attrToSet));
+ // The only allowed device is USB
+ EXPECT_EQ(BAD_VALUE,
+ mManager->setPreferredMixerAttributes(
+ &mediaAttr, speakerPortId, uid, &attrToSet));
+ // The only allowed usage is media
+ EXPECT_EQ(BAD_VALUE,
+ mManager->setPreferredMixerAttributes(&alarmAttr, usbPortId, uid, &attrToSet));
+ // Nothing set yet, must get null when query
+ EXPECT_EQ(NAME_NOT_FOUND,
+ mManager->getPreferredMixerAttributes(&mediaAttr, usbPortId, &attrFromQuery));
+ EXPECT_EQ(NO_ERROR,
+ mManager->setPreferredMixerAttributes(
+ &mediaAttr, usbPortId, uid, &attrToSet));
+ EXPECT_EQ(NO_ERROR,
+ mManager->getPreferredMixerAttributes(&mediaAttr, usbPortId, &attrFromQuery));
+ EXPECT_EQ(attrToSet.config.format, attrFromQuery.config.format);
+ EXPECT_EQ(attrToSet.config.sample_rate, attrFromQuery.config.sample_rate);
+ EXPECT_EQ(attrToSet.config.channel_mask, attrFromQuery.config.channel_mask);
+ EXPECT_EQ(attrToSet.mixer_behavior, attrFromQuery.mixer_behavior);
+ EXPECT_EQ(NAME_NOT_FOUND,
+ mManager->clearPreferredMixerAttributes(&mediaAttr, speakerPortId, uid));
+ EXPECT_EQ(PERMISSION_DENIED,
+ mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, otherUid));
+ EXPECT_EQ(NO_ERROR,
+ mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, uid));
+ }
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ "", "", AUDIO_FORMAT_LDAC));
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, RoutingChangedWithPreferredMixerAttributes) {
+ mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+ mClient->addSupportedChannelMask(AUDIO_CHANNEL_OUT_STEREO);
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+ auto devices = mManager->getAvailableOutputDevices();
+ audio_port_handle_t usbPortId = AUDIO_PORT_HANDLE_NONE;
+ for (auto device : devices) {
+ if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+ usbPortId = device->getId();
+ break;
+ }
+ }
+ EXPECT_NE(AUDIO_PORT_HANDLE_NONE, usbPortId);
+
+ const uid_t uid = 1234;
+ const audio_attributes_t mediaAttr = {
+ .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+ .usage = AUDIO_USAGE_MEDIA,
+ };
+
+ std::vector<audio_mixer_attributes_t> mixerAttributes;
+ EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(usbPortId, mixerAttributes));
+ EXPECT_GT(mixerAttributes.size(), 0);
+ EXPECT_EQ(NO_ERROR,
+ mManager->setPreferredMixerAttributes(
+ &mediaAttr, usbPortId, uid, &mixerAttributes[0]));
+
+ audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ 48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
+ AUDIO_SESSION_NONE, uid);
+ status_t status = mManager->startOutput(portId);
+ if (status == DEAD_OBJECT) {
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ 48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
+ AUDIO_SESSION_NONE, uid);
+ status = mManager->startOutput(portId);
+ }
+ EXPECT_EQ(NO_ERROR, status);
+ EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
+ EXPECT_NE(nullptr, mManager->getOutputs().valueFor(output));
+ EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ "", "", AUDIO_FORMAT_LDAC));
+ // When BT device is connected, it will be selected as media device and trigger routing changed.
+ // When this happens, existing output that is opened with preferred mixer attributes will be
+ // closed and reopened with default config.
+ EXPECT_EQ(nullptr, mManager->getOutputs().valueFor(output));
+
+ EXPECT_EQ(NO_ERROR,
+ mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, uid));
+
+ EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ "", "", AUDIO_FORMAT_LDAC));
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ "", "", AUDIO_FORMAT_LDAC));
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, BitPerfectPlayback) {
+ const audio_format_t bitPerfectFormat = AUDIO_FORMAT_PCM_16_BIT;
+ const audio_channel_mask_t bitPerfectChannelMask = AUDIO_CHANNEL_OUT_QUAD;
+ const uint32_t bitPerfectSampleRate = 48000;
+ mClient->addSupportedFormat(bitPerfectFormat);
+ mClient->addSupportedChannelMask(bitPerfectChannelMask);
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+ auto devices = mManager->getAvailableOutputDevices();
+ audio_port_handle_t usbPortId = AUDIO_PORT_HANDLE_NONE;
+ for (auto device : devices) {
+ if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+ usbPortId = device->getId();
+ break;
+ }
+ }
+ EXPECT_NE(AUDIO_PORT_HANDLE_NONE, usbPortId);
+
+ const uid_t uid = 1234;
+ const uid_t anotherUid = 5678;
+ const audio_attributes_t mediaAttr = {
+ .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+ .usage = AUDIO_USAGE_MEDIA,
+ };
+
+ std::vector<audio_mixer_attributes_t> mixerAttributes;
+ EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(usbPortId, mixerAttributes));
+ EXPECT_GT(mixerAttributes.size(), 0);
+ size_t bitPerfectIndex = 0;
+ for (; bitPerfectIndex < mixerAttributes.size(); ++bitPerfectIndex) {
+ if (mixerAttributes[bitPerfectIndex].mixer_behavior == AUDIO_MIXER_BEHAVIOR_BIT_PERFECT) {
+ break;
+ }
+ }
+ EXPECT_LT(bitPerfectIndex, mixerAttributes.size());
+ EXPECT_EQ(bitPerfectFormat, mixerAttributes[bitPerfectIndex].config.format);
+ EXPECT_EQ(bitPerfectChannelMask, mixerAttributes[bitPerfectIndex].config.channel_mask);
+ EXPECT_EQ(bitPerfectSampleRate, mixerAttributes[bitPerfectIndex].config.sample_rate);
+ EXPECT_EQ(NO_ERROR,
+ mManager->setPreferredMixerAttributes(
+ &mediaAttr, usbPortId, uid, &mixerAttributes[bitPerfectIndex]));
+
+ audio_io_handle_t bitPerfectOutput = AUDIO_IO_HANDLE_NONE;
+ audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ audio_port_handle_t bitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+ audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+ bool isBitPerfect;
+
+ // When there is no active bit-perfect playback, the output selection will follow default
+ // routing strategy.
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ 48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
+ uid, &isBitPerfect);
+ EXPECT_FALSE(isBitPerfect);
+ EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
+ const auto outputDesc = mManager->getOutputs().valueFor(output);
+ EXPECT_NE(nullptr, outputDesc);
+ EXPECT_NE(AUDIO_OUTPUT_FLAG_BIT_PERFECT, outputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+
+ // Start bit-perfect playback
+ getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
+ bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
+ mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
+ status_t status = mManager->startOutput(bitPerfectPortId);
+ if (status == DEAD_OBJECT) {
+ getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
+ bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
+ mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
+ status = mManager->startOutput(bitPerfectPortId);
+ }
+ EXPECT_EQ(NO_ERROR, status);
+ EXPECT_TRUE(isBitPerfect);
+ EXPECT_NE(AUDIO_IO_HANDLE_NONE, bitPerfectOutput);
+ const auto bitPerfectOutputDesc = mManager->getOutputs().valueFor(bitPerfectOutput);
+ EXPECT_NE(nullptr, bitPerfectOutputDesc);
+ EXPECT_EQ(AUDIO_OUTPUT_FLAG_BIT_PERFECT,
+ bitPerfectOutputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+
+ // If the playback is from preferred mixer attributes owner but the request doesn't match
+ // preferred mixer attributes, it will not be bit-perfect.
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ 48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
+ uid, &isBitPerfect);
+ EXPECT_FALSE(isBitPerfect);
+ EXPECT_EQ(bitPerfectOutput, output);
+
+ // When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ 48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
+ anotherUid, &isBitPerfect);
+ EXPECT_FALSE(isBitPerfect);
+ EXPECT_EQ(bitPerfectOutput, output);
+
+ // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
+ // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
+ getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
+ bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
+ AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+ EXPECT_FALSE(isBitPerfect);
+ EXPECT_EQ(bitPerfectOutput, output);
+
+ EXPECT_EQ(NO_ERROR,
+ mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, uid));
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ "", "", AUDIO_FORMAT_LDAC));
+}
+
class AudioPolicyManagerTestDynamicPolicy : public AudioPolicyManagerTestWithConfigurationFile {
protected:
void TearDown() override;
@@ -1064,13 +1337,14 @@
AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig);
ASSERT_EQ(INVALID_OPERATION, ret);
- // The first time to register valid policy mixes should succeed.
+ // The first time to register valid loopback policy mix should succeed.
clearPolicyMix();
- ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_SPEAKER, "", audioConfig);
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "addr", audioConfig);
ASSERT_EQ(NO_ERROR, ret);
- // Registering the same policy mixes should fail.
- ret = mManager->registerPolicyMixes(mAudioMixes);
+ // Registering the render policy for the loopback address should succeed.
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "addr", audioConfig);
ASSERT_EQ(INVALID_OPERATION, ret);
}
@@ -1135,9 +1409,6 @@
std::map<audio_format_t, bool> getSurroundFormatsHelper();
std::vector<audio_format_t> getReportedSurroundFormatsHelper();
std::unordered_set<audio_format_t> getFormatsFromPorts();
- AudioPolicyManagerTestClient* getClient() override {
- return new AudioPolicyManagerTestClientForHdmi;
- }
void TearDown() override;
static const std::string sTvConfig;
@@ -1357,19 +1628,45 @@
ASSERT_EQ(INVALID_OPERATION, ret);
}
+struct DPTestParam {
+ DPTestParam(const std::vector<AudioMixMatchCriterion>& mixCriteria,
+ bool expected_match = false)
+ : mixCriteria(mixCriteria), attributes(defaultAttr), session(AUDIO_SESSION_NONE),
+ expected_match(expected_match) {}
+
+ DPTestParam& withUsage(audio_usage_t usage) {
+ attributes.usage = usage;
+ return *this;
+ }
+
+ DPTestParam& withTags(const char *tags) {
+ std::strncpy(attributes.tags, tags, sizeof(attributes.tags));
+ return *this;
+ }
+
+ DPTestParam& withSource(audio_source_t source) {
+ attributes.source = source;
+ return *this;
+ }
+
+ DPTestParam& withSessionId(audio_session_t sessionId) {
+ session = sessionId;
+ return *this;
+ }
+
+ std::vector<AudioMixMatchCriterion> mixCriteria;
+ audio_attributes_t attributes;
+ audio_session_t session;
+ bool expected_match;
+};
+
class AudioPolicyManagerTestDPPlaybackReRouting : public AudioPolicyManagerTestDynamicPolicy,
- public testing::WithParamInterface<audio_attributes_t> {
+ public testing::WithParamInterface<DPTestParam> {
protected:
void SetUp() override;
void TearDown() override;
std::unique_ptr<RecordingActivityTracker> mTracker;
-
- std::vector<AudioMixMatchCriterion> mUsageRules = {
- createUsageCriterion(AUDIO_USAGE_MEDIA),
- createUsageCriterion(AUDIO_USAGE_ALARM)
- };
-
struct audio_port_v7 mInjectionPort;
audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
};
@@ -1383,8 +1680,10 @@
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
audioConfig.sample_rate = k48000SamplingRate;
+
+ DPTestParam param = GetParam();
status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
- AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig, mUsageRules);
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig, param.mixCriteria);
ASSERT_EQ(NO_ERROR, ret);
struct audio_port_v7 extractionPort;
@@ -1397,8 +1696,9 @@
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, source, AUDIO_FLAG_NONE, ""};
std::string tags = "addr=" + mMixAddress;
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
- getInputForAttr(attr, mTracker->getRiid(), &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
- AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate, AUDIO_INPUT_FLAG_NONE, &mPortId);
+ getInputForAttr(attr, param.session, mTracker->getRiid(), &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+ AUDIO_INPUT_FLAG_NONE, &mPortId);
ASSERT_EQ(NO_ERROR, mManager->startInput(mPortId));
ASSERT_EQ(extractionPort.id, selectedDeviceId);
@@ -1411,152 +1711,261 @@
AudioPolicyManagerTestDynamicPolicy::TearDown();
}
-TEST_F(AudioPolicyManagerTestDPPlaybackReRouting, InitSuccess) {
- // SetUp must finish with no assertions
-}
-
-TEST_F(AudioPolicyManagerTestDPPlaybackReRouting, Dump) {
- dumpToLog();
-}
-
TEST_P(AudioPolicyManagerTestDPPlaybackReRouting, PlaybackReRouting) {
- const audio_attributes_t attr = GetParam();
- const audio_usage_t usage = attr.usage;
+ const DPTestParam param = GetParam();
+ const audio_attributes_t& attr = param.attributes;
audio_port_handle_t playbackRoutedPortId = AUDIO_PORT_HANDLE_NONE;
getOutputForAttr(&playbackRoutedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, nullptr /*portId*/,
- attr);
- if (std::find_if(begin(mUsageRules), end(mUsageRules),
- [&usage](const AudioMixMatchCriterion &c) {
- return c.mRule == RULE_MATCH_ATTRIBUTE_USAGE &&
- c.mValue.mUsage == usage;}) != end(mUsageRules) ||
- (strncmp(attr.tags, "addr=", strlen("addr=")) == 0 &&
- strncmp(attr.tags + strlen("addr="), mMixAddress.c_str(),
- AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0)) {
+ attr, param.session);
+ if (param.expected_match) {
EXPECT_EQ(mInjectionPort.id, playbackRoutedPortId);
} else {
EXPECT_NE(mInjectionPort.id, playbackRoutedPortId);
}
}
-INSTANTIATE_TEST_CASE_P(
- PlaybackReroutingUsageMatch,
- AudioPolicyManagerTestDPPlaybackReRouting,
- testing::Values(
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
- )
- );
+const std::vector<AudioMixMatchCriterion> USAGE_MEDIA_ALARM_CRITERIA = {
+ createUsageCriterion(AUDIO_USAGE_MEDIA),
+ createUsageCriterion(AUDIO_USAGE_ALARM)
+};
-INSTANTIATE_TEST_CASE_P(
- PlaybackReroutingAddressPriorityMatch,
- AudioPolicyManagerTestDPPlaybackReRouting,
- testing::Values(
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ALARM,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_SONIFICATION,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VIRTUAL_SOURCE,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, "addr=remote_submix_media"}
- )
- );
+INSTANTIATE_TEST_SUITE_P(
+ PlaybackReroutingUsageMatch,
+ AudioPolicyManagerTestDPPlaybackReRouting,
+ testing::Values(
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_MEDIA),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_MEDIA).withTags("addr=other"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ALARM),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_NOTIFICATION),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_EVENT),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_ASSISTANCE_SONIFICATION),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_GAME),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ false)
+ .withUsage(AUDIO_USAGE_ASSISTANT)));
-INSTANTIATE_TEST_CASE_P(
- PlaybackReroutingUnHandledUsages,
- AudioPolicyManagerTestDPPlaybackReRouting,
- testing::Values(
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_VOICE_COMMUNICATION,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_NOTIFICATION_EVENT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC,
- AUDIO_USAGE_ASSISTANCE_SONIFICATION,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_GAME,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_MUSIC, AUDIO_USAGE_ASSISTANT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
- )
- );
+INSTANTIATE_TEST_SUITE_P(
+ PlaybackReroutingAddressPriorityMatch,
+ AudioPolicyManagerTestDPPlaybackReRouting,
+ testing::Values(
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_MEDIA).withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION).withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ALARM)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_NOTIFICATION)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_NOTIFICATION_EVENT)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANCE_SONIFICATION)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_GAME)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_VIRTUAL_SOURCE)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANT)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANT)
+ .withTags("sometag;addr=remote_submix_media;othertag=somevalue"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANT)
+ .withTags("addr=remote_submix_media;othertag"),
+ DPTestParam(USAGE_MEDIA_ALARM_CRITERIA, /*expected_match=*/ true)
+ .withUsage(AUDIO_USAGE_ASSISTANT)
+ .withTags("sometag;othertag;addr=remote_submix_media")));
+
+static constexpr audio_session_t TEST_SESSION_ID = static_cast<audio_session_t>(42);
+static constexpr audio_session_t OTHER_SESSION_ID = static_cast<audio_session_t>(77);
+
+INSTANTIATE_TEST_SUITE_P(
+ PlaybackReRoutingWithSessionId,
+ AudioPolicyManagerTestDPPlaybackReRouting,
+ testing::Values(
+ // Mix is matched because the session id matches the one specified by the mix rule.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+ /*expected_match=*/ true)
+ .withSessionId(TEST_SESSION_ID),
+ // Mix is not matched because the session id doesn't match the one specified
+ // by the mix rule.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+ /*expected_match=*/ false)
+ .withSessionId(OTHER_SESSION_ID),
+ // Mix is matched, the session id doesn't match the one specified by rule,
+ // but there's address specified in the tags which takes precedence.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+ /*expected_match=*/ true)
+ .withSessionId(OTHER_SESSION_ID).withTags("addr=remote_submix_media"),
+ // Mix is matched, both the session id and the usage match ones specified by mix rule.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+ createUsageCriterion(AUDIO_USAGE_MEDIA)},
+ /*expected_match=*/ true)
+ .withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_MEDIA),
+ // Mix is not matched, the session id matches the one specified by mix rule,
+ // but usage does not.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+ createUsageCriterion(AUDIO_USAGE_MEDIA)},
+ /*expected_match=*/ false)
+ .withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_GAME),
+ // Mix is not matched, the usage matches the one specified by mix rule,
+ // but the session id is excluded.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID, /*exclude=*/ true),
+ createUsageCriterion(AUDIO_USAGE_MEDIA)},
+ /*expected_match=*/ false)
+ .withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_MEDIA)));
+
+struct DPMmapTestParam {
+ DPMmapTestParam(int mixRouteFlags, audio_devices_t deviceType, const std::string& deviceAddress)
+ : mixRouteFlags(mixRouteFlags), deviceType(deviceType), deviceAddress(deviceAddress) {}
+
+ int mixRouteFlags;
+ audio_devices_t deviceType;
+ std::string deviceAddress;
+};
+
+class AudioPolicyManagerTestMMapPlaybackRerouting
+ : public AudioPolicyManagerTestDynamicPolicy,
+ public ::testing::WithParamInterface<DPMmapTestParam> {
+ protected:
+ void SetUp() override {
+ AudioPolicyManagerTestDynamicPolicy::SetUp();
+ audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+ }
+
+ audio_config_t audioConfig;
+ audio_io_handle_t mOutput;
+ audio_stream_type_t mStream = AUDIO_STREAM_DEFAULT;
+ audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ audio_port_handle_t mPortId;
+ AudioPolicyInterface::output_type_t mOutputType;
+ audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+ bool mIsSpatialized;
+ bool mIsBitPerfect;
+};
+
+TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting, MmapPlaybackStreamMatchingLoopbackDapMixFails) {
+ // Add mix matching the test uid.
+ const int testUid = 12345;
+ const auto param = GetParam();
+ status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, param.mixRouteFlags, param.deviceType,
+ param.deviceAddress, audioConfig, {createUidCriterion(testUid)});
+ ASSERT_EQ(NO_ERROR, ret);
+
+ // Geting output for matching uid and mmap-ed stream should fail.
+ audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+ ASSERT_EQ(INVALID_OPERATION,
+ mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
+ createAttributionSourceState(testUid), &audioConfig,
+ &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+}
+
+TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting,
+ NonMmapPlaybackStreamMatchingLoopbackDapMixSucceeds) {
+ // Add mix matching the test uid.
+ const int testUid = 12345;
+ const auto param = GetParam();
+ status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, param.mixRouteFlags, param.deviceType,
+ param.deviceAddress, audioConfig, {createUidCriterion(testUid)});
+ ASSERT_EQ(NO_ERROR, ret);
+
+ // Geting output for matching uid should succeed for non-mmaped stream.
+ audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_NONE;
+ ASSERT_EQ(NO_ERROR,
+ mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
+ createAttributionSourceState(testUid), &audioConfig,
+ &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+}
+
+TEST_F(AudioPolicyManagerTestMMapPlaybackRerouting,
+ MmapPlaybackStreamMatchingRenderDapMixSucceeds) {
+ // Add render-only mix matching the test uid.
+ const int testUid = 12345;
+ status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER, AUDIO_DEVICE_OUT_SPEAKER,
+ /*mixAddress=*/"", audioConfig, {createUidCriterion(testUid)});
+ ASSERT_EQ(NO_ERROR, ret);
+
+ // Geting output for matching uid should succeed for mmaped stream.
+ audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+ ASSERT_EQ(NO_ERROR,
+ mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
+ createAttributionSourceState(testUid), &audioConfig,
+ &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ MmapPlaybackRerouting, AudioPolicyManagerTestMMapPlaybackRerouting,
+ testing::Values(DPMmapTestParam(MIX_ROUTE_FLAG_LOOP_BACK, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+ /*deviceAddress=*/"remote_submix_media"),
+ DPMmapTestParam(MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER,
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+ /*deviceAddress=*/"remote_submix_media")));
class AudioPolicyManagerTestDPMixRecordInjection : public AudioPolicyManagerTestDynamicPolicy,
- public testing::WithParamInterface<audio_attributes_t> {
+ public testing::WithParamInterface<DPTestParam> {
protected:
void SetUp() override;
void TearDown() override;
std::unique_ptr<RecordingActivityTracker> mTracker;
-
- std::vector<AudioMixMatchCriterion> mSourceRules = {
- createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER),
- createCapturePresetCriterion(AUDIO_SOURCE_MIC),
- createCapturePresetCriterion(AUDIO_SOURCE_VOICE_COMMUNICATION)
- };
-
struct audio_port_v7 mExtractionPort;
audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
};
@@ -1570,8 +1979,10 @@
audioConfig.channel_mask = AUDIO_CHANNEL_IN_STEREO;
audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
audioConfig.sample_rate = k48000SamplingRate;
+
+ DPTestParam param = GetParam();
status_t ret = addPolicyMix(MIX_TYPE_RECORDERS, MIX_ROUTE_FLAG_LOOP_BACK,
- AUDIO_DEVICE_IN_REMOTE_SUBMIX, mMixAddress, audioConfig, mSourceRules);
+ AUDIO_DEVICE_IN_REMOTE_SUBMIX, mMixAddress, audioConfig, param.mixCriteria);
ASSERT_EQ(NO_ERROR, ret);
struct audio_port_v7 injectionPort;
@@ -1598,73 +2009,94 @@
AudioPolicyManagerTestDynamicPolicy::TearDown();
}
-TEST_F(AudioPolicyManagerTestDPMixRecordInjection, InitSuccess) {
- // SetUp mush finish with no assertions.
-}
-
-TEST_F(AudioPolicyManagerTestDPMixRecordInjection, Dump) {
- dumpToLog();
-}
-
TEST_P(AudioPolicyManagerTestDPMixRecordInjection, RecordingInjection) {
- const audio_attributes_t attr = GetParam();
- const audio_source_t source = attr.source;
+ const DPTestParam param = GetParam();
audio_port_handle_t captureRoutedPortId = AUDIO_PORT_HANDLE_NONE;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
- getInputForAttr(attr, mTracker->getRiid(), &captureRoutedPortId, AUDIO_FORMAT_PCM_16_BIT,
- AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate, AUDIO_INPUT_FLAG_NONE, &portId);
- if (std::find_if(begin(mSourceRules), end(mSourceRules),
- [&source](const AudioMixMatchCriterion &c) {
- return c.mRule == RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET &&
- c.mValue.mSource == source;})
- != end(mSourceRules)) {
+ getInputForAttr(param.attributes, param.session, mTracker->getRiid(), &captureRoutedPortId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+ AUDIO_INPUT_FLAG_NONE, &portId);
+ if (param.expected_match) {
EXPECT_EQ(mExtractionPort.id, captureRoutedPortId);
} else {
EXPECT_NE(mExtractionPort.id, captureRoutedPortId);
}
}
+const std::vector<AudioMixMatchCriterion> SOURCE_CAM_MIC_VOICE_CRITERIA = {
+ createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER),
+ createCapturePresetCriterion(AUDIO_SOURCE_MIC),
+ createCapturePresetCriterion(AUDIO_SOURCE_VOICE_COMMUNICATION)
+};
+
// No address priority rule for remote recording, address is a "don't care"
INSTANTIATE_TEST_CASE_P(
- RecordInjectionSourceMatch,
+ RecordInjectionSource,
AudioPolicyManagerTestDPMixRecordInjection,
testing::Values(
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_CAMCORDER, AUDIO_FLAG_NONE,
- "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE,
- "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_MIC, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE,
- "addr=remote_submix_media"}
- )
- );
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+ .withSource(AUDIO_SOURCE_CAMCORDER),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+ .withSource(AUDIO_SOURCE_CAMCORDER)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+ .withSource(AUDIO_SOURCE_MIC),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+ .withSource(AUDIO_SOURCE_MIC)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+ .withSource(AUDIO_SOURCE_VOICE_COMMUNICATION),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ true)
+ .withSource(AUDIO_SOURCE_VOICE_COMMUNICATION)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+ .withSource(AUDIO_SOURCE_VOICE_RECOGNITION),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+ .withSource(AUDIO_SOURCE_VOICE_RECOGNITION)
+ .withTags("addr=remote_submix_media"),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+ .withSource(AUDIO_SOURCE_HOTWORD),
+ DPTestParam(SOURCE_CAM_MIC_VOICE_CRITERIA, /*expected_match=*/ false)
+ .withSource(AUDIO_SOURCE_HOTWORD)
+ .withTags("addr=remote_submix_media")));
-// No address priority rule for remote recording
INSTANTIATE_TEST_CASE_P(
- RecordInjectionSourceNotMatch,
+ RecordInjectionWithSessionId,
AudioPolicyManagerTestDPMixRecordInjection,
testing::Values(
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE, ""},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_FLAG_NONE,
- "addr=remote_submix_media"},
- (audio_attributes_t){AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_HOTWORD, AUDIO_FLAG_NONE,
- "addr=remote_submix_media"}
- )
- );
+ // Mix is matched because the session id matches the one specified by the mix rule.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+ /*expected_match=*/ true)
+ .withSessionId(TEST_SESSION_ID),
+ // Mix is not matched because the session id doesn't match the one specified
+ // by the mix rule.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+ /*expected_match=*/ false)
+ .withSessionId(OTHER_SESSION_ID),
+ // Mix is not matched, the session id doesn't match the one specified by rule,
+ // but tand address specified in the tags is ignored for recorder mix.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID)},
+ /*expected_match=*/ false)
+ .withSessionId(OTHER_SESSION_ID).withTags("addr=remote_submix_media"),
+ // Mix is matched, both the session id and the source match ones specified by mix rule
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+ createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER)},
+ /*expected_match=*/ true)
+ .withSessionId(TEST_SESSION_ID).withSource(AUDIO_SOURCE_CAMCORDER),
+ // Mix is not matched, the session id matches the one specified by mix rule,
+ // but source does not.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID),
+ createCapturePresetCriterion(AUDIO_SOURCE_CAMCORDER)},
+ /*expected_match=*/ false)
+ .withSessionId(TEST_SESSION_ID).withSource(AUDIO_SOURCE_MIC),
+ // Mix is not matched, the source matches the one specified by mix rule,
+ // but the session id is excluded.
+ DPTestParam(/*mixCriteria=*/ {createSessionIdCriterion(TEST_SESSION_ID,
+ /*exclude=*/ true),
+ createCapturePresetCriterion(AUDIO_SOURCE_MIC)},
+ /*expected_match=*/ false)
+ .withSessionId(TEST_SESSION_ID).withSource(AUDIO_SOURCE_MIC)));
using DeviceConnectionTestParams =
std::tuple<audio_devices_t /*type*/, std::string /*name*/, std::string /*address*/>;
@@ -1767,8 +2199,9 @@
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE);
} else if (audio_is_input_device(type)) {
RecordingActivityTracker tracker;
- getInputForAttr({}, tracker.getRiid(), &routedPortId, AUDIO_FORMAT_PCM_16_BIT,
- AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate, AUDIO_INPUT_FLAG_NONE);
+ getInputForAttr({}, AUDIO_SESSION_NONE, tracker.getRiid(), &routedPortId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
+ AUDIO_INPUT_FLAG_NONE);
}
ASSERT_EQ(devicePort.id, routedPortId);
@@ -1836,11 +2269,23 @@
std::string getConfigFile() override { return sCarConfig; }
static const std::string sCarConfig;
+ static const std::string sCarBusMediaOutput;
+ static const std::string sCarBusNavigationOutput;
+ static const std::string sCarRearZoneOneOutput;
+ static const std::string sCarRearZoneTwoOutput;
};
const std::string AudioPolicyManagerCarTest::sCarConfig =
AudioPolicyManagerCarTest::sExecutableDir + "test_car_ap_atmos_offload_configuration.xml";
+const std::string AudioPolicyManagerCarTest::sCarBusMediaOutput = "bus0_media_out";
+
+const std::string AudioPolicyManagerCarTest::sCarBusNavigationOutput = "bus1_navigation_out";
+
+const std::string AudioPolicyManagerCarTest::sCarRearZoneOneOutput = "bus100_audio_zone_1";
+
+const std::string AudioPolicyManagerCarTest::sCarRearZoneTwoOutput = "bus200_audio_zone_2";
+
TEST_F(AudioPolicyManagerCarTest, InitSuccess) {
// SetUp must finish with no assertions.
}
@@ -1852,9 +2297,8 @@
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrAtmosOutputAfterRegisteringPolicyMix) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
- const std::string kTestBusMediaOutput = "bus0_media_out";
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, kTestBusMediaOutput, audioConfig);
+ AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig);
ASSERT_EQ(NO_ERROR, ret);
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
@@ -1882,6 +2326,415 @@
ASSERT_EQ(k48000SamplingRate, outDesc->getSamplingRate());
}
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrAfterRegisteringPolicyMix) {
+ status_t ret;
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+ std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ audio_port_v7 mediaDevicePort;
+ ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+ sCarBusMediaOutput, &mediaDevicePort));
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ audio_io_handle_t output;
+ audio_port_handle_t portId;
+ const audio_attributes_t mediaAttribute = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
+
+ ASSERT_EQ(mediaDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputAfterRegisteringPolicyMix) {
+ status_t ret;
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+ std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ audio_port_v7 navDevicePort;
+ ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+ sCarBusNavigationOutput, &navDevicePort));
+ audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ audio_io_handle_t output;
+ audio_port_handle_t portId;
+ const audio_attributes_t mediaAttribute = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
+
+ ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputAfterUserAffinities) {
+ status_t ret;
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+ std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+ const AudioDeviceTypeAddrVector outputDevices = {mediaOutputDevice};
+ mManager->setUserIdDeviceAffinities(/* userId */ 0, outputDevices);
+ audio_port_v7 navDevicePort;
+ ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+ sCarBusNavigationOutput, &navDevicePort));
+ audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ audio_io_handle_t output;
+ audio_port_handle_t portId;
+ const audio_attributes_t mediaAttribute = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
+
+ ASSERT_NE(navDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithExcludeUserIdCriteria) {
+ status_t ret;
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+ std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ /*exclude=*/ false),
+ createUserIdCriterion(/* userId */ 0, /* exclude */ true)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ audio_port_v7 navDevicePort;
+ ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+ sCarBusNavigationOutput, &navDevicePort));
+ audio_io_handle_t output;
+ audio_port_handle_t portId;
+ const audio_attributes_t navigationAttribute = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, navigationAttribute);
+
+ ASSERT_NE(navDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputExcludeUserIdCriteria) {
+ status_t ret;
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+ std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ /*exclude=*/ false),
+ createUserIdCriterion(0 /* userId */, /* exclude */ true)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ audio_port_v7 navDevicePort;
+ ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+ sCarBusNavigationOutput, &navDevicePort));
+ audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ audio_io_handle_t output;
+ audio_port_handle_t portId;
+ const audio_attributes_t mediaAttribute = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
+
+ ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest,
+ GetOutputForAttrWithMatchingMixAndSelectedOutputAfterUserAffinities) {
+ status_t ret;
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+ std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+ const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
+ const AudioDeviceTypeAddrVector outputDevices = {mediaOutputDevice, navOutputDevice};
+ mManager->setUserIdDeviceAffinities(/* userId */ 0, outputDevices);
+ audio_port_v7 navDevicePort;
+ ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+ sCarBusNavigationOutput, &navDevicePort));
+ audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ audio_io_handle_t output;
+ audio_port_handle_t portId;
+ const audio_attributes_t mediaAttribute = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
+
+ ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest,
+ GetOutputForAttrWithNoMatchingMaxAndSelectedOutputAfterUserAffinities) {
+ status_t ret;
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+ std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+ const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
+ const AudioDeviceTypeAddrVector outputDevices = {mediaOutputDevice, navOutputDevice};
+ mManager->setUserIdDeviceAffinities(/* userId */ 0, outputDevices);
+ audio_port_v7 navDevicePort;
+ ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+ sCarBusNavigationOutput, &navDevicePort));
+ audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ audio_io_handle_t output;
+ audio_port_handle_t portId;
+ const audio_attributes_t alarmAttribute = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, alarmAttribute);
+
+ ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest,
+ GetOutputForAttrWithMatMixAfterUserAffinitiesForOneUser) {
+ status_t ret;
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+ std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarRearZoneOneOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarRearZoneTwoOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+ const AudioDeviceTypeAddrVector primaryZoneDevices = {mediaOutputDevice};
+ mManager->setUserIdDeviceAffinities(/* userId */ 0, primaryZoneDevices);
+ audio_port_v7 primaryZoneDevicePort;
+ ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+ sCarBusMediaOutput, &primaryZoneDevicePort));
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ audio_io_handle_t output;
+ audio_port_handle_t portId;
+ const audio_attributes_t mediaAttribute = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ uid_t user11AppUid = multiuser_get_uid(/* user_id */ 11, /* app_id */ 12345);
+
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
+ AUDIO_SESSION_NONE, user11AppUid);
+
+ ASSERT_EQ(primaryZoneDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest,
+ GetOutputForAttrWithMatMixAfterUserAffinitiesForTwoUsers) {
+ status_t ret;
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+ std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarRearZoneOneOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarRearZoneTwoOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+ const AudioDeviceTypeAddrVector primaryZoneDevices = {mediaOutputDevice};
+ mManager->setUserIdDeviceAffinities(/* userId */ 0, primaryZoneDevices);
+ const AudioDeviceTypeAddr secondaryOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarRearZoneOneOutput);
+ const AudioDeviceTypeAddrVector secondaryZoneDevices = {secondaryOutputDevice};
+ mManager->setUserIdDeviceAffinities(/* userId */ 11, secondaryZoneDevices);
+ audio_port_v7 secondaryZoneDevicePort;
+ ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+ sCarRearZoneOneOutput, &secondaryZoneDevicePort));
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ audio_io_handle_t output;
+ audio_port_handle_t portId;
+ const audio_attributes_t mediaAttribute = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ uid_t user11AppUid = multiuser_get_uid(/* user_id */ 11, /* app_id */ 12345);
+
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
+ AUDIO_SESSION_NONE, user11AppUid);
+
+ ASSERT_EQ(secondaryZoneDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest,
+ GetOutputForAttrWithMatMixAfterUserAffinitiesForThreeUsers) {
+ status_t ret;
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+ std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarRearZoneOneOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarRearZoneTwoOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+ const AudioDeviceTypeAddrVector primaryZoneDevices = {mediaOutputDevice};
+ mManager->setUserIdDeviceAffinities(/* userId */ 0, primaryZoneDevices);
+ const AudioDeviceTypeAddr secondaryOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarRearZoneOneOutput);
+ const AudioDeviceTypeAddrVector secondaryZoneDevices = {secondaryOutputDevice};
+ mManager->setUserIdDeviceAffinities(/* userId */ 11, secondaryZoneDevices);
+ const AudioDeviceTypeAddr tertiaryOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarRearZoneTwoOutput);
+ const AudioDeviceTypeAddrVector tertiaryZoneDevices = {tertiaryOutputDevice};
+ mManager->setUserIdDeviceAffinities(/* userId */ 15, tertiaryZoneDevices);
+ audio_port_v7 tertiaryZoneDevicePort;
+ ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+ sCarRearZoneTwoOutput, &tertiaryZoneDevicePort));
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ audio_io_handle_t output;
+ audio_port_handle_t portId;
+ const audio_attributes_t mediaAttribute = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ uid_t user15AppUid = multiuser_get_uid(/* user_id */ 15, /* app_id */ 12345);
+
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
+ AUDIO_SESSION_NONE, user15AppUid);
+
+ ASSERT_EQ(tertiaryZoneDevicePort.id, selectedDeviceId);
+}
+
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithNoMatchingMix) {
+ status_t ret;
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+ std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ std::vector<AudioMixMatchCriterion> navMatchCriteria = {
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ /*exclude=*/ false)};
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+ const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
+ const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
+ const AudioDeviceTypeAddrVector outputDevices = {mediaOutputDevice, navOutputDevice};
+ mManager->setUserIdDeviceAffinities(/* userId */ 0, outputDevices);
+ audio_port_v7 navDevicePort;
+ ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
+ sCarBusNavigationOutput, &navDevicePort));
+ audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ audio_io_handle_t output;
+ audio_port_handle_t portId;
+ const audio_attributes_t alarmAttribute = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, alarmAttribute);
+
+ ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+}
+
class AudioPolicyManagerTVTest : public AudioPolicyManagerTestWithConfigurationFile {
protected:
std::string getConfigFile() override { return sTvConfig; }
@@ -2080,6 +2933,108 @@
mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
}
+TEST_F(AudioPolicyManagerDevicesRoleForCapturePresetTest, PreferredDeviceUsedForInput) {
+ const audio_source_t source = AUDIO_SOURCE_MIC;
+ const device_role_t role = DEVICE_ROLE_PREFERRED;
+ const std::string address = "card=1;device=0";
+ const std::string deviceName = "randomName";
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+ AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+ auto availableDevices = mManager->getAvailableInputDevices();
+ ASSERT_GT(availableDevices.size(), 1);
+
+ audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+ attr.source = source;
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000));
+ auto selectedDevice = availableDevices.getDeviceFromId(selectedDeviceId);
+ ASSERT_NE(nullptr, selectedDevice);
+
+ sp<DeviceDescriptor> preferredDevice = nullptr;
+ for (const auto& device : availableDevices) {
+ if (device != selectedDevice) {
+ preferredDevice = device;
+ break;
+ }
+ }
+ ASSERT_NE(nullptr, preferredDevice);
+ // After setting preferred device for capture preset, the selected device for input should be
+ // the preferred device.
+ ASSERT_EQ(NO_ERROR,
+ mManager->setDevicesRoleForCapturePreset(source, role,
+ {preferredDevice->getDeviceTypeAddr()}));
+ selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000));
+ ASSERT_EQ(preferredDevice, availableDevices.getDeviceFromId(selectedDeviceId));
+
+ // After clearing preferred device for capture preset, the selected device for input should be
+ // the same as original one.
+ ASSERT_EQ(NO_ERROR,
+ mManager->clearDevicesRoleForCapturePreset(source, role));
+ selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000));
+ ASSERT_EQ(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+ AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+}
+
+TEST_F(AudioPolicyManagerDevicesRoleForCapturePresetTest, DisabledDeviceNotUsedForInput) {
+ const audio_source_t source = AUDIO_SOURCE_MIC;
+ const device_role_t role = DEVICE_ROLE_DISABLED;
+ const std::string address = "card=1;device=0";
+ const std::string deviceName = "randomName";
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+ AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+ auto availableDevices = mManager->getAvailableInputDevices();
+ ASSERT_GT(availableDevices.size(), 1);
+
+ audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+ attr.source = source;
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000));
+ auto selectedDevice = availableDevices.getDeviceFromId(selectedDeviceId);
+ ASSERT_NE(nullptr, selectedDevice);
+
+ // After setting disabled device for capture preset, the disabled device must not be
+ // selected for input.
+ ASSERT_EQ(NO_ERROR,
+ mManager->setDevicesRoleForCapturePreset(source, role,
+ {selectedDevice->getDeviceTypeAddr()}));
+ selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000));
+ ASSERT_NE(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
+
+ // After clearing disabled device for capture preset, the selected device for input should be
+ // the original one.
+ ASSERT_EQ(NO_ERROR,
+ mManager->clearDevicesRoleForCapturePreset(source, role));
+ selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000));
+ ASSERT_EQ(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+ AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+}
+
INSTANTIATE_TEST_CASE_P(
DevicesRoleForCapturePresetOperation,
AudioPolicyManagerDevicesRoleForCapturePresetTest,
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index d342aea..50ca26a 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -54,6 +54,7 @@
<profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
</mixPort>
+ <mixPort name="hifi_output" role="source" flags="AUDIO_OUTPUT_FLAG_BIT_PERFECT"/>
</mixPorts>
<devicePorts>
<devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -74,12 +75,16 @@
<devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink"
encodedFormats="AUDIO_FORMAT_LDAC AUDIO_FORMAT_APTX AUDIO_FORMAT_APTX_HD AUDIO_FORMAT_AAC AUDIO_FORMAT_SBC">
</devicePort>
+ <devicePort tagName="USB Device Out" type="AUDIO_DEVICE_OUT_USB_DEVICE" role="sink">
+ </devicePort>
+ <devicePort tagName="USB Device In" type="AUDIO_DEVICE_IN_USB_DEVICE" role="source">
+ </devicePort>
</devicePorts>
<routes>
<route type="mix" sink="Speaker"
sources="primary output,voip_rx"/>
<route type="mix" sink="primary input"
- sources="Built-In Mic,Hdmi-In Mic"/>
+ sources="Built-In Mic,Hdmi-In Mic,USB Device In"/>
<route type="mix" sink="voip_tx"
sources="Built-In Mic"/>
<route type="mix" sink="Hdmi"
@@ -89,7 +94,9 @@
<route type="mix" sink="mixport_bt_hfp_input"
sources="BT SCO Headset Mic"/>
<route type="mix" sink="BT A2DP Out"
- sources="primary output"/>
+ sources="primary output,hifi_output"/>
+ <route type="mix" sink="USB Device Out"
+ sources="primary output,hifi_output"/>
</routes>
</module>
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 981c569..a45365a 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -57,7 +57,6 @@
"api1/client2/StreamingProcessor.cpp",
"api1/client2/JpegProcessor.cpp",
"api1/client2/CallbackProcessor.cpp",
- "api1/client2/JpegCompressor.cpp",
"api1/client2/CaptureSequencer.cpp",
"api1/client2/ZslProcessor.cpp",
"api2/CameraDeviceClient.cpp",
@@ -66,6 +65,7 @@
"api2/DepthCompositeStream.cpp",
"api2/HeicEncoderInfoManager.cpp",
"api2/HeicCompositeStream.cpp",
+ "api2/JpegRCompositeStream.cpp",
"device3/BufferUtils.cpp",
"device3/Camera3Device.cpp",
"device3/Camera3OfflineSession.cpp",
@@ -95,6 +95,12 @@
"hidl/HidlCameraDeviceUser.cpp",
"hidl/HidlCameraService.cpp",
"hidl/Utils.cpp",
+ "aidl/AidlCameraDeviceCallbacks.cpp",
+ "aidl/AidlCameraDeviceUser.cpp",
+ "aidl/AidlCameraService.cpp",
+ "aidl/AidlCameraServiceListener.cpp",
+ "aidl/AidlUtils.cpp",
+ "aidl/DeathPipe.cpp",
"utils/CameraServiceProxyWrapper.cpp",
"utils/CameraThreadState.cpp",
"utils/CameraTraces.cpp",
@@ -114,6 +120,7 @@
],
shared_libs: [
+ "libactivitymanager_aidl",
"libbase",
"libdl",
"libexif",
@@ -137,6 +144,7 @@
"libhidlbase",
"libimage_io",
"libjpeg",
+ "libultrahdr",
"libmedia_codeclist",
"libmedia_omx",
"libmemunreachable",
@@ -151,19 +159,22 @@
"android.frameworks.cameraservice.service@2.2",
"android.frameworks.cameraservice.device@2.0",
"android.frameworks.cameraservice.device@2.1",
+ "android.frameworks.cameraservice.common-V1-ndk",
+ "android.frameworks.cameraservice.service-V1-ndk",
+ "android.frameworks.cameraservice.device-V1-ndk",
"android.hardware.camera.common@1.0",
"android.hardware.camera.provider@2.4",
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
"android.hardware.camera.device@3.4",
"android.hardware.camera.device@3.5",
"android.hardware.camera.device@3.6",
"android.hardware.camera.device@3.7",
- "android.hardware.camera.device-V1-ndk",
+ "android.hardware.camera.device-V2-ndk",
"media_permission-aidl-cpp",
],
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 28bb781..3e7af3d 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -35,8 +35,10 @@
#include <android/hardware/ICamera.h>
#include <android/hardware/ICameraClient.h>
+#include <aidl/AidlCameraService.h>
#include <android-base/macros.h>
#include <android-base/parseint.h>
+#include <android/permission/PermissionChecker.h>
#include <binder/ActivityManager.h>
#include <binder/AppOpsManager.h>
#include <binder/IPCThreadState.h>
@@ -68,6 +70,8 @@
#include <private/android_filesystem_config.h>
#include <system/camera_vendor_tags.h>
#include <system/camera_metadata.h>
+#include <binder/IServiceManager.h>
+#include <binder/IActivityManager.h>
#include <camera/StringUtils.h>
#include <system/camera.h>
@@ -82,6 +86,9 @@
namespace {
const char* kPermissionServiceName = "permission";
+ const char* kActivityServiceName = "activity";
+ const char* kSensorPrivacyServiceName = "sensor_privacy";
+ const char* kAppopsServiceName = "appops";
}; // namespace anonymous
namespace android {
@@ -89,6 +96,7 @@
using binder::Status;
using namespace camera3;
using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
+using frameworks::cameraservice::service::implementation::AidlCameraService;
using hardware::ICamera;
using hardware::ICameraClient;
using hardware::ICameraServiceListener;
@@ -121,6 +129,8 @@
"android.permission.CAMERA_OPEN_CLOSE_LISTENER");
static const std::string
sCameraInjectExternalCameraPermission("android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+// Constant integer for FGS Logging, used to denote the API type for logger
+static const int LOG_FGS_CAMERA_API = 1;
const char *sFileName = "lastOpenSessionDumpFile";
static constexpr int32_t kSystemNativeClientScore = resource_policy::PERCEPTIBLE_APP_ADJ;
static constexpr int32_t kSystemNativeClientState =
@@ -133,7 +143,10 @@
// Set to keep track of logged service error events.
static std::set<std::string> sServiceErrorEventSet;
-CameraService::CameraService() :
+CameraService::CameraService(
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+ mCameraServiceProxyWrapper(cameraServiceProxyWrapper == nullptr ?
+ std::make_shared<CameraServiceProxyWrapper>() : cameraServiceProxyWrapper),
mEventLog(DEFAULT_EVENT_LOG_LENGTH),
mNumberOfCameras(0),
mNumberOfCamerasWithoutSystemCamera(0),
@@ -153,6 +166,20 @@
return (CameraThreadState::getCallingUid() < AID_APP_START);
}
+// Enable processes with isolated AID to request the binder
+void CameraService::instantiate() {
+ CameraService::publish(true);
+}
+
+void CameraService::onServiceRegistration(const String16& name, const sp<IBinder>&) {
+ if (name != toString16(kAppopsServiceName)) {
+ return;
+ }
+
+ ALOGV("appops service registered. setting camera audio restriction");
+ mAppOps.setCameraAudioRestriction(mAudioRestriction);
+}
+
void CameraService::onFirstRef()
{
@@ -177,16 +204,33 @@
mSensorPrivacyPolicy = new SensorPrivacyPolicy(this);
mSensorPrivacyPolicy->registerSelf();
mInjectionStatusListener = new InjectionStatusListener(this);
- mAppOps.setCameraAudioRestriction(mAudioRestriction);
+
+ // appops function setCamerAudioRestriction uses getService which
+ // is blocking till the appops service is ready. To enable early
+ // boot availability for cameraservice, use checkService which is
+ // non blocking and register for notifications
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->checkService(toString16(kAppopsServiceName));
+ if (!binder) {
+ sm->registerForNotifications(toString16(kAppopsServiceName), this);
+ } else {
+ mAppOps.setCameraAudioRestriction(mAudioRestriction);
+ }
+
sp<HidlCameraService> hcs = HidlCameraService::getInstance(this);
if (hcs->registerAsService() != android::OK) {
- ALOGE("%s: Failed to register default android.frameworks.cameraservice.service@1.0",
+ // Deprecated, so it will fail to register on newer devices
+ ALOGW("%s: Did not register default android.frameworks.cameraservice.service@2.2",
__FUNCTION__);
}
+ if (!AidlCameraService::registerService(this)) {
+ ALOGE("%s: Failed to register default AIDL VNDK CameraService", __FUNCTION__);
+ }
+
// This needs to be last call in this function, so that it's as close to
// ServiceManager::addService() as possible.
- CameraServiceProxyWrapper::pingCameraServiceProxy();
+ mCameraServiceProxyWrapper->pingCameraServiceProxy();
ALOGI("CameraService pinged cameraservice proxy");
}
@@ -265,7 +309,10 @@
cameraId.c_str());
continue;
}
- i->getListener()->onTorchStatusChanged(mapToInterface(status), cameraId);
+ auto ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
+ cameraId);
+ i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
+ __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
}
}
@@ -532,8 +579,12 @@
id.c_str());
continue;
}
- listener->getListener()->onPhysicalCameraStatusChanged(mapToInterface(newStatus),
- id, physicalId);
+ auto ret = listener->getListener()->onPhysicalCameraStatusChanged(
+ mapToInterface(newStatus), id, physicalId);
+ listener->handleBinderStatus(ret,
+ "%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
+ __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+ ret.exceptionCode());
}
}
}
@@ -574,7 +625,10 @@
int32_t newStrengthLevel) {
Mutex::Autolock lock(mStatusListenerLock);
for (auto& i : mListenerList) {
- i->getListener()->onTorchStrengthLevelChanged(cameraId, newStrengthLevel);
+ auto ret = i->getListener()->onTorchStrengthLevelChanged(cameraId, newStrengthLevel);
+ i->handleBinderStatus(ret,
+ "%s: Failed to trigger onTorchStrengthLevelChanged for %d:%d: %d", __FUNCTION__,
+ i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
}
}
@@ -631,11 +685,18 @@
broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
}
-static bool hasPermissionsForSystemCamera(int callingPid, int callingUid,
- bool logPermissionFailure = false) {
- return checkPermission(toString16(sSystemCameraPermission), callingPid, callingUid,
- logPermissionFailure) &&
- checkPermission(toString16(sCameraPermission), callingPid, callingUid);
+static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
+ permission::PermissionChecker permissionChecker;
+ AttributionSourceState attributionSource{};
+ attributionSource.pid = callingPid;
+ attributionSource.uid = callingUid;
+ bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
+ toString16(sSystemCameraPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+ bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
+ toString16(sCameraPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+ return checkPermissionForSystemCamera && checkPermissionForCamera;
}
Status CameraService::getNumberOfCameras(int32_t type, int32_t* numCameras) {
@@ -714,8 +775,14 @@
const std::vector<std::string> *deviceIds = &mNormalDeviceIdsWithoutSystemCamera;
auto callingPid = CameraThreadState::getCallingPid();
auto callingUid = CameraThreadState::getCallingUid();
- if (checkPermission(toString16(sSystemCameraPermission), callingPid, callingUid,
- /*logPermissionFailure*/false) || getpid() == callingPid) {
+ permission::PermissionChecker permissionChecker;
+ AttributionSourceState attributionSource{};
+ attributionSource.pid = callingPid;
+ attributionSource.uid = callingUid;
+ bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
+ toString16(sSystemCameraPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+ if (checkPermissionForSystemCamera || getpid() == callingPid) {
deviceIds = &mNormalDeviceIds;
}
if (cameraIdInt < 0 || cameraIdInt >= static_cast<int>(deviceIds->size())) {
@@ -784,9 +851,16 @@
// If it's not calling from cameraserver, check the permission only if
// android.permission.CAMERA is required. If android.permission.SYSTEM_CAMERA was needed,
// it would've already been checked in shouldRejectSystemCameraConnection.
+ permission::PermissionChecker permissionChecker;
+ AttributionSourceState attributionSource{};
+ attributionSource.pid = callingPid;
+ attributionSource.uid = callingUid;
+ bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
+ toString16(sCameraPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
if ((callingPid != getpid()) &&
(deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
- !checkPermission(toString16(sCameraPermission), callingPid, callingUid)) {
+ !checkPermissionForCamera) {
res = cameraInfo->removePermissionEntries(
mCameraProviderManager->getProviderTagIdLocked(cameraId),
&tagsRemoved);
@@ -965,17 +1039,20 @@
}
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
- *client = new Camera2Client(cameraService, tmp, packageName, featureId,
- cameraId, api1CameraId, facing, sensorOrientation, clientPid, clientUid,
- servicePid, overrideForPerfClass, overrideToPortrait, forceSlowJpegMode);
+ *client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
+ packageName, featureId, cameraId,
+ api1CameraId, facing, sensorOrientation,
+ clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+ forceSlowJpegMode);
ALOGI("%s: Camera1 API (legacy), override to portrait %d, forceSlowJpegMode %d",
__FUNCTION__, overrideToPortrait, forceSlowJpegMode);
} else { // Camera2 API route
sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
- *client = new CameraDeviceClient(cameraService, tmp, packageName,
- systemNativeClient, featureId, cameraId, facing, sensorOrientation,
- clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait);
+ *client = new CameraDeviceClient(cameraService, tmp,
+ cameraService->mCameraServiceProxyWrapper, packageName, systemNativeClient,
+ featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
+ overrideForPerfClass, overrideToPortrait);
ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
}
return Status::ok();
@@ -1218,6 +1295,9 @@
Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
const std::string& clientName, int& clientUid, int& clientPid,
/*out*/int& originalClientPid) const {
+ permission::PermissionChecker permissionChecker;
+ AttributionSourceState attributionSource{};
+
int callingPid = CameraThreadState::getCallingPid();
int callingUid = CameraThreadState::getCallingUid();
@@ -1264,9 +1344,14 @@
// If it's not calling from cameraserver, check the permission if the
// device isn't a system only camera (shouldRejectSystemCameraConnection already checks for
// android.permission.SYSTEM_CAMERA for system only camera devices).
+ attributionSource.pid = clientPid;
+ attributionSource.uid = clientUid;
+ attributionSource.packageName = clientName;
+ bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
+ toString16(sCameraPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
if (callingPid != getpid() &&
- (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
- !checkPermission(toString16(sCameraPermission), clientPid, clientUid)) {
+ (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
"Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" without camera permission",
@@ -1605,6 +1690,15 @@
}
*device = client;
+
+ const sp<IServiceManager> sm(defaultServiceManager());
+ const auto& mActivityManager = getActivityManager();
+ if (mActivityManager) {
+ mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
+ CameraThreadState::getCallingUid(),
+ CameraThreadState::getCallingPid());
+ }
+
return ret;
}
@@ -1656,7 +1750,7 @@
// characteristics) even if clients don't have android.permission.CAMERA. We do not want the
// same behavior for system camera devices.
if (!systemClient && systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
- !hasPermissionsForSystemCamera(cPid, cUid, /*logPermissionFailure*/true)) {
+ !hasPermissionsForSystemCamera(cPid, cUid)) {
ALOGW("Rejecting access to system only camera %s, inadequete permissions",
cameraId.c_str());
return true;
@@ -1703,7 +1797,7 @@
clientUserId = multiuser_get_user_id(callingUid);
}
- if (CameraServiceProxyWrapper::isCameraDisabled(clientUserId)) {
+ if (mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
std::string msg = "Camera disabled by device policy";
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(ERROR_DISABLED, msg.c_str());
@@ -1711,7 +1805,8 @@
// enforce system camera permissions
if (oomScoreOffset > 0 &&
- !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid())) {
+ !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid()) &&
+ !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
std::string msg = fmt::sprintf("Cannot change the priority of a client %s pid %d for "
"camera id %s without SYSTEM_CAMERA permissions",
clientPackageNameAdj.c_str(), callingPid, cameraId.c_str());
@@ -1747,6 +1842,13 @@
ALOGE("%s: Error while creating the file: %s", __FUNCTION__, sFileName);
}
}
+ const sp<IServiceManager> sm(defaultServiceManager());
+ const auto& mActivityManager = getActivityManager();
+ if (mActivityManager) {
+ mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
+ CameraThreadState::getCallingUid(),
+ CameraThreadState::getCallingPid());
+ }
return ret;
}
@@ -1980,8 +2082,17 @@
client->setRotateAndCropOverride(rotateAndCropMode);
} else {
client->setRotateAndCropOverride(
- CameraServiceProxyWrapper::getRotateAndCropOverride(
- clientPackageName, facing, multiuser_get_user_id(clientUid)));
+ mCameraServiceProxyWrapper->getRotateAndCropOverride(
+ clientPackageName, facing, multiuser_get_user_id(clientUid)));
+ }
+
+ // Set autoframing override behaviour
+ if (mOverrideAutoframingMode != ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+ client->setAutoframingOverride(mOverrideAutoframingMode);
+ } else {
+ client->setAutoframingOverride(
+ mCameraServiceProxyWrapper->getAutoframingOverride(
+ clientPackageName));
}
// Set camera muting behavior
@@ -2024,6 +2135,7 @@
client->setImageDumpMask(mImageDumpMask);
client->setStreamUseCaseOverrides(mStreamUseCaseOverrides);
+ client->setZoomOverride(mZoomOverrideValue);
} // lock is destroyed, allow further connect calls
// Important: release the mutex here so the client can call back into the service from its
@@ -2031,7 +2143,7 @@
device = client;
int32_t openLatencyMs = ns2ms(systemTime() - openTimeNs);
- CameraServiceProxyWrapper::logOpen(cameraId, facing, clientPackageName,
+ mCameraServiceProxyWrapper->logOpen(cameraId, facing, clientPackageName,
effectiveApiLevel, isNonSystemNdk, openLatencyMs);
{
@@ -2098,6 +2210,10 @@
onlineClientDesc->getOwnerId(), onlinePriority.getState(),
// native clients don't have offline processing support.
/*ommScoreOffset*/ 0, /*systemNativeClient*/false);
+ if (offlineClientDesc == nullptr) {
+ ALOGE("%s: Offline client descriptor was NULL", __FUNCTION__);
+ return BAD_VALUE;
+ }
// Allow only one offline device per camera
auto incompatibleClients = mActiveClientManager.getIncompatibleClients(offlineClientDesc);
@@ -2461,9 +2577,20 @@
for (const auto& it : mListenerList) {
auto ret = it->getListener()->onCameraAccessPrioritiesChanged();
- if (!ret.isOk()) {
- ALOGE("%s: Failed to trigger permission callback: %d", __FUNCTION__,
- ret.exceptionCode());
+ it->handleBinderStatus(ret, "%s: Failed to trigger permission callback for %d:%d: %d",
+ __FUNCTION__, it->getListenerUid(), it->getListenerPid(), ret.exceptionCode());
+ }
+}
+
+void CameraService::notifyMonitoredUids(const std::unordered_set<uid_t> ¬ifyUidSet) {
+ Mutex::Autolock lock(mStatusListenerLock);
+
+ for (const auto& it : mListenerList) {
+ if (notifyUidSet.find(it->getListenerUid()) != notifyUidSet.end()) {
+ ALOGV("%s: notifying uid %d", __FUNCTION__, it->getListenerUid());
+ auto ret = it->getListener()->onCameraAccessPrioritiesChanged();
+ it->handleBinderStatus(ret, "%s: Failed to trigger permission callback for %d:%d: %d",
+ __FUNCTION__, it->getListenerUid(), it->getListenerPid(), ret.exceptionCode());
}
}
}
@@ -2528,7 +2655,7 @@
const auto basicClient = current->getValue();
if (basicClient.get() != nullptr && !basicClient->getOverrideToPortrait()) {
basicClient->setRotateAndCropOverride(
- CameraServiceProxyWrapper::getRotateAndCropOverride(
+ mCameraServiceProxyWrapper->getRotateAndCropOverride(
basicClient->getPackageName(),
basicClient->getCameraFacing(),
multiuser_get_user_id(basicClient->getClientUid())));
@@ -2599,8 +2726,14 @@
// Check for camera permissions
int callingPid = CameraThreadState::getCallingPid();
int callingUid = CameraThreadState::getCallingUid();
- if ((callingPid != getpid()) && !checkPermission(toString16(sCameraPermission), callingPid,
- callingUid)) {
+ permission::PermissionChecker permissionChecker;
+ AttributionSourceState attributionSource{};
+ attributionSource.pid = callingPid;
+ attributionSource.uid = callingUid;
+ bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
+ toString16(sCameraPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+ if ((callingPid != getpid()) && !checkPermissionForCamera) {
ALOGE("%s: pid %d doesn't have camera permissions", __FUNCTION__, callingPid);
return STATUS_ERROR(ERROR_PERMISSION_DENIED,
"android.permission.CAMERA needed to call"
@@ -2647,8 +2780,13 @@
auto clientUid = CameraThreadState::getCallingUid();
auto clientPid = CameraThreadState::getCallingPid();
- bool openCloseCallbackAllowed = checkPermission(toString16(sCameraOpenCloseListenerPermission),
- clientPid, clientUid, /*logPermissionFailure*/false);
+ permission::PermissionChecker permissionChecker;
+ AttributionSourceState attributionSource{};
+ attributionSource.uid = clientUid;
+ attributionSource.pid = clientPid;
+ bool openCloseCallbackAllowed = permissionChecker.checkPermissionForPreflight(
+ toString16(sCameraOpenCloseListenerPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
Mutex::Autolock lock(mServiceLock);
@@ -2677,7 +2815,7 @@
// permissions the listener process has / whether it is a vendor listener. Since it might be
// eligible to listen to other camera ids.
mListenerList.emplace_back(serviceListener);
- mUidPolicy->registerMonitorUid(clientUid);
+ mUidPolicy->registerMonitorUid(clientUid, /*openCamera*/false);
}
/* Collect current devices and status */
@@ -2745,7 +2883,7 @@
Mutex::Autolock lock(mStatusListenerLock);
for (auto it = mListenerList.begin(); it != mListenerList.end(); it++) {
if (IInterface::asBinder((*it)->getListener()) == IInterface::asBinder(listener)) {
- mUidPolicy->unregisterMonitorUid((*it)->getListenerUid());
+ mUidPolicy->unregisterMonitorUid((*it)->getListenerUid(), /*closeCamera*/false);
IInterface::asBinder(listener)->unlinkToDeath(*it);
mListenerList.erase(it);
return Status::ok();
@@ -2910,6 +3048,13 @@
return binder::Status::ok();
}
+Status CameraService::reportExtensionSessionStats(
+ const hardware::CameraExtensionSessionStats& stats, std::string* sessionKey /*out*/) {
+ ALOGV("%s: reported %s", __FUNCTION__, stats.toString().c_str());
+ *sessionKey = mCameraServiceProxyWrapper->updateExtensionStats(stats);
+ return Status::ok();
+}
+
void CameraService::removeByClient(const BasicClient* client) {
Mutex::Autolock lock(mServiceLock);
for (auto& i : mActiveClientManager.getAll()) {
@@ -3386,6 +3531,13 @@
// client shouldn't be able to call into us anymore
mClientPid = 0;
+ const auto& mActivityManager = getActivityManager();
+ if (mActivityManager) {
+ mActivityManager->logFgsApiEnd(LOG_FGS_CAMERA_API,
+ CameraThreadState::getCallingUid(),
+ CameraThreadState::getCallingPid());
+ }
+
return res;
}
@@ -3478,6 +3630,11 @@
mClientPackageName);
bool isCameraPrivacyEnabled =
sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+ // We don't want to return EACCESS if the CameraPrivacy is enabled.
+ // We prefer to successfully open the camera and perform camera muting
+ // or blocking in connectHelper as handleAppOpMode can be called before the
+ // connection has been fully established and at that time camera muting
+ // capabilities are unknown.
if (!isUidActive || !isCameraPrivacyEnabled) {
ALOGI("Camera %s: Access for \"%s\" has been restricted",
mCameraIdStr.c_str(), mClientPackageName.c_str());
@@ -3516,7 +3673,7 @@
// Transition device availability listeners from PRESENT -> NOT_AVAILABLE
sCameraService->updateStatus(StatusInternal::NOT_AVAILABLE, mCameraIdStr);
- sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
+ sCameraService->mUidPolicy->registerMonitorUid(mClientUid, /*openCamera*/true);
// Notify listeners of camera open/close status
sCameraService->updateOpenCloseStatus(mCameraIdStr, true/*open*/, mClientPackageName);
@@ -3625,7 +3782,7 @@
}
mOpsCallback.clear();
- sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid);
+ sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid, /*closeCamera*/true);
// Notify listeners of camera open/close status
sCameraService->updateOpenCloseStatus(mCameraIdStr, false/*open*/, mClientPackageName);
@@ -3732,20 +3889,44 @@
// UidPolicy
// ----------------------------------------------------------------------------
-void CameraService::UidPolicy::registerSelf() {
+void CameraService::UidPolicy::registerWithActivityManager() {
Mutex::Autolock _l(mUidLock);
+ int32_t emptyUidArray[] = { };
if (mRegistered) return;
status_t res = mAm.linkToDeath(this);
- mAm.registerUidObserver(this, ActivityManager::UID_OBSERVER_GONE
+ mAm.registerUidObserverForUids(this, ActivityManager::UID_OBSERVER_GONE
| ActivityManager::UID_OBSERVER_IDLE
| ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE
| ActivityManager::UID_OBSERVER_PROC_OOM_ADJ,
ActivityManager::PROCESS_STATE_UNKNOWN,
- toString16(kServiceName));
+ toString16(kServiceName), emptyUidArray, 0, mObserverToken);
if (res == OK) {
mRegistered = true;
ALOGV("UidPolicy: Registered with ActivityManager");
+ } else {
+ ALOGE("UidPolicy: Failed to register with ActivityManager: 0x%08x", res);
+ }
+}
+
+void CameraService::UidPolicy::onServiceRegistration(const String16& name, const sp<IBinder>&) {
+ if (name != toString16(kActivityServiceName)) {
+ return;
+ }
+
+ registerWithActivityManager();
+}
+
+void CameraService::UidPolicy::registerSelf() {
+ // Use check service to see if the activity service is available
+ // If not available then register for notifications, instead of blocking
+ // till the service is ready
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->checkService(toString16(kActivityServiceName));
+ if (!binder) {
+ sm->registerForNotifications(toString16(kActivityServiceName), this);
+ } else {
+ registerWithActivityManager();
}
}
@@ -3804,24 +3985,54 @@
}
}
-void CameraService::UidPolicy::onUidProcAdjChanged(uid_t uid) {
- bool procAdjChange = false;
+/**
+ * When the OOM adj of the uid owning the camera changes, a different uid waiting on camera
+ * privileges may take precedence if the owner's new OOM adj is greater than the waiting package.
+ * Here, we track which monitoredUid has the camera, and track its adj relative to other
+ * monitoredUids. If it is revised above some other monitoredUid, signal
+ * onCameraAccessPrioritiesChanged. This only needs to capture the case where there are two
+ * foreground apps in split screen - state changes will capture all other cases.
+ */
+void CameraService::UidPolicy::onUidProcAdjChanged(uid_t uid, int32_t adj) {
+ std::unordered_set<uid_t> notifyUidSet;
{
Mutex::Autolock _l(mUidLock);
- if (mMonitoredUids.find(uid) != mMonitoredUids.end()) {
- procAdjChange = true;
+ auto it = mMonitoredUids.find(uid);
+
+ if (it != mMonitoredUids.end()) {
+ if (it->second.hasCamera) {
+ for (auto &monitoredUid : mMonitoredUids) {
+ if (monitoredUid.first != uid && adj > monitoredUid.second.procAdj) {
+ ALOGV("%s: notify uid %d", __FUNCTION__, monitoredUid.first);
+ notifyUidSet.emplace(monitoredUid.first);
+ }
+ }
+ ALOGV("%s: notify uid %d", __FUNCTION__, uid);
+ notifyUidSet.emplace(uid);
+ } else {
+ for (auto &monitoredUid : mMonitoredUids) {
+ if (monitoredUid.second.hasCamera && adj < monitoredUid.second.procAdj) {
+ ALOGV("%s: notify uid %d", __FUNCTION__, uid);
+ notifyUidSet.emplace(uid);
+ }
+ }
+ }
+ it->second.procAdj = adj;
}
}
- if (procAdjChange) {
+ if (notifyUidSet.size() > 0) {
sp<CameraService> service = mService.promote();
if (service != nullptr) {
- service->notifyMonitoredUids();
+ service->notifyMonitoredUids(notifyUidSet);
}
}
}
-void CameraService::UidPolicy::registerMonitorUid(uid_t uid) {
+/**
+ * Register a uid for monitoring, and note whether it owns a camera.
+ */
+void CameraService::UidPolicy::registerMonitorUid(uid_t uid, bool openCamera) {
Mutex::Autolock _l(mUidLock);
auto it = mMonitoredUids.find(uid);
if (it != mMonitoredUids.end()) {
@@ -3829,18 +4040,36 @@
} else {
MonitoredUid monitoredUid;
monitoredUid.procState = ActivityManager::PROCESS_STATE_NONEXISTENT;
+ monitoredUid.procAdj = resource_policy::UNKNOWN_ADJ;
monitoredUid.refCount = 1;
- mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid));
+ it = mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid)).first;
+ status_t res = mAm.addUidToObserver(mObserverToken, toString16(kServiceName), uid);
+ if (res != OK) {
+ ALOGE("UidPolicy: Failed to add uid to observer: 0x%08x", res);
+ }
+ }
+
+ if (openCamera) {
+ it->second.hasCamera = true;
}
}
-void CameraService::UidPolicy::unregisterMonitorUid(uid_t uid) {
+/**
+ * Unregister a uid for monitoring, and note whether it lost ownership of a camera.
+ */
+void CameraService::UidPolicy::unregisterMonitorUid(uid_t uid, bool closeCamera) {
Mutex::Autolock _l(mUidLock);
auto it = mMonitoredUids.find(uid);
if (it != mMonitoredUids.end()) {
it->second.refCount--;
if (it->second.refCount == 0) {
mMonitoredUids.erase(it);
+ status_t res = mAm.removeUidFromObserver(mObserverToken, toString16(kServiceName), uid);
+ if (res != OK) {
+ ALOGE("UidPolicy: Failed to remove uid from observer: 0x%08x", res);
+ }
+ } else if (closeCamera) {
+ it->second.hasCamera = false;
}
} else {
ALOGE("%s: Trying to unregister uid: %d which is not monitored!", __FUNCTION__, uid);
@@ -3962,7 +4191,9 @@
// ----------------------------------------------------------------------------
// SensorPrivacyPolicy
// ----------------------------------------------------------------------------
-void CameraService::SensorPrivacyPolicy::registerSelf() {
+
+void CameraService::SensorPrivacyPolicy::registerWithSensorPrivacyManager()
+{
Mutex::Autolock _l(mSensorPrivacyLock);
if (mRegistered) {
return;
@@ -3977,6 +4208,27 @@
}
}
+void CameraService::SensorPrivacyPolicy::onServiceRegistration(const String16& name,
+ const sp<IBinder>&) {
+ if (name != toString16(kSensorPrivacyServiceName)) {
+ return;
+ }
+
+ registerWithSensorPrivacyManager();
+}
+
+void CameraService::SensorPrivacyPolicy::registerSelf() {
+ // Use checkservice to see if the sensor_privacy service is available
+ // If service is not available then register for notification
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->checkService(toString16(kSensorPrivacyServiceName));
+ if (!binder) {
+ sm->registerForNotifications(toString16(kSensorPrivacyServiceName),this);
+ } else {
+ registerWithSensorPrivacyManager();
+ }
+}
+
void CameraService::SensorPrivacyPolicy::unregisterSelf() {
Mutex::Autolock _l(mSensorPrivacyLock);
mSpm.removeSensorPrivacyListener(this);
@@ -3986,6 +4238,10 @@
}
bool CameraService::SensorPrivacyPolicy::isSensorPrivacyEnabled() {
+ if (!mRegistered) {
+ registerWithSensorPrivacyManager();
+ }
+
Mutex::Autolock _l(mSensorPrivacyLock);
return mSensorPrivacyEnabled;
}
@@ -4692,8 +4948,12 @@
cameraId.c_str());
continue;
}
- listener->getListener()->onStatusChanged(mapToInterface(status),
+ auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
cameraId);
+ listener->handleBinderStatus(ret,
+ "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+ __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+ ret.exceptionCode());
}
});
}
@@ -4725,10 +4985,10 @@
} else {
ret = it->getListener()->onCameraClosed(cameraId);
}
- if (!ret.isOk()) {
- ALOGE("%s: Failed to trigger onCameraOpened/onCameraClosed callback: %d", __FUNCTION__,
- ret.exceptionCode());
- }
+
+ it->handleBinderStatus(ret,
+ "%s: Failed to trigger onCameraOpened/onCameraClosed callback for %d:%d: %d",
+ __FUNCTION__, it->getListenerUid(), it->getListenerPid(), ret.exceptionCode());
}
}
@@ -4829,8 +5089,12 @@
physicalCameraId.c_str());
continue;
}
- listener->getListener()->onPhysicalCameraStatusChanged(status,
+ auto ret = listener->getListener()->onPhysicalCameraStatusChanged(status,
logicalCameraId, physicalCameraId);
+ listener->handleBinderStatus(ret,
+ "%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
+ __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+ ret.exceptionCode());
}
}
}
@@ -4878,6 +5142,10 @@
return handleSetRotateAndCrop(args);
} else if (args.size() >= 1 && args[0] == toString16("get-rotate-and-crop")) {
return handleGetRotateAndCrop(out);
+ } else if (args.size() >= 2 && args[0] == toString16("set-autoframing")) {
+ return handleSetAutoframing(args);
+ } else if (args.size() >= 1 && args[0] == toString16("get-autoframing")) {
+ return handleGetAutoframing(out);
} else if (args.size() >= 2 && args[0] == toString16("set-image-dump-mask")) {
return handleSetImageDumpMask(args);
} else if (args.size() >= 1 && args[0] == toString16("get-image-dump-mask")) {
@@ -4887,7 +5155,10 @@
} else if (args.size() >= 2 && args[0] == toString16("set-stream-use-case-override")) {
return handleSetStreamUseCaseOverrides(args);
} else if (args.size() >= 1 && args[0] == toString16("clear-stream-use-case-override")) {
- return handleClearStreamUseCaseOverrides();
+ handleClearStreamUseCaseOverrides();
+ return OK;
+ } else if (args.size() >= 1 && args[0] == toString16("set-zoom-override")) {
+ return handleSetZoomOverride(args);
} else if (args.size() >= 2 && args[0] == toString16("watch")) {
return handleWatchCommand(args, in, out);
} else if (args.size() >= 2 && args[0] == toString16("set-watchdog")) {
@@ -4985,6 +5256,34 @@
return OK;
}
+status_t CameraService::handleSetAutoframing(const Vector<String16>& args) {
+ char* end;
+ int autoframingValue = (int) strtol(toStdString(args[1]).c_str(), &end, /*base=*/10);
+ if ((*end != '\0') ||
+ (autoframingValue != ANDROID_CONTROL_AUTOFRAMING_OFF &&
+ autoframingValue != ANDROID_CONTROL_AUTOFRAMING_ON &&
+ autoframingValue != ANDROID_CONTROL_AUTOFRAMING_AUTO)) {
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock lock(mServiceLock);
+ mOverrideAutoframingMode = autoframingValue;
+
+ if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) return OK;
+
+ const auto clients = mActiveClientManager.getAll();
+ for (auto& current : clients) {
+ if (current != nullptr) {
+ const auto basicClient = current->getValue();
+ if (basicClient.get() != nullptr) {
+ basicClient->setAutoframingOverride(autoframingValue);
+ }
+ }
+ }
+
+ return OK;
+}
+
status_t CameraService::handleSetCameraServiceWatchdog(const Vector<String16>& args) {
int enableWatchdog = atoi(toStdString(args[1]).c_str());
@@ -5013,6 +5312,12 @@
return dprintf(out, "rotateAndCrop override: %d\n", mOverrideRotateAndCropMode);
}
+status_t CameraService::handleGetAutoframing(int out) {
+ Mutex::Autolock lock(mServiceLock);
+
+ return dprintf(out, "autoframing override: %d\n", mOverrideAutoframingMode);
+}
+
status_t CameraService::handleSetImageDumpMask(const Vector<String16>& args) {
char *endPtr;
errno = 0;
@@ -5077,6 +5382,8 @@
useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL;
} else if (arg == "VIDEO_CALL") {
useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL;
+ } else if (arg == "CROPPED_RAW") {
+ useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW;
} else {
ALOGE("%s: Invalid stream use case %s", __FUNCTION__, arg.c_str());
return BAD_VALUE;
@@ -5090,9 +5397,35 @@
return OK;
}
-status_t CameraService::handleClearStreamUseCaseOverrides() {
+void CameraService::handleClearStreamUseCaseOverrides() {
Mutex::Autolock lock(mServiceLock);
mStreamUseCaseOverrides.clear();
+}
+
+status_t CameraService::handleSetZoomOverride(const Vector<String16>& args) {
+ char* end;
+ int zoomOverrideValue = strtol(toStdString(args[1]).c_str(), &end, /*base=*/10);
+ if ((*end != '\0') ||
+ (zoomOverrideValue != -1 &&
+ zoomOverrideValue != ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF &&
+ zoomOverrideValue != ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM)) {
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock lock(mServiceLock);
+ mZoomOverrideValue = zoomOverrideValue;
+
+ const auto clients = mActiveClientManager.getAll();
+ for (auto& current : clients) {
+ if (current != nullptr) {
+ const auto basicClient = current->getValue();
+ if (basicClient.get() != nullptr) {
+ if (basicClient->supportsZoomOverride()) {
+ basicClient->setZoomOverride(mZoomOverrideValue);
+ }
+ }
+ }
+ }
return OK;
}
@@ -5433,6 +5766,9 @@
" set-rotate-and-crop <ROTATION> overrides the rotate-and-crop value for AUTO backcompat\n"
" Valid values 0=0 deg, 1=90 deg, 2=180 deg, 3=270 deg, 4=No override\n"
" get-rotate-and-crop returns the current override rotate-and-crop value\n"
+ " set-autoframing <VALUE> overrides the autoframing value for AUTO\n"
+ " Valid values 0=false, 1=true, 2=auto\n"
+ " get-autoframing returns the current override autoframing value\n"
" set-image-dump-mask <MASK> specifies the formats to be saved to disk\n"
" Valid values 0=OFF, 1=ON for JPEG\n"
" get-image-dump-mask returns the current image-dump-mask value\n"
@@ -5444,8 +5780,10 @@
" last use case is assigned to all the remaining streams. In case of multiple\n"
" streams with the same resolution, the tie-breaker is (JPEG, RAW, YUV, and PRIV)\n"
" Valid values are (case sensitive): DEFAULT, PREVIEW, STILL_CAPTURE, VIDEO_RECORD,\n"
- " PREVIEW_VIDEO_STILL, VIDEO_CALL\n"
+ " PREVIEW_VIDEO_STILL, VIDEO_CALL, CROPPED_RAW\n"
" clear-stream-use-case-override clear the stream use case override\n"
+ " set-zoom-override <-1/0/1> enable or disable zoom override\n"
+ " Valid values -1: do not override, 0: override to OFF, 1: override to ZOOM\n"
" watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
" help print this message\n");
}
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 3ee1e6e..bc65293 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -29,6 +29,8 @@
#include <binder/ActivityManager.h>
#include <binder/AppOpsManager.h>
#include <binder/BinderService.h>
+#include <binder/IServiceManager.h>
+#include <binder/IActivityManager.h>
#include <binder/IAppOpsCallback.h>
#include <binder/IUidObserver.h>
#include <hardware/camera.h>
@@ -48,6 +50,7 @@
#include "utils/AutoConditionLock.h"
#include "utils/ClientManager.h"
#include "utils/IPCTransport.h"
+#include "utils/CameraServiceProxyWrapper.h"
#include <set>
#include <string>
@@ -70,7 +73,8 @@
public BinderService<CameraService>,
public virtual ::android::hardware::BnCameraService,
public virtual IBinder::DeathRecipient,
- public virtual CameraProviderManager::StatusListener
+ public virtual CameraProviderManager::StatusListener,
+ public virtual IServiceManager::LocalRegistrationCallback
{
friend class BinderService<CameraService>;
friend class CameraOfflineSessionClient;
@@ -97,10 +101,19 @@
// Event log ID
static const int SN_EVENT_LOG_ID = 0x534e4554;
+ // Register camera service
+ static void instantiate();
+
// Implementation of BinderService<T>
static char const* getServiceName() { return "media.camera"; }
- CameraService();
+ // Implementation of IServiceManager::LocalRegistrationCallback
+ virtual void onServiceRegistration(const String16& name, const sp<IBinder>& binder) override;
+
+ // Non-null arguments for cameraServiceProxyWrapper should be provided for
+ // testing purposes only.
+ CameraService(std::shared_ptr<CameraServiceProxyWrapper>
+ cameraServiceProxyWrapper = nullptr);
virtual ~CameraService();
/////////////////////////////////////////////////////////////////////
@@ -206,6 +219,9 @@
/*out*/
sp<hardware::camera2::ICameraInjectionSession>* cameraInjectionSession);
+ virtual binder::Status reportExtensionSessionStats(
+ const hardware::CameraExtensionSessionStats& stats, std::string* sessionKey /*out*/);
+
// Extra permissions checks
virtual status_t onTransact(uint32_t code, const Parcel& data,
Parcel* reply, uint32_t flags);
@@ -221,6 +237,7 @@
// Monitored UIDs availability notification
void notifyMonitoredUids();
+ void notifyMonitoredUids(const std::unordered_set<uid_t> ¬ifyUidSet);
// Stores current open session device info in temp file.
void cacheDump();
@@ -340,6 +357,9 @@
// Override rotate-and-crop AUTO behavior
virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop) = 0;
+ // Override autoframing AUTO behaviour
+ virtual status_t setAutoframingOverride(uint8_t autoframingValue) = 0;
+
// Whether the client supports camera muting (black only output)
virtual bool supportsCameraMute() = 0;
@@ -356,6 +376,12 @@
// Clear stream use case overrides
virtual void clearStreamUseCaseOverrides() = 0;
+ // Whether the client supports camera zoom override
+ virtual bool supportsZoomOverride() = 0;
+
+ // Set/reset zoom override
+ virtual status_t setZoomOverride(int32_t zoomOverride) = 0;
+
// The injection camera session to replace the internal camera
// session.
virtual status_t injectCamera(const std::string& injectedCamId,
@@ -510,7 +536,6 @@
virtual bool canCastToApiClient(apiLevel level) const;
void setImageDumpMask(int /*mask*/) { }
- void setStreamUseCaseOverrides(const std::vector<int64_t>& /*usecaseOverrides*/) { }
protected:
// Initialized in constructor
@@ -585,6 +610,20 @@
private:
+ // TODO: b/263304156 update this to make use of a death callback for more
+ // robust/fault tolerant logging
+ static const sp<IActivityManager>& getActivityManager() {
+ static const char* kActivityService = "activity";
+ static const auto activityManager = []() -> sp<IActivityManager> {
+ const sp<IServiceManager> sm(defaultServiceManager());
+ if (sm != nullptr) {
+ return interface_cast<IActivityManager>(sm->checkService(String16(kActivityService)));
+ }
+ return nullptr;
+ }();
+ return activityManager;
+ }
+
/**
* Typesafe version of device status, containing both the HAL-layer and the service interface-
* layer values.
@@ -705,7 +744,10 @@
// Observer for UID lifecycle enforcing that UIDs in idle
// state cannot use the camera to protect user privacy.
- class UidPolicy : public BnUidObserver, public virtual IBinder::DeathRecipient {
+ class UidPolicy :
+ public BnUidObserver,
+ public virtual IBinder::DeathRecipient,
+ public virtual IServiceManager::LocalRegistrationCallback {
public:
explicit UidPolicy(sp<CameraService> service)
: mRegistered(false), mService(service) {}
@@ -722,23 +764,30 @@
void onUidIdle(uid_t uid, bool disabled) override;
void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
int32_t capability) override;
- void onUidProcAdjChanged(uid_t uid) override;
+ void onUidProcAdjChanged(uid_t uid, int adj) override;
void addOverrideUid(uid_t uid, const std::string &callingPackage, bool active);
void removeOverrideUid(uid_t uid, const std::string &callingPackage);
- void registerMonitorUid(uid_t uid);
- void unregisterMonitorUid(uid_t uid);
+ void registerMonitorUid(uid_t uid, bool openCamera);
+ void unregisterMonitorUid(uid_t uid, bool closeCamera);
+ // Implementation of IServiceManager::LocalRegistrationCallback
+ virtual void onServiceRegistration(const String16& name,
+ const sp<IBinder>& binder) override;
// IBinder::DeathRecipient implementation
virtual void binderDied(const wp<IBinder> &who);
private:
bool isUidActiveLocked(uid_t uid, const std::string &callingPackage);
int32_t getProcStateLocked(uid_t uid);
- void updateOverrideUid(uid_t uid, const std::string &callingPackage, bool active, bool insert);
+ void updateOverrideUid(uid_t uid, const std::string &callingPackage, bool active,
+ bool insert);
+ void registerWithActivityManager();
struct MonitoredUid {
int32_t procState;
+ int32_t procAdj;
+ bool hasCamera;
size_t refCount;
};
@@ -750,12 +799,14 @@
// Monitored uid map
std::unordered_map<uid_t, MonitoredUid> mMonitoredUids;
std::unordered_map<uid_t, bool> mOverrideUids;
+ sp<IBinder> mObserverToken;
}; // class UidPolicy
// If sensor privacy is enabled then all apps, including those that are active, should be
// prevented from accessing the camera.
class SensorPrivacyPolicy : public hardware::BnSensorPrivacyListener,
- public virtual IBinder::DeathRecipient {
+ public virtual IBinder::DeathRecipient,
+ public virtual IServiceManager::LocalRegistrationCallback {
public:
explicit SensorPrivacyPolicy(wp<CameraService> service)
: mService(service), mSensorPrivacyEnabled(false), mRegistered(false) {}
@@ -769,6 +820,9 @@
binder::Status onSensorPrivacyChanged(int toggleType, int sensor,
bool enabled);
+ // Implementation of IServiceManager::LocalRegistrationCallback
+ virtual void onServiceRegistration(const String16& name,
+ const sp<IBinder>& binder) override;
// IBinder::DeathRecipient implementation
virtual void binderDied(const wp<IBinder> &who);
@@ -780,12 +834,15 @@
bool mRegistered;
bool hasCameraPrivacyFeature();
+ void registerWithSensorPrivacyManager();
};
sp<UidPolicy> mUidPolicy;
sp<SensorPrivacyPolicy> mSensorPrivacyPolicy;
+ std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
+
// Delay-load the Camera HAL module
virtual void onFirstRef();
@@ -1086,6 +1143,29 @@
return IInterface::asBinder(mListener)->linkToDeath(this);
}
+ template<typename... args_t>
+ void handleBinderStatus(const binder::Status &ret, const char *logOnError,
+ args_t... args) {
+ if (!ret.isOk() &&
+ (ret.exceptionCode() != binder::Status::Exception::EX_TRANSACTION_FAILED
+ || !mLastTransactFailed)) {
+ ALOGE(logOnError, args...);
+ }
+
+ // If the transaction failed, the process may have died (or other things, see
+ // b/28321379). Mute consecutive errors from this listener to avoid log spam.
+ if (ret.exceptionCode() == binder::Status::Exception::EX_TRANSACTION_FAILED) {
+ if (!mLastTransactFailed) {
+ ALOGE("%s: Muting similar errors from listener %d:%d", __FUNCTION__,
+ mListenerUid, mListenerPid);
+ }
+ mLastTransactFailed = true;
+ } else {
+ // Reset mLastTransactFailed when binder becomes healthy again.
+ mLastTransactFailed = false;
+ }
+ }
+
virtual void binderDied(const wp<IBinder> &/*who*/) {
auto parent = mParent.promote();
if (parent.get() != nullptr) {
@@ -1106,6 +1186,9 @@
int mListenerPid = -1;
bool mIsVendorListener = false;
bool mOpenCloseCallbackAllowed = false;
+
+ // Flag for preventing log spam when binder becomes unhealthy
+ bool mLastTransactFailed = false;
};
// Guarded by mStatusListenerMutex
@@ -1218,6 +1301,12 @@
// Get the rotate-and-crop AUTO override behavior
status_t handleGetRotateAndCrop(int out);
+ // Set the autoframing AUTO override behaviour.
+ status_t handleSetAutoframing(const Vector<String16>& args);
+
+ // Get the autoframing AUTO override behaviour
+ status_t handleGetAutoframing(int out);
+
// Set the mask for image dump to disk
status_t handleSetImageDumpMask(const Vector<String16>& args);
@@ -1231,7 +1320,10 @@
status_t handleSetStreamUseCaseOverrides(const Vector<String16>& args);
// Clear the stream use case overrides
- status_t handleClearStreamUseCaseOverrides();
+ void handleClearStreamUseCaseOverrides();
+
+ // Set or clear the zoom override flag
+ status_t handleSetZoomOverride(const Vector<String16>& args);
// Handle 'watch' command as passed through 'cmd'
status_t handleWatchCommand(const Vector<String16> &args, int inFd, int outFd);
@@ -1318,6 +1410,9 @@
// Current override cmd rotate-and-crop mode; AUTO means no override
uint8_t mOverrideRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
+ // Current autoframing mode
+ uint8_t mOverrideAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_AUTO;
+
// Current image dump mask
uint8_t mImageDumpMask = 0;
@@ -1330,6 +1425,9 @@
// Current stream use case overrides
std::vector<int64_t> mStreamUseCaseOverrides;
+ // Current zoom override value
+ int32_t mZoomOverrideValue = -1;
+
/**
* A listener class that implements the IBinder::DeathRecipient interface
* for use to call back the error state injected by the external camera, and
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.cpp b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
index e101dd3..1c1bd24 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.cpp
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
@@ -17,6 +17,8 @@
#define LOG_TAG "CameraServiceWatchdog"
#include "CameraServiceWatchdog.h"
+#include "android/set_abort_message.h"
+#include "utils/CameraServiceProxyWrapper.h"
namespace android {
@@ -35,13 +37,18 @@
{
AutoMutex _l(mWatchdogLock);
- for (auto it = tidToCycleCounterMap.begin(); it != tidToCycleCounterMap.end(); it++) {
+ for (auto it = mTidMap.begin(); it != mTidMap.end(); it++) {
uint32_t currentThreadId = it->first;
- tidToCycleCounterMap[currentThreadId]++;
+ mTidMap[currentThreadId].cycles++;
- if (tidToCycleCounterMap[currentThreadId] >= mMaxCycles) {
- ALOGW("CameraServiceWatchdog triggering abort for pid: %d", getpid());
+ if (mTidMap[currentThreadId].cycles >= mMaxCycles) {
+ std::string abortMessage = getAbortMessage(mTidMap[currentThreadId].functionName);
+ android_set_abort_message(abortMessage.c_str());
+ ALOGW("CameraServiceWatchdog triggering abort for pid: %d tid: %d", getpid(),
+ currentThreadId);
+ mCameraServiceProxyWrapper->logClose(mCameraId, 0 /*latencyMs*/,
+ true /*deviceError*/);
// We use abort here so we can get a tombstone for better
// debugging.
abort();
@@ -52,13 +59,19 @@
return true;
}
+std::string CameraServiceWatchdog::getAbortMessage(const std::string& functionName) {
+ std::string res = "CameraServiceWatchdog triggering abort during "
+ + functionName;
+ return res;
+}
+
void CameraServiceWatchdog::requestExit()
{
Thread::requestExit();
AutoMutex _l(mWatchdogLock);
- tidToCycleCounterMap.clear();
+ mTidMap.clear();
if (mPause) {
mPause = false;
@@ -81,18 +94,21 @@
{
AutoMutex _l(mWatchdogLock);
- tidToCycleCounterMap.erase(tid);
+ mTidMap.erase(tid);
- if (tidToCycleCounterMap.empty()) {
+ if (mTidMap.empty()) {
mPause = true;
}
}
-void CameraServiceWatchdog::start(uint32_t tid)
+void CameraServiceWatchdog::start(uint32_t tid, const char* functionName)
{
AutoMutex _l(mWatchdogLock);
- tidToCycleCounterMap[tid] = 0;
+ MonitoredFunction monitoredFunction = {};
+ monitoredFunction.cycles = 0;
+ monitoredFunction.functionName = functionName;
+ mTidMap[tid] = monitoredFunction;
if (mPause) {
mPause = false;
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
index e35d69e..9f25865 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.h
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -36,10 +36,12 @@
#include <utils/Log.h>
#include <unordered_map>
+#include "utils/CameraServiceProxyWrapper.h"
+
// Used to wrap the call of interest in start and stop calls
-#define WATCH(toMonitor) watchThread([&]() { return toMonitor;}, gettid())
+#define WATCH(toMonitor) watchThread([&]() { return toMonitor;}, gettid(), __FUNCTION__)
#define WATCH_CUSTOM_TIMER(toMonitor, cycles, cycleLength) \
- watchThread([&]() { return toMonitor;}, gettid(), cycles, cycleLength);
+ watchThread([&]() { return toMonitor;}, gettid(), __FUNCTION__, cycles, cycleLength);
// Default cycles and cycle length values used to calculate permitted elapsed time
const static size_t kMaxCycles = 100;
@@ -49,13 +51,24 @@
class CameraServiceWatchdog : public Thread {
-public:
- explicit CameraServiceWatchdog() : mPause(true), mMaxCycles(kMaxCycles),
- mCycleLengthMs(kCycleLengthMs), mEnabled(true) {};
+struct MonitoredFunction {
+ uint32_t cycles;
+ std::string functionName;
+};
- explicit CameraServiceWatchdog (size_t maxCycles, uint32_t cycleLengthMs, bool enabled) :
- mPause(true), mMaxCycles(maxCycles), mCycleLengthMs(cycleLengthMs), mEnabled(enabled)
- {};
+public:
+ explicit CameraServiceWatchdog(const std::string &cameraId,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+ mCameraId(cameraId), mPause(true), mMaxCycles(kMaxCycles),
+ mCycleLengthMs(kCycleLengthMs), mEnabled(true),
+ mCameraServiceProxyWrapper(cameraServiceProxyWrapper) {};
+
+ explicit CameraServiceWatchdog (const std::string &cameraId, size_t maxCycles,
+ uint32_t cycleLengthMs, bool enabled,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+ mCameraId(cameraId), mPause(true), mMaxCycles(maxCycles),
+ mCycleLengthMs(cycleLengthMs), mEnabled(enabled),
+ mCameraServiceProxyWrapper(cameraServiceProxyWrapper) {};
virtual ~CameraServiceWatchdog() {};
@@ -66,7 +79,8 @@
/** Used to wrap monitored calls in start and stop functions using custom timer values */
template<typename T>
- auto watchThread(T func, uint32_t tid, uint32_t cycles, uint32_t cycleLength) {
+ auto watchThread(T func, uint32_t tid, const char* functionName, uint32_t cycles,
+ uint32_t cycleLength) {
decltype(func()) res;
if (cycles != mMaxCycles || cycleLength != mCycleLengthMs) {
@@ -75,24 +89,24 @@
// Lock for mEnabled
mEnabledLock.lock();
- sp<CameraServiceWatchdog> tempWatchdog =
- new CameraServiceWatchdog(cycles, cycleLength, mEnabled);
+ sp<CameraServiceWatchdog> tempWatchdog = new CameraServiceWatchdog(
+ mCameraId, cycles, cycleLength, mEnabled, mCameraServiceProxyWrapper);
mEnabledLock.unlock();
status_t status = tempWatchdog->run("CameraServiceWatchdog");
if (status != OK) {
ALOGE("Unable to watch thread: %s (%d)", strerror(-status), status);
- res = watchThread(func, tid);
+ res = watchThread(func, tid, functionName);
return res;
}
- res = tempWatchdog->watchThread(func, tid);
+ res = tempWatchdog->watchThread(func, tid, functionName);
tempWatchdog->requestExit();
tempWatchdog.clear();
} else {
// If custom timer values are equivalent to set class timer values, use
// current thread
- res = watchThread(func, tid);
+ res = watchThread(func, tid, functionName);
}
return res;
@@ -100,12 +114,12 @@
/** Used to wrap monitored calls in start and stop functions using class timer values */
template<typename T>
- auto watchThread(T func, uint32_t tid) {
+ auto watchThread(T func, uint32_t tid, const char* functionName) {
decltype(func()) res;
AutoMutex _l(mEnabledLock);
if (mEnabled) {
- start(tid);
+ start(tid, functionName);
res = func();
stop(tid);
} else {
@@ -121,7 +135,7 @@
* Start adds a cycle counter for the calling thread. When threadloop is blocked/paused,
* start() unblocks and starts the watchdog
*/
- void start(uint32_t tid);
+ void start(uint32_t tid, const char* functionName);
/**
* If there are no calls left to be monitored, stop blocks/pauses threadloop
@@ -129,17 +143,24 @@
*/
void stop(uint32_t tid);
+ std::string getAbortMessage(const std::string& functionName);
+
virtual bool threadLoop();
Mutex mWatchdogLock; // Lock for condition variable
Mutex mEnabledLock; // Lock for enabled status
Condition mWatchdogCondition; // Condition variable for stop/start
+ std::string mCameraId; // Camera Id the watchdog belongs to
bool mPause; // True if tid map is empty
uint32_t mMaxCycles; // Max cycles
uint32_t mCycleLengthMs; // Length of time elapsed per cycle
bool mEnabled; // True if watchdog is enabled
- std::unordered_map<uint32_t, uint32_t> tidToCycleCounterMap; // Thread Id to cycle counter map
+ std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
+
+ std::unordered_map<uint32_t, MonitoredFunction> mTidMap; // Thread Id to MonitoredFunction type
+ // which retrieves the num of cycles
+ // and name of the function
};
} // namespace android
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
new file mode 100644
index 0000000..e648a36
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
@@ -0,0 +1,212 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlCameraDeviceCallbacks"
+
+#include <aidl/AidlCameraDeviceCallbacks.h>
+#include <aidl/AidlUtils.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <hidl/Utils.h>
+#include <utility>
+
+namespace android::frameworks::cameraservice::device::implementation {
+
+// VNDK classes
+using SCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using SCaptureResultExtras =
+ ::aidl::android::frameworks::cameraservice::device::CaptureResultExtras;
+using SPhysicalCaptureResultInfo =
+ ::aidl::android::frameworks::cameraservice::device::PhysicalCaptureResultInfo;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+// NDK classes
+using UCaptureResultExtras = ::android::hardware::camera2::impl::CaptureResultExtras;
+using UPhysicalCaptureResultInfo = ::android::hardware::camera2::impl::PhysicalCaptureResultInfo;
+
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
+
+const char *AidlCameraDeviceCallbacks::kResultKey = "CaptureResult";
+
+
+bool AidlCameraDeviceCallbacks::initializeLooper(int vndkVersion) {
+ mCbLooper = new ALooper;
+ mCbLooper->setName("cs-looper");
+ status_t err = mCbLooper->start(/*runOnCallingThread*/ false, /*canCallJava*/ false,
+ PRIORITY_DEFAULT);
+ if (err !=OK) {
+ ALOGE("Unable to start camera device callback looper");
+ return false;
+ }
+ mHandler = new CallbackHandler(this, vndkVersion);
+ mCbLooper->registerHandler(mHandler);
+ return true;
+}
+
+AidlCameraDeviceCallbacks::AidlCameraDeviceCallbacks(
+ const std::shared_ptr<SICameraDeviceCallback>& base):
+ mBase(base), mDeathPipe(this, base->asBinder()) {}
+
+AidlCameraDeviceCallbacks::~AidlCameraDeviceCallbacks() {
+ if (mCbLooper != nullptr) {
+ if (mHandler != nullptr) {
+ mCbLooper->unregisterHandler(mHandler->id());
+ }
+ mCbLooper->stop();
+ }
+ mCbLooper.clear();
+ mHandler.clear();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onDeviceError(
+ int32_t errorCode, const CaptureResultExtras& resultExtras) {
+ using hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+ SCaptureResultExtras cre = convertToAidl(resultExtras);
+ auto ret = mBase->onDeviceError(convertToAidl(errorCode), cre);
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onDeviceError")
+ return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onDeviceIdle() {
+ auto ret = mBase->onDeviceIdle();
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onDeviceIdle")
+ return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onCaptureStarted(
+ const CaptureResultExtras& resultExtras, int64_t timestamp) {
+ using hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+ SCaptureResultExtras hCaptureResultExtras = convertToAidl(resultExtras);
+ auto ret = mBase->onCaptureStarted(hCaptureResultExtras, timestamp);
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onCaptureStarted")
+ return binder::Status::ok();
+}
+
+void AidlCameraDeviceCallbacks::convertResultMetadataToAidl(const camera_metadata_t* src,
+ SCaptureMetadataInfo* dst) {
+ // First try writing to fmq.
+ size_t metadata_size = get_camera_metadata_size(src);
+ if ((metadata_size > 0) &&
+ (mCaptureResultMetadataQueue->availableToWrite() > 0)) {
+ if (mCaptureResultMetadataQueue->write((int8_t *)src, metadata_size)) {
+ dst->set<SCaptureMetadataInfo::fmqMetadataSize>(metadata_size);
+ } else {
+ ALOGW("%s Couldn't use fmq, falling back to hwbinder", __FUNCTION__);
+ SCameraMetadata metadata;
+ hardware::cameraservice::utils::conversion::aidl::cloneToAidl(src, &metadata);
+ dst->set<SCaptureMetadataInfo::metadata>(std::move(metadata));
+ }
+ }
+}
+
+void AidlCameraDeviceCallbacks::CallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
+ sp<RefBase> obj = nullptr;
+ sp<ResultWrapper> resultWrapper = nullptr;
+ bool found = false;
+ switch (msg->what()) {
+ case kWhatResultReceived:
+ found = msg->findObject(kResultKey, &obj);
+ if (!found || obj == nullptr) {
+ ALOGE("Cannot find result object in callback message");
+ return;
+ }
+ resultWrapper = static_cast<ResultWrapper*>(obj.get());
+ processResultMessage(resultWrapper);
+ break;
+ default:
+ ALOGE("Unknown callback sent");
+ break;
+ }
+ }
+
+void AidlCameraDeviceCallbacks::CallbackHandler::processResultMessage(
+ sp<ResultWrapper> &resultWrapper) {
+ sp<AidlCameraDeviceCallbacks> converter = mConverter.promote();
+ if (converter == nullptr) {
+ ALOGE("Callback wrapper has died, result callback cannot be made");
+ return;
+ }
+ CameraMetadataNative &result = resultWrapper->mResult;
+ auto resultExtras = resultWrapper->mResultExtras;
+ SCaptureResultExtras convResultExtras =
+ hardware::cameraservice::utils::conversion::aidl::convertToAidl(resultExtras);
+
+ // Convert Metadata into HCameraMetadata;
+ SCaptureMetadataInfo captureMetadataInfo;
+ if (filterVndkKeys(mVndkVersion, result, /*isStatic*/false) != OK) {
+ ALOGE("%s: filtering vndk keys from result failed, not sending onResultReceived callback",
+ __FUNCTION__);
+ return;
+ }
+ const camera_metadata_t *rawMetadata = result.getAndLock();
+ converter->convertResultMetadataToAidl(rawMetadata, &captureMetadataInfo);
+ result.unlock(rawMetadata);
+
+ auto &physicalCaptureResultInfos = resultWrapper->mPhysicalCaptureResultInfos;
+ std::vector<SPhysicalCaptureResultInfo> stableCaptureResInfo =
+ convertToAidl(physicalCaptureResultInfos, converter->mCaptureResultMetadataQueue);
+ auto ret = converter->mBase->onResultReceived(captureMetadataInfo,
+ convResultExtras,
+ stableCaptureResInfo);
+
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "OnResultReceived")
+}
+
+binder::Status AidlCameraDeviceCallbacks::onResultReceived(
+ const CameraMetadataNative& result,
+ const UCaptureResultExtras& resultExtras,
+ const ::std::vector<UPhysicalCaptureResultInfo>& physicalCaptureResultInfos) {
+ // Wrap CameraMetadata, resultExtras and physicalCaptureResultInfos in on
+ // sp<RefBase>-able structure and post it.
+ sp<ResultWrapper> resultWrapper = new ResultWrapper(const_cast<CameraMetadataNative &>(result),
+ resultExtras, physicalCaptureResultInfos);
+ sp<AMessage> msg = new AMessage(kWhatResultReceived, mHandler);
+ msg->setObject(kResultKey, resultWrapper);
+ msg->post();
+ return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onPrepared(int32_t streamId) {
+ auto ret = mBase->onPrepared(streamId);
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onPrepared")
+ return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onRepeatingRequestError(
+ int64_t lastFrameNumber,
+ int32_t repeatingRequestId) {
+ auto ret =
+ mBase->onRepeatingRequestError(lastFrameNumber, repeatingRequestId);
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onRepeatingRequestError")
+ return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onRequestQueueEmpty() {
+ // not implemented
+ return binder::Status::ok();
+}
+
+status_t AidlCameraDeviceCallbacks::linkToDeath(const sp<DeathRecipient>& recipient,
+ void* cookie, uint32_t flags) {
+ return mDeathPipe.linkToDeath(recipient, cookie, flags);
+}
+status_t AidlCameraDeviceCallbacks::unlinkToDeath(const wp<DeathRecipient>& recipient,
+ void* cookie,
+ uint32_t flags,
+ wp<DeathRecipient>* outRecipient) {
+ return mDeathPipe.unlinkToDeath(recipient, cookie, flags, outRecipient);
+}
+
+} // namespace android::frameworks::cameraservice::device::implementation
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
new file mode 100644
index 0000000..5cff5b3
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICECALLBACKS_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICECALLBACKS_H_
+
+#include <CameraService.h>
+#include <aidl/DeathPipe.h>
+#include <aidl/android/frameworks/cameraservice/device/BnCameraDeviceCallback.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCaptureResultInfo.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
+#include <fmq/AidlMessageQueue.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <mutex>
+#include <thread>
+#include <utility>
+
+namespace android::frameworks::cameraservice::device::implementation {
+
+// VNDK classes
+using SCaptureMetadataInfo = ::aidl::android::frameworks::cameraservice::device::CaptureMetadataInfo;
+using SICameraDeviceCallback =
+ ::aidl::android::frameworks::cameraservice::device::ICameraDeviceCallback;
+// NDK classes
+using UBnCameraDeviceCallbacks = ::android::hardware::camera2::BnCameraDeviceCallbacks;
+
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+using ::android::frameworks::cameraservice::utils::DeathPipe;
+using ::android::hardware::camera2::impl::CameraMetadataNative;
+
+using CaptureResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+
+class AidlCameraDeviceCallbacks : public UBnCameraDeviceCallbacks {
+ public:
+ explicit AidlCameraDeviceCallbacks(const std::shared_ptr<SICameraDeviceCallback>& base);
+
+ ~AidlCameraDeviceCallbacks() override;
+
+ bool initializeLooper(int vndkVersion);
+
+ binder::Status onDeviceError(int32_t errorCode,
+ const CaptureResultExtras& resultExtras) override;
+
+ binder::Status onDeviceIdle() override;
+
+ binder::Status onCaptureStarted(const CaptureResultExtras& resultExtras,
+ int64_t timestamp) override;
+
+ binder::Status onResultReceived(
+ const CameraMetadataNative& result, const CaptureResultExtras& resultExtras,
+ const std::vector<PhysicalCaptureResultInfo>& physicalCaptureResultInfos) override;
+
+ binder::Status onPrepared(int32_t streamId) override;
+
+ binder::Status onRepeatingRequestError(int64_t lastFrameNumber,
+ int32_t repeatingRequestId) override;
+
+ binder::Status onRequestQueueEmpty() override;
+
+ status_t linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
+ uint32_t flags) override;
+ status_t unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie, uint32_t flags,
+ wp<DeathRecipient>* outRecipient) override;
+
+ void setCaptureResultMetadataQueue(std::shared_ptr<CaptureResultMetadataQueue> metadataQueue) {
+ mCaptureResultMetadataQueue = std::move(metadataQueue);
+ }
+
+ private:
+ // Wrapper struct so that parameters to onResultReceived callback may be
+ // sent through an AMessage.
+ struct ResultWrapper : public RefBase {
+ CameraMetadataNative mResult;
+ CaptureResultExtras mResultExtras;
+ std::vector<PhysicalCaptureResultInfo> mPhysicalCaptureResultInfos;
+
+ ResultWrapper(CameraMetadataNative &result,
+ CaptureResultExtras resultExtras,
+ std::vector<PhysicalCaptureResultInfo> physicalCaptureResultInfos) :
+ // TODO: make this std::movable
+ mResult(result),
+ mResultExtras(std::move(resultExtras)),
+ mPhysicalCaptureResultInfos(std::move(physicalCaptureResultInfos)) { }
+ };
+
+ struct CallbackHandler : public AHandler {
+ public:
+ void onMessageReceived(const sp<AMessage> &msg) override;
+ CallbackHandler(AidlCameraDeviceCallbacks *converter, int vndkVersion) :
+ mConverter(converter), mVndkVersion(vndkVersion) { }
+ private:
+ void processResultMessage(sp<ResultWrapper> &resultWrapper);
+ wp<AidlCameraDeviceCallbacks> mConverter = nullptr;
+ int mVndkVersion = -1;
+ };
+
+ void convertResultMetadataToAidl(const camera_metadata * src, SCaptureMetadataInfo * dst);
+ enum {
+ kWhatResultReceived,
+ };
+
+ static const char *kResultKey;
+
+ private:
+ std::shared_ptr<SICameraDeviceCallback> mBase;
+ std::shared_ptr<CaptureResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
+ sp<CallbackHandler> mHandler = nullptr;
+ sp<ALooper> mCbLooper = nullptr;
+
+ // Pipes death subscription from current NDK interface to VNDK mBase.
+ // Should consume calls to linkToDeath and unlinkToDeath.
+ DeathPipe mDeathPipe;
+};
+
+} // namespace android::frameworks::cameraservice::device::implementation
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICECALLBACKS_H_
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
new file mode 100644
index 0000000..402f8a2
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
@@ -0,0 +1,280 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlCameraDeviceUser"
+
+#include "AidlCameraDeviceUser.h"
+#include <aidl/AidlUtils.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
+#include <android-base/properties.h>
+
+namespace android::frameworks::cameraservice::device::implementation {
+
+// VNDK classes
+using SCaptureMetadataInfo = ::aidl::android::frameworks::cameraservice::device::CaptureMetadataInfo;
+// NDK classes
+using UOutputConfiguration = ::android::hardware::camera2::params::OutputConfiguration;
+using USessionConfiguration = ::android::hardware::camera2::params::SessionConfiguration;
+using UStatus = ::android::binder::Status;
+using USubmitInfo = ::android::hardware::camera2::utils::SubmitInfo;
+
+using ::android::CameraMetadata;
+using ::android::hardware::cameraservice::utils::conversion::aidl::cloneFromAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::cloneToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertFromAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
+using ::ndk::ScopedAStatus;
+
+namespace {
+constexpr int32_t CAMERA_REQUEST_METADATA_QUEUE_SIZE = 1 << 20 /* 1 MB */;
+constexpr int32_t CAMERA_RESULT_METADATA_QUEUE_SIZE = 1 << 20 /* 1 MB */;
+
+inline ScopedAStatus fromSStatus(const SStatus& s) {
+ return s == SStatus::NO_ERROR ? ScopedAStatus::ok()
+ : ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(s));
+}
+inline ScopedAStatus fromUStatus(const UStatus& status) {
+ return status.isOk() ? ScopedAStatus::ok() : fromSStatus(convertToAidl(status));
+}
+} // anonymous namespace
+
+AidlCameraDeviceUser::AidlCameraDeviceUser(const sp<UICameraDeviceUser>& deviceRemote):
+ mDeviceRemote(deviceRemote) {
+ mInitSuccess = initDevice();
+ mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+}
+
+bool AidlCameraDeviceUser::initDevice() {
+ // TODO: Get request and result metadata queue size from a system property.
+ int32_t reqFMQSize = CAMERA_REQUEST_METADATA_QUEUE_SIZE;
+
+ mCaptureRequestMetadataQueue =
+ std::make_unique<CaptureRequestMetadataQueue>(static_cast<size_t>(reqFMQSize),
+ false /* non blocking */);
+ if (!mCaptureRequestMetadataQueue->isValid()) {
+ ALOGE("%s: invalid request fmq", __FUNCTION__);
+ return false;
+ }
+
+ int32_t resFMQSize = CAMERA_RESULT_METADATA_QUEUE_SIZE;
+ mCaptureResultMetadataQueue =
+ std::make_shared<CaptureResultMetadataQueue>(static_cast<size_t>(resFMQSize),
+ false /* non blocking */);
+ if (!mCaptureResultMetadataQueue->isValid()) {
+ ALOGE("%s: invalid result fmq", __FUNCTION__);
+ return false;
+ }
+ return true;
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::getCaptureRequestMetadataQueue(
+ MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
+ if (mInitSuccess) {
+ *_aidl_return = mCaptureRequestMetadataQueue->dupeDesc();
+ }
+ return ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::getCaptureResultMetadataQueue(
+ MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
+ if (mInitSuccess) {
+ *_aidl_return = mCaptureResultMetadataQueue->dupeDesc();
+ }
+ return ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::prepare(int32_t in_streamId) {
+ UStatus ret = mDeviceRemote->prepare(in_streamId);
+ return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::submitRequestList(
+ const std::vector<SCaptureRequest>& in_requestList, bool in_isRepeating,
+ SSubmitInfo* _aidl_return) {
+ USubmitInfo submitInfo;
+ std::vector<UCaptureRequest> requests;
+ for (const auto& req: in_requestList) {
+ requests.emplace_back();
+ if (!convertRequestFromAidl(req, &requests.back())) {
+ ALOGE("%s: Failed to convert AIDL CaptureRequest.", __FUNCTION__);
+ return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+ }
+ }
+ UStatus ret = mDeviceRemote->submitRequestList(requests,
+ in_isRepeating, &submitInfo);
+ if (!ret.isOk()) {
+ ALOGE("%s: Failed submitRequestList to cameraservice: %s",
+ __FUNCTION__, ret.toString8().string());
+ return fromUStatus(ret);
+ }
+ mRequestId = submitInfo.mRequestId;
+ convertToAidl(submitInfo, _aidl_return);
+ return ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::cancelRepeatingRequest(int64_t* _aidl_return) {
+ UStatus ret = mDeviceRemote->cancelRequest(mRequestId, _aidl_return);
+ return fromUStatus(ret);
+}
+
+ScopedAStatus AidlCameraDeviceUser::beginConfigure() {
+ UStatus ret = mDeviceRemote->beginConfigure();
+ return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::endConfigure(SStreamConfigurationMode in_operatingMode,
+ const SCameraMetadata& in_sessionParams,
+ int64_t in_startTimeNs) {
+ CameraMetadata metadata;
+ if (!cloneFromAidl(in_sessionParams, &metadata)) {
+ return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+ }
+
+ std::vector<int32_t> offlineStreamIds;
+ UStatus ret = mDeviceRemote->endConfigure(convertFromAidl(in_operatingMode),
+ metadata, in_startTimeNs,
+ &offlineStreamIds);
+ return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::createStream(
+ const SOutputConfiguration& in_outputConfiguration, int32_t* _aidl_return) {
+ UOutputConfiguration outputConfig = convertFromAidl(in_outputConfiguration);
+ int32_t newStreamId;
+ UStatus ret = mDeviceRemote->createStream(outputConfig, &newStreamId);
+ if (!ret.isOk()) {
+ ALOGE("%s: Failed to create stream: %s", __FUNCTION__, ret.toString8().string());
+ }
+ *_aidl_return = newStreamId;
+ return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::createDefaultRequest(STemplateId in_templateId,
+ SCameraMetadata* _aidl_return) {
+ CameraMetadata metadata;
+ UStatus ret = mDeviceRemote->createDefaultRequest(convertFromAidl(in_templateId),
+ &metadata);
+ if (!ret.isOk()) {
+ ALOGE("%s: Failed to create default request: %s", __FUNCTION__, ret.toString8().string());
+ return fromUStatus(ret);
+ }
+
+ if (filterVndkKeys(mVndkVersion, metadata, /*isStatic*/false) != OK) {
+ ALOGE("%s: Unable to filter vndk metadata keys for version %d",
+ __FUNCTION__, mVndkVersion);
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+
+ const camera_metadata_t* rawMetadata = metadata.getAndLock();
+ cloneToAidl(rawMetadata, _aidl_return);
+ metadata.unlock(rawMetadata);
+ return ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::waitUntilIdle() {
+ UStatus ret = mDeviceRemote->waitUntilIdle();
+ return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::flush(int64_t* _aidl_return) {
+ UStatus ret = mDeviceRemote->flush(_aidl_return);
+ return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::updateOutputConfiguration(
+ int32_t in_streamId, const SOutputConfiguration& in_outputConfiguration) {
+ UOutputConfiguration outputConfig = convertFromAidl(in_outputConfiguration);
+ UStatus ret = mDeviceRemote->updateOutputConfiguration(in_streamId, outputConfig);
+ if (!ret.isOk()) {
+ ALOGE("%s: Failed to update output config for stream id: %d: %s",
+ __FUNCTION__, in_streamId, ret.toString8().string());
+ }
+ return fromUStatus(ret);
+}
+ndk::ScopedAStatus AidlCameraDeviceUser::isSessionConfigurationSupported(
+ const SSessionConfiguration& in_sessionConfiguration, bool* _aidl_return) {
+ USessionConfiguration sessionConfig = convertFromAidl(in_sessionConfiguration);
+ UStatus ret = mDeviceRemote->isSessionConfigurationSupported(sessionConfig,
+ _aidl_return);
+ return fromUStatus(ret);
+}
+ndk::ScopedAStatus AidlCameraDeviceUser::deleteStream(int32_t in_streamId) {
+ UStatus ret = mDeviceRemote->deleteStream(in_streamId);
+ return fromUStatus(ret);
+}
+ndk::ScopedAStatus AidlCameraDeviceUser::disconnect() {
+ UStatus ret = mDeviceRemote->disconnect();
+ return fromUStatus(ret);
+}
+bool AidlCameraDeviceUser::convertRequestFromAidl(
+ const SCaptureRequest& src, UCaptureRequest* dst) {
+ dst->mIsReprocess = false;
+ for (const auto& streamAndWindowId : src.streamAndWindowIds) {
+ dst->mStreamIdxList.push_back(streamAndWindowId.streamId);
+ dst->mSurfaceIdxList.push_back(streamAndWindowId.windowId);
+ }
+
+ return copyPhysicalCameraSettings(src.physicalCameraSettings,
+ &(dst->mPhysicalCameraSettings));
+}
+bool AidlCameraDeviceUser::copyPhysicalCameraSettings(
+ const std::vector<SPhysicalCameraSettings>& src,
+ std::vector<UCaptureRequest::PhysicalCameraSettings>* dst) {
+ bool converted = false;
+ for (auto &e : src) {
+ dst->emplace_back();
+ CaptureRequest::PhysicalCameraSettings &physicalCameraSetting =
+ dst->back();
+ physicalCameraSetting.id = e.id;
+
+ // Read the settings either from the fmq or straightaway from the
+ // request. We don't need any synchronization, since submitRequestList
+ // is guaranteed to be called serially by the client if it decides to
+ // use fmq.
+ if (e.settings.getTag() == SCaptureMetadataInfo::fmqMetadataSize) {
+ /**
+ * Get settings from the fmq.
+ */
+ SCameraMetadata settingsFmq;
+ int64_t metadataSize = e.settings.get<SCaptureMetadataInfo::fmqMetadataSize>();
+ settingsFmq.metadata.resize(metadataSize);
+ int8_t* metadataPtr = (int8_t*) settingsFmq.metadata.data();
+ bool read = mCaptureRequestMetadataQueue->read(metadataPtr,
+ metadataSize);
+ if (!read) {
+ ALOGE("%s capture request settings could't be read from fmq size", __FUNCTION__);
+ converted = false;
+ } else {
+ converted = cloneFromAidl(settingsFmq, &physicalCameraSetting.settings);
+ }
+ } else {
+ /**
+ * The settings metadata is contained in request settings field.
+ */
+ converted = cloneFromAidl(e.settings.get<SCaptureMetadataInfo::metadata>(),
+ &physicalCameraSetting.settings);
+ }
+ if (!converted) {
+ ALOGE("%s: Unable to convert physicalCameraSettings from HIDL to AIDL.", __FUNCTION__);
+ return false;
+ }
+ }
+ return true;
+}
+
+} // namespace android::frameworks::cameraservice::device::implementation
\ No newline at end of file
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
new file mode 100644
index 0000000..8014951
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICEUSER_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICEUSER_H_
+
+#include <CameraService.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/device/BnCameraDeviceUser.h>
+#include <aidl/android/frameworks/cameraservice/device/CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/device/OutputConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCameraSettings.h>
+#include <aidl/android/frameworks/cameraservice/device/SessionConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/StreamConfigurationMode.h>
+#include <aidl/android/frameworks/cameraservice/device/SubmitInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/TemplateId.h>
+#include <aidl/android/hardware/common/fmq/MQDescriptor.h>
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+#include <fmq/AidlMessageQueue.h>
+#include <memory>
+
+namespace android::frameworks::cameraservice::device::implementation {
+
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+using CaptureRequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+using CaptureResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+
+// Stable NDK classes
+using SBnCameraDeviceUser = ::aidl::android::frameworks::cameraservice::device::BnCameraDeviceUser;
+using SCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using SCaptureRequest = ::aidl::android::frameworks::cameraservice::device::CaptureRequest;
+using SOutputConfiguration =
+ ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
+using SPhysicalCameraSettings =
+ ::aidl::android::frameworks::cameraservice::device::PhysicalCameraSettings;
+using SSessionConfiguration =
+ ::aidl::android::frameworks::cameraservice::device::SessionConfiguration;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+using SStreamConfigurationMode =
+ ::aidl::android::frameworks::cameraservice::device::StreamConfigurationMode;
+using SSubmitInfo = ::aidl::android::frameworks::cameraservice::device::SubmitInfo;
+using STemplateId = ::aidl::android::frameworks::cameraservice::device::TemplateId;
+// Unstable NDK classes
+using UCaptureRequest= ::android::hardware::camera2::CaptureRequest;
+using UICameraDeviceUser = ::android::hardware::camera2::ICameraDeviceUser;
+
+static constexpr int32_t REQUEST_ID_NONE = -1;
+
+class AidlCameraDeviceUser final : public SBnCameraDeviceUser {
+ public:
+ explicit AidlCameraDeviceUser(const sp<UICameraDeviceUser> &deviceRemote);
+ ~AidlCameraDeviceUser() override = default;
+
+ ndk::ScopedAStatus beginConfigure() override;
+ ndk::ScopedAStatus cancelRepeatingRequest(int64_t* _aidl_return) override;
+ ndk::ScopedAStatus createDefaultRequest(STemplateId in_templateId,
+ SCameraMetadata* _aidl_return) override;
+ ndk::ScopedAStatus createStream(const SOutputConfiguration& in_outputConfiguration,
+ int32_t* _aidl_return) override;
+ ndk::ScopedAStatus deleteStream(int32_t in_streamId) override;
+ ndk::ScopedAStatus disconnect() override;
+ ndk::ScopedAStatus endConfigure(SStreamConfigurationMode in_operatingMode,
+ const SCameraMetadata& in_sessionParams,
+ int64_t in_startTimeNs) override;
+ ndk::ScopedAStatus flush(int64_t* _aidl_return) override;
+ ndk::ScopedAStatus getCaptureRequestMetadataQueue(
+ MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) override;
+ ndk::ScopedAStatus getCaptureResultMetadataQueue(
+ MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) override;
+ ndk::ScopedAStatus isSessionConfigurationSupported(
+ const SSessionConfiguration& in_sessionConfiguration, bool* _aidl_return) override;
+ ndk::ScopedAStatus prepare(int32_t in_streamId) override;
+ ndk::ScopedAStatus submitRequestList(const std::vector<SCaptureRequest>& in_requestList,
+ bool in_isRepeating, SSubmitInfo* _aidl_return) override;
+ ndk::ScopedAStatus updateOutputConfiguration(
+ int32_t in_streamId, const SOutputConfiguration& in_outputConfiguration) override;
+ ndk::ScopedAStatus waitUntilIdle() override;
+
+ [[nodiscard]] bool initStatus() const { return mInitSuccess; }
+
+ std::shared_ptr<CaptureResultMetadataQueue> getCaptureResultMetadataQueue() {
+ return mCaptureResultMetadataQueue;
+ }
+
+ private:
+ bool initDevice();
+
+ bool convertRequestFromAidl(const SCaptureRequest &src, UCaptureRequest *dst);
+ bool copyPhysicalCameraSettings(const std::vector<SPhysicalCameraSettings> &src,
+ std::vector<CaptureRequest::PhysicalCameraSettings> *dst);
+
+ const sp<UICameraDeviceUser> mDeviceRemote;
+ std::unique_ptr<CaptureRequestMetadataQueue> mCaptureRequestMetadataQueue = nullptr;
+ std::shared_ptr<CaptureResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
+ bool mInitSuccess = false;
+ int32_t mRequestId = REQUEST_ID_NONE;
+ int mVndkVersion = -1;
+};
+
+} // namespace android::frameworks::cameraservice::device::implementation
+
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICEUSER_H_
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
new file mode 100644
index 0000000..a62f6de
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -0,0 +1,328 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlCameraService"
+
+#include "AidlCameraService.h"
+#include <aidl/AidlCameraDeviceCallbacks.h>
+#include <aidl/AidlCameraDeviceUser.h>
+#include <aidl/AidlCameraServiceListener.h>
+#include <aidl/AidlUtils.h>
+#include <aidl/android/frameworks/cameraservice/common/CameraMetadataType.h>
+#include <android-base/properties.h>
+#include <android/binder_ibinder.h>
+#include <android/binder_manager.h>
+#include <binder/Status.h>
+#include <hidl/HidlTransportSupport.h>
+
+namespace android::frameworks::cameraservice::service::implementation {
+
+using ::android::frameworks::cameraservice::device::implementation::AidlCameraDeviceCallbacks;
+using ::android::frameworks::cameraservice::device::implementation::AidlCameraDeviceUser;
+using ::android::hardware::cameraservice::utils::conversion::aidl::areBindersEqual;
+using ::android::hardware::cameraservice::utils::conversion::aidl::cloneToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
+using ::ndk::ScopedAStatus;
+
+// VNDK classes
+using SCameraMetadataType = ::aidl::android::frameworks::cameraservice::common::CameraMetadataType;
+using SVendorTag = ::aidl::android::frameworks::cameraservice::common::VendorTag;
+using SVendorTagSection = ::aidl::android::frameworks::cameraservice::common::VendorTagSection;
+// NDK classes
+using UICameraService = ::android::hardware::ICameraService;
+using UStatus = ::android::binder::Status;
+
+namespace {
+inline ScopedAStatus fromSStatus(const SStatus& s) {
+ return s == SStatus::NO_ERROR ? ScopedAStatus::ok()
+ : ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(s));
+}
+inline ScopedAStatus fromUStatus(const UStatus& s) {
+ return s.isOk() ? ScopedAStatus::ok() : fromSStatus(convertToAidl(s));
+}
+} // anonymous namespace
+
+std::shared_ptr<AidlCameraService> kCameraService;
+
+bool AidlCameraService::registerService(::android::CameraService* cameraService) {
+ kCameraService = SharedRefBase::make<AidlCameraService>(cameraService);
+ std::string serviceName = SBnCameraService::descriptor;
+ serviceName += "/default";
+ bool isDeclared = AServiceManager_isDeclared(serviceName.c_str());
+ if (!isDeclared) {
+ ALOGI("%s: AIDL vndk not declared.", __FUNCTION__);
+ return false;
+ }
+
+ binder_exception_t registered = AServiceManager_addService(
+ kCameraService->asBinder().get(), serviceName.c_str());
+ ALOGE_IF(registered != EX_NONE,
+ "%s: AIDL VNDK declared, but failed to register service: %d",
+ __FUNCTION__, registered);
+ return registered == EX_NONE;
+}
+
+AidlCameraService::AidlCameraService(::android::CameraService* cameraService):
+ mCameraService(cameraService) {
+ mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+}
+ScopedAStatus AidlCameraService::getCameraCharacteristics(const std::string& in_cameraId,
+ SCameraMetadata* _aidl_return) {
+ if (_aidl_return == nullptr) { return fromSStatus(SStatus::ILLEGAL_ARGUMENT); }
+
+ ::android::CameraMetadata cameraMetadata;
+ UStatus ret = mCameraService->getCameraCharacteristics(in_cameraId,
+ mVndkVersion,
+ /* overrideToPortrait= */ false,
+ &cameraMetadata);
+ if (!ret.isOk()) {
+ if (ret.exceptionCode() != EX_SERVICE_SPECIFIC) {
+ ALOGE("%s: Transaction error when getting camera characteristics"
+ " from camera service: %d.",
+ __FUNCTION__ , ret.exceptionCode());
+ return fromUStatus(ret);
+ }
+ switch (ret.serviceSpecificErrorCode()) {
+ case UICameraService::ERROR_ILLEGAL_ARGUMENT:
+ ALOGE("%s: Camera ID %s does not exist!", __FUNCTION__, in_cameraId.c_str());
+ return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+ default:
+ ALOGE("Get camera characteristics from camera service failed: %s",
+ ret.toString8().string());
+ return fromUStatus(ret);
+ }
+ }
+
+ if (filterVndkKeys(mVndkVersion, cameraMetadata) != OK) {
+ ALOGE("%s: Unable to filter vndk metadata keys for version %d",
+ __FUNCTION__, mVndkVersion);
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+
+ const camera_metadata_t* rawMetadata = cameraMetadata.getAndLock();
+ cloneToAidl(rawMetadata, _aidl_return);
+ cameraMetadata.unlock(rawMetadata);
+
+ return ScopedAStatus::ok();
+}
+ndk::ScopedAStatus AidlCameraService::connectDevice(
+ const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+ const std::string& in_cameraId,
+ std::shared_ptr<SICameraDeviceUser>* _aidl_return) {
+ // Here, we first get NDK ICameraDeviceUser from mCameraService, then save
+ // that interface in the newly created AidlCameraDeviceUser impl class.
+ if (mCameraService == nullptr) {
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+ sp<hardware::camera2::ICameraDeviceUser> unstableDevice = nullptr;
+ // Create a hardware::camera2::ICameraDeviceCallback object which internally
+ // calls callback functions passed through hCallback.
+ sp<AidlCameraDeviceCallbacks> hybridCallbacks = new AidlCameraDeviceCallbacks(in_callback);
+ if (!hybridCallbacks->initializeLooper(mVndkVersion)) {
+ ALOGE("Unable to handle callbacks on device, cannot connect");
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+ sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
+ binder::Status serviceRet = mCameraService->connectDevice(
+ callbacks,
+ in_cameraId,
+ std::string(),
+ /* clientFeatureId= */{},
+ hardware::ICameraService::USE_CALLING_UID,
+ /* scoreOffset= */ 0,
+ /* targetSdkVersion= */ __ANDROID_API_FUTURE__,
+ /* overrideToPortrait= */ false,
+ &unstableDevice);
+ if (!serviceRet.isOk()) {
+ ALOGE("%s: Unable to connect to camera device: %s", __FUNCTION__,
+ serviceRet.toString8().c_str());
+ return fromUStatus(serviceRet);
+ }
+
+ // Now we create a AidlCameraDeviceUser class, store the unstableDevice in it,
+ // and return that back. All calls on that interface will be forwarded to
+ // the NDK AIDL interface.
+ std::shared_ptr<AidlCameraDeviceUser> stableDevice =
+ ndk::SharedRefBase::make<AidlCameraDeviceUser>(unstableDevice);
+ if (!stableDevice->initStatus()) {
+ ALOGE("%s: Unable to initialize camera device AIDL wrapper", __FUNCTION__);
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+ hybridCallbacks->setCaptureResultMetadataQueue(
+ stableDevice->getCaptureResultMetadataQueue());
+ *_aidl_return = stableDevice;
+ return ScopedAStatus::ok();
+}
+void AidlCameraService::addToListenerCacheLocked(
+ std::shared_ptr<SICameraServiceListener> stableCsListener,
+ sp<UICameraServiceListener> csListener) {
+ mListeners.emplace_back(std::make_pair(stableCsListener, csListener));
+}
+sp<UICameraServiceListener> AidlCameraService::searchListenerCacheLocked(
+ const std::shared_ptr<SICameraServiceListener>& listener, bool removeIfFound) {
+ // Go through the mListeners list and compare the listener with the VNDK AIDL
+ // listener registered.
+ if (listener == nullptr) {
+ return nullptr;
+ }
+
+ auto it = mListeners.begin();
+ sp<UICameraServiceListener> csListener = nullptr;
+ for (;it != mListeners.end(); it++) {
+ if (areBindersEqual(listener->asBinder(), it->first->asBinder())) {
+ break;
+ }
+ }
+ if (it != mListeners.end()) {
+ csListener = it->second;
+ if (removeIfFound) {
+ mListeners.erase(it);
+ }
+ }
+ return csListener;
+}
+ndk::ScopedAStatus AidlCameraService::addListener(
+ const std::shared_ptr<SICameraServiceListener>& in_listener,
+ std::vector<SCameraStatusAndId>* _aidl_return) {
+ std::vector<hardware::CameraStatus> cameraStatusAndIds{};
+ SStatus status = addListenerInternal(
+ in_listener, &cameraStatusAndIds);
+ if (status != SStatus::NO_ERROR) {
+ return fromSStatus(status);
+ }
+
+ // Convert cameraStatusAndIds to VNDK AIDL
+ convertToAidl(cameraStatusAndIds, _aidl_return);
+ return ScopedAStatus::ok();
+}
+SStatus AidlCameraService::addListenerInternal(
+ const std::shared_ptr<SICameraServiceListener>& listener,
+ std::vector<hardware::CameraStatus>* cameraStatusAndIds) {
+ if (mCameraService == nullptr) {
+ return SStatus::UNKNOWN_ERROR;
+ }
+ if (listener == nullptr || cameraStatusAndIds == nullptr) {
+ ALOGE("%s listener and cameraStatusAndIds must not be NULL", __FUNCTION__);
+ return SStatus::ILLEGAL_ARGUMENT;
+ }
+ sp<UICameraServiceListener> csListener = nullptr;
+ // Check the cache for previously registered callbacks
+ {
+ Mutex::Autolock l(mListenerListLock);
+ csListener = searchListenerCacheLocked(listener);
+ if (csListener == nullptr) {
+ // Wrap a listener with AidlCameraServiceListener and pass it to
+ // CameraService.
+ csListener = sp<AidlCameraServiceListener>::make(listener);
+ // Add to cache
+ addToListenerCacheLocked(listener, csListener);
+ } else {
+ ALOGE("%s: Trying to add a listener %p already registered",
+ __FUNCTION__, listener.get());
+ return SStatus::ILLEGAL_ARGUMENT;
+ }
+ }
+ binder::Status serviceRet =
+ mCameraService->addListenerHelper(csListener, cameraStatusAndIds, true);
+ if (!serviceRet.isOk()) {
+ ALOGE("%s: Unable to add camera device status listener", __FUNCTION__);
+ return convertToAidl(serviceRet);
+ }
+
+ cameraStatusAndIds->erase(std::remove_if(cameraStatusAndIds->begin(),
+ cameraStatusAndIds->end(),
+ [this](const hardware::CameraStatus& s) {
+ bool supportsHAL3 = false;
+ binder::Status sRet =
+ mCameraService->supportsCameraApi(s.cameraId,
+ UICameraService::API_VERSION_2, &supportsHAL3);
+ return !sRet.isOk() || !supportsHAL3;
+ }), cameraStatusAndIds->end());
+
+ return SStatus::NO_ERROR;
+}
+ndk::ScopedAStatus AidlCameraService::removeListener(
+ const std::shared_ptr<SICameraServiceListener>& in_listener) {
+ if (in_listener == nullptr) {
+ ALOGE("%s listener must not be NULL", __FUNCTION__);
+ return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+ }
+ sp<UICameraServiceListener> csListener = nullptr;
+ {
+ Mutex::Autolock l(mListenerListLock);
+ csListener = searchListenerCacheLocked(in_listener, /*removeIfFound*/true);
+ }
+ if (csListener != nullptr) {
+ mCameraService->removeListener(csListener);
+ } else {
+ ALOGE("%s Removing unregistered listener %p", __FUNCTION__, in_listener.get());
+ return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+ }
+ return ScopedAStatus::ok();
+}
+ndk::ScopedAStatus AidlCameraService::getCameraVendorTagSections(
+ std::vector<SProviderIdAndVendorTagSections>* _aidl_return) {
+ sp<VendorTagDescriptorCache> gCache = VendorTagDescriptorCache::getGlobalVendorTagCache();
+ if (gCache == nullptr) {
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+
+ const std::unordered_map<metadata_vendor_id_t, sp<android::VendorTagDescriptor>>
+ &vendorIdsAndTagDescs = gCache->getVendorIdsAndTagDescriptors();
+ if (vendorIdsAndTagDescs.empty()) {
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+
+ std::vector<SProviderIdAndVendorTagSections>& tagIdAndVendorTagSections = *_aidl_return;
+ tagIdAndVendorTagSections.resize(vendorIdsAndTagDescs.size());
+ size_t j = 0;
+ for (auto &vendorIdAndTagDescs : vendorIdsAndTagDescs) {
+ std::vector<SVendorTagSection> vendorTagSections;
+ sp<VendorTagDescriptor> desc = vendorIdAndTagDescs.second;
+ const SortedVector<String8>* sectionNames = desc->getAllSectionNames();
+ size_t numSections = sectionNames->size();
+ std::vector<std::vector<SVendorTag>> tagsBySection(numSections);
+ int tagCount = desc->getTagCount();
+ if (tagCount <= 0) {
+ continue;
+ }
+ std::vector<uint32_t> tags(tagCount);
+ desc->getTagArray(tags.data());
+ for (int i = 0; i < tagCount; i++) {
+ SVendorTag vt;
+ vt.tagId = tags[i];
+ vt.tagName = desc->getTagName(tags[i]);
+ vt.tagType = (SCameraMetadataType) desc->getTagType(tags[i]);
+ ssize_t sectionIdx = desc->getSectionIndex(tags[i]);
+ tagsBySection[sectionIdx].push_back(vt);
+ }
+ vendorTagSections.resize(numSections);
+ for (size_t s = 0; s < numSections; s++) {
+ vendorTagSections[s].sectionName = (*sectionNames)[s].string();
+ vendorTagSections[s].tags = tagsBySection[s];
+ }
+ SProviderIdAndVendorTagSections & prvdrIdAndVendorTagSection =
+ tagIdAndVendorTagSections[j];
+ prvdrIdAndVendorTagSection.providerId = vendorIdAndTagDescs.first;
+ prvdrIdAndVendorTagSection.vendorTagSections = std::move(vendorTagSections);
+ j++;
+ }
+ return ScopedAStatus::ok();
+}
+
+} // namespace android::frameworks::cameraservice::service::implementation
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.h b/services/camera/libcameraservice/aidl/AidlCameraService.h
new file mode 100644
index 0000000..4c67ac7
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICE_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICE_H_
+
+#include <CameraService.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/service/BnCameraService.h>
+
+namespace android::frameworks::cameraservice::service::implementation {
+
+// VNDK classes
+using SBnCameraService = ::aidl::android::frameworks::cameraservice::service::BnCameraService;
+using SCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using SCameraStatusAndId = ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
+using SICameraDeviceCallback =
+ ::aidl::android::frameworks::cameraservice::device::ICameraDeviceCallback;
+using SICameraDeviceUser = ::aidl::android::frameworks::cameraservice::device::ICameraDeviceUser;
+using SICameraServiceListener =
+ ::aidl::android::frameworks::cameraservice::service::ICameraServiceListener;
+using SProviderIdAndVendorTagSections =
+ ::aidl::android::frameworks::cameraservice::common::ProviderIdAndVendorTagSections;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+// NDK classes
+using UICameraServiceListener = ::android::hardware::ICameraServiceListener;
+
+class AidlCameraService: public SBnCameraService {
+ public:
+ static bool registerService(::android::CameraService* cameraService);
+
+ explicit AidlCameraService(::android::CameraService* cameraService);
+ ~AidlCameraService() override = default;
+ ndk::ScopedAStatus getCameraCharacteristics(const std::string& in_cameraId,
+ SCameraMetadata* _aidl_return) override;
+
+ ndk::ScopedAStatus connectDevice(const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+ const std::string& in_cameraId,
+ std::shared_ptr<SICameraDeviceUser>* _aidl_return) override;
+
+ ndk::ScopedAStatus addListener(const std::shared_ptr<SICameraServiceListener>& in_listener,
+ std::vector<SCameraStatusAndId>* _aidl_return) override;
+
+ ndk::ScopedAStatus getCameraVendorTagSections(
+ std::vector<SProviderIdAndVendorTagSections>* _aidl_return) override;
+
+ ndk::ScopedAStatus removeListener(
+ const std::shared_ptr<SICameraServiceListener>& in_listener) override;
+
+ private:
+ void addToListenerCacheLocked(std::shared_ptr<SICameraServiceListener> stableCsListener,
+ sp<hardware::ICameraServiceListener> csListener);
+
+ sp<UICameraServiceListener> searchListenerCacheLocked(
+ const std::shared_ptr<SICameraServiceListener>& listener, bool removeIfFound = false);
+
+ SStatus addListenerInternal(const std::shared_ptr<SICameraServiceListener>& listener,
+ std::vector<hardware::CameraStatus>* cameraStatusAndIds);
+
+
+ ::android::CameraService* mCameraService;
+
+ Mutex mListenerListLock;
+ std::list<std::pair<std::shared_ptr<SICameraServiceListener>,
+ sp<UICameraServiceListener>>> mListeners;
+ int mVndkVersion = -1;
+
+};
+
+} // namespace android::frameworks::cameraservice::service::implementation
+
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICE_H_
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
new file mode 100644
index 0000000..d7ab0d9
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <aidl/AidlCameraServiceListener.h>
+#include <aidl/AidlUtils.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+#include <camera/StringUtils.h>
+
+namespace android::frameworks::cameraservice::service::implementation {
+
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertCameraStatusToAidl;
+// VNDK classes
+using SCameraStatusAndId = ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+
+binder::Status AidlCameraServiceListener::onStatusChanged(
+ int32_t status, const std::string& cameraId) {
+ SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
+ auto ret = mBase->onStatusChanged(sStatus, cameraId);
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onStatusChanged")
+ return binder::Status::ok();
+}
+
+binder::Status AidlCameraServiceListener::onPhysicalCameraStatusChanged(
+ int32_t status, const std::string& cameraId,
+ const std::string& physicalCameraId) {
+ SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
+
+ auto ret = mBase->onPhysicalCameraStatusChanged(sStatus, cameraId, physicalCameraId);
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onPhysicalCameraStatusChanged")
+ return binder::Status::ok();
+}
+
+::android::binder::Status AidlCameraServiceListener::onTorchStatusChanged(
+ int32_t, const std::string&) {
+ // We don't implement onTorchStatusChanged
+ return binder::Status::ok();
+}
+
+::android::binder::Status AidlCameraServiceListener::onTorchStrengthLevelChanged(
+ const std::string&, int32_t) {
+ // We don't implement onTorchStrengthLevelChanged
+ return binder::Status::ok();
+}
+status_t AidlCameraServiceListener::linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
+ uint32_t flags) {
+ return mDeathPipe.linkToDeath(recipient, cookie, flags);
+}
+status_t AidlCameraServiceListener::unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie,
+ uint32_t flags,
+ wp<DeathRecipient>* outRecipient) {
+ return mDeathPipe.unlinkToDeath(recipient, cookie, flags, outRecipient);
+}
+
+} // namespace android::frameworks::cameraservice::service::implementation
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
new file mode 100644
index 0000000..6483fe1
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICELISTENER_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICELISTENER_H_
+
+
+#include <aidl/DeathPipe.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraDeviceStatus.h>
+#include <aidl/android/frameworks/cameraservice/service/ICameraServiceListener.h>
+#include <android/hardware/BnCameraServiceListener.h>
+
+namespace android::frameworks::cameraservice::service::implementation {
+
+using ::android::frameworks::cameraservice::utils::DeathPipe;
+
+// VNDK classes
+using SCameraDeviceStatus = ::aidl::android::frameworks::cameraservice::service::CameraDeviceStatus;
+using SICameraServiceListener =
+ ::aidl::android::frameworks::cameraservice::service::ICameraServiceListener;
+// NDK classes
+using UBnCameraServiceListener = ::android::hardware::BnCameraServiceListener;
+
+/**
+ * A simple shim to pass calls from CameraService to VNDK client.
+ */
+class AidlCameraServiceListener : public UBnCameraServiceListener {
+ public:
+ AidlCameraServiceListener(const std::shared_ptr<SICameraServiceListener>& base):
+ mBase(base), mDeathPipe(this, base->asBinder()) {}
+
+ ~AidlCameraServiceListener() = default;
+
+ ::android::binder::Status onStatusChanged(int32_t status,
+ const std::string& cameraId) override;
+ ::android::binder::Status onPhysicalCameraStatusChanged(int32_t status,
+ const std::string& cameraId,
+ const std::string& physicalCameraId) override;
+
+ ::android::binder::Status onTorchStatusChanged(
+ int32_t status, const std::string& cameraId) override;
+ ::android::binder::Status onTorchStrengthLevelChanged(
+ const std::string& cameraId, int32_t newStrengthLevel) override;
+ binder::Status onCameraAccessPrioritiesChanged() override {
+ // TODO: no implementation yet.
+ return binder::Status::ok();
+ }
+ binder::Status onCameraOpened(const std::string& /*cameraId*/,
+ const std::string& /*clientPackageId*/) override {
+ // empty implementation
+ return binder::Status::ok();
+ }
+ binder::Status onCameraClosed(const std::string& /*cameraId*/) override {
+ // empty implementation
+ return binder::Status::ok();
+ }
+
+ status_t linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
+ uint32_t flags) override;
+ status_t unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie, uint32_t flags,
+ wp<DeathRecipient>* outRecipient) override;
+
+ private:
+ std::shared_ptr<SICameraServiceListener> mBase;
+
+ // Pipes death subscription to current NDK AIDL interface to VNDK mBase.
+ // Should consume calls to linkToDeath and unlinkToDeath.
+ DeathPipe mDeathPipe;
+};
+
+} // android
+
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICELISTENER_H_
\ No newline at end of file
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
new file mode 100644
index 0000000..7291c5f
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -0,0 +1,305 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlUtils"
+
+#include <aidl/AidlUtils.h>
+#include <aidl/VndkVersionMetadataTags.h>
+#include <aidlcommonsupport/NativeHandle.h>
+#include <device3/Camera3StreamInterface.h>
+#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
+#include <mediautils/AImageReaderUtils.h>
+#include <camera/StringUtils.h>
+
+namespace android::hardware::cameraservice::utils::conversion::aidl {
+
+using aimg::AImageReader_getHGBPFromHandle;
+using hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
+
+// Note: existing data in dst will be gone. Caller still owns the memory of src
+void cloneToAidl(const camera_metadata_t* src, SCameraMetadata* dst) {
+ if (src == nullptr) {
+ ALOGW("%s:attempt to convert empty metadata to AIDL", __FUNCTION__);
+ return;
+ }
+ size_t size = get_camera_metadata_size(src);
+ uint8_t* startPtr = (uint8_t*)src;
+ uint8_t* endPtr = startPtr + size;
+ dst->metadata.assign(startPtr, endPtr);
+}
+
+// The camera metadata here is cloned. Since we're reading metadata over
+// the binder we would need to clone it in order to avoid alignment issues.
+bool cloneFromAidl(const SCameraMetadata &src, CameraMetadata *dst) {
+ const camera_metadata_t *buffer =
+ reinterpret_cast<const camera_metadata_t*>(src.metadata.data());
+ size_t expectedSize = src.metadata.size();
+ if (buffer != nullptr) {
+ int res = validate_camera_metadata_structure(buffer, &expectedSize);
+ if (res == OK || res == CAMERA_METADATA_VALIDATION_SHIFTED) {
+ *dst = buffer;
+ } else {
+ ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+ return false;
+ }
+ }
+ return true;
+}
+
+int32_t convertFromAidl(SStreamConfigurationMode streamConfigurationMode) {
+ switch (streamConfigurationMode) {
+ case SStreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE:
+ return camera2::ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE;
+ case SStreamConfigurationMode::NORMAL_MODE:
+ return camera2::ICameraDeviceUser::NORMAL_MODE;
+ default:
+ // TODO: Fix this
+ return camera2::ICameraDeviceUser::VENDOR_MODE_START;
+ }
+}
+
+UOutputConfiguration convertFromAidl(const SOutputConfiguration &src) {
+ std::vector<sp<IGraphicBufferProducer>> iGBPs;
+ auto &windowHandles = src.windowHandles;
+ iGBPs.reserve(windowHandles.size());
+
+ for (auto &handle : windowHandles) {
+ native_handle_t* nh = makeFromAidl(handle);
+ iGBPs.push_back(new H2BGraphicBufferProducer(AImageReader_getHGBPFromHandle(nh)));
+ native_handle_delete(nh);
+ }
+ UOutputConfiguration outputConfiguration(
+ iGBPs, convertFromAidl(src.rotation), src.physicalCameraId,
+ src.windowGroupId, OutputConfiguration::SURFACE_TYPE_UNKNOWN, 0, 0,
+ (windowHandles.size() > 1));
+ return outputConfiguration;
+}
+
+USessionConfiguration convertFromAidl(const SSessionConfiguration &src) {
+ USessionConfiguration sessionConfig(src.inputWidth, src.inputHeight,
+ src.inputFormat, static_cast<int>(src.operationMode));
+
+ for (const auto& os : src.outputStreams) {
+ UOutputConfiguration config = convertFromAidl(os);
+ sessionConfig.addOutputConfiguration(config);
+ }
+
+ return sessionConfig;
+}
+
+int convertFromAidl(SOutputConfiguration::Rotation rotation) {
+ switch(rotation) {
+ case SOutputConfiguration::Rotation::R270:
+ return android::camera3::CAMERA_STREAM_ROTATION_270;
+ case SOutputConfiguration::Rotation::R180:
+ return android::camera3::CAMERA_STREAM_ROTATION_180;
+ case SOutputConfiguration::Rotation::R90:
+ return android::camera3::CAMERA_STREAM_ROTATION_90;
+ case SOutputConfiguration::Rotation::R0:
+ default:
+ return android::camera3::CAMERA_STREAM_ROTATION_0;
+ }
+}
+
+int32_t convertFromAidl(STemplateId templateId) {
+ switch(templateId) {
+ case STemplateId::PREVIEW:
+ return camera2::ICameraDeviceUser::TEMPLATE_PREVIEW;
+ case STemplateId::STILL_CAPTURE:
+ return camera2::ICameraDeviceUser::TEMPLATE_STILL_CAPTURE;
+ case STemplateId::RECORD:
+ return camera2::ICameraDeviceUser::TEMPLATE_RECORD;
+ case STemplateId::VIDEO_SNAPSHOT:
+ return camera2::ICameraDeviceUser::TEMPLATE_VIDEO_SNAPSHOT;
+ case STemplateId::ZERO_SHUTTER_LAG:
+ return camera2::ICameraDeviceUser::TEMPLATE_ZERO_SHUTTER_LAG;
+ case STemplateId::MANUAL:
+ return camera2::ICameraDeviceUser::TEMPLATE_MANUAL;
+ }
+}
+
+void convertToAidl(const camera2::utils::SubmitInfo& submitInfo, SSubmitInfo* hSubmitInfo) {
+ hSubmitInfo->requestId = submitInfo.mRequestId;
+ hSubmitInfo->lastFrameNumber = submitInfo.mLastFrameNumber;
+}
+
+
+SStatus convertToAidl(const binder::Status &status) {
+ if (status.isOk()) {
+ return SStatus::NO_ERROR;
+ }
+ if (status.exceptionCode() != EX_SERVICE_SPECIFIC) {
+ return SStatus::UNKNOWN_ERROR;
+ }
+
+ switch (status.serviceSpecificErrorCode()) {
+ case hardware::ICameraService::ERROR_DISCONNECTED:
+ return SStatus::DISCONNECTED;
+ case hardware::ICameraService::ERROR_CAMERA_IN_USE:
+ return SStatus::CAMERA_IN_USE;
+ case hardware::ICameraService::ERROR_MAX_CAMERAS_IN_USE:
+ return SStatus::MAX_CAMERAS_IN_USE;
+ case hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT:
+ return SStatus::ILLEGAL_ARGUMENT;
+ case hardware::ICameraService::ERROR_DEPRECATED_HAL:
+ // Should not reach here since we filtered legacy HALs earlier
+ return SStatus::DEPRECATED_HAL;
+ case hardware::ICameraService::ERROR_DISABLED:
+ return SStatus::DISABLED;
+ case hardware::ICameraService::ERROR_PERMISSION_DENIED:
+ return SStatus::PERMISSION_DENIED;
+ case hardware::ICameraService::ERROR_INVALID_OPERATION:
+ return SStatus::INVALID_OPERATION;
+ default:
+ return SStatus::UNKNOWN_ERROR;
+ }
+}
+
+SCaptureResultExtras convertToAidl(const UCaptureResultExtras &src) {
+ SCaptureResultExtras dst;
+ dst.requestId = src.requestId;
+ dst.burstId = src.burstId;
+ dst.frameNumber = src.frameNumber;
+ dst.partialResultCount = src.partialResultCount;
+ dst.errorStreamId = src.errorStreamId;
+ dst.errorPhysicalCameraId = src.errorPhysicalCameraId;
+ return dst;
+}
+
+SErrorCode convertToAidl(int32_t errorCode) {
+ switch(errorCode) {
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED:
+ return SErrorCode::CAMERA_DISCONNECTED;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE :
+ return SErrorCode::CAMERA_DEVICE;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE:
+ return SErrorCode::CAMERA_SERVICE;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
+ return SErrorCode::CAMERA_REQUEST;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
+ return SErrorCode::CAMERA_RESULT;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
+ return SErrorCode::CAMERA_BUFFER;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISABLED:
+ return SErrorCode::CAMERA_DISABLED;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR:
+ return SErrorCode::CAMERA_INVALID_ERROR;
+ default:
+ return SErrorCode::CAMERA_UNKNOWN_ERROR;
+ }
+}
+
+std::vector<SPhysicalCaptureResultInfo> convertToAidl(
+ const std::vector<UPhysicalCaptureResultInfo>& src,
+ std::shared_ptr<CaptureResultMetadataQueue>& fmq) {
+ std::vector<SPhysicalCaptureResultInfo> dst;
+ dst.resize(src.size());
+ size_t i = 0;
+ for (auto &physicalCaptureResultInfo : src) {
+ dst[i++] = convertToAidl(physicalCaptureResultInfo, fmq);
+ }
+ return dst;
+}
+
+SPhysicalCaptureResultInfo convertToAidl(const UPhysicalCaptureResultInfo & src,
+ std::shared_ptr<CaptureResultMetadataQueue> & fmq) {
+ SPhysicalCaptureResultInfo dst;
+ dst.physicalCameraId = src.mPhysicalCameraId;
+
+ const camera_metadata_t *rawMetadata = src.mPhysicalCameraMetadata.getAndLock();
+ // Try using fmq at first.
+ size_t metadata_size = get_camera_metadata_size(rawMetadata);
+ if ((metadata_size > 0) && (fmq->availableToWrite() > 0)) {
+ if (fmq->write((int8_t *)rawMetadata, metadata_size)) {
+ dst.physicalCameraMetadata.set<SCaptureMetadataInfo::fmqMetadataSize>(metadata_size);
+ } else {
+ ALOGW("%s Couldn't use fmq, falling back to hwbinder", __FUNCTION__);
+ SCameraMetadata metadata;
+ cloneToAidl(rawMetadata, &metadata);
+ dst.physicalCameraMetadata.set<SCaptureMetadataInfo::metadata>(std::move(metadata));
+ }
+ }
+ src.mPhysicalCameraMetadata.unlock(rawMetadata);
+ return dst;
+}
+
+void convertToAidl(const std::vector<hardware::CameraStatus> &src,
+ std::vector<SCameraStatusAndId>* dst) {
+ dst->resize(src.size());
+ size_t i = 0;
+ for (const auto &statusAndId : src) {
+ auto &a = (*dst)[i++];
+ a.cameraId = statusAndId.cameraId;
+ a.deviceStatus = convertCameraStatusToAidl(statusAndId.status);
+ size_t numUnvailPhysicalCameras = statusAndId.unavailablePhysicalIds.size();
+ a.unavailPhysicalCameraIds.resize(numUnvailPhysicalCameras);
+ for (size_t j = 0; j < numUnvailPhysicalCameras; j++) {
+ a.unavailPhysicalCameraIds[j] = statusAndId.unavailablePhysicalIds[j];
+ }
+ }
+}
+
+SCameraDeviceStatus convertCameraStatusToAidl(int32_t src) {
+ SCameraDeviceStatus deviceStatus = SCameraDeviceStatus::STATUS_UNKNOWN;
+ switch(src) {
+ case hardware::ICameraServiceListener::STATUS_NOT_PRESENT:
+ deviceStatus = SCameraDeviceStatus::STATUS_NOT_PRESENT;
+ break;
+ case hardware::ICameraServiceListener::STATUS_PRESENT:
+ deviceStatus = SCameraDeviceStatus::STATUS_PRESENT;
+ break;
+ case hardware::ICameraServiceListener::STATUS_ENUMERATING:
+ deviceStatus = SCameraDeviceStatus::STATUS_ENUMERATING;
+ break;
+ case hardware::ICameraServiceListener::STATUS_NOT_AVAILABLE:
+ deviceStatus = SCameraDeviceStatus::STATUS_NOT_AVAILABLE;
+ break;
+ default:
+ break;
+ }
+ return deviceStatus;
+}
+
+bool areBindersEqual(const ndk::SpAIBinder& b1, const ndk::SpAIBinder& b2) {
+ return !AIBinder_lt(b1.get(), b2.get()) && !AIBinder_lt(b2.get(), b1.get());
+}
+
+status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic) {
+ if (vndkVersion == __ANDROID_API_FUTURE__) {
+ // VNDK version in ro.vndk.version is a version code-name that
+ // corresponds to the current version.
+ return OK;
+ }
+ const auto &apiLevelToKeys =
+ isStatic ? static_api_level_to_keys : dynamic_api_level_to_keys;
+ // Find the vndk versions above the given vndk version. All the vndk
+ // versions above the given one, need to have their keys filtered from the
+ // metadata in order to avoid metadata invalidation.
+ auto it = apiLevelToKeys.upper_bound(vndkVersion);
+ while (it != apiLevelToKeys.end()) {
+ for (const auto &key : it->second) {
+ status_t res = metadata.erase(key);
+ if (res != OK) {
+ ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
+ return res;
+ }
+ }
+ it++;
+ }
+ return OK;
+}
+
+} // namespace android::hardware::cameraservice::utils::conversion::aidl
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.h b/services/camera/libcameraservice/aidl/AidlUtils.h
new file mode 100644
index 0000000..c89d7ff
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlUtils.h
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
+
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/device/CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureResultExtras.h>
+#include <aidl/android/frameworks/cameraservice/device/ErrorCode.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/OutputConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCaptureResultInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/SessionConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/StreamConfigurationMode.h>
+#include <aidl/android/frameworks/cameraservice/device/SubmitInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/TemplateId.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraDeviceStatus.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+#include <android/hardware/ICameraService.h>
+#include <android/hardware/camera2/ICameraDeviceUser.h>
+#include <android/hardware/graphics/bufferqueue/1.0/IGraphicBufferProducer.h>
+#include <camera/CameraMetadata.h>
+#include <fmq/AidlMessageQueue.h>
+#include <hardware/camera.h>
+
+namespace android::hardware::cameraservice::utils::conversion::aidl {
+
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+using ::android::CameraMetadata;
+using CaptureResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+
+// VNDK classes
+using SCameraDeviceStatus = ::aidl::android::frameworks::cameraservice::service::CameraDeviceStatus;
+using SCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using SCameraStatusAndId = ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
+using SCaptureResultExtras =
+ ::aidl::android::frameworks::cameraservice::device::CaptureResultExtras;
+using SErrorCode = ::aidl::android::frameworks::cameraservice::device::ErrorCode;
+using SCaptureMetadataInfo = ::aidl::android::frameworks::cameraservice::device::CaptureMetadataInfo;
+using SOutputConfiguration =
+ ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
+using SPhysicalCaptureResultInfo =
+ ::aidl::android::frameworks::cameraservice::device::PhysicalCaptureResultInfo;
+using SSessionConfiguration =
+ ::aidl::android::frameworks::cameraservice::device::SessionConfiguration;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+using SStreamConfigurationMode =
+ ::aidl::android::frameworks::cameraservice::device::StreamConfigurationMode;
+using SSubmitInfo = ::aidl::android::frameworks::cameraservice::device::SubmitInfo;
+using STemplateId = ::aidl::android::frameworks::cameraservice::device::TemplateId;
+// NDK classes
+using UCaptureResultExtras = ::android::hardware::camera2::impl::CaptureResultExtras;
+using UOutputConfiguration = ::android::hardware::camera2::params::OutputConfiguration;
+using UPhysicalCaptureResultInfo = ::android::hardware::camera2::impl::PhysicalCaptureResultInfo;
+using USessionConfiguration = ::android::hardware::camera2::params::SessionConfiguration;
+
+// Common macro to log errors returned from stable AIDL calls
+#define LOG_STATUS_ERROR_IF_NOT_OK(status, callName) \
+ if (!(status).isOk()) { \
+ if ((status).getExceptionCode() == EX_SERVICE_SPECIFIC) { \
+ SStatus errStatus = static_cast<SStatus>((status).getServiceSpecificError()); \
+ ALOGE("%s: %s callback failed: %s", __FUNCTION__, callName, \
+ toString(errStatus).c_str()); \
+ } else { \
+ ALOGE("%s: Transaction failed during %s: %d", __FUNCTION__, callName, \
+ (status).getExceptionCode()); \
+ } \
+ }
+
+// Note: existing data in dst will be gone. Caller still owns the memory of src
+void cloneToAidl(const camera_metadata_t *src, SCameraMetadata* dst);
+
+bool cloneFromAidl(const SCameraMetadata &src, CameraMetadata *dst);
+
+int32_t convertFromAidl(SStreamConfigurationMode streamConfigurationMode);
+
+UOutputConfiguration convertFromAidl(const SOutputConfiguration &src);
+
+USessionConfiguration convertFromAidl(const SSessionConfiguration &src);
+
+int convertFromAidl(SOutputConfiguration::Rotation rotation);
+
+int32_t convertFromAidl(STemplateId templateId);
+
+void convertToAidl(const hardware::camera2::utils::SubmitInfo &submitInfo,
+ SSubmitInfo *hSubmitInfo);
+
+SStatus convertToAidl(const binder::Status &status);
+
+SCaptureResultExtras convertToAidl(const UCaptureResultExtras &captureResultExtras);
+
+SErrorCode convertToAidl(int32_t errorCode);
+
+std::vector<SPhysicalCaptureResultInfo> convertToAidl(
+ const std::vector<UPhysicalCaptureResultInfo>& src,
+ std::shared_ptr<CaptureResultMetadataQueue>& fmq);
+
+SPhysicalCaptureResultInfo convertToAidl(const UPhysicalCaptureResultInfo& src,
+ std::shared_ptr<CaptureResultMetadataQueue>& fmq);
+
+void convertToAidl(const std::vector<hardware::CameraStatus> &src,
+ std::vector<SCameraStatusAndId>* dst);
+
+SCameraDeviceStatus convertCameraStatusToAidl(int32_t src);
+
+bool areBindersEqual(const ndk::SpAIBinder& b1, const ndk::SpAIBinder& b2);
+
+status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic = true);
+
+} // namespace android::hardware::cameraservice::utils::conversion::aidl
+
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
diff --git a/services/camera/libcameraservice/aidl/DeathPipe.cpp b/services/camera/libcameraservice/aidl/DeathPipe.cpp
new file mode 100644
index 0000000..de46411
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/DeathPipe.cpp
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "DeathPipe"
+
+#include "DeathPipe.h"
+
+namespace android::frameworks::cameraservice::utils {
+
+DeathPipe::DeathPipe(IBinder* parent, const ::ndk::SpAIBinder& binder):
+ mParent(parent), mAIBinder(binder) {
+ mDeathRecipient = ::ndk::ScopedAIBinder_DeathRecipient(
+ AIBinder_DeathRecipient_new(DeathPipe::onDeathCallback));
+ // Set an unlinked callback that allows Obituaries to be deallocated
+ AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(),
+ DeathPipe::onUnlinkedCallback);
+}
+
+status_t DeathPipe::linkToDeath(const sp<IBinder::DeathRecipient>& recipient,
+ void* cookie, uint32_t flags) {
+ LOG_ALWAYS_FATAL_IF(recipient == nullptr, "%s: recipient must be non-nullptr", __FUNCTION__);
+ std::lock_guard<std::mutex> _l(mLock);
+
+ // Create and immortalize an obituary before linking it to death.
+ // The created Obituary can now only be garbage collected if it is unlinked from death
+ std::shared_ptr<Obituary> obituary = std::make_shared<Obituary>(recipient, cookie,
+ flags, /* who= */ mParent);
+ obituary->immortalize();
+
+ // Ensure that "cookie" is a pointer to an immortal obituary.
+ // AIBinder_linkToDeath calls DeathPipe::onUnlinkedCallback if linking to death fails, marking
+ // it for garbage collection
+ binder_status_t ret = AIBinder_linkToDeath(mAIBinder.get(),
+ mDeathRecipient.get(),
+ /* cookie= */ obituary.get());
+ if (ret != STATUS_OK) {
+ return DEAD_OBJECT;
+ }
+ mObituaries.emplace_back(obituary);
+ return NO_ERROR;
+}
+
+status_t DeathPipe::unlinkToDeath(const wp<IBinder::DeathRecipient>& recipient,
+ void* cookie, uint32_t flags,
+ wp<IBinder::DeathRecipient>* outRecipient) {
+ std::lock_guard<std::mutex> _l(mLock);
+ // Temporary Obituary for checking equality
+ std::shared_ptr<Obituary> inObituary = std::make_shared<Obituary>(recipient, cookie,
+ flags, mParent);
+ for (auto it = mObituaries.begin(); it != mObituaries.end(); it++) {
+ if ((*inObituary) == (**it)) {
+ if (outRecipient != nullptr) {
+ *outRecipient = (*it)->recipient;
+ }
+ // Unlink the found Obituary from death. AIBinder_unlinkToDeath calls
+ // DeathPipe::onUnlinkedCallback with the given cookie when unlinking is done
+ binder_status_t ret = AIBinder_unlinkToDeath(mAIBinder.get(),
+ mDeathRecipient.get(),
+ /* cookie= */ (*it).get());
+ mObituaries.erase(it);
+ return ret == STATUS_OK ? NO_ERROR : DEAD_OBJECT;
+ }
+ }
+ return NAME_NOT_FOUND;
+}
+
+DeathPipe::~DeathPipe() = default;
+
+
+void DeathPipe::onDeathCallback(void* cookie) {
+ // Cookie will always be a pointer to a valid immortal Obituary
+ Obituary* obituary = static_cast<Obituary*>(cookie);
+ obituary->onDeath();
+ // Don't call Obituary::clear() because VNDK Binder will call DeathPipe::onUnlinkedCallback()
+ // when it is ready
+}
+
+void DeathPipe::onUnlinkedCallback(void* cookie) {
+ // Cookie will always be a pointer to a valid immortal Obituary.
+ Obituary* obituary = static_cast<Obituary*>(cookie);
+ // Mark obituary to be garbage collected if needed. onDeathCallback won't be called with
+ // this particular cookie after this.
+ obituary->clear();
+}
+
+} // namespace android::frameworks::cameraservice::utils
\ No newline at end of file
diff --git a/services/camera/libcameraservice/aidl/DeathPipe.h b/services/camera/libcameraservice/aidl/DeathPipe.h
new file mode 100644
index 0000000..a816dd0
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/DeathPipe.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_DEATHPIPE_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_DEATHPIPE_H_
+
+#include <android/binder_auto_utils.h>
+#include <android/binder_ibinder.h>
+#include <binder/Parcel.h>
+#include <list>
+
+namespace android::frameworks::cameraservice::utils {
+
+/**
+ * This is a helper class to pipe death notifications from VNDK {@code AIBinder} to
+ * S/NDK {@code IBinder}.
+ *
+ * To use this class, create a DeathPipe member object as a field of NDK interface
+ * implementation, and forward functions {@code BBinder::linkToDeath} and
+ * {@code BBinder::unlinkToDeath} to corresponding DeathPipe functions.
+ */
+class DeathPipe {
+ public:
+ /**
+ * @param parent the NDK Binder object. Assumed to live longer than the DeathPipe
+ * object
+ * @param binder the VNDK Binder object which DeathPipe with subscribe to.
+ */
+ explicit DeathPipe(IBinder* parent, const ::ndk::SpAIBinder& binder);
+ ~DeathPipe();
+
+ status_t linkToDeath(const sp<IBinder::DeathRecipient>& recipient, void* cookie,
+ uint32_t flags);
+ status_t unlinkToDeath(const wp<IBinder::DeathRecipient>& recipient,
+ void* cookie, uint32_t flags, wp<IBinder::DeathRecipient>* outRecipient);
+
+ // Static functions that will be called by VNDK binder upon death or unlinking
+ static void onDeathCallback(void* cookie);
+ static void onUnlinkedCallback(void* cookie);
+
+ private:
+ /**
+ * {@code Obituary} is a tiny container that contains some metadata to pass VNDK binder's
+ * death notification to the NDK binder. A pointer to the Obituary is used as the
+ * {@code cookie} in VNDK binder's death notification.
+ *
+ * Theoretically, the VNDK binder might send out death notification after the DeathPipe
+ * object is destroyed, so care must be taken to ensure that Obituaries aren't accidentally
+ * destroyed before VNDK binder stops using its cookies.
+ *
+ */
+ struct Obituary: public std::enable_shared_from_this<Obituary> {
+ wp<IBinder::DeathRecipient> recipient; // NDK death recipient
+ void *cookie; // cookie sent by the NDK recipient
+ uint32_t flags; // flags sent by the NDK recipient
+ wp<IBinder> who; // NDK binder whose death 'recipient' subscribed to
+
+ // Self ptr to ensure we don't destroy this obituary while it can still be notified by the
+ // VNDK Binder. When populated with Obituary::immortalize, this Obituary won't be
+ // garbage collected until Obituary::clear is called.
+ std::shared_ptr<Obituary> mSelfPtr;
+
+ Obituary(const wp<IBinder::DeathRecipient>& recipient, void* cookie,
+ uint32_t flags, IBinder* who) :
+ recipient(recipient), cookie(cookie), flags(flags),
+ who(who), mSelfPtr(nullptr) {}
+
+ // Function to be called when the VNDK Binder dies. Pipes the notification to the relevant
+ // NDK recipient if it still exists
+ void onDeath() const {
+ sp<IBinder::DeathRecipient> r = recipient.promote();
+ if (r == nullptr) { return; }
+ r->binderDied(who);
+ };
+
+ // Should be called before calling AIBinder_linkToDeath. Once this function returns this
+ // Obituary won't be garbage collected until Obituary::clear is called.
+ void immortalize() {
+ mSelfPtr = shared_from_this();
+ }
+
+ // Should be called when this Obituary can be garbage collected.
+ // Typically, after the Obituary is no longer linked to a VNDK DeathRecipient
+ void clear() {
+ mSelfPtr = nullptr;
+ }
+
+ bool operator==(const Obituary& rhs) const {
+ return recipient == rhs.recipient &&
+ cookie == rhs.cookie &&
+ flags == rhs.flags &&
+ who == rhs.who;
+ }
+ };
+
+ // Parent to which the cameraservice wants to subscribe to for death notification
+ IBinder* mParent;
+
+ // VNDK Binder object to which the death notification will be bound to. If it dies,
+ // cameraservice will be notified as if mParent died.
+ ::ndk::SpAIBinder mAIBinder;
+
+ // Owning VNDK's deathRecipient ensures that all linked death notifications are cleaned up
+ // when this class destructs.
+ ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+ // Lock to protect access to fields below.
+ std::mutex mLock;
+ // List of all obituaries created by DeathPipe, used to unlink death subscription
+ std::list<std::shared_ptr<Obituary>> mObituaries;
+
+};
+
+} // namespace android::frameworks::cameraservice::utils
+
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_DEATHPIPE_H_
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
similarity index 80%
rename from services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
rename to services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index ae4d5dd..48c804d 100644
--- a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -21,7 +21,7 @@
* ! Do not edit this file directly !
*
* Generated automatically from vndk_camera_metadata_tags.mako. To be included in libcameraservice
- * only by hidl/Utils.cpp.
+ * only by aidl/AidlUtils.cpp.
*/
/**
@@ -74,6 +74,17 @@
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
ANDROID_SENSOR_READOUT_TIMESTAMP,
} },
+ {34, {
+ ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
+ ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS,
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS,
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS,
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
+ } },
};
/**
@@ -90,4 +101,13 @@
ANDROID_SENSOR_PIXEL_MODE,
ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
} },
+ {34, {
+ ANDROID_CONTROL_AUTOFRAMING,
+ ANDROID_CONTROL_AUTOFRAMING_STATE,
+ ANDROID_CONTROL_SETTINGS_OVERRIDE,
+ ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
+ ANDROID_EXTENSION_CURRENT_TYPE,
+ ANDROID_EXTENSION_STRENGTH,
+ ANDROID_SCALER_RAW_CROP_REGION,
+ } },
};
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 9dead7f..b388e5a 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -55,6 +55,7 @@
Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
const std::optional<std::string>& clientFeatureId,
const std::string& cameraDeviceId,
@@ -67,7 +68,7 @@
bool overrideForPerfClass,
bool overrideToPortrait,
bool forceSlowJpegMode):
- Camera2ClientBase(cameraService, cameraClient, clientPackageName,
+ Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper, clientPackageName,
false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
@@ -82,9 +83,7 @@
SharedParameters::Lock l(mParameters);
l.mParameters.state = Parameters::DISCONNECTED;
- if (forceSlowJpegMode) {
- l.mParameters.isSlowJpegModeForced = true;
- }
+ l.mParameters.isSlowJpegModeForced = forceSlowJpegMode;
}
status_t Camera2Client::initialize(sp<CameraProviderManager> manager,
@@ -144,19 +143,44 @@
std::string threadName = std::string("C2-") + std::to_string(mCameraId);
mFrameProcessor = new FrameProcessor(mDevice, this);
- mFrameProcessor->run((threadName + "-FrameProc").c_str());
+ res = mFrameProcessor->run((threadName + "-FrameProc").c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mCaptureSequencer = new CaptureSequencer(this);
- mCaptureSequencer->run((threadName + "-CaptureSeq").c_str());
+ res = mCaptureSequencer->run((threadName + "-CaptureSeq").c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start capture sequencer thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
- mJpegProcessor->run((threadName + "-JpegProc").c_str());
+ res = mJpegProcessor->run((threadName + "-JpegProc").c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start jpeg processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
- mZslProcessor->run((threadName + "-ZslProc").c_str());
+ res = mZslProcessor->run((threadName + "-ZslProc").c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start zsl processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mCallbackProcessor = new CallbackProcessor(this);
- mCallbackProcessor->run((threadName + "-CallbkProc").c_str());
+ res = mCallbackProcessor->run((threadName + "-CallbkProc").c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start callback processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
if (gLogLevel >= 1) {
SharedParameters::Lock l(mParameters);
@@ -471,12 +495,13 @@
ALOGV("Camera %d: Disconnecting device", mCameraId);
+ bool hasDeviceError = mDevice->hasDeviceError();
mDevice->disconnect();
CameraService::Client::disconnect();
int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
- CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
+ mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
return res;
}
@@ -2332,6 +2357,13 @@
static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
}
+status_t Camera2Client::setAutoframingOverride(uint8_t autoframingValue) {
+ if (autoframingValue > ANDROID_CONTROL_AUTOFRAMING_AUTO) return BAD_VALUE;
+
+ return mDevice->setAutoframingAutoBehavior(
+ static_cast<camera_metadata_enum_android_control_autoframing_t>(autoframingValue));
+}
+
bool Camera2Client::supportsCameraMute() {
return mDevice->supportsCameraMute();
}
@@ -2349,6 +2381,14 @@
mDevice->clearStreamUseCaseOverrides();
}
+bool Camera2Client::supportsZoomOverride() {
+ return mDevice->supportsZoomOverride();
+}
+
+status_t Camera2Client::setZoomOverride(int zoomOverride) {
+ return mDevice->setZoomOverride(zoomOverride);
+}
+
status_t Camera2Client::waitUntilCurrentRequestIdLocked() {
int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
if (activeRequestId != 0) {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 5b4d547..fe12690 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -86,6 +86,7 @@
virtual status_t setAudioRestriction(int mode);
virtual int32_t getGlobalAudioRestriction();
virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop);
+ virtual status_t setAutoframingOverride(uint8_t autoframingMode);
virtual bool supportsCameraMute();
virtual status_t setCameraMute(bool enabled);
@@ -96,12 +97,16 @@
const std::vector<int64_t>& useCaseOverrides);
virtual void clearStreamUseCaseOverrides();
+ virtual bool supportsZoomOverride();
+ virtual status_t setZoomOverride(int32_t zoomOverride);
+
/**
* Interface used by CameraService
*/
Camera2Client(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
const std::optional<std::string>& clientFeatureId,
const std::string& cameraDeviceId,
diff --git a/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp b/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp
deleted file mode 100644
index 01951a0..0000000
--- a/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "Camera2-JpegCompressor"
-
-#include <utils/Log.h>
-#include <ui/GraphicBufferMapper.h>
-
-#include "JpegCompressor.h"
-
-namespace android {
-namespace camera2 {
-
-JpegCompressor::JpegCompressor():
- Thread(false),
- mIsBusy(false),
- mCaptureTime(0) {
-}
-
-JpegCompressor::~JpegCompressor() {
- ALOGV("%s", __FUNCTION__);
- Mutex::Autolock lock(mMutex);
-}
-
-status_t JpegCompressor::start(const Vector<CpuConsumer::LockedBuffer*>& buffers,
- nsecs_t captureTime) {
- ALOGV("%s", __FUNCTION__);
- Mutex::Autolock busyLock(mBusyMutex);
-
- if (mIsBusy) {
- ALOGE("%s: Already processing a buffer!", __FUNCTION__);
- return INVALID_OPERATION;
- }
-
- mIsBusy = true;
-
- mBuffers = buffers;
- mCaptureTime = captureTime;
-
- status_t res;
- res = run("JpegCompressor");
- if (res != OK) {
- ALOGE("%s: Unable to start up compression thread: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- //delete mBuffers; // necessary?
- }
- return res;
-}
-
-status_t JpegCompressor::cancel() {
- ALOGV("%s", __FUNCTION__);
- requestExitAndWait();
- return OK;
-}
-
-status_t JpegCompressor::readyToRun() {
- ALOGV("%s", __FUNCTION__);
- return OK;
-}
-
-bool JpegCompressor::threadLoop() {
- ALOGV("%s", __FUNCTION__);
-
- mAuxBuffer = mBuffers[0]; // input
- mJpegBuffer = mBuffers[1]; // output
-
- // Set up error management
- mJpegErrorInfo = NULL;
- JpegError error;
- error.parent = this;
-
- mCInfo.err = jpeg_std_error(&error);
- mCInfo.err->error_exit = jpegErrorHandler;
-
- jpeg_create_compress(&mCInfo);
- if (checkError("Error initializing compression")) return false;
-
- // Route compressed data straight to output stream buffer
- JpegDestination jpegDestMgr;
- jpegDestMgr.parent = this;
- jpegDestMgr.init_destination = jpegInitDestination;
- jpegDestMgr.empty_output_buffer = jpegEmptyOutputBuffer;
- jpegDestMgr.term_destination = jpegTermDestination;
-
- mCInfo.dest = &jpegDestMgr;
-
- // Set up compression parameters
- mCInfo.image_width = mAuxBuffer->width;
- mCInfo.image_height = mAuxBuffer->height;
- mCInfo.input_components = 1; // 3;
- mCInfo.in_color_space = JCS_GRAYSCALE; // JCS_RGB
-
- ALOGV("%s: image_width = %d, image_height = %d", __FUNCTION__, mCInfo.image_width, mCInfo.image_height);
-
- jpeg_set_defaults(&mCInfo);
- if (checkError("Error configuring defaults")) return false;
-
- // Do compression
- jpeg_start_compress(&mCInfo, TRUE);
- if (checkError("Error starting compression")) return false;
-
- size_t rowStride = mAuxBuffer->stride;// * 3;
- const size_t kChunkSize = 32;
- while (mCInfo.next_scanline < mCInfo.image_height) {
- JSAMPROW chunk[kChunkSize];
- for (size_t i = 0 ; i < kChunkSize; i++) {
- chunk[i] = (JSAMPROW)
- (mAuxBuffer->data + (i + mCInfo.next_scanline) * rowStride);
- }
- jpeg_write_scanlines(&mCInfo, chunk, kChunkSize);
- if (checkError("Error while compressing")) return false;
- if (exitPending()) {
- ALOGV("%s: Cancel called, exiting early", __FUNCTION__);
- cleanUp();
- return false;
- }
- }
-
- jpeg_finish_compress(&mCInfo);
- if (checkError("Error while finishing compression")) return false;
-
- cleanUp();
- return false;
-}
-
-bool JpegCompressor::isBusy() {
- ALOGV("%s", __FUNCTION__);
- Mutex::Autolock busyLock(mBusyMutex);
- return mIsBusy;
-}
-
-// old function -- TODO: update for new buffer type
-bool JpegCompressor::isStreamInUse(uint32_t /*id*/) {
- ALOGV("%s", __FUNCTION__);
- Mutex::Autolock lock(mBusyMutex);
-
- if (mBuffers.size() && mIsBusy) {
- for (size_t i = 0; i < mBuffers.size(); i++) {
-// if ( mBuffers[i].streamId == (int)id ) return true;
- }
- }
- return false;
-}
-
-bool JpegCompressor::waitForDone(nsecs_t timeout) {
- ALOGV("%s", __FUNCTION__);
- Mutex::Autolock lock(mBusyMutex);
- status_t res = OK;
- if (mIsBusy) {
- res = mDone.waitRelative(mBusyMutex, timeout);
- }
- return (res == OK);
-}
-
-bool JpegCompressor::checkError(const char *msg) {
- ALOGV("%s", __FUNCTION__);
- if (mJpegErrorInfo) {
- char errBuffer[JMSG_LENGTH_MAX];
- mJpegErrorInfo->err->format_message(mJpegErrorInfo, errBuffer);
- ALOGE("%s: %s: %s",
- __FUNCTION__, msg, errBuffer);
- cleanUp();
- mJpegErrorInfo = NULL;
- return true;
- }
- return false;
-}
-
-void JpegCompressor::cleanUp() {
- ALOGV("%s", __FUNCTION__);
- jpeg_destroy_compress(&mCInfo);
- Mutex::Autolock lock(mBusyMutex);
- mIsBusy = false;
- mDone.signal();
-}
-
-void JpegCompressor::jpegErrorHandler(j_common_ptr cinfo) {
- ALOGV("%s", __FUNCTION__);
- JpegError *error = static_cast<JpegError*>(cinfo->err);
- error->parent->mJpegErrorInfo = cinfo;
-}
-
-void JpegCompressor::jpegInitDestination(j_compress_ptr cinfo) {
- ALOGV("%s", __FUNCTION__);
- JpegDestination *dest= static_cast<JpegDestination*>(cinfo->dest);
- ALOGV("%s: Setting destination to %p, size %zu",
- __FUNCTION__, dest->parent->mJpegBuffer->data, kMaxJpegSize);
- dest->next_output_byte = (JOCTET*)(dest->parent->mJpegBuffer->data);
- dest->free_in_buffer = kMaxJpegSize;
-}
-
-boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr /*cinfo*/) {
- ALOGV("%s", __FUNCTION__);
- ALOGE("%s: JPEG destination buffer overflow!",
- __FUNCTION__);
- return true;
-}
-
-void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) {
- (void) cinfo; // TODO: clean up
- ALOGV("%s", __FUNCTION__);
- ALOGV("%s: Done writing JPEG data. %zu bytes left in buffer",
- __FUNCTION__, cinfo->dest->free_in_buffer);
-}
-
-}; // namespace camera2
-}; // namespace android
diff --git a/services/camera/libcameraservice/api1/client2/JpegCompressor.h b/services/camera/libcameraservice/api1/client2/JpegCompressor.h
deleted file mode 100644
index 589a2fd..0000000
--- a/services/camera/libcameraservice/api1/client2/JpegCompressor.h
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/**
- * This class simulates a hardware JPEG compressor. It receives image buffers
- * in RGBA_8888 format, processes them in a worker thread, and then pushes them
- * out to their destination stream.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H
-#define ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H
-
-#include "utils/Thread.h"
-#include "utils/Mutex.h"
-#include "utils/Timers.h"
-#include "utils/Vector.h"
-//#include "Base.h"
-#include <stdio.h>
-#include <gui/CpuConsumer.h>
-
-extern "C" {
-#include <jpeglib.h>
-}
-
-
-namespace android {
-namespace camera2 {
-
-class JpegCompressor: private Thread, public virtual RefBase {
- public:
-
- JpegCompressor();
- ~JpegCompressor();
-
- // Start compressing COMPRESSED format buffers; JpegCompressor takes
- // ownership of the Buffers vector.
- status_t start(const Vector<CpuConsumer::LockedBuffer*>& buffers,
- nsecs_t captureTime);
-
- status_t cancel();
-
- bool isBusy();
- bool isStreamInUse(uint32_t id);
-
- bool waitForDone(nsecs_t timeout);
-
- // TODO: Measure this
- static const size_t kMaxJpegSize = 300000;
-
- private:
- Mutex mBusyMutex;
- Mutex mMutex;
- bool mIsBusy;
- Condition mDone;
- nsecs_t mCaptureTime;
-
- Vector<CpuConsumer::LockedBuffer*> mBuffers;
- CpuConsumer::LockedBuffer *mJpegBuffer;
- CpuConsumer::LockedBuffer *mAuxBuffer;
-
- jpeg_compress_struct mCInfo;
-
- struct JpegError : public jpeg_error_mgr {
- JpegCompressor *parent;
- };
- j_common_ptr mJpegErrorInfo;
-
- struct JpegDestination : public jpeg_destination_mgr {
- JpegCompressor *parent;
- };
-
- static void jpegErrorHandler(j_common_ptr cinfo);
-
- static void jpegInitDestination(j_compress_ptr cinfo);
- static boolean jpegEmptyOutputBuffer(j_compress_ptr cinfo);
- static void jpegTermDestination(j_compress_ptr cinfo);
-
- bool checkError(const char *msg);
- void cleanUp();
-
- /**
- * Inherited Thread virtual overrides
- */
- private:
- virtual status_t readyToRun();
- virtual bool threadLoop();
-};
-
-}; // namespace camera2
-}; // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index d5ea689..aa3d1bb 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -43,6 +43,7 @@
int cameraFacing) :
cameraId(cameraId),
cameraFacing(cameraFacing),
+ isSlowJpegModeForced(false),
info(NULL),
mDefaultSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED) {
}
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index d54ba46..c60f327 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -32,12 +32,12 @@
#include "device3/Camera3Device.h"
#include "device3/Camera3OutputStream.h"
#include "api2/CameraDeviceClient.h"
-#include "utils/CameraServiceProxyWrapper.h"
#include <camera_metadata_hidden.h>
#include "DepthCompositeStream.h"
#include "HeicCompositeStream.h"
+#include "JpegRCompositeStream.h"
// Convenience methods for constructing binder::Status objects for error returns
@@ -88,6 +88,7 @@
CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -99,9 +100,10 @@
int servicePid,
bool overrideForPerfClass,
bool overrideToPortrait) :
- Camera2ClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
- clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing, sensorOrientation,
- clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait),
+ Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
+ systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
+ sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
+ overrideToPortrait),
mInputStream(),
mStreamingRequestId(REQUEST_ID_NONE),
mRequestIdCounter(0),
@@ -129,7 +131,12 @@
mFrameProcessor = new FrameProcessorBase(mDevice);
std::string threadName = std::string("CDU-") + mCameraIdStr + "-FrameProc";
- mFrameProcessor->run(threadName.c_str());
+ res = mFrameProcessor->run(threadName.c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
@@ -183,11 +190,11 @@
// Cache physical camera ids corresponding to this device and also the high
// resolution sensors in this device + physical camera ids
mProviderManager->isLogicalCamera(mCameraIdStr, &mPhysicalCameraIds);
- if (isUltraHighResolutionSensor(mCameraIdStr)) {
+ if (supportsUltraHighResolutionCapture(mCameraIdStr)) {
mHighResolutionSensors.insert(mCameraIdStr);
}
for (auto &physicalId : mPhysicalCameraIds) {
- if (isUltraHighResolutionSensor(physicalId)) {
+ if (supportsUltraHighResolutionCapture(physicalId)) {
mHighResolutionSensors.insert(physicalId);
}
}
@@ -694,7 +701,7 @@
nsecs_t configureEnd = systemTime();
int32_t configureDurationMs = ns2ms(configureEnd) - startTimeMs;
- CameraServiceProxyWrapper::logStreamConfigured(mCameraIdStr, operatingMode,
+ mCameraServiceProxyWrapper->logStreamConfigured(mCameraIdStr, operatingMode,
false /*internalReconfig*/, configureDurationMs);
}
@@ -882,6 +889,8 @@
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
+ bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();
res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
outputConfiguration.getSurfaceType());
@@ -927,7 +936,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -949,19 +958,26 @@
bool isDepthCompositeStream =
camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
- if (isDepthCompositeStream || isHeicCompositeStream) {
+ bool isJpegRCompositeStream =
+ camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0]) &&
+ !mDevice->isCompositeJpegRDisabled();
+ if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
sp<CompositeStream> compositeStream;
if (isDepthCompositeStream) {
compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback());
- } else {
+ } else if (isHeicCompositeStream) {
compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback());
+ } else {
+ compositeStream = new camera3::JpegRCompositeStream(mDevice, getRemoteCallback());
}
err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
streamInfo.height, streamInfo.format,
static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
- outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
+ outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
+ streamInfo.colorSpace, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
+ useReadoutTimestamp);
if (err == OK) {
Mutex::Autolock l(mCompositeLock);
mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
@@ -974,7 +990,8 @@
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
/*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
- streamInfo.timestampBase, streamInfo.mirrorMode);
+ streamInfo.timestampBase, streamInfo.mirrorMode, streamInfo.colorSpace,
+ useReadoutTimestamp);
}
if (err != OK) {
@@ -1025,6 +1042,7 @@
int width, height, format, surfaceType;
uint64_t consumerUsage;
android_dataspace dataSpace;
+ int32_t colorSpace;
status_t err;
binder::Status res;
@@ -1038,6 +1056,7 @@
surfaceType = outputConfiguration.getSurfaceType();
format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
// Hardcode consumer usage flags: SurfaceView--0x900, SurfaceTexture--0x100.
consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
@@ -1055,7 +1074,7 @@
outputConfiguration.getSensorPixelModesUsed();
if (SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
sensorPixelModesUsed, format, width, height, getStaticInfo(cameraIdUsed),
- /*allowRounding*/ false, &overriddenSensorPixelModesUsed) != OK) {
+ &overriddenSensorPixelModesUsed) != OK) {
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
"sensor pixel modes used not valid for deferred stream");
}
@@ -1070,7 +1089,8 @@
outputConfiguration.isMultiResolution(), consumerUsage,
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
- outputConfiguration.getMirrorMode());
+ outputConfiguration.getMirrorMode(),
+ outputConfiguration.useReadoutTimestamp());
if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@@ -1087,7 +1107,8 @@
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
outputConfiguration.getTimestampBase(),
- outputConfiguration.getMirrorMode()));
+ outputConfiguration.getMirrorMode(),
+ colorSpace));
ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
" (%d x %d) stream with format 0x%x.",
@@ -1277,6 +1298,7 @@
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
int mirrorMode = outputConfiguration.getMirrorMode();
for (size_t i = 0; i < newOutputsMap.size(); i++) {
@@ -1285,7 +1307,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
/*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -1460,7 +1482,7 @@
binder::Status CameraDeviceClient::prepare(int streamId) {
ATRACE_CALL();
- ALOGV("%s", __FUNCTION__);
+ ALOGV("%s stream id %d", __FUNCTION__, streamId);
binder::Status res;
if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -1500,7 +1522,7 @@
binder::Status CameraDeviceClient::prepare2(int maxCount, int streamId) {
ATRACE_CALL();
- ALOGV("%s", __FUNCTION__);
+ ALOGV("%s stream id %d", __FUNCTION__, streamId);
binder::Status res;
if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -1644,7 +1666,8 @@
const std::vector<int32_t> &sensorPixelModesUsed =
outputConfiguration.getSensorPixelModesUsed();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
- int64_t streamUseCase= outputConfiguration.getStreamUseCase();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
+ int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
for (auto& bufferProducer : bufferProducers) {
@@ -1660,7 +1683,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -1744,6 +1767,13 @@
static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
}
+status_t CameraDeviceClient::setAutoframingOverride(uint8_t autoframingValue) {
+ if (autoframingValue > ANDROID_CONTROL_AUTOFRAMING_AUTO) return BAD_VALUE;
+
+ return mDevice->setAutoframingAutoBehavior(
+ static_cast<camera_metadata_enum_android_control_autoframing_t>(autoframingValue));
+}
+
bool CameraDeviceClient::supportsCameraMute() {
return mDevice->supportsCameraMute();
}
@@ -1761,6 +1791,14 @@
mDevice->clearStreamUseCaseOverrides();
}
+bool CameraDeviceClient::supportsZoomOverride() {
+ return mDevice->supportsZoomOverride();
+}
+
+status_t CameraDeviceClient::setZoomOverride(int32_t zoomOverride) {
+ return mDevice->setZoomOverride(zoomOverride);
+}
+
binder::Status CameraDeviceClient::switchToOffline(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
const std::vector<int>& offlineOutputIds,
@@ -1813,7 +1851,9 @@
for (const auto& gbp : mConfiguredOutputs.valueAt(index).getGraphicBufferProducers()) {
sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
- camera3::HeicCompositeStream::isHeicCompositeStream(s);
+ camera3::HeicCompositeStream::isHeicCompositeStream(s) ||
+ (camera3::JpegRCompositeStream::isJpegRCompositeStream(s) &&
+ !mDevice->isCompositeJpegRDisabled());
if (isCompositeStream) {
auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
if (compositeIdx == NAME_NOT_FOUND) {
@@ -1996,8 +2036,20 @@
if (remoteCb != 0) {
remoteCb->onDeviceIdle();
}
+
+ std::vector<hardware::CameraStreamStats> fullStreamStats = streamStats;
+ {
+ Mutex::Autolock l(mCompositeLock);
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ hardware::CameraStreamStats compositeStats;
+ mCompositeStreamMap.valueAt(i)->getStreamStats(&compositeStats);
+ if (compositeStats.mWidth > 0) {
+ fullStreamStats.push_back(compositeStats);
+ }
+ }
+ }
Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
- streamStats, mUserTag, mVideoStabilizationMode);
+ fullStreamStats, mUserTag, mVideoStabilizationMode);
}
void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
@@ -2020,6 +2072,7 @@
// Thread safe. Don't bother locking.
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
if (remoteCb != 0) {
+ ALOGV("%s: stream id %d", __FUNCTION__, streamId);
remoteCb->onPrepared(streamId);
}
}
@@ -2074,10 +2127,11 @@
mCompositeStreamMap.clear();
}
+ bool hasDeviceError = mDevice->hasDeviceError();
Camera2ClientBase::detachDevice();
int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
- CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
+ mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
}
/** Device-related methods */
@@ -2215,9 +2269,9 @@
return mDevice->infoPhysical(cameraId);
}
-bool CameraDeviceClient::isUltraHighResolutionSensor(const std::string &cameraId) {
+bool CameraDeviceClient::supportsUltraHighResolutionCapture(const std::string &cameraId) {
const CameraMetadata &deviceInfo = getStaticInfo(cameraId);
- return SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+ return SessionConfigurationUtils::supportsUltraHighResolutionCapture(deviceInfo);
}
bool CameraDeviceClient::isSensorPixelModeConsistent(
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 4b330f3..45c904a 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -29,6 +29,7 @@
#include "common/FrameProcessorBase.h"
#include "common/Camera2ClientBase.h"
#include "CompositeStream.h"
+#include "utils/CameraServiceProxyWrapper.h"
#include "utils/SessionConfigurationUtils.h"
using android::camera3::OutputStreamInfo;
@@ -179,6 +180,7 @@
CameraDeviceClient(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
bool clientPackageOverride,
const std::optional<std::string>& clientFeatureId,
@@ -197,9 +199,14 @@
virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop) override;
+ virtual status_t setAutoframingOverride(uint8_t autoframingValue) override;
+
virtual bool supportsCameraMute();
virtual status_t setCameraMute(bool enabled);
+ virtual bool supportsZoomOverride() override;
+ virtual status_t setZoomOverride(int32_t zoomOverride) override;
+
virtual status_t dump(int fd, const Vector<String16>& args);
virtual status_t dumpClient(int fd, const Vector<String16>& args);
@@ -238,7 +245,7 @@
// Calculate the ANativeWindow transform from android.sensor.orientation
status_t getRotationTransformLocked(int mirrorMode, /*out*/int32_t* transform);
- bool isUltraHighResolutionSensor(const std::string &cameraId);
+ bool supportsUltraHighResolutionCapture(const std::string &cameraId);
bool isSensorPixelModeConsistent(const std::list<int> &streamIdList,
const CameraMetadata &settings);
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 89c05b0..99bdb0e 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -49,7 +49,12 @@
mFrameProcessor = new camera2::FrameProcessorBase(mOfflineSession);
std::string threadName = fmt::sprintf("Offline-%s-FrameProc", mCameraIdStr.c_str());
- mFrameProcessor->run(threadName.c_str());
+ res = mFrameProcessor->run(threadName.c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
@@ -81,6 +86,10 @@
return OK;
}
+status_t CameraOfflineSessionClient::setAutoframingOverride(uint8_t) {
+ return OK;
+}
+
bool CameraOfflineSessionClient::supportsCameraMute() {
// Offline mode doesn't support muting
return false;
@@ -97,6 +106,14 @@
void CameraOfflineSessionClient::clearStreamUseCaseOverrides() {
}
+bool CameraOfflineSessionClient::supportsZoomOverride() {
+ return false;
+}
+
+status_t CameraOfflineSessionClient::setZoomOverride(int32_t /*zoomOverride*/) {
+ return INVALID_OPERATION;
+}
+
status_t CameraOfflineSessionClient::dump(int fd, const Vector<String16>& args) {
return BasicClient::dump(fd, args);
}
@@ -247,7 +264,7 @@
mOpsActive = true;
// Transition device state to OPEN
- sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
+ sCameraService->mUidPolicy->registerMonitorUid(mClientUid, /*openCamera*/true);
return OK;
}
@@ -271,7 +288,7 @@
}
mOpsCallback.clear();
- sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid);
+ sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid, /*closeCamera*/true);
return OK;
}
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 4a5b1f2..70bad03 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -82,6 +82,8 @@
status_t setRotateAndCropOverride(uint8_t rotateAndCrop) override;
+ status_t setAutoframingOverride(uint8_t autoframingValue) override;
+
bool supportsCameraMute() override;
status_t setCameraMute(bool enabled) override;
@@ -92,6 +94,10 @@
void clearStreamUseCaseOverrides() override;
+ bool supportsZoomOverride() override;
+
+ status_t setZoomOverride(int32_t zoomOverride) override;
+
// permissions management
status_t startCameraOps() override;
status_t finishCameraOps() override;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 3221d74..8f53458 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -49,7 +49,8 @@
camera_stream_rotation_t rotation, int * id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> * surfaceIds,
- int streamSetId, bool isShared, bool isMultiResolution) {
+ int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) {
if (hasDeferredConsumer) {
ALOGE("%s: Deferred consumers not supported in case of composite streams!",
__FUNCTION__);
@@ -75,7 +76,8 @@
}
return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation,
- id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared);
+ id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared,
+ colorSpace, dynamicProfile, streamUseCase, useReadoutTimestamp);
}
status_t CompositeStream::deleteStream() {
@@ -85,6 +87,7 @@
mCaptureResults.clear();
mFrameNumberMap.clear();
mErrorFrameNumbers.clear();
+ mRequestTimeMap.clear();
}
return deleteInternalStreams();
@@ -95,6 +98,8 @@
Mutex::Autolock l(mMutex);
if (!mErrorState && (streamId == getStreamId())) {
mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
+ auto ts = systemTime();
+ mRequestTimeMap.emplace(frameNumber, ts);
}
}
@@ -109,6 +114,11 @@
void CompositeStream::eraseResult(int64_t frameNumber) {
Mutex::Autolock l(mMutex);
+ auto requestTimeIt = mRequestTimeMap.find(frameNumber);
+ if (requestTimeIt != mRequestTimeMap.end()) {
+ mRequestTimeMap.erase(requestTimeIt);
+ }
+
auto it = mPendingCaptureResults.find(frameNumber);
if (it == mPendingCaptureResults.end()) {
return;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index ec16dde..1b7fc6e 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -46,7 +46,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int streamSetId, bool isShared, bool isMultiResolution);
+ int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp);
status_t deleteStream();
@@ -59,7 +60,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int streamSetId, bool isShared) = 0;
+ int streamSetId, bool isShared, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) = 0;
// Release all internal streams and corresponding resources.
virtual status_t deleteInternalStreams() = 0;
@@ -81,6 +83,9 @@
// Notify when shutter notify is triggered
virtual void onShutter(const CaptureResultExtras& /*resultExtras*/, nsecs_t /*timestamp*/) {}
+ // Get composite stream stats
+ virtual void getStreamStats(hardware::CameraStreamStats* streamStats /*out*/) = 0;
+
void onResultAvailable(const CaptureResult& result);
bool onError(int32_t errorCode, const CaptureResultExtras& resultExtras);
@@ -138,6 +143,9 @@
// Keeps a set buffer/result frame numbers for any errors detected during processing.
std::set<int64_t> mErrorFrameNumbers;
+ // Frame number to request time map
+ std::unordered_map<int64_t, nsecs_t> mRequestTimeMap;
+
};
}; //namespace camera3
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 01fe78b..1bd0b85 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -99,7 +99,7 @@
}
getSupportedDepthSizes(staticInfo, /*maxResolution*/false, &mSupportedDepthSizes);
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(staticInfo)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(staticInfo)) {
getSupportedDepthSizes(staticInfo, true, &mSupportedDepthSizesMaximumResolution);
}
}
@@ -582,7 +582,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int /*streamSetId*/, bool /*isShared*/) {
+ int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
+ int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
if (mSupportedDepthSizes.empty()) {
ALOGE("%s: This camera device doesn't support any depth map streams!", __FUNCTION__);
return INVALID_OPERATION;
@@ -613,7 +614,14 @@
mBlobSurface = new Surface(producer);
ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
- id, physicalCameraId, sensorPixelModesUsed, surfaceIds);
+ id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+ camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ useReadoutTimestamp);
if (ret == OK) {
mBlobStreamId = *id;
mBlobSurfaceId = (*surfaceIds)[0];
@@ -630,7 +638,14 @@
std::vector<int> depthSurfaceId;
ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
- &depthSurfaceId);
+ &depthSurfaceId, camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false,
+ /*isMultiResolution*/false, /*consumerUsage*/0,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ useReadoutTimestamp);
if (ret == OK) {
mDepthSurfaceId = depthSurfaceId[0];
} else {
@@ -887,7 +902,7 @@
return BAD_VALUE;
}
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(ch)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(ch)) {
getSupportedDepthSizes(ch, /*maxResolution*/true, &depthSizesMaximumResolution);
if (depthSizesMaximumResolution.empty()) {
ALOGE("%s: No depth stream configurations for maximum resolution present",
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index a8c40ae..f797f9c 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -53,7 +53,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int streamSetId, bool isShared) override;
+ int streamSetId, bool isShared, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
status_t deleteInternalStreams() override;
status_t configureStream() override;
status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
@@ -68,6 +69,9 @@
static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+ // Get composite stream stats
+ void getStreamStats(hardware::CameraStreamStats*) override {};
+
protected:
bool threadLoop() override;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 97c1ae1..68e9ad4 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -121,8 +121,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int /*streamSetId*/, bool /*isShared*/) {
-
+ int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
+ int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
sp<CameraDeviceBase> device = mDevice.promote();
if (!device.get()) {
ALOGE("%s: Invalid camera device!", __FUNCTION__);
@@ -148,7 +148,14 @@
res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
- sensorPixelModesUsed,surfaceIds);
+ sensorPixelModesUsed, surfaceIds, camera3::CAMERA3_STREAM_SET_ID_INVALID,
+ /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ colorSpace,
+ useReadoutTimestamp);
if (res == OK) {
mAppSegmentSurfaceId = (*surfaceIds)[0];
} else {
@@ -184,7 +191,14 @@
int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
- rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId);
+ rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId,
+ camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ colorSpace,
+ useReadoutTimestamp);
if (res == OK) {
mMainImageSurfaceId = sourceSurfaceId[0];
mMainImageStreamId = *id;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 78c5f02..b539cdd 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -47,8 +47,8 @@
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
- std::vector<int> *surfaceIds,
- int streamSetId, bool isShared) override;
+ std::vector<int> *surfaceIds, int streamSetId, bool isShared, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
status_t deleteInternalStreams() override;
@@ -75,6 +75,9 @@
static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+ // Get composite stream stats
+ void getStreamStats(hardware::CameraStreamStats*) override {};
+
static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr);
static bool isInMemoryTempFileSupported();
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
new file mode 100644
index 0000000..988446b
--- /dev/null
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -0,0 +1,879 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "hardware/gralloc.h"
+#include "system/graphics-base-v1.0.h"
+#include "system/graphics-base-v1.1.h"
+#define LOG_TAG "Camera3-JpegRCompositeStream"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <aidl/android/hardware/camera/device/CameraBlob.h>
+#include <aidl/android/hardware/camera/device/CameraBlobId.h>
+
+#include "common/CameraProviderManager.h"
+#include <gui/Surface.h>
+#include <ultrahdr/jpegr.h>
+#include <utils/ExifUtils.h>
+#include <utils/Log.h>
+#include "utils/SessionConfigurationUtils.h"
+#include <utils/Trace.h>
+
+#include "JpegRCompositeStream.h"
+
+namespace android {
+namespace camera3 {
+
+using aidl::android::hardware::camera::device::CameraBlob;
+using aidl::android::hardware::camera::device::CameraBlobId;
+
+JpegRCompositeStream::JpegRCompositeStream(sp<CameraDeviceBase> device,
+ wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
+ CompositeStream(device, cb),
+ mBlobStreamId(-1),
+ mBlobSurfaceId(-1),
+ mP010StreamId(-1),
+ mP010SurfaceId(-1),
+ mBlobWidth(0),
+ mBlobHeight(0),
+ mP010BufferAcquired(false),
+ mBlobBufferAcquired(false),
+ mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
+ mOutputStreamUseCase(0),
+ mFirstRequestLatency(-1),
+ mProducerListener(new ProducerListener()),
+ mMaxJpegBufferSize(-1),
+ mUHRMaxJpegBufferSize(-1),
+ mStaticInfo(device->info()) {
+ auto entry = mStaticInfo.find(ANDROID_JPEG_MAX_SIZE);
+ if (entry.count > 0) {
+ mMaxJpegBufferSize = entry.data.i32[0];
+ } else {
+ ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
+ }
+
+ mUHRMaxJpegSize =
+ SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
+ /*ultraHighResolution*/true);
+ mDefaultMaxJpegSize =
+ SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
+ /*isUltraHighResolution*/false);
+
+ mUHRMaxJpegBufferSize =
+ SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
+ mMaxJpegBufferSize);
+}
+
+JpegRCompositeStream::~JpegRCompositeStream() {
+ mBlobConsumer.clear(),
+ mBlobSurface.clear(),
+ mBlobStreamId = -1;
+ mBlobSurfaceId = -1;
+ mP010Consumer.clear();
+ mP010Surface.clear();
+ mP010Consumer = nullptr;
+ mP010Surface = nullptr;
+}
+
+void JpegRCompositeStream::compilePendingInputLocked() {
+ CpuConsumer::LockedBuffer imgBuffer;
+
+ while (mSupportInternalJpeg && !mInputJpegBuffers.empty() && !mBlobBufferAcquired) {
+ auto it = mInputJpegBuffers.begin();
+ auto res = mBlobConsumer->lockNextBuffer(&imgBuffer);
+ if (res == NOT_ENOUGH_DATA) {
+ // Can not lock any more buffers.
+ break;
+ } else if (res != OK) {
+ ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ mPendingInputFrames[*it].error = true;
+ mInputJpegBuffers.erase(it);
+ continue;
+ }
+
+ if (*it != imgBuffer.timestamp) {
+ ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with "
+ "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+ }
+
+ if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+ (mPendingInputFrames[imgBuffer.timestamp].error)) {
+ mBlobConsumer->unlockBuffer(imgBuffer);
+ } else {
+ mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer;
+ mBlobBufferAcquired = true;
+ }
+ mInputJpegBuffers.erase(it);
+ }
+
+ while (!mInputP010Buffers.empty() && !mP010BufferAcquired) {
+ auto it = mInputP010Buffers.begin();
+ auto res = mP010Consumer->lockNextBuffer(&imgBuffer);
+ if (res == NOT_ENOUGH_DATA) {
+ // Can not lock any more buffers.
+ break;
+ } else if (res != OK) {
+ ALOGE("%s: Error receiving P010 image buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ mPendingInputFrames[*it].error = true;
+ mInputP010Buffers.erase(it);
+ continue;
+ }
+
+ if (*it != imgBuffer.timestamp) {
+ ALOGW("%s: Expecting P010 buffer with time stamp: %" PRId64 " received buffer with "
+ "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+ }
+
+ if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+ (mPendingInputFrames[imgBuffer.timestamp].error)) {
+ mP010Consumer->unlockBuffer(imgBuffer);
+ } else {
+ mPendingInputFrames[imgBuffer.timestamp].p010Buffer = imgBuffer;
+ mP010BufferAcquired = true;
+ }
+ mInputP010Buffers.erase(it);
+ }
+
+ while (!mCaptureResults.empty()) {
+ auto it = mCaptureResults.begin();
+ // Negative timestamp indicates that something went wrong during the capture result
+ // collection process.
+ if (it->first >= 0) {
+ auto frameNumber = std::get<0>(it->second);
+ mPendingInputFrames[it->first].frameNumber = frameNumber;
+ mPendingInputFrames[it->first].result = std::get<1>(it->second);
+ mSessionStatsBuilder.incResultCounter(false /*dropped*/);
+ }
+ mCaptureResults.erase(it);
+ }
+
+ while (!mFrameNumberMap.empty()) {
+ auto it = mFrameNumberMap.begin();
+ auto frameNumber = it->first;
+ mPendingInputFrames[it->second].frameNumber = frameNumber;
+ auto requestTimeIt = mRequestTimeMap.find(frameNumber);
+ if (requestTimeIt != mRequestTimeMap.end()) {
+ mPendingInputFrames[it->second].requestTimeNs = requestTimeIt->second;
+ mRequestTimeMap.erase(requestTimeIt);
+ }
+ mFrameNumberMap.erase(it);
+ }
+
+ auto it = mErrorFrameNumbers.begin();
+ while (it != mErrorFrameNumbers.end()) {
+ bool frameFound = false;
+ for (auto &inputFrame : mPendingInputFrames) {
+ if (inputFrame.second.frameNumber == *it) {
+ inputFrame.second.error = true;
+ frameFound = true;
+ break;
+ }
+ }
+
+ if (frameFound) {
+ mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/,
+ 0 /*captureLatencyMs*/);
+ it = mErrorFrameNumbers.erase(it);
+ } else {
+ ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
+ *it);
+ it++;
+ }
+ }
+}
+
+bool JpegRCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) {
+ if (currentTs == nullptr) {
+ return false;
+ }
+
+ bool newInputAvailable = false;
+ for (const auto& it : mPendingInputFrames) {
+ if ((!it.second.error) && (it.second.p010Buffer.data != nullptr) &&
+ (it.second.requestTimeNs != -1) &&
+ ((it.second.jpegBuffer.data != nullptr) || !mSupportInternalJpeg) &&
+ (it.first < *currentTs)) {
+ *currentTs = it.first;
+ newInputAvailable = true;
+ }
+ }
+
+ return newInputAvailable;
+}
+
+int64_t JpegRCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) {
+ int64_t ret = -1;
+ if (currentTs == nullptr) {
+ return ret;
+ }
+
+ for (const auto& it : mPendingInputFrames) {
+ if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
+ *currentTs = it.first;
+ ret = it.second.frameNumber;
+ }
+ }
+
+ return ret;
+}
+
+status_t JpegRCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
+ status_t res;
+ sp<ANativeWindow> outputANW = mOutputSurface;
+ ANativeWindowBuffer *anb;
+ int fenceFd;
+ void *dstBuffer;
+
+ size_t maxJpegRBufferSize = 0;
+ if (mMaxJpegBufferSize > 0) {
+ // If this is an ultra high resolution sensor and the input frames size
+ // is > default res jpeg.
+ if (mUHRMaxJpegSize.width != 0 &&
+ inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
+ mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
+ maxJpegRBufferSize = mUHRMaxJpegBufferSize;
+ } else {
+ maxJpegRBufferSize = mMaxJpegBufferSize;
+ }
+ } else {
+ maxJpegRBufferSize = inputFrame.p010Buffer.width * inputFrame.p010Buffer.height;
+ }
+
+ uint8_t jpegQuality = 100;
+ auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
+ if (entry.count > 0) {
+ jpegQuality = entry.data.u8[0];
+ }
+
+ if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), maxJpegRBufferSize, 1))
+ != OK) {
+ ALOGE("%s: Unable to configure stream buffer dimensions"
+ " %zux%u for stream %d", __FUNCTION__, maxJpegRBufferSize, 1U, mP010StreamId);
+ return res;
+ }
+
+ res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd);
+ if (res != OK) {
+ ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
+ res);
+ return res;
+ }
+
+ sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
+ GraphicBufferLocker gbLocker(gb);
+ res = gbLocker.lockAsync(&dstBuffer, fenceFd);
+ if (res != OK) {
+ ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+ return res;
+ }
+
+ if ((gb->getWidth() < maxJpegRBufferSize) || (gb->getHeight() != 1)) {
+ ALOGE("%s: Blob buffer size mismatch, expected %zux%u received %dx%d", __FUNCTION__,
+ maxJpegRBufferSize, 1, gb->getWidth(), gb->getHeight());
+ outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+ return BAD_VALUE;
+ }
+
+ size_t actualJpegRSize = 0;
+ ultrahdr::jpegr_uncompressed_struct p010;
+ ultrahdr::jpegr_compressed_struct jpegR;
+ ultrahdr::JpegR jpegREncoder;
+
+ p010.height = inputFrame.p010Buffer.height;
+ p010.width = inputFrame.p010Buffer.width;
+ p010.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT2100;
+ p010.data = inputFrame.p010Buffer.data;
+ p010.chroma_data = inputFrame.p010Buffer.dataCb;
+ // Strides are expected to be in pixels not bytes
+ p010.luma_stride = inputFrame.p010Buffer.stride / 2;
+ p010.chroma_stride = inputFrame.p010Buffer.chromaStride / 2;
+
+ jpegR.data = dstBuffer;
+ jpegR.maxLength = maxJpegRBufferSize;
+
+ ultrahdr::ultrahdr_transfer_function transferFunction;
+ switch (mP010DynamicRange) {
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
+ transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_PQ;
+ break;
+ default:
+ transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_HLG;
+ }
+
+ if (mSupportInternalJpeg) {
+ ultrahdr::jpegr_compressed_struct jpeg;
+
+ jpeg.data = inputFrame.jpegBuffer.data;
+ jpeg.length = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
+ inputFrame.jpegBuffer.width);
+ if (jpeg.length == 0) {
+ ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!",
+ __FUNCTION__);
+ jpeg.length = inputFrame.jpegBuffer.width;
+ }
+
+ if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) {
+ jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_P3;
+ } else {
+ jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT709;
+ }
+
+ res = jpegREncoder.encodeJPEGR(&p010, &jpeg, transferFunction, &jpegR);
+ } else {
+ const uint8_t* exifBuffer = nullptr;
+ size_t exifBufferSize = 0;
+ std::unique_ptr<ExifUtils> utils(ExifUtils::create());
+ utils->initializeEmpty();
+ utils->setFromMetadata(inputFrame.result, mStaticInfo, inputFrame.p010Buffer.width,
+ inputFrame.p010Buffer.height);
+ if (utils->generateApp1()) {
+ exifBuffer = utils->getApp1Buffer();
+ exifBufferSize = utils->getApp1Length();
+ } else {
+ ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
+ }
+
+ ultrahdr::jpegr_exif_struct exif;
+ exif.data = reinterpret_cast<void*>(const_cast<uint8_t*>(exifBuffer));
+ exif.length = exifBufferSize;
+
+ res = jpegREncoder.encodeJPEGR(&p010, transferFunction, &jpegR, jpegQuality, &exif);
+ }
+
+ if (res != OK) {
+ ALOGE("%s: Error trying to encode JPEG/R: %s (%d)", __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ actualJpegRSize = jpegR.length;
+
+ size_t finalJpegRSize = actualJpegRSize + sizeof(CameraBlob);
+ if (finalJpegRSize > maxJpegRBufferSize) {
+ ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
+ outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+ return NO_MEMORY;
+ }
+
+ res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
+ getStreamId(), strerror(-res), res);
+ return res;
+ }
+
+ ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegRSize);
+ uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
+ (gb->getWidth() - sizeof(CameraBlob));
+ CameraBlob blobHeader = {
+ .blobId = CameraBlobId::JPEG,
+ .blobSizeBytes = static_cast<int32_t>(actualJpegRSize)
+ };
+ memcpy(header, &blobHeader, sizeof(CameraBlob));
+
+ if (inputFrame.requestTimeNs != -1) {
+ auto captureLatency = ns2ms(systemTime() - inputFrame.requestTimeNs);
+ mSessionStatsBuilder.incCounter(mP010StreamId, false /*dropped*/, captureLatency);
+ if (mFirstRequestLatency == -1) {
+ mFirstRequestLatency = captureLatency;
+ }
+ }
+ outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+
+ return res;
+}
+
+void JpegRCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
+ if (inputFrame == nullptr) {
+ return;
+ }
+
+ if (inputFrame->p010Buffer.data != nullptr) {
+ mP010Consumer->unlockBuffer(inputFrame->p010Buffer);
+ inputFrame->p010Buffer.data = nullptr;
+ mP010BufferAcquired = false;
+ }
+
+ if (inputFrame->jpegBuffer.data != nullptr) {
+ mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer);
+ inputFrame->jpegBuffer.data = nullptr;
+ mBlobBufferAcquired = false;
+ }
+
+ if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
+ //TODO: Figure out correct requestId
+ notifyError(inputFrame->frameNumber, -1 /*requestId*/);
+ inputFrame->errorNotified = true;
+ mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/, 0 /*captureLatencyMs*/);
+ }
+}
+
+void JpegRCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
+ auto it = mPendingInputFrames.begin();
+ while (it != mPendingInputFrames.end()) {
+ if (it->first <= currentTs) {
+ releaseInputFrameLocked(&it->second);
+ it = mPendingInputFrames.erase(it);
+ } else {
+ it++;
+ }
+ }
+}
+
+bool JpegRCompositeStream::threadLoop() {
+ int64_t currentTs = INT64_MAX;
+ bool newInputAvailable = false;
+
+ {
+ Mutex::Autolock l(mMutex);
+
+ if (mErrorState) {
+ // In case we landed in error state, return any pending buffers and
+ // halt all further processing.
+ compilePendingInputLocked();
+ releaseInputFramesLocked(currentTs);
+ return false;
+ }
+
+ while (!newInputAvailable) {
+ compilePendingInputLocked();
+ newInputAvailable = getNextReadyInputLocked(¤tTs);
+ if (!newInputAvailable) {
+ auto failingFrameNumber = getNextFailingInputLocked(¤tTs);
+ if (failingFrameNumber >= 0) {
+ // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
+ // possible for two internal stream buffers to fail. In such scenario the
+ // composite stream should notify the client about a stream buffer error only
+ // once and this information is kept within 'errorNotified'.
+ // Any present failed input frames will be removed on a subsequent call to
+ // 'releaseInputFramesLocked()'.
+ releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
+ currentTs = INT64_MAX;
+ }
+
+ auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
+ if (ret == TIMED_OUT) {
+ return true;
+ } else if (ret != OK) {
+ ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
+ strerror(-ret), ret);
+ return false;
+ }
+ }
+ }
+ }
+
+ auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
+ Mutex::Autolock l(mMutex);
+ if (res != OK) {
+ ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
+ currentTs, strerror(-res), res);
+ mPendingInputFrames[currentTs].error = true;
+ }
+
+ releaseInputFramesLocked(currentTs);
+
+ return true;
+}
+
+bool JpegRCompositeStream::isJpegRCompositeStream(const sp<Surface> &surface) {
+ if (CameraProviderManager::kFrameworkJpegRDisabled) {
+ return false;
+ }
+ ANativeWindow *anw = surface.get();
+ status_t err;
+ int format;
+ if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
+ ALOGE("%s: Failed to query Surface format: %s (%d)", __FUNCTION__, strerror(-err),
+ err);
+ return false;
+ }
+
+ int dataspace;
+ if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
+ ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-err),
+ err);
+ return false;
+ }
+
+ if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == static_cast<int>(kJpegRDataSpace))) {
+ return true;
+ }
+
+ return false;
+}
+
+void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
+ int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
+ if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
+ return;
+ }
+
+ switch (dynamicProfile) {
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
+ *dynamicRange = dynamicProfile;
+ *dataSpace = HAL_DATASPACE_BT2020_ITU_PQ;
+ break;
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
+ *dynamicRange = dynamicProfile;
+ *dataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
+ break;
+ default:
+ *dynamicRange = kP010DefaultDynamicRange;
+ *dataSpace = kP010DefaultDataSpace;
+ }
+
+}
+
+status_t JpegRCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
+ camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
+ const std::unordered_set<int32_t> &sensorPixelModesUsed,
+ std::vector<int> *surfaceIds,
+ int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) {
+ sp<CameraDeviceBase> device = mDevice.promote();
+ if (!device.get()) {
+ ALOGE("%s: Invalid camera device!", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ deriveDynamicRangeAndDataspace(dynamicProfile, &mP010DynamicRange, &mP010DataSpace);
+ mSupportInternalJpeg = CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+ mStaticInfo, mP010DynamicRange,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mP010Consumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
+ mP010Consumer->setFrameAvailableListener(this);
+ mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
+ mP010Surface = new Surface(producer);
+
+ auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
+ static_cast<android_dataspace>(mP010DataSpace), rotation,
+ id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+ camera3::CAMERA3_STREAM_SET_ID_INVALID, false /*isShared*/, false /*isMultiResolution*/,
+ GRALLOC_USAGE_SW_READ_OFTEN, mP010DynamicRange, streamUseCase,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT, OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, useReadoutTimestamp);
+ if (ret == OK) {
+ mP010StreamId = *id;
+ mP010SurfaceId = (*surfaceIds)[0];
+ mOutputSurface = consumers[0];
+ } else {
+ return ret;
+ }
+
+ if (mSupportInternalJpeg) {
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+ mBlobConsumer->setFrameAvailableListener(this);
+ mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
+ mBlobSurface = new Surface(producer);
+ std::vector<int> blobSurfaceId;
+ ret = device->createStream(mBlobSurface, width, height, format,
+ kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
+ &blobSurfaceId,
+ /*streamSetI*/ camera3::CAMERA3_STREAM_SET_ID_INVALID,
+ /*isShared*/ false,
+ /*isMultiResolution*/ false,
+ /*consumerUsage*/ GRALLOC_USAGE_SW_READ_OFTEN,
+ /*dynamicProfile*/ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ streamUseCase,
+ /*timestampBase*/ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ /*mirrorMode*/ OutputConfiguration::MIRROR_MODE_AUTO,
+ /*colorSpace*/ colorSpace, useReadoutTimestamp);
+ if (ret == OK) {
+ mBlobSurfaceId = blobSurfaceId[0];
+ } else {
+ return ret;
+ }
+
+ ret = registerCompositeStreamListener(mBlobStreamId);
+ if (ret != OK) {
+ ALOGE("%s: Failed to register jpeg stream listener!", __FUNCTION__);
+ return ret;
+ }
+ }
+
+ ret = registerCompositeStreamListener(getStreamId());
+ if (ret != OK) {
+ ALOGE("%s: Failed to register P010 stream listener!", __FUNCTION__);
+ return ret;
+ }
+
+ mOutputColorSpace = colorSpace;
+ mOutputStreamUseCase = streamUseCase;
+ mBlobWidth = width;
+ mBlobHeight = height;
+
+ return ret;
+}
+
+status_t JpegRCompositeStream::configureStream() {
+ if (isRunning()) {
+ // Processing thread is already running, nothing more to do.
+ return NO_ERROR;
+ }
+
+ if (mOutputSurface.get() == nullptr) {
+ ALOGE("%s: No valid output surface set!", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+ if (res != OK) {
+ ALOGE("%s: Unable to connect to native window for stream %d",
+ __FUNCTION__, mP010StreamId);
+ return res;
+ }
+
+ if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
+ != OK) {
+ ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
+ mP010StreamId);
+ return res;
+ }
+
+ if ((res = native_window_set_usage(mOutputSurface.get(),
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN)) != OK) {
+ ALOGE("%s: Unable to configure stream buffer usage for stream %d", __FUNCTION__,
+ mP010StreamId);
+ return res;
+ }
+
+ int maxProducerBuffers;
+ ANativeWindow *anw = mP010Surface.get();
+ if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
+ ALOGE("%s: Unable to query consumer undequeued"
+ " buffer count for stream %d", __FUNCTION__, mP010StreamId);
+ return res;
+ }
+
+ ANativeWindow *anwConsumer = mOutputSurface.get();
+ int maxConsumerBuffers;
+ if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+ &maxConsumerBuffers)) != OK) {
+ ALOGE("%s: Unable to query consumer undequeued"
+ " buffer count for stream %d", __FUNCTION__, mP010StreamId);
+ return res;
+ }
+
+ if ((res = native_window_set_buffer_count(
+ anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
+ ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mP010StreamId);
+ return res;
+ }
+
+ mSessionStatsBuilder.addStream(mP010StreamId);
+
+ run("JpegRCompositeStreamProc");
+
+ return NO_ERROR;
+}
+
+status_t JpegRCompositeStream::deleteInternalStreams() {
+ // The 'CameraDeviceClient' parent will delete the P010 stream
+ requestExit();
+
+ auto ret = join();
+ if (ret != OK) {
+ ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
+ strerror(-ret), ret);
+ }
+
+ if (mBlobStreamId >= 0) {
+ // Camera devices may not be valid after switching to offline mode.
+ // In this case, all offline streams including internal composite streams
+ // are managed and released by the offline session.
+ sp<CameraDeviceBase> device = mDevice.promote();
+ if (device.get() != nullptr) {
+ ret = device->deleteStream(mBlobStreamId);
+ }
+
+ mBlobStreamId = -1;
+ }
+
+ if (mOutputSurface != nullptr) {
+ mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
+ mOutputSurface.clear();
+ }
+
+ return ret;
+}
+
+void JpegRCompositeStream::onFrameAvailable(const BufferItem& item) {
+ if (item.mDataSpace == kJpegDataSpace) {
+ ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!",
+ __func__, ns2ms(item.mTimestamp));
+
+ Mutex::Autolock l(mMutex);
+ if (!mErrorState) {
+ mInputJpegBuffers.push_back(item.mTimestamp);
+ mInputReadyCondition.signal();
+ }
+ } else if (item.mDataSpace == static_cast<android_dataspace_t>(mP010DataSpace)) {
+ ALOGV("%s: P010 buffer with ts: %" PRIu64 " ms. arrived!", __func__,
+ ns2ms(item.mTimestamp));
+
+ Mutex::Autolock l(mMutex);
+ if (!mErrorState) {
+ mInputP010Buffers.push_back(item.mTimestamp);
+ mInputReadyCondition.signal();
+ }
+ } else {
+ ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
+ }
+}
+
+status_t JpegRCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
+ Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
+ if (outputStreamIds == nullptr) {
+ return BAD_VALUE;
+ }
+
+ if (outSurfaceMap->find(mP010StreamId) == outSurfaceMap->end()) {
+ outputStreamIds->push_back(mP010StreamId);
+ }
+ (*outSurfaceMap)[mP010StreamId].push_back(mP010SurfaceId);
+
+ if (mSupportInternalJpeg) {
+ if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
+ outputStreamIds->push_back(mBlobStreamId);
+ }
+ (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
+ }
+
+ if (currentStreamId != nullptr) {
+ *currentStreamId = mP010StreamId;
+ }
+
+ return NO_ERROR;
+}
+
+status_t JpegRCompositeStream::insertCompositeStreamIds(
+ std::vector<int32_t>* compositeStreamIds /*out*/) {
+ if (compositeStreamIds == nullptr) {
+ return BAD_VALUE;
+ }
+
+ compositeStreamIds->push_back(mP010StreamId);
+ if (mSupportInternalJpeg) {
+ compositeStreamIds->push_back(mBlobStreamId);
+ }
+
+ return OK;
+}
+
+void JpegRCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
+ // Processing can continue even in case of result errors.
+ // At the moment Jpeg/R composite stream processing relies mainly on static camera
+ // characteristics data. The actual result data can be used for the jpeg quality but
+ // in case it is absent we can default to maximum.
+ eraseResult(resultExtras.frameNumber);
+ mSessionStatsBuilder.incResultCounter(true /*dropped*/);
+}
+
+bool JpegRCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
+ bool ret = false;
+ // Buffer errors concerning internal composite streams should not be directly visible to
+ // camera clients. They must only receive a single buffer error with the public composite
+ // stream id.
+ if ((resultExtras.errorStreamId == mP010StreamId) ||
+ (resultExtras.errorStreamId == mBlobStreamId)) {
+ flagAnErrorFrameNumber(resultExtras.frameNumber);
+ ret = true;
+ }
+
+ return ret;
+}
+
+status_t JpegRCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+ const CameraMetadata& staticInfo,
+ std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
+ if (compositeOutput == nullptr) {
+ return BAD_VALUE;
+ }
+
+ int64_t dynamicRange, dataSpace;
+ deriveDynamicRangeAndDataspace(streamInfo.dynamicRangeProfile, &dynamicRange, &dataSpace);
+
+ compositeOutput->clear();
+ compositeOutput->push_back({});
+ (*compositeOutput)[0].width = streamInfo.width;
+ (*compositeOutput)[0].height = streamInfo.height;
+ (*compositeOutput)[0].format = kP010PixelFormat;
+ (*compositeOutput)[0].dataSpace = static_cast<android_dataspace_t>(dataSpace);
+ (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+ (*compositeOutput)[0].dynamicRangeProfile = dynamicRange;
+ (*compositeOutput)[0].colorSpace =
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+
+ if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo,
+ streamInfo.dynamicRangeProfile,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
+ compositeOutput->push_back({});
+ (*compositeOutput)[1].width = streamInfo.width;
+ (*compositeOutput)[1].height = streamInfo.height;
+ (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
+ (*compositeOutput)[1].dataSpace = kJpegDataSpace;
+ (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+ (*compositeOutput)[1].dynamicRangeProfile =
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+ (*compositeOutput)[1].colorSpace = streamInfo.colorSpace;
+ }
+
+ return NO_ERROR;
+}
+
+void JpegRCompositeStream::getStreamStats(hardware::CameraStreamStats* streamStats) {
+ if ((streamStats == nullptr) || (mFirstRequestLatency != -1)) {
+ return;
+ }
+
+ bool deviceError;
+ std::map<int, StreamStats> stats;
+ mSessionStatsBuilder.buildAndReset(&streamStats->mRequestCount, &streamStats->mErrorCount,
+ &deviceError, &stats);
+ if (stats.find(mP010StreamId) != stats.end()) {
+ streamStats->mWidth = mBlobWidth;
+ streamStats->mHeight = mBlobHeight;
+ streamStats->mFormat = HAL_PIXEL_FORMAT_BLOB;
+ streamStats->mDataSpace = static_cast<int>(kJpegRDataSpace);
+ streamStats->mDynamicRangeProfile = mP010DynamicRange;
+ streamStats->mColorSpace = mOutputColorSpace;
+ streamStats->mStreamUseCase = mOutputStreamUseCase;
+ streamStats->mStartLatencyMs = mFirstRequestLatency;
+ streamStats->mHistogramType = hardware::CameraStreamStats::HISTOGRAM_TYPE_CAPTURE_LATENCY;
+ streamStats->mHistogramBins.assign(stats[mP010StreamId].mCaptureLatencyBins.begin(),
+ stats[mP010StreamId].mCaptureLatencyBins.end());
+ streamStats->mHistogramCounts.assign(stats[mP010StreamId].mCaptureLatencyHistogram.begin(),
+ stats[mP010StreamId].mCaptureLatencyHistogram.end());
+ }
+}
+
+}; // namespace camera3
+}; // namespace android
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
new file mode 100644
index 0000000..016d57c
--- /dev/null
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -0,0 +1,155 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_JPEG_R_COMPOSITE_STREAM_H
+#define ANDROID_SERVERS_CAMERA_CAMERA3_JPEG_R_COMPOSITE_STREAM_H
+
+#include <gui/CpuConsumer.h>
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "system/graphics-base-v1.1.h"
+
+#include "api1/client2/JpegProcessor.h"
+#include "utils/SessionStatsBuilder.h"
+
+#include "CompositeStream.h"
+
+namespace android {
+
+class CameraDeviceClient;
+class CameraMetadata;
+class Surface;
+
+namespace camera3 {
+
+class JpegRCompositeStream : public CompositeStream, public Thread,
+ public CpuConsumer::FrameAvailableListener {
+
+public:
+ JpegRCompositeStream(sp<CameraDeviceBase> device,
+ wp<hardware::camera2::ICameraDeviceCallbacks> cb);
+ ~JpegRCompositeStream() override;
+
+ static bool isJpegRCompositeStream(const sp<Surface> &surface);
+
+ // CompositeStream overrides
+ status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
+ camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
+ const std::unordered_set<int32_t> &sensorPixelModesUsed,
+ std::vector<int> *surfaceIds,
+ int streamSetId, bool isShared, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
+ status_t deleteInternalStreams() override;
+ status_t configureStream() override;
+ status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
+ int32_t* /*out*/currentStreamId) override;
+ status_t insertCompositeStreamIds(std::vector<int32_t>* compositeStreamIds /*out*/) override;
+ int getStreamId() override { return mP010StreamId; }
+
+ // CpuConsumer listener implementation
+ void onFrameAvailable(const BufferItem& item) override;
+
+ // Return stream information about the internal camera streams
+ static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+ const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+
+ // Get composite stream stats
+ void getStreamStats(hardware::CameraStreamStats* streamStats) override;
+
+protected:
+
+ bool threadLoop() override;
+ bool onStreamBufferError(const CaptureResultExtras& resultExtras) override;
+ void onResultError(const CaptureResultExtras& resultExtras) override;
+
+private:
+ struct InputFrame {
+ CpuConsumer::LockedBuffer p010Buffer;
+ CpuConsumer::LockedBuffer jpegBuffer;
+ CameraMetadata result;
+ bool error;
+ bool errorNotified;
+ int64_t frameNumber;
+ int32_t requestId;
+ nsecs_t requestTimeNs;
+
+ InputFrame() : error(false), errorNotified(false), frameNumber(-1), requestId(-1),
+ requestTimeNs(-1) { }
+ };
+
+ status_t processInputFrame(nsecs_t ts, const InputFrame &inputFrame);
+
+ // Buffer/Results handling
+ void compilePendingInputLocked();
+ void releaseInputFrameLocked(InputFrame *inputFrame /*out*/);
+ void releaseInputFramesLocked(int64_t currentTs);
+
+ // Find first complete and valid frame with smallest timestamp
+ bool getNextReadyInputLocked(int64_t *currentTs /*inout*/);
+
+ // Find next failing frame number with smallest timestamp and return respective frame number
+ int64_t getNextFailingInputLocked(int64_t *currentTs /*inout*/);
+
+ static void deriveDynamicRangeAndDataspace(int64_t dynamicProfile, int64_t* /*out*/dynamicRange,
+ int64_t* /*out*/dataSpace);
+
+ static const nsecs_t kWaitDuration = 10000000; // 10 ms
+ static const auto kP010PixelFormat = HAL_PIXEL_FORMAT_YCBCR_P010;
+ static const auto kP010DefaultDataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
+ static const auto kP010DefaultDynamicRange =
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10;
+ static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
+ static const auto kJpegRDataSpace =
+ aidl::android::hardware::graphics::common::Dataspace::JPEG_R;
+
+ bool mSupportInternalJpeg = false;
+ int64_t mP010DataSpace = HAL_DATASPACE_BT2020_HLG;
+ int64_t mP010DynamicRange =
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10;
+ int mBlobStreamId, mBlobSurfaceId, mP010StreamId, mP010SurfaceId;
+ size_t mBlobWidth, mBlobHeight;
+ sp<CpuConsumer> mBlobConsumer, mP010Consumer;
+ bool mP010BufferAcquired, mBlobBufferAcquired;
+ sp<Surface> mP010Surface, mBlobSurface, mOutputSurface;
+ int32_t mOutputColorSpace;
+ int64_t mOutputStreamUseCase;
+ nsecs_t mFirstRequestLatency;
+ sp<ProducerListener> mProducerListener;
+
+ ssize_t mMaxJpegBufferSize;
+ ssize_t mUHRMaxJpegBufferSize;
+
+ camera3::Size mDefaultMaxJpegSize;
+ camera3::Size mUHRMaxJpegSize;
+
+ // Keep all incoming P010 buffer timestamps pending further processing.
+ std::vector<int64_t> mInputP010Buffers;
+
+ // Keep all incoming Jpeg/Blob buffer timestamps pending further processing.
+ std::vector<int64_t> mInputJpegBuffers;
+
+ // Map of all input frames pending further processing.
+ std::unordered_map<int64_t, InputFrame> mPendingInputFrames;
+
+ const CameraMetadata mStaticInfo;
+
+ SessionStatsBuilder mSessionStatsBuilder;
+};
+
+}; //namespace camera3
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 0fe15a8..a54ba9b 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -38,7 +38,6 @@
#include "device3/aidl/AidlCamera3Device.h"
#include "device3/hidl/HidlCamera3Device.h"
#include "utils/CameraThreadState.h"
-#include "utils/CameraServiceProxyWrapper.h"
namespace android {
@@ -50,6 +49,7 @@
Camera2ClientBase<TClientBase>::Camera2ClientBase(
const sp<CameraService>& cameraService,
const sp<TCamCallbacks>& remoteCallback,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -67,6 +67,7 @@
clientFeatureId, cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
clientUid, servicePid, overrideToPortrait),
mSharedCameraCallbacks(remoteCallback),
+ mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
mDeviceActive(false), mApi1CameraId(api1CameraId)
{
ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.c_str(),
@@ -104,11 +105,6 @@
TClientBase::mCameraIdStr.c_str());
status_t res;
- // Verify ops permissions
- res = TClientBase::startCameraOps();
- if (res != OK) {
- return res;
- }
IPCTransport providerTransport = IPCTransport::INVALID;
res = providerPtr->getCameraIdIPCTransport(TClientBase::mCameraIdStr,
&providerTransport);
@@ -118,12 +114,14 @@
switch (providerTransport) {
case IPCTransport::HIDL:
mDevice =
- new HidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
+ new HidlCamera3Device(mCameraServiceProxyWrapper,
+ TClientBase::mCameraIdStr, mOverrideForPerfClass,
TClientBase::mOverrideToPortrait, mLegacyClient);
break;
case IPCTransport::AIDL:
mDevice =
- new AidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
+ new AidlCamera3Device(mCameraServiceProxyWrapper,
+ TClientBase::mCameraIdStr, mOverrideForPerfClass,
TClientBase::mOverrideToPortrait, mLegacyClient);
break;
default:
@@ -144,12 +142,30 @@
return res;
}
+ // Verify ops permissions
+ res = TClientBase::startCameraOps();
+ if (res != OK) {
+ TClientBase::finishCameraOps();
+ return res;
+ }
+
wp<NotificationListener> weakThis(this);
res = mDevice->setNotifyCallback(weakThis);
+ if (res != OK) {
+ ALOGE("%s: Camera %s: Unable to set notify callback: %s (%d)",
+ __FUNCTION__, TClientBase::mCameraIdStr.c_str(), strerror(-res), res);
+ return res;
+ }
/** Start watchdog thread */
- mCameraServiceWatchdog = new CameraServiceWatchdog();
- mCameraServiceWatchdog->run("Camera2ClientBaseWatchdog");
+ mCameraServiceWatchdog = new CameraServiceWatchdog(TClientBase::mCameraIdStr,
+ mCameraServiceProxyWrapper);
+ res = mCameraServiceWatchdog->run("Camera2ClientBaseWatchdog");
+ if (res != OK) {
+ ALOGE("%s: Unable to start camera service watchdog thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
return OK;
}
@@ -167,8 +183,8 @@
mCameraServiceWatchdog.clear();
}
- ALOGI("Closed Camera %s. Client was: %s (PID %d, UID %u)",
- TClientBase::mCameraIdStr.c_str(),
+ ALOGI("%s: Client object's dtor for Camera Id %s completed. Client was: %s (PID %d, UID %u)",
+ __FUNCTION__, TClientBase::mCameraIdStr.c_str(),
TClientBase::mClientPackageName.c_str(),
mInitialClientPid, TClientBase::mClientUid);
}
@@ -374,7 +390,7 @@
TClientBase::mCameraIdStr.c_str(), res);
return res;
}
- CameraServiceProxyWrapper::logActive(TClientBase::mCameraIdStr, maxPreviewFps);
+ mCameraServiceProxyWrapper->logActive(TClientBase::mCameraIdStr, maxPreviewFps);
}
mDeviceActive = true;
@@ -393,7 +409,7 @@
ALOGE("%s: Camera %s: Error finishing streaming ops: %d", __FUNCTION__,
TClientBase::mCameraIdStr.c_str(), res);
}
- CameraServiceProxyWrapper::logIdle(TClientBase::mCameraIdStr,
+ mCameraServiceProxyWrapper->logIdle(TClientBase::mCameraIdStr,
requestCount, resultErrorCount, deviceError, userTag, videoStabilizationMode,
streamStats);
}
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 2ad2367..30c763d 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -19,6 +19,7 @@
#include "common/CameraDeviceBase.h"
#include "camera/CaptureResult.h"
+#include "utils/CameraServiceProxyWrapper.h"
#include "CameraServiceWatchdog.h"
namespace android {
@@ -48,6 +49,7 @@
// TODO: too many params, move into a ClientArgs<T>
Camera2ClientBase(const sp<CameraService>& cameraService,
const sp<TCamCallbacks>& remoteCallback,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -142,6 +144,7 @@
pid_t mInitialClientPid;
bool mOverrideForPerfClass = false;
bool mLegacyClient = false;
+ std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
virtual sp<IBinder> asBinderWrapper() {
return IInterface::asBinder(this);
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index be38b9f..017da0f 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -113,6 +113,8 @@
*/
virtual const CameraMetadata& infoPhysical(const std::string& physicalId) const = 0;
+ virtual bool isCompositeJpegRDisabled() const { return false; };
+
struct PhysicalCameraSettings {
std::string cameraId;
CameraMetadata metadata;
@@ -126,6 +128,9 @@
int32_t mOriginalTestPatternMode = 0;
int32_t mOriginalTestPatternData[4] = {};
+ // Original value of SETTINGS_OVERRIDE so that they can be restored if
+ // camera service isn't overwriting the app value.
+ int32_t mOriginalSettingsOverride = ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
};
typedef List<PhysicalCameraSettings> PhysicalCameraSettingsList;
@@ -192,7 +197,10 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false)
+ = 0;
/**
* Create an output stream of the requested size, format, rotation and
@@ -213,7 +221,10 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false)
+ = 0;
/**
* Create an input stream of width, height, and format.
@@ -235,11 +246,13 @@
bool dataSpaceOverridden;
android_dataspace originalDataSpace;
int64_t dynamicRangeProfile;
+ int32_t colorSpace;
StreamInfo() : width(0), height(0), format(0), formatOverridden(false), originalFormat(0),
dataSpace(HAL_DATASPACE_UNKNOWN), dataSpaceOverridden(false),
originalDataSpace(HAL_DATASPACE_UNKNOWN),
- dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD){}
+ dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
/**
* Check whether the format matches the current or the original one in case
* it got overridden.
@@ -434,6 +447,14 @@
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) = 0;
/**
+ * Set the current behavior for the AUTOFRAMING control when in AUTO.
+ *
+ * The value must be one of the AUTOFRAMING_* values besides AUTO.
+ */
+ virtual status_t setAutoframingAutoBehavior(
+ camera_metadata_enum_android_control_autoframing_t autoframingValue) = 0;
+
+ /**
* Whether camera muting (producing black-only output) is supported.
*
* Calling setCameraMute(true) when this returns false will return an
@@ -449,6 +470,14 @@
virtual status_t setCameraMute(bool enabled) = 0;
/**
+ * Whether the camera device supports zoom override.
+ */
+ virtual bool supportsZoomOverride() = 0;
+
+ // Set/reset zoom override
+ virtual status_t setZoomOverride(int32_t zoomOverride) = 0;
+
+ /**
* Enable/disable camera service watchdog
*/
virtual status_t setCameraServiceWatchdog(bool enabled) = 0;
@@ -459,6 +488,11 @@
virtual wp<camera3::StatusTracker> getStatusTracker() = 0;
/**
+ * If the device is in eror state
+ */
+ virtual bool hasDeviceError() = 0;
+
+ /**
* Set bitmask for image dump flag
*/
void setImageDumpMask(int mask) { mImageDumpMask = mask; }
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 230d5b6..23051ef 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include "system/graphics-base-v1.0.h"
+#include "system/graphics-base-v1.1.h"
#define LOG_TAG "CameraProviderManager"
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
@@ -40,6 +42,7 @@
#include <cutils/properties.h>
#include <hwbinder/IPCThreadState.h>
#include <utils/Trace.h>
+#include <ui/PublicFormat.h>
#include <camera/StringUtils.h>
#include "api2/HeicCompositeStream.h"
@@ -60,6 +63,8 @@
} // anonymous namespace
const float CameraProviderManager::kDepthARTolerance = .1f;
+const bool CameraProviderManager::kFrameworkJpegRDisabled =
+ property_get_bool("ro.camera.disableJpegR", false);
CameraProviderManager::HidlServiceInteractionProxyImpl
CameraProviderManager::sHidlServiceInteractionProxy{};
@@ -312,6 +317,18 @@
return deviceInfo->supportNativeZoomRatio();
}
+bool CameraProviderManager::isCompositeJpegRDisabled(const std::string &id) const {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+ return isCompositeJpegRDisabledLocked(id);
+}
+
+bool CameraProviderManager::isCompositeJpegRDisabledLocked(const std::string &id) const {
+ auto deviceInfo = findDeviceInfoLocked(id);
+ if (deviceInfo == nullptr) return false;
+
+ return deviceInfo->isCompositeJpegRDisabled();
+}
+
status_t CameraProviderManager::getResourceCost(const std::string &id,
CameraResourceCost* cost) const {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
@@ -1000,19 +1017,21 @@
auto availableDurations = ch.find(tag);
if (availableDurations.count > 0) {
// Duration entry contains 4 elements (format, width, height, duration)
- for (size_t i = 0; i < availableDurations.count; i += 4) {
- for (const auto& size : sizes) {
- int64_t width = std::get<0>(size);
- int64_t height = std::get<1>(size);
+ for (const auto& size : sizes) {
+ int64_t width = std::get<0>(size);
+ int64_t height = std::get<1>(size);
+ for (size_t i = 0; i < availableDurations.count; i += 4) {
if ((availableDurations.data.i64[i] == format) &&
(availableDurations.data.i64[i+1] == width) &&
(availableDurations.data.i64[i+2] == height)) {
durations->push_back(availableDurations.data.i64[i+3]);
+ break;
}
}
}
}
}
+
void CameraProviderManager::ProviderInfo::DeviceInfo3::getSupportedDynamicDepthDurations(
const std::vector<int64_t>& depthDurations, const std::vector<int64_t>& blobDurations,
std::vector<int64_t> *dynamicDepthDurations /*out*/) {
@@ -1072,6 +1091,212 @@
}
}
+bool CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+ const CameraMetadata& deviceInfo, int64_t profile, int64_t concurrentProfile) {
+ auto entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ if (entry.count == 0) {
+ return false;
+ }
+
+ const auto it = std::find(entry.data.u8, entry.data.u8 + entry.count,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT);
+ if (it == entry.data.u8 + entry.count) {
+ return false;
+ }
+
+ entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
+ if (entry.count == 0 || ((entry.count % 3) != 0)) {
+ return false;
+ }
+
+ for (size_t i = 0; i < entry.count; i += 3) {
+ if (entry.data.i64[i] == profile) {
+ if ((entry.data.i64[i+1] == 0) || (entry.data.i64[i+1] & concurrentProfile)) {
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveJpegRTags(bool maxResolution) {
+ if (kFrameworkJpegRDisabled || mCompositeJpegRDisabled) {
+ return OK;
+ }
+
+ const int32_t scalerSizesTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t scalerMinFrameDurationsTag = SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);
+ const int32_t scalerStallDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+
+ const int32_t jpegRSizesTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t jpegRStallDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS, maxResolution);
+ const int32_t jpegRMinFrameDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS, maxResolution);
+
+ auto& c = mCameraCharacteristics;
+ std::vector<int32_t> supportedChTags;
+ auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (chTags.count == 0) {
+ ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
+ auto capabilities = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ if (capabilities.count == 0) {
+ ALOGE("%s: Supported camera capabilities is empty!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ auto end = capabilities.data.u8 + capabilities.count;
+ bool isTenBitOutputSupported = std::find(capabilities.data.u8, end,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) != end;
+ if (!isTenBitOutputSupported) {
+ // No 10-bit support, nothing more to do.
+ return OK;
+ }
+
+ if (!isConcurrentDynamicRangeCaptureSupported(c,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) &&
+ !property_get_bool("ro.camera.enableCompositeAPI0JpegR", false)) {
+ // API0, P010 only Jpeg/R support is meant to be used only as a reference due to possible
+ // impact on quality and performance.
+ // This data path will be turned off by default and individual device builds must enable
+ // 'ro.camera.enableCompositeAPI0JpegR' in order to experiment using it.
+ mCompositeJpegRDisabled = true;
+ return OK;
+ }
+
+ getSupportedSizes(c, scalerSizesTag,
+ static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_BLOB), &supportedBlobSizes);
+ getSupportedSizes(c, scalerSizesTag,
+ static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010), &supportedP010Sizes);
+ auto it = supportedP010Sizes.begin();
+ while (it != supportedP010Sizes.end()) {
+ if (std::find(supportedBlobSizes.begin(), supportedBlobSizes.end(), *it) ==
+ supportedBlobSizes.end()) {
+ it = supportedP010Sizes.erase(it);
+ } else {
+ it++;
+ }
+ }
+ if (supportedP010Sizes.empty()) {
+ // Nothing to do in this case.
+ return OK;
+ }
+
+ std::vector<int32_t> jpegREntries;
+ for (const auto& it : supportedP010Sizes) {
+ int32_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(it)),
+ static_cast<int32_t> (std::get<1>(it)),
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT };
+ jpegREntries.insert(jpegREntries.end(), entry, entry + 4);
+ }
+
+ std::vector<int64_t> blobMinDurations, blobStallDurations;
+ std::vector<int64_t> jpegRMinDurations, jpegRStallDurations;
+
+ // We use the jpeg stall and min frame durations to approximate the respective jpeg/r
+ // durations.
+ getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+ supportedP010Sizes, &blobMinDurations);
+ getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+ supportedP010Sizes, &blobStallDurations);
+ if (blobStallDurations.empty() || blobMinDurations.empty() ||
+ supportedP010Sizes.size() != blobMinDurations.size() ||
+ blobMinDurations.size() != blobStallDurations.size()) {
+ ALOGE("%s: Unexpected number of available blob durations! %zu vs. %zu with "
+ "supportedP010Sizes size: %zu", __FUNCTION__, blobMinDurations.size(),
+ blobStallDurations.size(), supportedP010Sizes.size());
+ return BAD_VALUE;
+ }
+
+ auto itDuration = blobMinDurations.begin();
+ auto itSize = supportedP010Sizes.begin();
+ while (itDuration != blobMinDurations.end()) {
+ int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
+ static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
+ jpegRMinDurations.insert(jpegRMinDurations.end(), entry, entry + 4);
+ itDuration++; itSize++;
+ }
+
+ itDuration = blobStallDurations.begin();
+ itSize = supportedP010Sizes.begin();
+ while (itDuration != blobStallDurations.end()) {
+ int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
+ static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
+ jpegRStallDurations.insert(jpegRStallDurations.end(), entry, entry + 4);
+ itDuration++; itSize++;
+ }
+
+ supportedChTags.reserve(chTags.count + 3);
+ supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
+ chTags.data.i32 + chTags.count);
+ supportedChTags.push_back(jpegRSizesTag);
+ supportedChTags.push_back(jpegRMinFrameDurationsTag);
+ supportedChTags.push_back(jpegRStallDurationsTag);
+ c.update(jpegRSizesTag, jpegREntries.data(), jpegREntries.size());
+ c.update(jpegRMinFrameDurationsTag, jpegRMinDurations.data(), jpegRMinDurations.size());
+ c.update(jpegRStallDurationsTag, jpegRStallDurations.data(), jpegRStallDurations.size());
+ c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+ supportedChTags.size());
+
+ auto colorSpaces = c.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
+ if (colorSpaces.count > 0 && !maxResolution) {
+ bool displayP3Support = false;
+ int64_t dynamicRange = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+ for (size_t i = 0; i < colorSpaces.count; i += 3) {
+ auto colorSpace = colorSpaces.data.i64[i];
+ auto format = colorSpaces.data.i64[i+1];
+ bool formatMatch = (format == static_cast<int64_t>(PublicFormat::JPEG)) ||
+ (format == static_cast<int64_t>(PublicFormat::UNKNOWN));
+ bool colorSpaceMatch =
+ colorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3;
+ if (formatMatch && colorSpaceMatch) {
+ displayP3Support = true;
+ }
+
+ // Jpeg/R will support the same dynamic range profiles as P010
+ if (format == static_cast<int64_t>(PublicFormat::YCBCR_P010)) {
+ dynamicRange |= colorSpaces.data.i64[i+2];
+ }
+ }
+ if (displayP3Support) {
+ std::vector<int64_t> supportedColorSpaces;
+ // Jpeg/R must support the default system as well ase display P3 color space
+ supportedColorSpaces.reserve(colorSpaces.count + 3*2);
+ supportedColorSpaces.insert(supportedColorSpaces.end(), colorSpaces.data.i64,
+ colorSpaces.data.i64 + colorSpaces.count);
+
+ supportedColorSpaces.push_back(static_cast<int64_t>(
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB));
+ supportedColorSpaces.push_back(static_cast<int64_t>(PublicFormat::JPEG_R));
+ supportedColorSpaces.push_back(dynamicRange);
+
+ supportedColorSpaces.push_back(static_cast<int64_t>(
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3));
+ supportedColorSpaces.push_back(static_cast<int64_t>(PublicFormat::JPEG_R));
+ supportedColorSpaces.push_back(dynamicRange);
+ c.update(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
+ supportedColorSpaces.data(), supportedColorSpaces.size());
+ }
+ }
+
+ return OK;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addDynamicDepthTags(
bool maxResolution) {
const int32_t depthExclTag = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE;
@@ -1356,6 +1581,19 @@
return res;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addAutoframingTags() {
+ status_t res = OK;
+ auto& c = mCameraCharacteristics;
+
+ auto availableAutoframingEntry = c.find(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE);
+ if (availableAutoframingEntry.count == 0) {
+ uint8_t defaultAutoframingEntry = ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_FALSE;
+ res = c.update(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
+ &defaultAutoframingEntry, 1);
+ }
+ return res;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addPreCorrectionActiveArraySize() {
status_t res = OK;
auto& c = mCameraCharacteristics;
@@ -2302,6 +2540,10 @@
(mDeviceStateOrientationMap.find(newState) != mDeviceStateOrientationMap.end())) {
mCameraCharacteristics.update(ANDROID_SENSOR_ORIENTATION,
&mDeviceStateOrientationMap[newState], 1);
+ if (mCameraCharNoPCOverride.get() != nullptr) {
+ mCameraCharNoPCOverride->update(ANDROID_SENSOR_ORIENTATION,
+ &mDeviceStateOrientationMap[newState], 1);
+ }
}
}
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 98298ea..a2ec576 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -248,6 +248,11 @@
bool supportNativeZoomRatio(const std::string &id) const;
/**
+ * Return true if the camera device has no composite Jpeg/R support.
+ */
+ bool isCompositeJpegRDisabled(const std::string &id) const;
+
+ /**
* Return the resource cost of this camera device
*/
status_t getResourceCost(const std::string &id,
@@ -407,7 +412,11 @@
status_t notifyUsbDeviceEvent(int32_t eventId, const std::string &usbDeviceId);
+ static bool isConcurrentDynamicRangeCaptureSupported(const CameraMetadata& deviceInfo,
+ int64_t profile, int64_t concurrentProfile);
+
static const float kDepthARTolerance;
+ static const bool kFrameworkJpegRDisabled;
private:
// All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
mutable std::mutex mInterfaceMutex;
@@ -564,6 +573,7 @@
bool hasFlashUnit() const { return mHasFlashUnit; }
bool supportNativeZoomRatio() const { return mSupportNativeZoomRatio; }
+ bool isCompositeJpegRDisabled() const { return mCompositeJpegRDisabled; }
virtual status_t setTorchMode(bool enabled) = 0;
virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
@@ -605,13 +615,14 @@
mParentProvider(parentProvider), mTorchStrengthLevel(0),
mTorchMaximumStrengthLevel(0), mTorchDefaultStrengthLevel(0),
mHasFlashUnit(false), mSupportNativeZoomRatio(false),
- mPublicCameraIds(publicCameraIds) {}
+ mPublicCameraIds(publicCameraIds), mCompositeJpegRDisabled(false) {}
virtual ~DeviceInfo() {}
protected:
bool mHasFlashUnit; // const after constructor
bool mSupportNativeZoomRatio; // const after constructor
const std::vector<std::string>& mPublicCameraIds;
+ bool mCompositeJpegRDisabled;
};
std::vector<std::unique_ptr<DeviceInfo>> mDevices;
std::unordered_set<std::string> mUniqueCameraIds;
@@ -673,7 +684,9 @@
status_t fixupTorchStrengthTags();
status_t addDynamicDepthTags(bool maxResolution = false);
status_t deriveHeicTags(bool maxResolution = false);
+ status_t deriveJpegRTags(bool maxResolution = false);
status_t addRotateCropTags();
+ status_t addAutoframingTags();
status_t addPreCorrectionActiveArraySize();
status_t addReadoutTimestampTag(bool readoutTimestampSupported = true);
@@ -798,6 +811,8 @@
// No guarantees on the order of traversal
ProviderInfo::DeviceInfo* findDeviceInfoLocked(const std::string& id) const;
+ bool isCompositeJpegRDisabledLocked(const std::string &id) const;
+
// Map external providers to USB devices in order to handle USB hotplug
// events for lazy HALs
std::pair<std::vector<std::string>, sp<ProviderInfo>>
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 06d97ce..5e79d6b 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -483,6 +483,9 @@
}
}
+ mCompositeJpegRDisabled = mCameraCharacteristics.exists(
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS);
+
mSystemCameraKind = getSystemCameraKind();
status_t res = fixupMonochromeTags();
@@ -501,8 +504,13 @@
ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
__FUNCTION__, strerror(-res), res);
}
-
- if (camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
+ res = deriveJpegRTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ using camera3::SessionConfigurationUtils::supportsUltraHighResolutionCapture;
+ if (supportsUltraHighResolutionCapture(mCameraCharacteristics)) {
status_t status = addDynamicDepthTags(/*maxResolution*/true);
if (OK != status) {
ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
@@ -514,6 +522,12 @@
ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities for"
"maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
}
+
+ status = deriveJpegRTags(/*maxResolution*/true);
+ if (OK != status) {
+ ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities for"
+ "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
+ }
}
res = addRotateCropTags();
@@ -521,6 +535,11 @@
ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
strerror(-res), res);
}
+ res = addAutoframingTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to add default AUTOFRAMING tags: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ }
res = addPreCorrectionActiveArraySize();
if (OK != res) {
ALOGE("%s: Unable to add PRE_CORRECTION_ACTIVE_ARRAY_SIZE: %s (%d)", __FUNCTION__,
@@ -550,6 +569,11 @@
"ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL tags: %s (%d)", __FUNCTION__,
strerror(-res), res);
}
+
+ // b/247038031: In case of system_server crash, camera_server is
+ // restarted as well. If flashlight is turned on before the crash, it
+ // may be stuck to be on. As a workaround, set torch mode to be OFF.
+ interface->setTorchMode(false);
} else {
mHasFlashUnit = false;
}
@@ -711,8 +735,8 @@
camera::device::StreamConfiguration streamConfiguration;
bool earlyExit = false;
auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
- mId, mCameraCharacteristics, getMetadata, mPhysicalIds,
- streamConfiguration, overrideForPerfClass, &earlyExit);
+ mId, mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
+ mPhysicalIds, streamConfiguration, overrideForPerfClass, &earlyExit);
if (!bRes.isOk()) {
return UNKNOWN_ERROR;
@@ -777,7 +801,8 @@
bStatus =
SessionConfigurationUtils::convertToHALStreamCombination(
cameraIdAndSessionConfig.mSessionConfiguration,
- cameraId, deviceInfo, getMetadata,
+ cameraId, deviceInfo,
+ mManager->isCompositeJpegRDisabledLocked(cameraId), getMetadata,
physicalCameraIds, streamConfiguration,
overrideForPerfClass, &shouldExit);
if (!bStatus.isOk()) {
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 3b501dc..bf7a471 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -624,7 +624,7 @@
__FUNCTION__, strerror(-res), res);
}
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(mCameraCharacteristics)) {
status_t status = addDynamicDepthTags(/*maxResolution*/true);
if (OK != status) {
ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
@@ -643,6 +643,11 @@
ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
strerror(-res), res);
}
+ res = addAutoframingTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to add default AUTOFRAMING tags: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ }
res = addPreCorrectionActiveArraySize();
if (OK != res) {
ALOGE("%s: Unable to add PRE_CORRECTION_ACTIVE_ARRAY_SIZE: %s (%d)", __FUNCTION__,
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 3f380ea..a0e2778 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -54,18 +54,19 @@
#include <android/hardware/camera/device/3.7/ICameraInjectionSession.h>
#include <android/hardware/camera2/ICameraDeviceUser.h>
-#include "utils/CameraTraces.h"
-#include "mediautils/SchedulingPolicyService.h"
-#include "device3/Camera3Device.h"
-#include "device3/Camera3OutputStream.h"
-#include "device3/Camera3InputStream.h"
-#include "device3/Camera3FakeStream.h"
-#include "device3/Camera3SharedOutputStream.h"
#include "CameraService.h"
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "aidl/AidlUtils.h"
+#include "device3/Camera3Device.h"
+#include "device3/Camera3FakeStream.h"
+#include "device3/Camera3InputStream.h"
+#include "device3/Camera3OutputStream.h"
+#include "device3/Camera3SharedOutputStream.h"
+#include "mediautils/SchedulingPolicyService.h"
#include "utils/CameraThreadState.h"
+#include "utils/CameraTraces.h"
#include "utils/SessionConfigurationUtils.h"
#include "utils/TraceHFR.h"
-#include "utils/CameraServiceProxyWrapper.h"
#include <algorithm>
#include <tuple>
@@ -75,12 +76,15 @@
namespace android {
-Camera3Device::Camera3Device(const std::string &id, bool overrideForPerfClass, bool overrideToPortrait,
+Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+ const std::string &id, bool overrideForPerfClass, bool overrideToPortrait,
bool legacyClient):
+ mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
mId(id),
mLegacyClient(legacyClient),
mOperatingMode(NO_MODE),
mIsConstrainedHighSpeedConfiguration(false),
+ mIsCompositeJpegRDisabled(false),
mStatus(STATUS_UNINITIALIZED),
mStatusWaiters(0),
mUsePartialResult(false),
@@ -101,6 +105,8 @@
mOverrideToPortrait(overrideToPortrait),
mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
mComposerOutput(false),
+ mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
+ mSettingsOverride(-1),
mActivePhysicalId("")
{
ATRACE_CALL();
@@ -170,10 +176,21 @@
}
}
+ camera_metadata_entry_t availableSettingsOverrides = mDeviceInfo.find(
+ ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES);
+ for (size_t i = 0; i < availableSettingsOverrides.count; i++) {
+ if (availableSettingsOverrides.data.i32[i] ==
+ ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM) {
+ mSupportZoomOverride = true;
+ break;
+ }
+ }
+
/** Start up request queue thread */
mRequestThread = createNewRequestThread(
this, mStatusTracker, mInterface, sessionParamKeys,
- mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait);
+ mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait,
+ mSupportZoomOverride);
res = mRequestThread->run((std::string("C3Dev-") + mId + "-ReqQueue").c_str());
if (res != OK) {
SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -221,7 +238,7 @@
mZoomRatioMappers[mId] = ZoomRatioMapper(&mDeviceInfo,
mSupportNativeZoomRatio, usePrecorrectArray);
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(mDeviceInfo)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(mDeviceInfo)) {
mUHRCropAndMeteringRegionMappers[mId] =
UHRCropAndMeteringRegionMapper(mDeviceInfo, usePrecorrectArray);
}
@@ -234,7 +251,7 @@
mInjectionMethods = createCamera3DeviceInjectionMethods(this);
/** Start watchdog thread */
- mCameraServiceWatchdog = new CameraServiceWatchdog();
+ mCameraServiceWatchdog = new CameraServiceWatchdog(mId, mCameraServiceProxyWrapper);
res = mCameraServiceWatchdog->run("CameraServiceWatchdog");
if (res != OK) {
SET_ERR_L("Unable to start camera service watchdog thread: %s (%d)",
@@ -251,113 +268,110 @@
status_t Camera3Device::disconnectImpl() {
ATRACE_CALL();
+ Mutex::Autolock il(mInterfaceLock);
+
ALOGI("%s: E", __FUNCTION__);
status_t res = OK;
std::vector<wp<Camera3StreamInterface>> streams;
+ nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
{
- Mutex::Autolock il(mInterfaceLock);
- nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
- {
- Mutex::Autolock l(mLock);
- if (mStatus == STATUS_UNINITIALIZED) return res;
+ Mutex::Autolock l(mLock);
+ if (mStatus == STATUS_UNINITIALIZED) return res;
- if (mRequestThread != NULL) {
- if (mStatus == STATUS_ACTIVE || mStatus == STATUS_ERROR) {
- res = mRequestThread->clear();
+ if (mRequestThread != NULL) {
+ if (mStatus == STATUS_ACTIVE || mStatus == STATUS_ERROR) {
+ res = mRequestThread->clear();
+ if (res != OK) {
+ SET_ERR_L("Can't stop streaming");
+ // Continue to close device even in case of error
+ } else {
+ res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+ /*requestThreadInvocation*/ false);
if (res != OK) {
- SET_ERR_L("Can't stop streaming");
+ SET_ERR_L("Timeout waiting for HAL to drain (% " PRIi64 " ns)",
+ maxExpectedDuration);
// Continue to close device even in case of error
- } else {
- res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
- if (res != OK) {
- SET_ERR_L("Timeout waiting for HAL to drain (% " PRIi64 " ns)",
- maxExpectedDuration);
- // Continue to close device even in case of error
- }
}
}
- // Signal to request thread that we're not expecting any
- // more requests. This will be true since once we're in
- // disconnect and we've cleared off the request queue, the
- // request thread can't receive any new requests through
- // binder calls - since disconnect holds
- // mBinderSerialization lock.
- mRequestThread->setRequestClearing();
}
+ // Signal to request thread that we're not expecting any
+ // more requests. This will be true since once we're in
+ // disconnect and we've cleared off the request queue, the
+ // request thread can't receive any new requests through
+ // binder calls - since disconnect holds
+ // mBinderSerialization lock.
+ mRequestThread->setRequestClearing();
+ }
- if (mStatus == STATUS_ERROR) {
- CLOGE("Shutting down in an error state");
- }
+ if (mStatus == STATUS_ERROR) {
+ CLOGE("Shutting down in an error state");
+ }
- if (mStatusTracker != NULL) {
- mStatusTracker->requestExit();
- }
+ if (mStatusTracker != NULL) {
+ mStatusTracker->requestExit();
+ }
- if (mRequestThread != NULL) {
- mRequestThread->requestExit();
- }
+ if (mRequestThread != NULL) {
+ mRequestThread->requestExit();
+ }
- streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
- for (size_t i = 0; i < mOutputStreams.size(); i++) {
- streams.push_back(mOutputStreams[i]);
- }
- if (mInputStream != nullptr) {
- streams.push_back(mInputStream);
- }
+ streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
+ for (size_t i = 0; i < mOutputStreams.size(); i++) {
+ streams.push_back(mOutputStreams[i]);
+ }
+ if (mInputStream != nullptr) {
+ streams.push_back(mInputStream);
}
}
- // Joining done without holding mLock and mInterfaceLock, otherwise deadlocks may ensue
- // as the threads try to access parent state (b/143513518)
+
+ // Joining done without holding mLock, otherwise deadlocks may ensue
+ // as the threads try to access parent state
if (mRequestThread != NULL && mStatus != STATUS_ERROR) {
// HAL may be in a bad state, so waiting for request thread
// (which may be stuck in the HAL processCaptureRequest call)
// could be dangerous.
- // give up mInterfaceLock here and then lock it again. Could this lead
- // to other deadlocks
mRequestThread->join();
}
+
+ if (mStatusTracker != NULL) {
+ mStatusTracker->join();
+ }
+
+ if (mInjectionMethods->isInjecting()) {
+ mInjectionMethods->stopInjection();
+ }
+
+ HalInterface* interface;
{
- Mutex::Autolock il(mInterfaceLock);
- if (mStatusTracker != NULL) {
- mStatusTracker->join();
- }
+ Mutex::Autolock l(mLock);
+ mRequestThread.clear();
+ Mutex::Autolock stLock(mTrackerLock);
+ mStatusTracker.clear();
+ interface = mInterface.get();
+ }
- if (mInjectionMethods->isInjecting()) {
- mInjectionMethods->stopInjection();
- }
+ // Call close without internal mutex held, as the HAL close may need to
+ // wait on assorted callbacks,etc, to complete before it can return.
+ mCameraServiceWatchdog->WATCH(interface->close());
- HalInterface* interface;
- {
- Mutex::Autolock l(mLock);
- mRequestThread.clear();
- Mutex::Autolock stLock(mTrackerLock);
- mStatusTracker.clear();
- interface = mInterface.get();
- }
+ flushInflightRequests();
- // Call close without internal mutex held, as the HAL close may need to
- // wait on assorted callbacks,etc, to complete before it can return.
- mCameraServiceWatchdog->WATCH(interface->close());
+ {
+ Mutex::Autolock l(mLock);
+ mInterface->clear();
+ mOutputStreams.clear();
+ mInputStream.clear();
+ mDeletedStreams.clear();
+ mBufferManager.clear();
+ internalUpdateStatusLocked(STATUS_UNINITIALIZED);
+ }
- flushInflightRequests();
-
- {
- Mutex::Autolock l(mLock);
- mInterface->clear();
- mOutputStreams.clear();
- mInputStream.clear();
- mDeletedStreams.clear();
- mBufferManager.clear();
- internalUpdateStatusLocked(STATUS_UNINITIALIZED);
- }
-
- for (auto& weakStream : streams) {
- sp<Camera3StreamInterface> stream = weakStream.promote();
- if (stream != nullptr) {
- ALOGE("%s: Stream %d leaked! strong reference (%d)!",
- __FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
- }
+ for (auto& weakStream : streams) {
+ sp<Camera3StreamInterface> stream = weakStream.promote();
+ if (stream != nullptr) {
+ ALOGE("%s: Stream %d leaked! strong reference (%d)!",
+ __FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
}
}
ALOGI("%s: X", __FUNCTION__);
@@ -409,7 +423,7 @@
// Get max jpeg size (area-wise) for default sensor pixel mode
camera3::Size maxDefaultJpegResolution =
SessionConfigurationUtils::getMaxJpegResolution(info,
- /*isUltraHighResolutionSensor*/false);
+ /*supportsUltraHighResolutionCapture*/false);
// Get max jpeg size (area-wise) for max resolution sensor pixel mode / 0 if
// not ultra high res sensor
camera3::Size uhrMaxJpegResolution =
@@ -827,7 +841,7 @@
}
if (res == OK) {
- waitUntilStateThenRelock(/*active*/true, kActiveTimeout);
+ waitUntilStateThenRelock(/*active*/true, kActiveTimeout, /*requestThreadInvocation*/false);
if (res != OK) {
SET_ERR_L("Can't transition to active in %f seconds!",
kActiveTimeout/1e9);
@@ -952,7 +966,8 @@
break;
case STATUS_ACTIVE:
ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
- res = internalPauseAndWaitLocked(maxExpectedDuration);
+ res = internalPauseAndWaitLocked(maxExpectedDuration,
+ /*requestThreadInvocation*/ false);
if (res != OK) {
SET_ERR_L("Can't pause captures to reconfigure streams!");
return res;
@@ -1003,7 +1018,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode) {
+ int timestampBase, int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) {
ATRACE_CALL();
if (consumer == nullptr) {
@@ -1017,7 +1032,7 @@
return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
}
static bool isRawFormat(int format) {
@@ -1039,7 +1054,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode) {
+ int timestampBase, int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
@@ -1048,10 +1063,11 @@
ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
" consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
" dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
- " mirrorMode %d",
+ " mirrorMode %d, colorSpace %d, useReadoutTimestamp %d",
mId.c_str(), mNextStreamId, width, height, format, dataSpace, rotation,
consumerUsage, isShared, physicalCameraId.c_str(), isMultiResolution,
- dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode);
+ dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace,
+ useReadoutTimestamp);
status_t res;
bool wasActive = false;
@@ -1069,7 +1085,8 @@
break;
case STATUS_ACTIVE:
ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
- res = internalPauseAndWaitLocked(maxExpectedDuration);
+ res = internalPauseAndWaitLocked(maxExpectedDuration,
+ /*requestThreadInvocation*/ false);
if (res != OK) {
SET_ERR_L("Can't pause captures to reconfigure streams!");
return res;
@@ -1122,7 +1139,7 @@
width, height, blobBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
} else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
bool maxResolution =
sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1137,25 +1154,25 @@
width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
} else if (isShared) {
newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
mUseHalBufManager, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
} else if (consumers.size() == 0 && hasDeferredConsumer) {
newStream = new Camera3OutputStream(mNextStreamId,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
} else {
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
}
size_t consumerCount = consumers.size();
@@ -1243,6 +1260,7 @@
streamInfo->dataSpaceOverridden = stream->isDataSpaceOverridden();
streamInfo->originalDataSpace = stream->getOriginalDataSpace();
streamInfo->dynamicRangeProfile = stream->getDynamicRangeProfile();
+ streamInfo->colorSpace = stream->getColorSpace();
return OK;
}
@@ -1364,6 +1382,7 @@
filteredParams.unlock(meta);
if (availableSessionKeys.count > 0) {
bool rotateAndCropSessionKey = false;
+ bool autoframingSessionKey = false;
for (size_t i = 0; i < availableSessionKeys.count; i++) {
camera_metadata_ro_entry entry = params.find(
availableSessionKeys.data.i32[i]);
@@ -1373,23 +1392,37 @@
if (ANDROID_SCALER_ROTATE_AND_CROP == availableSessionKeys.data.i32[i]) {
rotateAndCropSessionKey = true;
}
+ if (ANDROID_CONTROL_AUTOFRAMING == availableSessionKeys.data.i32[i]) {
+ autoframingSessionKey = true;
+ }
}
- if (rotateAndCropSessionKey) {
+ if (rotateAndCropSessionKey || autoframingSessionKey) {
sp<CaptureRequest> request = new CaptureRequest();
PhysicalCameraSettings settingsList;
settingsList.metadata = filteredParams;
request->mSettingsList.push_back(settingsList);
- auto rotateAndCropEntry = filteredParams.find(ANDROID_SCALER_ROTATE_AND_CROP);
- if (rotateAndCropEntry.count > 0 &&
- rotateAndCropEntry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
- request->mRotateAndCropAuto = true;
- } else {
- request->mRotateAndCropAuto = false;
+ if (rotateAndCropSessionKey) {
+ auto rotateAndCropEntry = filteredParams.find(ANDROID_SCALER_ROTATE_AND_CROP);
+ if (rotateAndCropEntry.count > 0 &&
+ rotateAndCropEntry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
+ request->mRotateAndCropAuto = true;
+ } else {
+ request->mRotateAndCropAuto = false;
+ }
+
+ overrideAutoRotateAndCrop(request, mOverrideToPortrait, mRotateAndCropOverride);
}
- overrideAutoRotateAndCrop(request, mOverrideToPortrait, mRotateAndCropOverride);
+ if (autoframingSessionKey) {
+ auto autoframingEntry = filteredParams.find(ANDROID_CONTROL_AUTOFRAMING);
+ if (autoframingEntry.count > 0 &&
+ autoframingEntry.data.u8[0] == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+ overrideAutoframing(request, mAutoframingOverride);
+ }
+ }
+
filteredParams = request->mSettingsList.begin()->metadata;
}
}
@@ -1486,6 +1519,13 @@
&kDefaultJpegQuality, 1);
}
+ // Fill in AUTOFRAMING if not available
+ if (!mRequestTemplateCache[templateId].exists(ANDROID_CONTROL_AUTOFRAMING)) {
+ static const uint8_t kDefaultAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_OFF;
+ mRequestTemplateCache[templateId].update(ANDROID_CONTROL_AUTOFRAMING,
+ &kDefaultAutoframingMode, 1);
+ }
+
*request = mRequestTemplateCache[templateId];
mLastTemplateId = templateId;
}
@@ -1519,7 +1559,8 @@
}
ALOGV("%s: Camera %s: Waiting until idle (%" PRIi64 "ns)", __FUNCTION__, mId.c_str(),
maxExpectedDuration);
- status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
+ status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+ /*requestThreadInvocation*/ false);
if (res != OK) {
mStatusTracker->dumpActiveComponents();
SET_ERR_L("Error waiting for HAL to drain: %s (%d)", strerror(-res),
@@ -1530,12 +1571,14 @@
void Camera3Device::internalUpdateStatusLocked(Status status) {
mStatus = status;
- mRecentStatusUpdates.add(mStatus);
+ mStatusIsInternal = mPauseStateNotify ? true : false;
+ mRecentStatusUpdates.add({mStatus, mStatusIsInternal});
mStatusChanged.broadcast();
}
// Pause to reconfigure
-status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration) {
+status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration,
+ bool requestThreadInvocation) {
if (mRequestThread.get() != nullptr) {
mRequestThread->setPaused(true);
} else {
@@ -1544,8 +1587,10 @@
ALOGV("%s: Camera %s: Internal wait until idle (% " PRIi64 " ns)", __FUNCTION__, mId.c_str(),
maxExpectedDuration);
- status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
+ status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+ requestThreadInvocation);
if (res != OK) {
+ mStatusTracker->dumpActiveComponents();
SET_ERR_L("Can't idle device in %f seconds!",
maxExpectedDuration/1e9);
}
@@ -1561,7 +1606,9 @@
ALOGV("%s: Camera %s: Internal wait until active (% " PRIi64 " ns)", __FUNCTION__, mId.c_str(),
kActiveTimeout);
- res = waitUntilStateThenRelock(/*active*/ true, kActiveTimeout);
+ // internalResumeLocked is always called from a binder thread.
+ res = waitUntilStateThenRelock(/*active*/ true, kActiveTimeout,
+ /*requestThreadInvocation*/ false);
if (res != OK) {
SET_ERR_L("Can't transition to active in %f seconds!",
kActiveTimeout/1e9);
@@ -1570,7 +1617,8 @@
return OK;
}
-status_t Camera3Device::waitUntilStateThenRelock(bool active, nsecs_t timeout) {
+status_t Camera3Device::waitUntilStateThenRelock(bool active, nsecs_t timeout,
+ bool requestThreadInvocation) {
status_t res = OK;
size_t startIndex = 0;
@@ -1599,7 +1647,12 @@
bool stateSeen = false;
nsecs_t startTime = systemTime();
do {
- if (active == (mStatus == STATUS_ACTIVE)) {
+ if (mStatus == STATUS_ERROR) {
+ // Device in error state. Return right away.
+ break;
+ }
+ if (active == (mStatus == STATUS_ACTIVE) &&
+ (requestThreadInvocation || !mStatusIsInternal)) {
// Desired state is current
break;
}
@@ -1621,9 +1674,19 @@
"%s: Skipping status updates in Camera3Device, may result in deadlock.",
__FUNCTION__);
- // Encountered desired state since we began waiting
+ // Encountered desired state since we began waiting. Internal invocations coming from
+ // request threads (such as reconfigureCamera) should be woken up immediately, whereas
+ // invocations from binder threads (such as createInputStream) should only be woken up if
+ // they are not paused. This avoids intermediate pause signals from reconfigureCamera as it
+ // changes the status to active right after.
for (size_t i = startIndex; i < mRecentStatusUpdates.size(); i++) {
- if (active == (mRecentStatusUpdates[i] == STATUS_ACTIVE) ) {
+ if (mRecentStatusUpdates[i].status == STATUS_ERROR) {
+ // Device in error state. Return right away.
+ stateSeen = true;
+ break;
+ }
+ if (active == (mRecentStatusUpdates[i].status == STATUS_ACTIVE) &&
+ (requestThreadInvocation || !mRecentStatusUpdates[i].isInternal)) {
stateSeen = true;
break;
}
@@ -1855,7 +1918,7 @@
camera_metadata_entry minDurations =
mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
for (size_t i = 0; i < minDurations.count; i += 4) {
- if (minDurations.data.i64[i] == stream->getFormat()
+ if (minDurations.data.i64[i] == stream->getOriginalFormat()
&& minDurations.data.i64[i+1] == stream->getWidth()
&& minDurations.data.i64[i+2] == stream->getHeight()) {
int64_t minFrameDuration = minDurations.data.i64[i+3];
@@ -1911,10 +1974,11 @@
streamUseCase = camera3Stream->getStreamUseCase();
}
streamStats.emplace_back(stream->getWidth(), stream->getHeight(),
- stream->getFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
+ stream->getOriginalFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
stream->getMaxHalBuffers(),
stream->getMaxTotalBuffers() - stream->getMaxHalBuffers(),
- stream->getDynamicRangeProfile(), streamUseCase);
+ stream->getDynamicRangeProfile(), streamUseCase,
+ stream->getColorSpace());
}
}
}
@@ -2164,6 +2228,15 @@
newRequest->mRotateAndCropAuto = false;
}
+ auto autoframingEntry =
+ newRequest->mSettingsList.begin()->metadata.find(ANDROID_CONTROL_AUTOFRAMING);
+ if (autoframingEntry.count > 0 &&
+ autoframingEntry.data.u8[0] == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+ newRequest->mAutoframingAuto = true;
+ } else {
+ newRequest->mAutoframingAuto = false;
+ }
+
auto zoomRatioEntry =
newRequest->mSettingsList.begin()->metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
if (zoomRatioEntry.count > 0 &&
@@ -2195,6 +2268,16 @@
}
}
+ if (mSupportZoomOverride) {
+ for (auto& settings : newRequest->mSettingsList) {
+ auto settingsOverrideEntry =
+ settings.metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+ settings.mOriginalSettingsOverride = settingsOverrideEntry.count > 0 ?
+ settingsOverrideEntry.data.i32[0] :
+ ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
+ }
+ }
+
return newRequest;
}
@@ -2251,7 +2334,9 @@
nsecs_t startTime = systemTime();
- Mutex::Autolock il(mInterfaceLock);
+ // We must not hold mInterfaceLock here since this function is called from
+ // RequestThread::threadLoop and holding mInterfaceLock could lead to
+ // deadlocks (http://b/143513518)
nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
Mutex::Autolock l(mLock);
@@ -2268,7 +2353,16 @@
mPauseStateNotify = true;
mStatusTracker->markComponentIdle(clientStatusId, Fence::NO_FENCE);
- rc = internalPauseAndWaitLocked(maxExpectedDuration);
+ // This is essentially the same as calling rc = internalPauseAndWaitLocked(..), except that
+ // we don't want to call setPaused(true) to avoid it interfering with setPaused() called
+ // from createInputStream/createStream.
+ rc = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+ /*requestThreadInvocation*/ true);
+ if (rc != OK) {
+ mStatusTracker->dumpActiveComponents();
+ SET_ERR_L("Can't idle device in %f seconds!",
+ maxExpectedDuration/1e9);
+ }
}
if (rc == NO_ERROR) {
@@ -2283,6 +2377,9 @@
//present streams end up with outstanding buffers that will
//not get drained.
internalUpdateStatusLocked(STATUS_ACTIVE);
+
+ mCameraServiceProxyWrapper->logStreamConfigured(mId, mOperatingMode,
+ true /*internalReconfig*/, ns2ms(systemTime() - startTime));
} else if (rc == DEAD_OBJECT) {
// DEAD_OBJECT can be returned if either the consumer surface is
// abandoned, or the HAL has died.
@@ -2298,9 +2395,6 @@
ALOGE("%s: Failed to pause streaming: %d", __FUNCTION__, rc);
}
- CameraServiceProxyWrapper::logStreamConfigured(mId, mOperatingMode, true /*internalReconfig*/,
- ns2ms(systemTime() - startTime));
-
if (markClientActive) {
mStatusTracker->markComponentActive(clientStatusId);
}
@@ -2426,7 +2520,10 @@
if (outputStream->format == HAL_PIXEL_FORMAT_BLOB) {
size_t k = i + ((mInputStream != nullptr) ? 1 : 0); // Input stream if present should
// always occupy the initial entry.
- if (outputStream->data_space == HAL_DATASPACE_V0_JFIF) {
+ if ((outputStream->data_space == HAL_DATASPACE_V0_JFIF) ||
+ (outputStream->data_space ==
+ static_cast<android_dataspace_t>(
+ aidl::android::hardware::graphics::common::Dataspace::JPEG_R))) {
bufferSizes[k] = static_cast<uint32_t>(
getJpegBufferSize(infoPhysical(outputStream->physical_camera_id),
outputStream->width, outputStream->height));
@@ -2456,8 +2553,9 @@
// max_buffers, usage, and priv fields, as well as data_space and format
// fields for IMPLEMENTATION_DEFINED formats.
+ int64_t logId = mCameraServiceProxyWrapper->getCurrentLogIdForCamera(mId);
const camera_metadata_t *sessionBuffer = sessionParams.getAndLock();
- res = mInterface->configureStreams(sessionBuffer, &config, bufferSizes);
+ res = mInterface->configureStreams(sessionBuffer, &config, bufferSizes, logId);
sessionParams.unlock(sessionBuffer);
if (res == BAD_VALUE) {
@@ -2720,7 +2818,7 @@
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
bool hasAppCallback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
bool isFixedFps, const std::set<std::set<std::string>>& physicalCameraIds,
- bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
+ bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
const std::set<std::string>& cameraIdsWithZoom,
const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs) {
ATRACE_CALL();
@@ -2729,8 +2827,8 @@
ssize_t res;
res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
hasAppCallback, minExpectedDuration, maxExpectedDuration, isFixedFps, physicalCameraIds,
- isStillCapture, isZslCapture, rotateAndCropAuto, cameraIdsWithZoom, requestTimeNs,
- outputSurfaces));
+ isStillCapture, isZslCapture, rotateAndCropAuto, autoframingAuto, cameraIdsWithZoom,
+ requestTimeNs, outputSurfaces));
if (res < 0) return res;
if (mInFlightMap.size() == 1) {
@@ -2920,7 +3018,8 @@
sp<HalInterface> interface, const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) :
+ bool overrideToPortrait,
+ bool supportSettingsOverride) :
Thread(/*canCallJava*/false),
mParent(parent),
mStatusTracker(statusTracker),
@@ -2938,9 +3037,11 @@
mCurrentAfTriggerId(0),
mCurrentPreCaptureTriggerId(0),
mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
+ mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
mComposerOutput(false),
mCameraMute(ANDROID_SENSOR_TEST_PATTERN_MODE_OFF),
mCameraMuteChanged(false),
+ mSettingsOverride(ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF),
mRepeatingLastFrameNumber(
hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES),
mPrepareVideoStream(false),
@@ -2950,8 +3051,10 @@
mLatestSessionParams(sessionParamKeys.size()),
mUseHalBufManager(useHalBufManager),
mSupportCameraMute(supportCameraMute),
- mOverrideToPortrait(overrideToPortrait) {
+ mOverrideToPortrait(overrideToPortrait),
+ mSupportSettingsOverride(supportSettingsOverride) {
mStatusId = statusTracker->addComponent("RequestThread");
+ mVndkVersion = property_get_int32("ro.vndk.version", __ANDROID_API_FUTURE__);
}
Camera3Device::RequestThread::~RequestThread() {}
@@ -3477,6 +3580,7 @@
// The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
captureRequest->mRotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
overrideAutoRotateAndCrop(captureRequest);
+ captureRequest->mAutoframingChanged = overrideAutoframing(captureRequest);
}
// 'mNextRequests' will at this point contain either a set of HFR batched requests
@@ -3502,7 +3606,6 @@
if (parent != nullptr) {
mReconfigured |= parent->reconfigureCamera(mLatestSessionParams, mStatusId);
}
- setPaused(false);
if (mNextRequests[0].captureRequest->mInputStream != nullptr) {
mNextRequests[0].captureRequest->mInputStream->restoreConfiguredState();
@@ -3625,12 +3728,15 @@
mPrevTriggers = triggerCount;
bool testPatternChanged = overrideTestPattern(captureRequest);
+ bool settingsOverrideChanged = overrideSettingsOverride(captureRequest);
// If the request is the same as last, or we had triggers now or last time or
// changing overrides this time
bool newRequest =
(mPrevRequest != captureRequest || triggersMixedIn ||
- captureRequest->mRotateAndCropChanged || testPatternChanged) &&
+ captureRequest->mRotateAndCropChanged ||
+ captureRequest->mAutoframingChanged ||
+ testPatternChanged || settingsOverrideChanged) &&
// Request settings are all the same within one batch, so only treat the first
// request in a batch as new
!(batchedRequest && i > 0);
@@ -3746,6 +3852,17 @@
}
captureRequest->mRotationAndCropUpdated = true;
}
+
+ for (it = captureRequest->mSettingsList.begin();
+ it != captureRequest->mSettingsList.end(); it++) {
+ res = hardware::cameraservice::utils::conversion::aidl::filterVndkKeys(
+ mVndkVersion, it->metadata, false /*isStatic*/);
+ if (res != OK) {
+ SET_ERR("RequestThread: Failed during VNDK filter of capture requests "
+ "%d: %s (%d)", halRequest->frame_number, strerror(-res), res);
+ return INVALID_OPERATION;
+ }
+ }
}
}
@@ -3959,7 +4076,8 @@
expectedDurationInfo.maxDuration,
expectedDurationInfo.isFixedFps,
requestedPhysicalCameras, isStillCapture, isZslCapture,
- captureRequest->mRotateAndCropAuto, mPrevCameraIdsWithZoom,
+ captureRequest->mRotateAndCropAuto, captureRequest->mAutoframingAuto,
+ mPrevCameraIdsWithZoom,
(mUseHalBufManager) ? uniqueSurfaceIdMap :
SurfaceMap{}, captureRequest->mRequestTimeNs);
ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
@@ -4097,6 +4215,14 @@
return OK;
}
+status_t Camera3Device::RequestThread::setAutoframingAutoBehaviour(
+ camera_metadata_enum_android_control_autoframing_t autoframingValue) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mTriggerMutex);
+ mAutoframingOverride = autoframingValue;
+ return OK;
+}
+
status_t Camera3Device::RequestThread::setComposerSurface(bool composerSurfacePresent) {
ATRACE_CALL();
Mutex::Autolock l(mTriggerMutex);
@@ -4114,6 +4240,13 @@
return OK;
}
+status_t Camera3Device::RequestThread::setZoomOverride(int32_t zoomOverride) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mTriggerMutex);
+ mSettingsOverride = zoomOverride;
+ return OK;
+}
+
nsecs_t Camera3Device::getExpectedInFlightDuration() {
ATRACE_CALL();
std::lock_guard<std::mutex> l(mInFlightLock);
@@ -4169,6 +4302,12 @@
mStreamUseCaseOverrides.clear();
}
+bool Camera3Device::hasDeviceError() {
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+ return mStatus == STATUS_ERROR;
+}
+
void Camera3Device::RequestThread::cleanUpFailedRequests(bool sendRequestError) {
if (mNextRequests.empty()) {
return;
@@ -4715,6 +4854,38 @@
return false;
}
+bool Camera3Device::overrideAutoframing(const sp<CaptureRequest> &request /*out*/,
+ camera_metadata_enum_android_control_autoframing_t autoframingOverride) {
+ CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+ auto autoframingEntry = metadata.find(ANDROID_CONTROL_AUTOFRAMING);
+ if (autoframingEntry.count > 0) {
+ if (autoframingEntry.data.u8[0] == autoframingOverride) {
+ return false;
+ } else {
+ autoframingEntry.data.u8[0] = autoframingOverride;
+ return true;
+ }
+ } else {
+ uint8_t autoframing_u8 = autoframingOverride;
+ metadata.update(ANDROID_CONTROL_AUTOFRAMING,
+ &autoframing_u8, 1);
+ return true;
+ }
+
+ return false;
+}
+
+bool Camera3Device::RequestThread::overrideAutoframing(const sp<CaptureRequest> &request) {
+ ATRACE_CALL();
+
+ if (request->mAutoframingAuto) {
+ Mutex::Autolock l(mTriggerMutex);
+ return Camera3Device::overrideAutoframing(request, mAutoframingOverride);
+ }
+
+ return false;
+}
+
bool Camera3Device::RequestThread::overrideTestPattern(
const sp<CaptureRequest> &request) {
ATRACE_CALL();
@@ -4777,6 +4948,33 @@
return changed;
}
+bool Camera3Device::RequestThread::overrideSettingsOverride(
+ const sp<CaptureRequest> &request) {
+ ATRACE_CALL();
+
+ if (!mSupportSettingsOverride) return false;
+
+ Mutex::Autolock l(mTriggerMutex);
+
+ // For a multi-camera, only override the logical camera's metadata.
+ CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+ camera_metadata_entry entry = metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+ int32_t originalValue = request->mSettingsList.begin()->mOriginalSettingsOverride;
+ if (mSettingsOverride != -1 &&
+ (entry.count == 0 || entry.data.i32[0] != mSettingsOverride)) {
+ metadata.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
+ &mSettingsOverride, 1);
+ return true;
+ } else if (mSettingsOverride == -1 &&
+ (entry.count == 0 || entry.data.i32[0] != originalValue)) {
+ metadata.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
+ &originalValue, 1);
+ return true;
+ }
+
+ return false;
+}
+
status_t Camera3Device::RequestThread::setHalInterface(
sp<HalInterface> newHalInterface) {
if (newHalInterface.get() == nullptr) {
@@ -4846,7 +5044,8 @@
}
// queue up the work
- mPendingStreams.emplace(maxCount, stream);
+ mPendingStreams.push_back(
+ std::tuple<int, sp<camera3::Camera3StreamInterface>>(maxCount, stream));
ALOGV("%s: Stream %d queued for preparing", __FUNCTION__, stream->getId());
return OK;
@@ -4857,8 +5056,8 @@
Mutex::Autolock l(mLock);
- std::unordered_map<int, sp<camera3::Camera3StreamInterface> > pendingStreams;
- pendingStreams.insert(mPendingStreams.begin(), mPendingStreams.end());
+ std::list<std::tuple<int, sp<camera3::Camera3StreamInterface>>> pendingStreams;
+ pendingStreams.insert(pendingStreams.begin(), mPendingStreams.begin(), mPendingStreams.end());
sp<camera3::Camera3StreamInterface> currentStream = mCurrentStream;
int currentMaxCount = mCurrentMaxCount;
mPendingStreams.clear();
@@ -4879,18 +5078,19 @@
//of the streams in the pending list.
if (currentStream != nullptr) {
if (!mCurrentPrepareComplete) {
- pendingStreams.emplace(currentMaxCount, currentStream);
+ pendingStreams.push_back(std::tuple(currentMaxCount, currentStream));
}
}
- mPendingStreams.insert(pendingStreams.begin(), pendingStreams.end());
+ mPendingStreams.insert(mPendingStreams.begin(), pendingStreams.begin(), pendingStreams.end());
for (const auto& it : mPendingStreams) {
- it.second->cancelPrepare();
+ std::get<1>(it)->cancelPrepare();
}
}
status_t Camera3Device::PreparerThread::resume() {
ATRACE_CALL();
+ ALOGV("%s: PreparerThread", __FUNCTION__);
status_t res;
Mutex::Autolock l(mLock);
@@ -4903,10 +5103,10 @@
auto it = mPendingStreams.begin();
for (; it != mPendingStreams.end();) {
- res = it->second->startPrepare(it->first, true /*blockRequest*/);
+ res = std::get<1>(*it)->startPrepare(std::get<0>(*it), true /*blockRequest*/);
if (res == OK) {
if (listener != NULL) {
- listener->notifyPrepared(it->second->getId());
+ listener->notifyPrepared(std::get<1>(*it)->getId());
}
it = mPendingStreams.erase(it);
} else if (res != NOT_ENOUGH_DATA) {
@@ -4940,7 +5140,7 @@
Mutex::Autolock l(mLock);
for (const auto& it : mPendingStreams) {
- it.second->cancelPrepare();
+ std::get<1>(it)->cancelPrepare();
}
mPendingStreams.clear();
mCancelNow = true;
@@ -4971,8 +5171,8 @@
// Get next stream to prepare
auto it = mPendingStreams.begin();
- mCurrentStream = it->second;
- mCurrentMaxCount = it->first;
+ mCurrentMaxCount = std::get<0>(*it);
+ mCurrentStream = std::get<1>(*it);
mCurrentPrepareComplete = false;
mPendingStreams.erase(it);
ATRACE_ASYNC_BEGIN("stream prepare", mCurrentStream->getId());
@@ -5204,6 +5404,21 @@
return mRequestThread->setRotateAndCropAutoBehavior(rotateAndCropValue);
}
+status_t Camera3Device::setAutoframingAutoBehavior(
+ camera_metadata_enum_android_control_autoframing_t autoframingValue) {
+ ATRACE_CALL();
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+ if (mRequestThread == nullptr) {
+ return INVALID_OPERATION;
+ }
+ if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+ return BAD_VALUE;
+ }
+ mAutoframingOverride = autoframingValue;
+ return mRequestThread->setAutoframingAutoBehaviour(autoframingValue);
+}
+
bool Camera3Device::supportsCameraMute() {
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
@@ -5226,6 +5441,25 @@
return mRequestThread->setCameraMute(muteMode);
}
+bool Camera3Device::supportsZoomOverride() {
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+
+ return mSupportZoomOverride;
+}
+
+status_t Camera3Device::setZoomOverride(int32_t zoomOverride) {
+ ATRACE_CALL();
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+
+ if (mRequestThread == nullptr || !mSupportZoomOverride) {
+ return INVALID_OPERATION;
+ }
+
+ return mRequestThread->setZoomOverride(zoomOverride);
+}
+
status_t Camera3Device::injectCamera(const std::string& injectedCamId,
sp<CameraProviderManager> manager) {
ALOGI("%s Injection camera: injectedCamId = %s", __FUNCTION__, injectedCamId.c_str());
@@ -5304,6 +5538,7 @@
// Start from an array of indexes in mStreamUseCaseOverrides, and sort them
// based first on size, and second on formats of [JPEG, RAW, YUV, PRIV].
+ // Refer to CameraService::printHelp for details.
std::vector<int> outputStreamsIndices(mOutputStreams.size());
for (size_t i = 0; i < outputStreamsIndices.size(); i++) {
outputStreamsIndices[i] = i;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 7b89f9f..0c1bbcb 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -20,6 +20,7 @@
#include <utility>
#include <unordered_map>
#include <set>
+#include <tuple>
#include <utils/Condition.h>
#include <utils/Errors.h>
@@ -49,6 +50,7 @@
#include "utils/TagMonitor.h"
#include "utils/IPCTransport.h"
#include "utils/LatencyHistogram.h"
+#include "utils/CameraServiceProxyWrapper.h"
#include <camera_metadata_hidden.h>
using android::camera3::camera_capture_request_t;
@@ -82,7 +84,8 @@
friend class AidlCamera3Device;
public:
- explicit Camera3Device(const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+ explicit Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+ const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
bool legacyClient = false);
virtual ~Camera3Device();
@@ -116,6 +119,7 @@
status_t dumpWatchedEventsToVector(std::vector<std::string> &out) override;
const CameraMetadata& info() const override;
const CameraMetadata& infoPhysical(const std::string& physicalId) const override;
+ bool isCompositeJpegRDisabled() const override { return mIsCompositeJpegRDisabled; };
// Capture and setStreamingRequest will configure streams if currently in
// idle state
@@ -150,7 +154,10 @@
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false)
+ override;
status_t createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -165,7 +172,10 @@
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false)
+ override;
status_t createInputStream(
uint32_t width, uint32_t height, int format, bool isMultiResolution,
@@ -268,6 +278,14 @@
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);
/**
+ * Set the current behavior for the AUTOFRAMING control when in AUTO.
+ *
+ * The value must be one of the AUTOFRAMING_* values besides AUTO.
+ */
+ status_t setAutoframingAutoBehavior(
+ camera_metadata_enum_android_control_autoframing_t autoframingValue);
+
+ /**
* Whether camera muting (producing black-only output) is supported.
*
* Calling setCameraMute(true) when this returns false will return an
@@ -294,9 +312,20 @@
// Clear stream use case overrides
void clearStreamUseCaseOverrides();
+ /**
+ * Whether the camera device supports zoom override.
+ */
+ bool supportsZoomOverride();
+
+ // Set/reset zoom override
+ status_t setZoomOverride(int32_t zoomOverride);
+
// Get the status trackeer for the camera device
wp<camera3::StatusTracker> getStatusTracker() { return mStatusTracker; }
+ // Whether the device is in error state
+ bool hasDeviceError();
+
/**
* The injection camera session to replace the internal camera
* session.
@@ -333,8 +362,11 @@
// Constant to use for stream ID when one doesn't exist
static const int NO_STREAM = -1;
+ std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
+
// A lock to enforce serialization on the input/configure side
// of the public interface.
+ // Only locked by public methods inherited from CameraDeviceBase.
// Not locked by methods guarded by mOutputLock, since they may act
// concurrently to the input/configure side of the interface.
// Must be locked before mLock if both will be locked by a method
@@ -389,7 +421,7 @@
virtual status_t configureStreams(const camera_metadata_t * sessionParams,
/*inout*/ camera_stream_configuration_t * config,
- const std::vector<uint32_t>& bufferSizes) = 0;
+ const std::vector<uint32_t>& bufferSizes, int64_t logId) = 0;
// The injection camera configures the streams to hal.
virtual status_t configureInjectedStreams(
@@ -521,6 +553,7 @@
CameraMetadata mDeviceInfo;
bool mSupportNativeZoomRatio;
+ bool mIsCompositeJpegRDisabled;
std::unordered_map<std::string, CameraMetadata> mPhysicalDeviceInfoMap;
CameraMetadata mRequestTemplateCache[CAMERA_TEMPLATE_COUNT];
@@ -539,8 +572,15 @@
STATUS_ACTIVE
} mStatus;
+ struct StatusInfo {
+ Status status;
+ bool isInternal; // status triggered by internal reconfigureCamera.
+ };
+
+ bool mStatusIsInternal;
+
// Only clear mRecentStatusUpdates, mStatusWaiters from waitUntilStateThenRelock
- Vector<Status> mRecentStatusUpdates;
+ Vector<StatusInfo> mRecentStatusUpdates;
int mStatusWaiters;
Condition mStatusChanged;
@@ -606,6 +646,11 @@
// Indicates that the ROTATE_AND_CROP value within 'mSettingsList' was modified
// irrespective of the original value.
bool mRotateAndCropChanged = false;
+ // Whether this request has AUTOFRAMING_AUTO set, so need to override the AUTOFRAMING value
+ // in the capture request.
+ bool mAutoframingAuto;
+ // Indicates that the auto framing value within 'mSettingsList' was modified
+ bool mAutoframingChanged = false;
// Whether this capture request has its zoom ratio set to 1.0x before
// the framework overrides it for camera HAL consumption.
@@ -619,6 +664,8 @@
// Whether this capture request's rotation and crop update has been
// done.
bool mRotationAndCropUpdated = false;
+ // Whether this capture request's autoframing has been done.
+ bool mAutoframingUpdated = false;
// Whether this capture request's zoom ratio update has been done.
bool mZoomRatioUpdated = false;
// Whether this max resolution capture request's crop / metering region update has been
@@ -678,7 +725,8 @@
* CameraDeviceBase interface we shouldn't need to.
* Must be called with mLock and mInterfaceLock both held.
*/
- status_t internalPauseAndWaitLocked(nsecs_t maxExpectedDuration);
+ status_t internalPauseAndWaitLocked(nsecs_t maxExpectedDuration,
+ bool requestThreadInvocation);
/**
* Resume work after internalPauseAndWaitLocked()
@@ -697,7 +745,8 @@
* During the wait mLock is released.
*
*/
- status_t waitUntilStateThenRelock(bool active, nsecs_t timeout);
+ status_t waitUntilStateThenRelock(bool active, nsecs_t timeout,
+ bool requestThreadInvocation);
/**
* Implementation of waitUntilDrained. On success, will transition to IDLE state.
@@ -797,6 +846,10 @@
bool overrideToPortrait,
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride);
+ // Override auto framing control if needed
+ static bool overrideAutoframing(const sp<CaptureRequest> &request /*out*/,
+ camera_metadata_enum_android_control_autoframing_t autoframingOverride);
+
struct RequestTrigger {
// Metadata tag number, e.g. android.control.aePrecaptureTrigger
uint32_t metadataTag;
@@ -827,7 +880,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait);
+ bool overrideToPortrait,
+ bool supportSettingsOverride);
~RequestThread();
void setNotificationListener(wp<NotificationListener> listener);
@@ -926,10 +980,16 @@
status_t setRotateAndCropAutoBehavior(
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);
+
+ status_t setAutoframingAutoBehaviour(
+ camera_metadata_enum_android_control_autoframing_t autoframingValue);
+
status_t setComposerSurface(bool composerSurfacePresent);
status_t setCameraMute(int32_t muteMode);
+ status_t setZoomOverride(int32_t zoomOverride);
+
status_t setHalInterface(sp<HalInterface> newHalInterface);
protected:
@@ -952,10 +1012,17 @@
// Override rotate_and_crop control if needed; returns true if the current value was changed
bool overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/);
+ // Override autoframing control if needed; returns true if the current value was changed
+ bool overrideAutoframing(const sp<CaptureRequest> &request);
+
// Override test_pattern control if needed for camera mute; returns true
// if the current value was changed
bool overrideTestPattern(const sp<CaptureRequest> &request);
+ // Override settings override if needed for lower zoom latency; return
+ // true if the current value was changed
+ bool overrideSettingsOverride(const sp<CaptureRequest> &request);
+
static const nsecs_t kRequestTimeout = 50e6; // 50 ms
// TODO: does this need to be adjusted for long exposure requests?
@@ -1086,9 +1153,12 @@
uint32_t mCurrentAfTriggerId;
uint32_t mCurrentPreCaptureTriggerId;
camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
+ camera_metadata_enum_android_control_autoframing_t mAutoframingOverride;
bool mComposerOutput;
int32_t mCameraMute; // 0 = no mute, otherwise the TEST_PATTERN_MODE to use
bool mCameraMuteChanged;
+ int32_t mSettingsOverride; // -1 = use original, otherwise
+ // the settings override to use.
int64_t mRepeatingLastFrameNumber;
@@ -1108,6 +1178,8 @@
const bool mUseHalBufManager;
const bool mSupportCameraMute;
const bool mOverrideToPortrait;
+ const bool mSupportSettingsOverride;
+ int32_t mVndkVersion = -1;
};
virtual sp<RequestThread> createNewRequestThread(wp<Camera3Device> /*parent*/,
@@ -1116,7 +1188,8 @@
const Vector<int32_t>& /*sessionParamKeys*/,
bool /*useHalBufManager*/,
bool /*supportCameraMute*/,
- bool /*overrideToPortrait*/) = 0;
+ bool /*overrideToPortrait*/,
+ bool /*supportSettingsOverride*/) = 0;
sp<RequestThread> mRequestThread;
@@ -1137,7 +1210,7 @@
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
bool callback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
bool isFixedFps, const std::set<std::set<std::string>>& physicalCameraIds,
- bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
+ bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
nsecs_t requestTimeNs);
@@ -1194,7 +1267,7 @@
// Guarded by mLock
wp<NotificationListener> mListener;
- std::unordered_map<int, sp<camera3::Camera3StreamInterface> > mPendingStreams;
+ std::list<std::tuple<int, sp<camera3::Camera3StreamInterface>>> mPendingStreams;
bool mActive;
bool mCancelNow;
@@ -1381,6 +1454,8 @@
bool mSupportCameraMute = false;
// Whether the HAL supports SOLID_COLOR or BLACK if mSupportCameraMute is true
bool mSupportTestPatternSolidColor = false;
+ // Whether the HAL supports zoom settings override
+ bool mSupportZoomOverride = false;
// Whether the camera framework overrides the device characteristics for
// performance class.
@@ -1392,6 +1467,13 @@
camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
bool mComposerOutput;
+ // Auto framing override value
+ camera_metadata_enum_android_control_autoframing mAutoframingOverride;
+
+ // Settings override value
+ int32_t mSettingsOverride; // -1 = use original, otherwise
+ // the settings override to use.
+
// Current active physical id of the logical multi-camera, if any
std::string mActivePhysicalId;
diff --git a/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
index 4640f2d..b0e4ca3 100644
--- a/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
@@ -58,7 +58,8 @@
if (parent->mStatus == STATUS_ACTIVE) {
ALOGV("%s: Let the device be IDLE and the request thread is paused",
__FUNCTION__);
- res = parent->internalPauseAndWaitLocked(maxExpectedDuration);
+ res = parent->internalPauseAndWaitLocked(maxExpectedDuration,
+ /*requestThreadInvocation*/false);
if (res != OK) {
ALOGE("%s: Can't pause captures to inject camera!", __FUNCTION__);
return res;
@@ -117,7 +118,8 @@
if (parent->mStatus == STATUS_ACTIVE) {
ALOGV("%s: Let the device be IDLE and the request thread is paused",
__FUNCTION__);
- res = parent->internalPauseAndWaitLocked(maxExpectedDuration);
+ res = parent->internalPauseAndWaitLocked(maxExpectedDuration,
+ /*requestThreadInvocation*/false);
if (res != OK) {
ALOGE("%s: Can't pause captures to stop injection!", __FUNCTION__);
return res;
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 7d08089..c59138c 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -38,11 +38,12 @@
const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile, int64_t streamUseCase,
- bool deviceTimeBaseIsRealtime, int timestampBase) :
+ bool deviceTimeBaseIsRealtime, int timestampBase, int32_t colorSpace) :
Camera3Stream(id, type,
width, height, maxSize, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
- dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase),
+ dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase,
+ colorSpace),
mTotalBufferCount(0),
mMaxCachedBufferCount(0),
mHandoutTotalBufferCount(0),
@@ -95,6 +96,7 @@
}
lines << fmt::sprintf(" Dynamic Range Profile: 0x%" PRIx64 "\n",
camera_stream::dynamic_range_profile);
+ lines << fmt::sprintf(" Color Space: %d\n", camera_stream::color_space);
lines << fmt::sprintf(" Stream use case: %" PRId64 "\n", camera_stream::use_case);
lines << fmt::sprintf(" Timestamp base: %d\n", getTimestampBase());
lines << fmt::sprintf(" Frames produced: %d, last timestamp: %" PRId64 " ns\n",
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 1086955..239fc71 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -41,7 +41,8 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
- int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
public:
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 290836c..7185895 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -24,6 +24,7 @@
#include <aidl/android/hardware/camera/device/CameraBlob.h>
#include <aidl/android/hardware/camera/device/CameraBlobId.h>
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
#include <android-base/unique_fd.h>
#include <cutils/properties.h>
@@ -57,18 +58,18 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
- mUseReadoutTime(false),
+ mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(0),
mDropBuffers(false),
mMirrorMode(mirrorMode),
@@ -92,17 +93,17 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
- deviceTimeBaseIsRealtime, timestampBase),
+ deviceTimeBaseIsRealtime, timestampBase, colorSpace),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
- mUseReadoutTime(false),
+ mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(0),
mDropBuffers(false),
mMirrorMode(mirrorMode),
@@ -132,18 +133,18 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mConsumer(nullptr),
mTransform(0),
mTraceFirstBuffer(true),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
- mUseReadoutTime(false),
+ mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(consumerUsage),
mDropBuffers(false),
mMirrorMode(mirrorMode),
@@ -181,18 +182,19 @@
int setId, bool isMultiResolution,
int64_t dynamicRangeProfile, int64_t streamUseCase,
bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace,
+ bool useReadoutTimestamp) :
Camera3IOStreamBase(id, type, width, height,
/*maxSize*/0,
format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mTransform(0),
mTraceFirstBuffer(true),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
- mUseReadoutTime(false),
+ mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(consumerUsage),
mDropBuffers(false),
mMirrorMode(mirrorMode),
@@ -332,7 +334,7 @@
status_t res =
gbLocker.lockAsync(
GraphicBuffer::USAGE_SW_READ_OFTEN | GraphicBuffer::USAGE_SW_WRITE_RARELY,
- &mapped, fenceFd.get());
+ &mapped, fenceFd.release());
if (res != OK) {
ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
return res;
@@ -456,7 +458,10 @@
mTraceFirstBuffer = false;
}
// Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
- if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF) {
+ if (getFormat() == HAL_PIXEL_FORMAT_BLOB && (getDataSpace() == HAL_DATASPACE_V0_JFIF ||
+ (getDataSpace() ==
+ static_cast<android_dataspace_t>(
+ aidl::android::hardware::graphics::common::Dataspace::JPEG_R)))) {
if (mIPCTransport == IPCTransport::HIDL) {
fixUpHidlJpegBlobHeader(anwBuffer, anwReleaseFence);
}
@@ -467,7 +472,7 @@
}
}
- nsecs_t captureTime = (mUseReadoutTime && readoutTimestamp != 0 ?
+ nsecs_t captureTime = ((mUseReadoutTime || mSyncToDisplay) && readoutTimestamp != 0 ?
readoutTimestamp : timestamp) - mTimestampOffset;
if (mPreviewFrameSpacer != nullptr) {
nsecs_t readoutTime = (readoutTimestamp != 0 ? readoutTimestamp : timestamp)
@@ -482,7 +487,7 @@
bufferDeferred = true;
} else {
nsecs_t presentTime = mSyncToDisplay ?
- syncTimestampToDisplayLocked(captureTime) : captureTime;
+ syncTimestampToDisplayLocked(captureTime, releaseFence->dup()) : captureTime;
setTransform(transform, true/*mayChangeMirror*/);
res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
@@ -513,10 +518,6 @@
mStreamUnpreparable = true;
}
- if (res != OK) {
- close(anwReleaseFence);
- }
-
*releaseFenceOut = releaseFence;
return res;
@@ -713,7 +714,8 @@
res = mPreviewFrameSpacer->run((std::string("PreviewSpacer-")
+ std::to_string(mId)).c_str());
if (res != OK) {
- ALOGE("%s: Unable to start preview spacer", __FUNCTION__);
+ ALOGE("%s: Unable to start preview spacer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
return res;
}
}
@@ -722,16 +724,12 @@
mFrameCount = 0;
mLastTimestamp = 0;
- mUseReadoutTime =
- (timestampBase == OutputConfiguration::TIMESTAMP_BASE_READOUT_SENSOR || mSyncToDisplay);
-
if (isDeviceTimeBaseRealtime()) {
if (isDefaultTimeBase && !isConsumedByHWComposer() && !isVideoStream()) {
// Default time base, but not hardware composer or video encoder
mTimestampOffset = 0;
} else if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME ||
- timestampBase == OutputConfiguration::TIMESTAMP_BASE_SENSOR ||
- timestampBase == OutputConfiguration::TIMESTAMP_BASE_READOUT_SENSOR) {
+ timestampBase == OutputConfiguration::TIMESTAMP_BASE_SENSOR) {
mTimestampOffset = 0;
}
// If timestampBase is CHOREOGRAPHER SYNCED or MONOTONIC, leave
@@ -741,7 +739,7 @@
// Reverse offset for monotonicTime -> bootTime
mTimestampOffset = -mTimestampOffset;
} else {
- // If timestampBase is DEFAULT, MONOTONIC, SENSOR, READOUT_SENSOR or
+ // If timestampBase is DEFAULT, MONOTONIC, SENSOR or
// CHOREOGRAPHER_SYNCED, timestamp offset is 0.
mTimestampOffset = 0;
}
@@ -1327,7 +1325,7 @@
void* mapped = nullptr;
base::unique_fd fenceFd(dup(fence));
status_t res = graphicBuffer->lockAsync(GraphicBuffer::USAGE_SW_READ_OFTEN, &mapped,
- fenceFd.get());
+ fenceFd.release());
if (res != OK) {
ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
return;
@@ -1414,7 +1412,7 @@
}
}
-nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t) {
+nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t, int releaseFence) {
nsecs_t currentTime = systemTime();
if (!mFixedFps) {
mLastCaptureTime = t;
@@ -1448,7 +1446,7 @@
//
nsecs_t captureInterval = t - mLastCaptureTime;
if (captureInterval > kSpacingResetIntervalNs) {
- for (size_t i = 0; i < VsyncEventData::kFrameTimelinesLength; i++) {
+ for (size_t i = 0; i < vsyncEventData.frameTimelinesLength; i++) {
const auto& timeline = vsyncEventData.frameTimelines[i];
if (timeline.deadlineTimestamp >= currentTime &&
timeline.expectedPresentationTime > minPresentT) {
@@ -1457,6 +1455,17 @@
mLastCaptureTime = t;
mLastPresentTime = presentT;
+ // If releaseFence is available, store the fence to check signal
+ // time later.
+ mRefVsyncData = vsyncEventData;
+ mReferenceCaptureTime = t;
+ mReferenceArrivalTime = currentTime;
+ if (releaseFence != -1) {
+ mReferenceFrameFence = new Fence(releaseFence);
+ } else {
+ mFenceSignalOffset = 0;
+ }
+
// Move the expected presentation time back by 1/3 of frame interval to
// mitigate the time drift. Due to time drift, if we directly use the
// expected presentation time, often times 2 expected presentation time
@@ -1466,6 +1475,36 @@
}
}
+ // If there is a reference frame release fence, get the signal time and
+ // update the captureToPresentOffset.
+ if (mReferenceFrameFence != nullptr) {
+ mFenceSignalOffset = 0;
+ nsecs_t signalTime = mReferenceFrameFence->getSignalTime();
+ // Now that the fence has signaled, recalculate the offsets based on
+ // the timeline which was actually latched
+ if (signalTime != INT64_MAX) {
+ for (size_t i = 0; i < mRefVsyncData.frameTimelinesLength; i++) {
+ const auto& timeline = mRefVsyncData.frameTimelines[i];
+ if (timeline.deadlineTimestamp >= signalTime) {
+ nsecs_t originalOffset = mCaptureToPresentOffset;
+ mCaptureToPresentOffset = timeline.expectedPresentationTime
+ - mReferenceCaptureTime;
+ mLastPresentTime = timeline.expectedPresentationTime;
+ mFenceSignalOffset = signalTime > mReferenceArrivalTime ?
+ signalTime - mReferenceArrivalTime : 0;
+
+ ALOGV("%s: Last deadline %" PRId64 " signalTime %" PRId64
+ " original offset %" PRId64 " new offset %" PRId64
+ " fencesignal offset %" PRId64, __FUNCTION__,
+ timeline.deadlineTimestamp, signalTime, originalOffset,
+ mCaptureToPresentOffset, mFenceSignalOffset);
+ break;
+ }
+ }
+ mReferenceFrameFence.clear();
+ }
+ }
+
nsecs_t idealPresentT = t + mCaptureToPresentOffset;
nsecs_t expectedPresentT = mLastPresentTime;
nsecs_t minDiff = INT64_MAX;
@@ -1509,6 +1548,7 @@
// Find best timestamp in the vsync timelines:
// - Only use at most kMaxTimelines timelines to avoid long latency
+ // - Add an extra timeline if display fence is used
// - closest to the ideal presentation time,
// - deadline timestamp is greater than the current time, and
// - For fixed FPS, if the capture interval doesn't deviate too much from refresh interval,
@@ -1517,7 +1557,9 @@
// - For variable FPS, or if the capture interval deviates from refresh
// interval for more than 5%, find a presentation time closest to the
// (lastPresentationTime + captureToPresentOffset) instead.
- int maxTimelines = std::min(kMaxTimelines, (int)VsyncEventData::kFrameTimelinesLength);
+ int fenceAdjustment = (mFenceSignalOffset > 0) ? 1 : 0;
+ int maxTimelines = std::min(kMaxTimelines + fenceAdjustment,
+ (int)vsyncEventData.frameTimelinesLength);
float biasForShortDelay = 1.0f;
for (int i = 0; i < maxTimelines; i ++) {
const auto& vsyncTime = vsyncEventData.frameTimelines[i];
@@ -1528,7 +1570,7 @@
biasForShortDelay = 1.0 - 2.0 * i / (maxTimelines - 1);
}
if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
- vsyncTime.deadlineTimestamp >= currentTime &&
+ vsyncTime.deadlineTimestamp >= currentTime + mFenceSignalOffset &&
((!cameraDisplayInSync && vsyncTime.expectedPresentationTime > minPresentT) ||
(cameraDisplayInSync && vsyncTime.expectedPresentationTime >
mLastPresentTime + minInterval +
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index c44b842..0b456c0 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -96,7 +96,9 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
/**
* Set up a stream for formats that have a variable buffer size for the same
* dimensions, such as compressed JPEG.
@@ -113,7 +115,9 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
/**
* Set up a stream with deferred consumer for formats that have 2 dimensions, such as
* RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -129,7 +133,9 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
virtual ~Camera3OutputStream();
@@ -278,7 +284,9 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
/**
* Note that we release the lock briefly in this function
@@ -438,7 +446,14 @@
static constexpr nsecs_t kTimelineThresholdNs = 1000000LL; // 1 millisecond
static constexpr float kMaxIntervalRatioDeviation = 0.05f;
static constexpr int kMaxTimelines = 2;
- nsecs_t syncTimestampToDisplayLocked(nsecs_t t);
+ nsecs_t syncTimestampToDisplayLocked(nsecs_t t, int releaseFence);
+
+ // In case of fence being used
+ sp<Fence> mReferenceFrameFence;
+ nsecs_t mReferenceCaptureTime = 0;
+ nsecs_t mReferenceArrivalTime = 0;
+ nsecs_t mFenceSignalOffset = 0;
+ VsyncEventData mRefVsyncData;
// Re-space frames by delaying queueBuffer so that frame delivery has
// the same cadence as capture. Default is on for SurfaceTexture bound
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index f25137a..1e7e337 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -129,12 +129,71 @@
return res;
}
+status_t fixupAutoframingTags(CameraMetadata& resultMetadata) {
+ status_t res = OK;
+ camera_metadata_entry autoframingEntry =
+ resultMetadata.find(ANDROID_CONTROL_AUTOFRAMING);
+ if (autoframingEntry.count == 0) {
+ const uint8_t defaultAutoframingEntry = ANDROID_CONTROL_AUTOFRAMING_OFF;
+ res = resultMetadata.update(ANDROID_CONTROL_AUTOFRAMING, &defaultAutoframingEntry, 1);
+ if (res != OK) {
+ ALOGE("%s: Failed to update ANDROID_CONTROL_AUTOFRAMING: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ camera_metadata_entry autoframingStateEntry =
+ resultMetadata.find(ANDROID_CONTROL_AUTOFRAMING_STATE);
+ if (autoframingStateEntry.count == 0) {
+ const uint8_t defaultAutoframingStateEntry = ANDROID_CONTROL_AUTOFRAMING_STATE_INACTIVE;
+ res = resultMetadata.update(ANDROID_CONTROL_AUTOFRAMING_STATE,
+ &defaultAutoframingStateEntry, 1);
+ if (res != OK) {
+ ALOGE("%s: Failed to update ANDROID_CONTROL_AUTOFRAMING_STATE: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ return res;
+}
+
+void correctMeteringRegions(camera_metadata_t *meta) {
+ if (meta == nullptr) return;
+
+ uint32_t meteringRegionKeys[] = {
+ ANDROID_CONTROL_AE_REGIONS,
+ ANDROID_CONTROL_AWB_REGIONS,
+ ANDROID_CONTROL_AF_REGIONS };
+
+ for (uint32_t key : meteringRegionKeys) {
+ camera_metadata_entry_t entry;
+ int res = find_camera_metadata_entry(meta, key, &entry);
+ if (res != OK) continue;
+
+ for (size_t i = 0; i < entry.count; i += 5) {
+ if (entry.data.i32[0] > entry.data.i32[2]) {
+ ALOGW("%s: Invalid metering region (%d): left: %d, right: %d",
+ __FUNCTION__, key, entry.data.i32[0], entry.data.i32[2]);
+ entry.data.i32[2] = entry.data.i32[0];
+ }
+ if (entry.data.i32[1] > entry.data.i32[3]) {
+ ALOGW("%s: Invalid metering region (%d): top: %d, bottom: %d",
+ __FUNCTION__, key, entry.data.i32[1], entry.data.i32[3]);
+ entry.data.i32[3] = entry.data.i32[1];
+ }
+ }
+ }
+}
+
void insertResultLocked(CaptureOutputStates& states, CaptureResult *result, uint32_t frameNumber) {
if (result == nullptr) return;
camera_metadata_t *meta = const_cast<camera_metadata_t *>(
result->mMetadata.getAndLock());
set_camera_metadata_vendor_id(meta, states.vendorTagId);
+ correctMeteringRegions(meta);
result->mMetadata.unlock(meta);
if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
@@ -153,6 +212,7 @@
camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
physicalMetadata.mPhysicalCameraMetadata.getAndLock());
set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
+ correctMeteringRegions(pmeta);
physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
}
@@ -325,6 +385,22 @@
}
}
+ // Fix up autoframing metadata
+ res = fixupAutoframingTags(captureResult.mMetadata);
+ if (res != OK) {
+ SET_ERR("Failed to set autoframing defaults in result metadata: %s (%d)",
+ strerror(-res), res);
+ return;
+ }
+ for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+ res = fixupAutoframingTags(physicalMetadata.mPhysicalCameraMetadata);
+ if (res != OK) {
+ SET_ERR("Failed to set autoframing defaults in physical result metadata: %s (%d)",
+ strerror(-res), res);
+ return;
+ }
+ }
+
for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
const std::string cameraId = physicalMetadata.mPhysicalCameraId;
auto mapper = states.distortionMappers.find(cameraId);
@@ -465,6 +541,32 @@
return found;
}
+const std::set<std::string>& getCameraIdsWithZoomLocked(
+ const InFlightRequestMap& inflightMap, const CameraMetadata& metadata,
+ const std::set<std::string>& cameraIdsWithZoom) {
+ camera_metadata_ro_entry overrideEntry =
+ metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+ camera_metadata_ro_entry frameNumberEntry =
+ metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER);
+ if (overrideEntry.count != 1
+ || overrideEntry.data.i32[0] != ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM
+ || frameNumberEntry.count != 1) {
+ // No valid overriding frame number, skip
+ return cameraIdsWithZoom;
+ }
+
+ uint32_t overridingFrameNumber = frameNumberEntry.data.i32[0];
+ ssize_t idx = inflightMap.indexOfKey(overridingFrameNumber);
+ if (idx < 0) {
+ ALOGE("%s: Failed to find pending request #%d in inflight map",
+ __FUNCTION__, overridingFrameNumber);
+ return cameraIdsWithZoom;
+ }
+
+ const InFlightRequest &r = inflightMap.valueFor(overridingFrameNumber);
+ return r.cameraIdsWithZoom;
+}
+
void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result) {
ATRACE_CALL();
@@ -676,10 +778,12 @@
} else if (request.hasCallback) {
CameraMetadata metadata;
metadata = result->result;
+ auto cameraIdsWithZoom = getCameraIdsWithZoomLocked(
+ states.inflightMap, metadata, request.cameraIdsWithZoom);
sendCaptureResult(states, metadata, request.resultExtras,
collectedPartialResult, frameNumber,
hasInputBufferInRequest, request.zslCapture && request.stillCapture,
- request.rotateAndCropAuto, request.cameraIdsWithZoom,
+ request.rotateAndCropAuto, cameraIdsWithZoom,
request.physicalMetadatas);
}
}
@@ -906,11 +1010,13 @@
states.listener->notifyShutter(r.resultExtras, msg.timestamp);
}
// send pending result and buffers
+ const auto& cameraIdsWithZoom = getCameraIdsWithZoomLocked(
+ inflightMap, r.pendingMetadata, r.cameraIdsWithZoom);
sendCaptureResult(states,
r.pendingMetadata, r.resultExtras,
r.collectedPartialResult, msg.frame_number,
r.hasInputBuffer, r.zslCapture && r.stillCapture,
- r.rotateAndCropAuto, r.cameraIdsWithZoom, r.physicalMetadatas);
+ r.rotateAndCropAuto, cameraIdsWithZoom, r.physicalMetadatas);
}
returnAndRemovePendingOutputBuffers(
states.useHalBufManager, states.listener, r, states.sessionStatsBuilder);
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 64810d4..1191f05 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -35,12 +35,13 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool useHalBufManager, int64_t dynamicProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
transport, consumerUsage, timestampOffset, setId,
/*isMultiResolution*/false, dynamicProfile, streamUseCase,
- deviceTimeBaseIsRealtime, timestampBase, mirrorMode),
+ deviceTimeBaseIsRealtime, timestampBase, mirrorMode, colorSpace,
+ useReadoutTimestamp),
mUseHalBufManager(useHalBufManager) {
size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
if (surfaces.size() > consumerCount) {
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 0caa90b..c2ff20e 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -45,7 +45,9 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
virtual ~Camera3SharedOutputStream();
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 27269a6..23afa6e 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -56,7 +56,8 @@
const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int32_t colorSpace) :
camera_stream(),
mId(id),
mSetId(setId),
@@ -96,6 +97,7 @@
camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
camera_stream::dynamic_range_profile = dynamicRangeProfile;
camera_stream::use_case = streamUseCase;
+ camera_stream::color_space = colorSpace;
if ((format == HAL_PIXEL_FORMAT_BLOB || format == HAL_PIXEL_FORMAT_RAW_OPAQUE) &&
maxSize == 0) {
@@ -136,6 +138,10 @@
return camera_stream::data_space;
}
+int32_t Camera3Stream::getColorSpace() const {
+ return camera_stream::color_space;
+}
+
uint64_t Camera3Stream::getUsage() const {
return mUsage;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index e451fa4..2bfaaab 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -166,6 +166,7 @@
uint32_t getHeight() const;
int getFormat() const;
android_dataspace getDataSpace() const;
+ int32_t getColorSpace() const;
uint64_t getUsage() const;
void setUsage(uint64_t usage);
void setFormatOverride(bool formatOverridden);
@@ -508,7 +509,8 @@
const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase);
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int32_t colorSpace);
wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index d715306..7fa6273 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -67,6 +67,7 @@
std::unordered_set<int32_t> sensor_pixel_modes_used;
int64_t dynamic_range_profile;
int64_t use_case;
+ int32_t color_space;
} camera_stream_t;
typedef struct camera_stream_buffer {
@@ -114,20 +115,24 @@
int64_t streamUseCase;
int timestampBase;
int mirrorMode;
+ int32_t colorSpace;
OutputStreamInfo() :
width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
consumerUsage(0),
dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
timestampBase(OutputConfiguration::TIMESTAMP_BASE_DEFAULT),
- mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO) {}
+ mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO),
+ colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed,
- int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode) :
+ int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode,
+ int32_t _colorSpace) :
width(_width), height(_height), format(_format),
dataSpace(_dataSpace), consumerUsage(_consumerUsage),
sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
- streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode) {}
+ streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode),
+ colorSpace(_colorSpace) {}
};
// Utility class to lock and unlock a GraphicBuffer
@@ -206,6 +211,7 @@
virtual int getFormat() const = 0;
virtual int64_t getDynamicRangeProfile() const = 0;
virtual android_dataspace getDataSpace() const = 0;
+ virtual int32_t getColorSpace() const = 0;
virtual void setFormatOverride(bool formatOverriden) = 0;
virtual bool isFormatOverridden() const = 0;
virtual int getOriginalFormat() const = 0;
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.cpp b/services/camera/libcameraservice/device3/DistortionMapper.cpp
index 15807bf..f0764b4 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.cpp
+++ b/services/camera/libcameraservice/device3/DistortionMapper.cpp
@@ -67,7 +67,7 @@
return res;
}
- bool mMaxResolution = SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+ bool mMaxResolution = SessionConfigurationUtils::supportsUltraHighResolutionCapture(deviceInfo);
if (mMaxResolution) {
res = setupStaticInfoLocked(deviceInfo, /*maxResolution*/true);
}
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index 4c19349..665ac73 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -181,6 +181,9 @@
// Indicates that ROTATE_AND_CROP was set to AUTO
bool rotateAndCropAuto;
+ // Indicates that AUTOFRAMING was set to AUTO
+ bool autoframingAuto;
+
// Requested camera ids (both logical and physical) with zoomRatio != 1.0f
std::set<std::string> cameraIdsWithZoom;
@@ -213,6 +216,7 @@
stillCapture(false),
zslCapture(false),
rotateAndCropAuto(false),
+ autoframingAuto(false),
requestTimeNs(0),
transform(-1) {
}
@@ -220,8 +224,9 @@
InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration, bool fixedFps,
const std::set<std::set<std::string>>& physicalCameraIdSet, bool isStillCapture,
- bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& idsWithZoom,
- nsecs_t requestNs, const SurfaceMap& outSurfaces = SurfaceMap{}) :
+ bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
+ const std::set<std::string>& idsWithZoom, nsecs_t requestNs,
+ const SurfaceMap& outSurfaces = SurfaceMap{}) :
shutterTimestamp(0),
sensorTimestamp(0),
requestStatus(OK),
@@ -239,6 +244,7 @@
stillCapture(isStillCapture),
zslCapture(isZslCapture),
rotateAndCropAuto(rotateAndCropAuto),
+ autoframingAuto(autoframingAuto),
cameraIdsWithZoom(idsWithZoom),
requestTimeNs(requestNs),
outputSurfaces(outSurfaces),
diff --git a/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp b/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
index c558d91..ce7097a 100644
--- a/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
+++ b/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
@@ -91,6 +91,8 @@
if (meteringRegionsSetEntry.count == 1 &&
meteringRegionsSetEntry.data.u8[0] == entry.second.second) {
// metering region set by client, doesn't need to be fixed.
+ ALOGV("%s: Metering region %u set by client, they don't need to be fixed",
+ __FUNCTION__, entry.first);
continue;
}
camera_metadata_entry meteringRegionEntry = request->find(entry.first);
@@ -121,6 +123,7 @@
if (cropRegionSetEntry.count == 1 &&
cropRegionSetEntry.data.u8[0] == ANDROID_SCALER_CROP_REGION_SET_TRUE) {
// crop regions set by client, doesn't need to be fixed.
+ ALOGV("%s: crop region set by client, doesn't need to be fixed", __FUNCTION__);
return;
}
camera_metadata_entry_t cropRegionEntry = request->find(ANDROID_SCALER_CROP_REGION);
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 27b00c9..aaa1b70 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -153,9 +153,9 @@
return;
}
- bool isUltraHighResolutionSensor =
- camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(*deviceInfo);
- if (isUltraHighResolutionSensor) {
+ bool supportsUltraHighResolutionCapture =
+ camera3::SessionConfigurationUtils::supportsUltraHighResolutionCapture(*deviceInfo);
+ if (supportsUltraHighResolutionCapture) {
if (!SessionConfigurationUtils::getArrayWidthAndHeight(deviceInfo,
ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
&arrayMaximumResolutionW, &arrayMaximumResolutionH)) {
@@ -354,17 +354,8 @@
if (weight == 0) {
continue;
}
- // Top left (inclusive)
- scaleCoordinates(entry.data.i32 + j, 1, zoomRatio, true /*clamp*/, arrayWidth,
+ scaleRegion(entry.data.i32 + j, zoomRatio, arrayWidth,
arrayHeight);
- // Bottom right (exclusive): Use adjacent inclusive pixel to
- // calculate.
- entry.data.i32[j+2] -= 1;
- entry.data.i32[j+3] -= 1;
- scaleCoordinates(entry.data.i32 + j + 2, 1, zoomRatio, true /*clamp*/, arrayWidth,
- arrayHeight);
- entry.data.i32[j+2] += 1;
- entry.data.i32[j+3] += 1;
}
}
@@ -401,17 +392,8 @@
if (weight == 0) {
continue;
}
- // Top-left (inclusive)
- scaleCoordinates(entry.data.i32 + j, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
+ scaleRegion(entry.data.i32 + j, 1.0 / zoomRatio, arrayWidth,
arrayHeight);
- // Bottom-right (exclusive): Use adjacent inclusive pixel to
- // calculate.
- entry.data.i32[j+2] -= 1;
- entry.data.i32[j+3] -= 1;
- scaleCoordinates(entry.data.i32 + j + 2, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
- arrayHeight);
- entry.data.i32[j+2] += 1;
- entry.data.i32[j+3] += 1;
}
}
for (auto rect : kRectsToCorrect) {
@@ -470,6 +452,24 @@
}
}
+void ZoomRatioMapper::scaleRegion(int32_t* region, float scaleRatio,
+ int32_t arrayWidth, int32_t arrayHeight) {
+ // Top-left (inclusive)
+ scaleCoordinates(region, 1, scaleRatio, true /*clamp*/, arrayWidth,
+ arrayHeight);
+ // Bottom-right (exclusive): Use adjacent inclusive pixel to
+ // calculate.
+ region[2] -= 1;
+ region[3] -= 1;
+ scaleCoordinates(region + 2, 1, scaleRatio, true /*clamp*/, arrayWidth,
+ arrayHeight);
+ region[2] += 1;
+ region[3] += 1;
+ // Make sure bottom-right >= top-left
+ region[2] = std::max(region[0], region[2]);
+ region[3] = std::max(region[1], region[3]);
+}
+
void ZoomRatioMapper::scaleRects(int32_t* rects, int rectCount,
float scaleRatio, int32_t arrayWidth, int32_t arrayHeight) {
for (int i = 0; i < rectCount * 4; i += 4) {
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
index b7a9e41..1aa8e78 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.h
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -69,6 +69,8 @@
public: // Visible for testing. Do not use concurently.
void scaleCoordinates(int32_t* coordPairs, int coordCount,
float scaleRatio, bool clamp, int32_t arrayWidth, int32_t arrayHeight);
+ void scaleRegion(int32_t* region, float scaleRatio,
+ int32_t arrayWidth, int32_t arrayHeight);
bool isValid() { return mIsValid; }
private:
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 1f9313e..af48dd6 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -163,9 +163,12 @@
return (uint64_t)usage;
}
-AidlCamera3Device::AidlCamera3Device(const std::string& id, bool overrideForPerfClass,
- bool overrideToPortrait, bool legacyClient) :
- Camera3Device(id, overrideForPerfClass, overrideToPortrait, legacyClient) {
+AidlCamera3Device::AidlCamera3Device(
+ std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+ const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+ bool legacyClient) :
+ Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
+ legacyClient) {
mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
}
@@ -203,6 +206,7 @@
return res;
}
mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId);
+ mIsCompositeJpegRDisabled = manager->isCompositeJpegRDisabled(mId);
std::vector<std::string> physicalCameraIds;
bool isLogical = manager->isLogicalCamera(mId, &physicalCameraIds);
@@ -236,7 +240,7 @@
&mPhysicalDeviceInfoMap[physicalId],
mSupportNativeZoomRatio, usePrecorrectArray);
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(
mPhysicalDeviceInfoMap[physicalId])) {
mUHRCropAndMeteringRegionMappers[physicalId] =
UHRCropAndMeteringRegionMapper(mPhysicalDeviceInfoMap[physicalId],
@@ -872,8 +876,9 @@
}
status_t AidlCamera3Device::AidlHalInterface::configureStreams(
- const camera_metadata_t *sessionParams,
- camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
+ const camera_metadata_t *sessionParams,
+ camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes,
+ int64_t logId) {
using camera::device::StreamType;
using camera::device::StreamConfigurationMode;
@@ -915,6 +920,7 @@
cam3stream->getOriginalFormat() : src->format);
dst.dataSpace = mapToAidlDataspace(cam3stream->isDataSpaceOverridden() ?
cam3stream->getOriginalDataSpace() : src->data_space);
+ dst.colorSpace = src->color_space;
dst.bufferSize = bufferSizes[i];
if (!src->physical_camera_id.empty()) {
@@ -957,6 +963,7 @@
requestedConfiguration.streamConfigCounter = mNextStreamConfigCounter++;
requestedConfiguration.multiResolutionInputImage = config->input_is_multi_resolution;
+ requestedConfiguration.logId = logId;
auto err = mAidlSession->configureStreams(requestedConfiguration, &finalConfiguration);
if (!err.isOk()) {
ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
@@ -1413,9 +1420,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) :
+ bool overrideToPortrait,
+ bool supportSettingsOverride) :
RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
- supportCameraMute, overrideToPortrait) {}
+ supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
@@ -1585,9 +1593,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) {
+ bool overrideToPortrait,
+ bool supportSettingsOverride) {
return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
- useHalBufManager, supportCameraMute, overrideToPortrait);
+ useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
};
sp<Camera3Device::Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index f4554d4..e0be367 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -39,7 +39,9 @@
using AidlRequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
class AidlCameraDeviceCallbacks;
friend class AidlCameraDeviceCallbacks;
- explicit AidlCamera3Device(const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+ explicit AidlCamera3Device(
+ std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+ const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
bool legacyClient = false);
virtual ~AidlCamera3Device() { }
@@ -99,7 +101,9 @@
virtual status_t configureStreams(const camera_metadata_t *sessionParams,
/*inout*/ camera_stream_configuration_t *config,
- const std::vector<uint32_t>& bufferSizes) override;
+ const std::vector<uint32_t>& bufferSizes,
+ int64_t logId) override;
+
// The injection camera configures the streams to hal.
virtual status_t configureInjectedStreams(
const camera_metadata_t* sessionParams,
@@ -175,7 +179,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait);
+ bool overrideToPortrait,
+ bool supportSettingsOverride);
status_t switchToOffline(
const std::vector<int32_t>& streamsToKeep,
@@ -261,7 +266,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) override;
+ bool overrideToPortrait,
+ bool supportSettingsOverride) override;
virtual sp<Camera3DeviceInjectionMethods>
createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index c22aad6..06af5ff 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -204,7 +204,7 @@
&mPhysicalDeviceInfoMap[physicalId],
mSupportNativeZoomRatio, usePrecorrectArray);
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(
mPhysicalDeviceInfoMap[physicalId])) {
mUHRCropAndMeteringRegionMappers[physicalId] =
UHRCropAndMeteringRegionMapper(mPhysicalDeviceInfoMap[physicalId],
@@ -705,9 +705,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) {
+ bool overrideToPortrait,
+ bool supportSettingsOverride) {
return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
- useHalBufManager, supportCameraMute, overrideToPortrait);
+ useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
};
sp<Camera3Device::Camera3DeviceInjectionMethods>
@@ -881,7 +882,8 @@
status_t HidlCamera3Device::HidlHalInterface::configureStreams(
const camera_metadata_t *sessionParams,
- camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
+ camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes,
+ int64_t /*logId*/) {
ATRACE_NAME("CameraHal::configureStreams");
if (!valid()) return INVALID_OPERATION;
status_t res = OK;
@@ -1701,9 +1703,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) :
+ bool overrideToPortrait,
+ bool supportSettingsOverride) :
RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
- supportCameraMute, overrideToPortrait) {}
+ supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
status_t HidlCamera3Device::HidlRequestThread::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index e64bcf0..2cfdf9d 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -31,9 +31,12 @@
public Camera3Device {
public:
- explicit HidlCamera3Device(const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
- bool legacyClient = false) : Camera3Device(id, overrideForPerfClass, overrideToPortrait,
- legacyClient) { }
+ explicit HidlCamera3Device(
+ std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+ const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+ bool legacyClient = false) :
+ Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
+ legacyClient) { }
virtual ~HidlCamera3Device() {}
@@ -108,7 +111,8 @@
virtual status_t configureStreams(const camera_metadata_t *sessionParams,
/*inout*/ camera_stream_configuration_t *config,
- const std::vector<uint32_t>& bufferSizes) override;
+ const std::vector<uint32_t>& bufferSizes,
+ int64_t logId) override;
// The injection camera configures the streams to hal.
virtual status_t configureInjectedStreams(
@@ -174,7 +178,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait);
+ bool overrideToPortrait,
+ bool supportSettingsOverride);
status_t switchToOffline(
const std::vector<int32_t>& streamsToKeep,
@@ -222,7 +227,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) override;
+ bool overrideToPortrait,
+ bool supportSettingsOverride) override;
virtual sp<Camera3DeviceInjectionMethods>
createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
index 3392db1..de51ffa 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
+++ b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
@@ -17,6 +17,7 @@
#include <hidl/AidlCameraDeviceCallbacks.h>
#include <hidl/Utils.h>
+#include <aidl/AidlUtils.h>
namespace android {
namespace frameworks {
@@ -144,7 +145,7 @@
// Convert Metadata into HCameraMetadata;
FmqSizeOrMetadata hResult;
- using hardware::cameraservice::utils::conversion::filterVndkKeys;
+ using hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
if (filterVndkKeys(mVndkVersion, result, /*isStatic*/false) != OK) {
ALOGE("%s: filtering vndk keys from result failed, not sending onResultReceived callback",
__FUNCTION__);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index beedba8..59fc1cd 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -19,10 +19,12 @@
#include <gui/Surface.h>
#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
+#include <aidl/AidlUtils.h>
#include <hidl/AidlCameraDeviceCallbacks.h>
#include <hidl/HidlCameraDeviceUser.h>
#include <hidl/Utils.h>
#include <android/hardware/camera/device/3.2/types.h>
+#include <android-base/properties.h>
namespace android {
namespace frameworks {
@@ -31,6 +33,7 @@
namespace V2_1 {
namespace implementation {
+using hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
using hardware::cameraservice::utils::conversion::convertToHidl;
using hardware::cameraservice::utils::conversion::convertFromHidl;
using hardware::cameraservice::utils::conversion::B2HStatus;
@@ -55,6 +58,7 @@
const sp<hardware::camera2::ICameraDeviceUser> &deviceRemote)
: mDeviceRemote(deviceRemote) {
mInitSuccess = initDevice();
+ mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
}
bool HidlCameraDeviceUser::initDevice() {
@@ -235,8 +239,16 @@
android::CameraMetadata cameraMetadata;
binder::Status ret = mDeviceRemote->createDefaultRequest(convertFromHidl(templateId),
&cameraMetadata);
- HStatus hStatus = B2HStatus(ret);
+
HCameraMetadata hidlMetadata;
+ if (filterVndkKeys(mVndkVersion, cameraMetadata, /*isStatic*/false) != OK) {
+ ALOGE("%s: Unable to filter vndk metadata keys for version %d",
+ __FUNCTION__, mVndkVersion);
+ _hidl_cb(HStatus::UNKNOWN_ERROR, hidlMetadata);
+ return Void();
+ }
+
+ HStatus hStatus = B2HStatus(ret);
const camera_metadata_t *rawMetadata = cameraMetadata.getAndLock();
convertToHidl(rawMetadata, &hidlMetadata);
_hidl_cb(hStatus, hidlMetadata);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
index 0e2ab3d..a653ca2 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
@@ -127,6 +127,7 @@
std::shared_ptr<CaptureResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
bool mInitSuccess = false;
int32_t mRequestId = REQUEST_ID_NONE;
+ int mVndkVersion = -1;
};
} // implementation
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 90ba294..94bf653 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -21,6 +21,7 @@
#include <hidl/HidlCameraService.h>
#include <hidl/HidlCameraDeviceUser.h>
#include <hidl/Utils.h>
+#include <aidl/AidlUtils.h>
#include <hidl/HidlTransportSupport.h>
@@ -34,9 +35,9 @@
using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
using hardware::hidl_vec;
using hardware::cameraservice::utils::conversion::convertToHidl;
-using hardware::cameraservice::utils::conversion::filterVndkKeys;
using hardware::cameraservice::utils::conversion::B2HStatus;
using hardware::Void;
+using hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
using device::V2_0::implementation::H2BCameraDeviceCallbacks;
using device::V2_1::implementation::HidlCameraDeviceUser;
diff --git a/services/camera/libcameraservice/hidl/Utils.cpp b/services/camera/libcameraservice/hidl/Utils.cpp
index ea05636..b5dddf7 100644
--- a/services/camera/libcameraservice/hidl/Utils.cpp
+++ b/services/camera/libcameraservice/hidl/Utils.cpp
@@ -15,7 +15,6 @@
*/
#include <hidl/Utils.h>
-#include <hidl/VndkVersionMetadataTags.h>
#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
#include <cutils/native_handle.h>
#include <mediautils/AImageReaderUtils.h>
@@ -299,31 +298,6 @@
return hPhysicalCaptureResultInfos;
}
-status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic) {
- if (vndkVersion == __ANDROID_API_FUTURE__) {
- // VNDK version in ro.vndk.version is a version code-name that
- // corresponds to the current version.
- return OK;
- }
- const auto &apiLevelToKeys =
- isStatic ? static_api_level_to_keys : dynamic_api_level_to_keys;
- // Find the vndk versions above the given vndk version. All the vndk
- // versions above the given one, need to have their keys filtered from the
- // metadata in order to avoid metadata invalidation.
- auto it = apiLevelToKeys.upper_bound(vndkVersion);
- while (it != apiLevelToKeys.end()) {
- for (const auto &key : it->second) {
- status_t res = metadata.erase(key);
- if (res != OK) {
- ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
- return res;
- }
- }
- it++;
- }
- return OK;
-}
-
} //conversion
} // utils
} //cameraservice
diff --git a/services/camera/libcameraservice/hidl/Utils.h b/services/camera/libcameraservice/hidl/Utils.h
index e6d4393..ec06571 100644
--- a/services/camera/libcameraservice/hidl/Utils.h
+++ b/services/camera/libcameraservice/hidl/Utils.h
@@ -97,8 +97,6 @@
HStatus B2HStatus(const binder::Status &bStatus);
-status_t filterVndkKeys(int vndk_version, CameraMetadata &metadata, bool isStatic = true);
-
} // conversion
} // utils
} // cameraservice
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index 4986199..921ad7d 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -49,7 +49,7 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
@@ -61,6 +61,7 @@
fuzz_config: {
cc: [
"android-media-fuzzing-reports@google.com",
+ "android-camera-fwk-eng@google.com",
],
componentid: 155276,
libfuzzer_options: [
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 11a2d09..854c342 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -111,12 +111,15 @@
size_t mPreviewBufferCount = 0;
bool mAutoFocusMessage = false;
bool mSnapshotNotification = false;
+ bool mRecordingNotification = false;
mutable Mutex mPreviewLock;
mutable Condition mPreviewCondition;
mutable Mutex mAutoFocusLock;
mutable Condition mAutoFocusCondition;
mutable Mutex mSnapshotLock;
mutable Condition mSnapshotCondition;
+ mutable Mutex mRecordingLock;
+ mutable Condition mRecordingCondition;
void getNumCameras();
void getCameraInformation(int32_t cameraId);
@@ -125,6 +128,7 @@
void invokeDump();
void invokeShellCommand();
void invokeNotifyCalls();
+ void invokeTorchAPIs(int32_t cameraId);
// CameraClient interface
void notifyCallback(int32_t msgType, int32_t, int32_t) override;
@@ -152,6 +156,8 @@
Mutex::Autolock l(mPreviewLock);
++mPreviewBufferCount;
mPreviewCondition.broadcast();
+ mRecordingNotification = true;
+ mRecordingCondition.broadcast();
break;
}
case CAMERA_MSG_COMPRESSED_IMAGE: {
@@ -311,116 +317,155 @@
mCameraService->notifySystemEvent(eventId, args);
}
+void CameraFuzzer::invokeTorchAPIs(int32_t cameraId) {
+ std::string cameraIdStr = std::to_string(cameraId);
+ sp<IBinder> binder = new BBinder;
+
+ mCameraService->setTorchMode(cameraIdStr, true, binder);
+ ALOGV("Turned torch on.");
+ int32_t torchStrength = rand() % 5 + 1;
+ ALOGV("Changing torch strength level to %d", torchStrength);
+ mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder);
+ mCameraService->setTorchMode(cameraIdStr, false, binder);
+ ALOGV("Turned torch off.");
+}
+
void CameraFuzzer::invokeCameraAPIs() {
- for (int32_t cameraId = 0; cameraId < mNumCameras; ++cameraId) {
- getCameraInformation(cameraId);
+ /** In order to avoid the timeout issue caused due to multiple iteration of loops, the 'for'
+ * loops are removed and the 'cameraId', 'pictureSize' and 'videoSize' are derived using the
+ * FuzzedDataProvider from the available cameras and vectors of 'pictureSizes' and 'videoSizes'
+ */
+ int32_t cameraId = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(0, mNumCameras - 1);
+ getCameraInformation(cameraId);
+ invokeTorchAPIs(cameraId);
- ::android::binder::Status rc;
- sp<ICamera> cameraDevice;
+ ::android::binder::Status rc;
+ sp<ICamera> cameraDevice;
- rc = mCameraService->connect(this, cameraId, std::string(),
- android::CameraService::USE_CALLING_UID, android::CameraService::USE_CALLING_PID,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
- /*forceSlowJpegMode*/false,
- &cameraDevice);
- if (!rc.isOk()) {
- // camera not connected
- return;
+ rc = mCameraService->connect(this, cameraId, std::string(),
+ android::CameraService::USE_CALLING_UID,
+ android::CameraService::USE_CALLING_PID,
+ /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+ /*overrideToPortrait*/true, /*forceSlowJpegMode*/false,
+ &cameraDevice);
+ if (!rc.isOk()) {
+ // camera not connected
+ return;
+ }
+ if (cameraDevice) {
+ sp<Surface> previewSurface;
+ sp<SurfaceControl> surfaceControl;
+ CameraParameters params(cameraDevice->getParameters());
+ String8 focusModes(params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
+ bool isAFSupported = false;
+ const char* focusMode = nullptr;
+
+ if (focusModes.contains(CameraParameters::FOCUS_MODE_AUTO)) {
+ isAFSupported = true;
+ } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) {
+ isAFSupported = true;
+ focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
+ } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ isAFSupported = true;
+ focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO;
+ } else if (focusModes.contains(CameraParameters::FOCUS_MODE_MACRO)) {
+ isAFSupported = true;
+ focusMode = CameraParameters::FOCUS_MODE_MACRO;
}
- if (cameraDevice) {
- sp<Surface> previewSurface;
- sp<SurfaceControl> surfaceControl;
- CameraParameters params(cameraDevice->getParameters());
- String8 focusModes(params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
- bool isAFSupported = false;
- const char *focusMode = nullptr;
+ if (nullptr != focusMode) {
+ params.set(CameraParameters::KEY_FOCUS_MODE, focusMode);
+ cameraDevice->setParameters(params.flatten());
+ }
+ int previewWidth, previewHeight;
+ params.getPreviewSize(&previewWidth, &previewHeight);
- if (focusModes.contains(CameraParameters::FOCUS_MODE_AUTO)) {
- isAFSupported = true;
- } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) {
- isAFSupported = true;
- focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
- } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) {
- isAFSupported = true;
- focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO;
- } else if (focusModes.contains(CameraParameters::FOCUS_MODE_MACRO)) {
- isAFSupported = true;
- focusMode = CameraParameters::FOCUS_MODE_MACRO;
- }
- if (nullptr != focusMode) {
- params.set(CameraParameters::KEY_FOCUS_MODE, focusMode);
- cameraDevice->setParameters(params.flatten());
- }
- int previewWidth, previewHeight;
- params.getPreviewSize(&previewWidth, &previewHeight);
+ mComposerClient = new SurfaceComposerClient;
+ mComposerClient->initCheck();
- mComposerClient = new SurfaceComposerClient;
- mComposerClient->initCheck();
-
- bool shouldPassInvalidLayerMetaData = mFuzzedDataProvider->ConsumeBool();
- int layerMetaData;
- if (shouldPassInvalidLayerMetaData) {
- layerMetaData = mFuzzedDataProvider->ConsumeIntegral<int>();
- } else {
- layerMetaData = kLayerMetadata[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+ bool shouldPassInvalidLayerMetaData = mFuzzedDataProvider->ConsumeBool();
+ int layerMetaData;
+ if (shouldPassInvalidLayerMetaData) {
+ layerMetaData = mFuzzedDataProvider->ConsumeIntegral<int>();
+ } else {
+ layerMetaData = kLayerMetadata[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
0, kNumLayerMetaData - 1)];
- }
- surfaceControl = mComposerClient->createSurface(
+ }
+ surfaceControl = mComposerClient->createSurface(
String8("Test Surface"), previewWidth, previewHeight,
CameraParameters::previewFormatToEnum(params.getPreviewFormat()), layerMetaData);
- if (surfaceControl.get() != nullptr) {
- SurfaceComposerClient::Transaction{}
+ if (surfaceControl.get()) {
+ SurfaceComposerClient::Transaction{}
.setLayer(surfaceControl, 0x7fffffff)
.show(surfaceControl)
.apply();
- previewSurface = surfaceControl->getSurface();
+ previewSurface = surfaceControl->getSurface();
+ if (previewSurface.get()) {
cameraDevice->setPreviewTarget(previewSurface->getIGraphicBufferProducer());
}
- cameraDevice->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
+ }
+ cameraDevice->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
- Vector<Size> pictureSizes;
- params.getSupportedPictureSizes(pictureSizes);
+ Vector<Size> pictureSizes;
+ params.getSupportedPictureSizes(pictureSizes);
- for (size_t i = 0; i < pictureSizes.size(); ++i) {
- params.setPictureSize(pictureSizes[i].width, pictureSizes[i].height);
- cameraDevice->setParameters(params.flatten());
- cameraDevice->startPreview();
- waitForPreviewStart();
- cameraDevice->autoFocus();
- waitForEvent(mAutoFocusLock, mAutoFocusCondition, mAutoFocusMessage);
- bool shouldPassInvalidCameraMsg = mFuzzedDataProvider->ConsumeBool();
- int msgType;
- if (shouldPassInvalidCameraMsg) {
- msgType = mFuzzedDataProvider->ConsumeIntegral<int>();
- } else {
- msgType = kCameraMsg[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+ if (pictureSizes.size()) {
+ Size pictureSize = pictureSizes[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+ 0, pictureSizes.size() - 1)];
+ params.setPictureSize(pictureSize.width, pictureSize.height);
+ cameraDevice->setParameters(params.flatten());
+ cameraDevice->startPreview();
+ waitForPreviewStart();
+ cameraDevice->autoFocus();
+ waitForEvent(mAutoFocusLock, mAutoFocusCondition, mAutoFocusMessage);
+ bool shouldPassInvalidCameraMsg = mFuzzedDataProvider->ConsumeBool();
+ int msgType;
+ if (shouldPassInvalidCameraMsg) {
+ msgType = mFuzzedDataProvider->ConsumeIntegral<int>();
+ } else {
+ msgType = kCameraMsg[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
0, kNumCameraMsg - 1)];
- }
- cameraDevice->takePicture(msgType);
-
- waitForEvent(mSnapshotLock, mSnapshotCondition, mSnapshotNotification);
}
+ cameraDevice->takePicture(msgType);
- Vector<Size> videoSizes;
- params.getSupportedVideoSizes(videoSizes);
+ waitForEvent(mSnapshotLock, mSnapshotCondition, mSnapshotNotification);
+ cameraDevice->stopPreview();
+ }
- for (size_t i = 0; i < videoSizes.size(); ++i) {
- params.setVideoSize(videoSizes[i].width, videoSizes[i].height);
+ Vector<Size> videoSizes;
+ params.getSupportedVideoSizes(videoSizes);
- cameraDevice->setParameters(params.flatten());
- cameraDevice->startPreview();
- waitForPreviewStart();
- cameraDevice->setVideoBufferMode(
+ if (videoSizes.size()) {
+ Size videoSize = videoSizes[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+ 0, videoSizes.size() - 1)];
+ params.setVideoSize(videoSize.width, videoSize.height);
+
+ cameraDevice->setParameters(params.flatten());
+ cameraDevice->startPreview();
+ waitForPreviewStart();
+ cameraDevice->setVideoBufferMode(
android::hardware::BnCamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
- cameraDevice->setVideoTarget(previewSurface->getIGraphicBufferProducer());
- cameraDevice->startRecording();
- cameraDevice->stopRecording();
+ sp<SurfaceControl> surfaceControlVideo = mComposerClient->createSurface(
+ String8("Test Surface Video"), previewWidth, previewHeight,
+ CameraParameters::previewFormatToEnum(params.getPreviewFormat()),
+ layerMetaData);
+ if (surfaceControlVideo.get()) {
+ SurfaceComposerClient::Transaction{}
+ .setLayer(surfaceControlVideo, 0x7fffffff)
+ .show(surfaceControlVideo)
+ .apply();
+ sp<Surface> previewSurfaceVideo = surfaceControlVideo->getSurface();
+ if (previewSurfaceVideo.get()) {
+ cameraDevice->setVideoTarget(previewSurfaceVideo->getIGraphicBufferProducer());
+ }
}
cameraDevice->stopPreview();
- cameraDevice->disconnect();
+ cameraDevice->startRecording();
+ waitForEvent(mRecordingLock, mRecordingCondition, mRecordingNotification);
+ cameraDevice->stopRecording();
}
+ cameraDevice->disconnect();
}
}
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 3616572..5e2a3fb 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -27,8 +27,13 @@
"external/dynamic_depth/internal",
],
+ header_libs: [
+ "libmedia_headers",
+ ],
+
shared_libs: [
"libbase",
+ "libbinder",
"libcutils",
"libcameraservice",
"libhidlbase",
@@ -44,7 +49,7 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
@@ -57,6 +62,7 @@
],
srcs: [
+ "CameraPermissionsTest.cpp",
"CameraProviderManagerTest.cpp",
"ClientManagerTest.cpp",
"DepthProcessorTest.cpp",
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
new file mode 100644
index 0000000..db43a02
--- /dev/null
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -0,0 +1,303 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/hardware/BnCameraServiceListener.h>
+#include <android/hardware/BnCameraServiceProxy.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
+#include <android/hardware/ICameraService.h>
+
+#include <private/android_filesystem_config.h>
+
+#include "../CameraService.h"
+#include "../utils/CameraServiceProxyWrapper.h"
+
+#include <gtest/gtest.h>
+
+#include <memory>
+#include <vector>
+
+using namespace android;
+using namespace android::hardware::camera;
+
+// Empty service listener.
+class TestCameraServiceListener : public hardware::BnCameraServiceListener {
+public:
+ virtual ~TestCameraServiceListener() {};
+
+ virtual binder::Status onStatusChanged(int32_t , const std::string&) {
+ return binder::Status::ok();
+ };
+
+ virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
+ const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) {
+ // No op
+ return binder::Status::ok();
+ };
+
+ virtual binder::Status onTorchStatusChanged(int32_t /*status*/,
+ const std::string& /*cameraId*/) {
+ return binder::Status::ok();
+ };
+
+ virtual binder::Status onCameraAccessPrioritiesChanged() {
+ // No op
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
+ const std::string& /*clientPackageName*/) {
+ // No op
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
+ // No op
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
+ int32_t /*torchStrength*/) {
+ // No op
+ return binder::Status::ok();
+ }
+};
+
+// Empty device callback.
+class TestCameraDeviceCallbacks : public hardware::camera2::BnCameraDeviceCallbacks {
+public:
+ TestCameraDeviceCallbacks() {}
+
+ virtual ~TestCameraDeviceCallbacks() {}
+
+ virtual binder::Status onDeviceError(int /*errorCode*/,
+ const CaptureResultExtras& /*resultExtras*/) {
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onDeviceIdle() {
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onCaptureStarted(const CaptureResultExtras& /*resultExtras*/,
+ int64_t /*timestamp*/) {
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onResultReceived(const CameraMetadata& /*metadata*/,
+ const CaptureResultExtras& /*resultExtras*/,
+ const std::vector<PhysicalCaptureResultInfo>& /*physicalResultInfos*/) {
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onPrepared(int /*streamId*/) {
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onRepeatingRequestError(
+ int64_t /*lastFrameNumber*/, int32_t /*stoppedSequenceId*/) {
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onRequestQueueEmpty() {
+ return binder::Status::ok();
+ }
+};
+
+// Override isCameraDisabled from the CameraServiceProxy with a flag.
+class CameraServiceProxyOverride : public ::android::hardware::BnCameraServiceProxy {
+public:
+ CameraServiceProxyOverride() :
+ mCameraServiceProxy(CameraServiceProxyWrapper::getDefaultCameraServiceProxy()),
+ mCameraDisabled(false), mOverrideCameraDisabled(false)
+ { }
+
+ virtual binder::Status getRotateAndCropOverride(const std::string& packageName, int lensFacing,
+ int userId, int *ret) override {
+ return mCameraServiceProxy->getRotateAndCropOverride(packageName, lensFacing,
+ userId, ret);
+ }
+
+ virtual binder::Status getAutoframingOverride(const std::string& packageName, int *ret) override {
+ return mCameraServiceProxy->getAutoframingOverride(packageName, ret);
+ }
+
+ virtual binder::Status pingForUserUpdate() override {
+ return mCameraServiceProxy->pingForUserUpdate();
+ }
+
+ virtual binder::Status notifyCameraState(
+ const hardware::CameraSessionStats& cameraSessionStats) override {
+ return mCameraServiceProxy->notifyCameraState(cameraSessionStats);
+ }
+
+ virtual binder::Status isCameraDisabled(int userId, bool *ret) override {
+ if (mOverrideCameraDisabled) {
+ *ret = mCameraDisabled;
+ return binder::Status::ok();
+ }
+ return mCameraServiceProxy->isCameraDisabled(userId, ret);
+ }
+
+ void setCameraDisabled(bool cameraDisabled) {
+ mCameraDisabled = cameraDisabled;
+ }
+
+ void setOverrideCameraDisabled(bool overrideCameraDisabled) {
+ mOverrideCameraDisabled = overrideCameraDisabled;
+ }
+
+protected:
+ sp<hardware::ICameraServiceProxy> mCameraServiceProxy;
+ bool mCameraDisabled;
+ bool mOverrideCameraDisabled;
+};
+
+class AutoDisconnectDevice {
+public:
+ AutoDisconnectDevice(sp<hardware::camera2::ICameraDeviceUser> device) :
+ mDevice(device)
+ { }
+
+ ~AutoDisconnectDevice() {
+ if (mDevice != nullptr) {
+ mDevice->disconnect();
+ }
+ }
+
+private:
+ sp<hardware::camera2::ICameraDeviceUser> mDevice;
+};
+
+class CameraPermissionsTest : public ::testing::Test {
+protected:
+ static sp<CameraService> sCameraService;
+ static sp<CameraServiceProxyOverride> sCameraServiceProxy;
+ static std::shared_ptr<CameraServiceProxyWrapper> sCameraServiceProxyWrapper;
+ static uid_t sOldUid;
+
+ static void SetUpTestSuite() {
+ sOldUid = getuid();
+ setuid(AID_CAMERASERVER);
+ sCameraServiceProxy = new CameraServiceProxyOverride();
+ sCameraServiceProxyWrapper =
+ std::make_shared<CameraServiceProxyWrapper>(sCameraServiceProxy);
+ sCameraService = new CameraService(sCameraServiceProxyWrapper);
+ sCameraService->clearCachedVariables();
+ }
+
+ static void TearDownTestSuite() {
+ sCameraServiceProxyWrapper = nullptr;
+ sCameraServiceProxy = nullptr;
+ sCameraService = nullptr;
+ setuid(sOldUid);
+ }
+};
+
+sp<CameraService> CameraPermissionsTest::sCameraService = nullptr;
+sp<CameraServiceProxyOverride> CameraPermissionsTest::sCameraServiceProxy = nullptr;
+std::shared_ptr<CameraServiceProxyWrapper>
+CameraPermissionsTest::sCameraServiceProxyWrapper = nullptr;
+uid_t CameraPermissionsTest::sOldUid = 0;
+
+// Test that camera connections fail with ERROR_DISABLED when the camera is disabled via device
+// policy, and succeed when it isn't.
+TEST_F(CameraPermissionsTest, TestCameraDisabled) {
+ std::vector<hardware::CameraStatus> statuses;
+ sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+ sCameraService->addListenerTest(serviceListener, &statuses);
+ sCameraServiceProxy->setOverrideCameraDisabled(true);
+
+ sCameraServiceProxy->setCameraDisabled(true);
+ for (auto s : statuses) {
+ sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+ sp<hardware::camera2::ICameraDeviceUser> device;
+ binder::Status status =
+ sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
+ android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+ AutoDisconnectDevice autoDisconnect(device);
+ ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
+ ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
+ << "connectDevice returned exception code " << status.exceptionCode();
+ }
+
+ sCameraServiceProxy->setCameraDisabled(false);
+ for (auto s : statuses) {
+ sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+ sp<hardware::camera2::ICameraDeviceUser> device;
+ binder::Status status =
+ sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
+ android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+ AutoDisconnectDevice autoDisconnect(device);
+ ASSERT_TRUE(status.isOk());
+ }
+}
+
+// Test that consecutive camera connections succeed.
+TEST_F(CameraPermissionsTest, TestConsecutiveConnections) {
+ std::vector<hardware::CameraStatus> statuses;
+ sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+ sCameraService->addListenerTest(serviceListener, &statuses);
+ sCameraServiceProxy->setOverrideCameraDisabled(false);
+
+ for (auto s : statuses) {
+ sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+ sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
+ binder::Status status =
+ sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
+ android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+ AutoDisconnectDevice autoDisconnectA(deviceA);
+ ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+ " service specific error code " << status.serviceSpecificErrorCode();
+ status =
+ sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
+ android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+ AutoDisconnectDevice autoDisconnectB(deviceB);
+ ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+ " service specific error code " << status.serviceSpecificErrorCode();
+ }
+}
+
+// Test that consecutive camera connections succeed even when a nonzero oomScoreOffset is provided
+// in the second call.
+TEST_F(CameraPermissionsTest, TestConflictingOomScoreOffset) {
+ std::vector<hardware::CameraStatus> statuses;
+ sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+ sCameraService->addListenerTest(serviceListener, &statuses);
+ sCameraServiceProxy->setOverrideCameraDisabled(false);
+
+ for (auto s : statuses) {
+ sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+ sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
+ binder::Status status =
+ sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
+ android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+ AutoDisconnectDevice autoDisconnectA(deviceA);
+ ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+ " service specific error code " << status.serviceSpecificErrorCode();
+ status =
+ sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
+ android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+ AutoDisconnectDevice autoDisconnectB(deviceB);
+ ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+ " service specific error code " << status.serviceSpecificErrorCode();
+ }
+}
diff --git a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
index b3a1d18..badd47a 100644
--- a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
+++ b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
@@ -252,6 +252,19 @@
for (size_t i = 0; i < coords.size(); i++) {
EXPECT_LE(std::abs(coords[i] - expectedZoomOutCoords[i]), kMaxAllowedPixelError);
}
+
+ // Verify region zoom scaling doesn't generate invalid metering region
+ // (width < 0, or height < 0)
+ std::array<float, 3> scaleRatios = {10.0f, 1.0f, 0.1f};
+ for (float scaleRatio : scaleRatios) {
+ for (size_t i = 0; i < originalCoords.size(); i+= 2) {
+ int32_t coordinates[] = {originalCoords[i], originalCoords[i+1],
+ originalCoords[i], originalCoords[i+1]};
+ mapper.scaleRegion(coordinates, scaleRatio, width, height);
+ EXPECT_LE(coordinates[0], coordinates[2]);
+ EXPECT_LE(coordinates[1], coordinates[3]);
+ }
+ }
}
TEST(ZoomRatioTest, scaleCoordinatesTest) {
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index f6ad2fe..d07bf6d 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -28,32 +28,41 @@
namespace android {
-using hardware::ICameraServiceProxy;
+using hardware::CameraExtensionSessionStats;
using hardware::CameraSessionStats;
+using hardware::ICameraServiceProxy;
-Mutex CameraServiceProxyWrapper::sProxyMutex;
-sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::sCameraServiceProxy;
-
-Mutex CameraServiceProxyWrapper::mLock;
-std::map<std::string, std::shared_ptr<CameraServiceProxyWrapper::CameraSessionStatsWrapper>>
- CameraServiceProxyWrapper::mSessionStatsMap;
+namespace {
+// Sentinel value to be returned when extension session with a stale or invalid key is reported.
+const std::string POISON_EXT_STATS_KEY("poisoned_stats");
+} // anonymous namespace
/**
* CameraSessionStatsWrapper functions
*/
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen() {
- Mutex::Autolock l(mLock);
-
- updateProxyDeviceState(mSessionStats);
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateProxyDeviceState(
+ sp<hardware::ICameraServiceProxy>& proxyBinder) {
+ if (proxyBinder == nullptr) return;
+ proxyBinder->notifyCameraState(mSessionStats);
}
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(int32_t latencyMs) {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen(
+ sp<hardware::ICameraServiceProxy>& proxyBinder) {
+ Mutex::Autolock l(mLock);
+ updateProxyDeviceState(proxyBinder);
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(
+ sp<hardware::ICameraServiceProxy>& proxyBinder, int32_t latencyMs,
+ bool deviceError) {
Mutex::Autolock l(mLock);
mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_CLOSED;
mSessionStats.mLatencyMs = latencyMs;
- updateProxyDeviceState(mSessionStats);
+ mSessionStats.mDeviceError = deviceError;
+ mSessionStats.mSessionIndex = 0;
+ updateProxyDeviceState(proxyBinder);
}
void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onStreamConfigured(
@@ -68,12 +77,14 @@
}
}
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(float maxPreviewFps) {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(
+ sp<hardware::ICameraServiceProxy>& proxyBinder, float maxPreviewFps) {
Mutex::Autolock l(mLock);
mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_ACTIVE;
mSessionStats.mMaxPreviewFps = maxPreviewFps;
- updateProxyDeviceState(mSessionStats);
+ mSessionStats.mSessionIndex++;
+ updateProxyDeviceState(proxyBinder);
// Reset mCreationDuration to -1 to distinguish between 1st session
// after configuration, and all other sessions after configuration.
@@ -81,6 +92,7 @@
}
void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onIdle(
+ sp<hardware::ICameraServiceProxy>& proxyBinder,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats) {
@@ -93,10 +105,76 @@
mSessionStats.mUserTag = userTag;
mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
mSessionStats.mStreamStats = streamStats;
- updateProxyDeviceState(mSessionStats);
+
+ updateProxyDeviceState(proxyBinder);
mSessionStats.mInternalReconfigure = 0;
mSessionStats.mStreamStats.clear();
+ mSessionStats.mCameraExtensionSessionStats = {};
+}
+
+int64_t CameraServiceProxyWrapper::CameraSessionStatsWrapper::getLogId() {
+ Mutex::Autolock l(mLock);
+ return mSessionStats.mLogId;
+}
+
+std::string CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateExtensionSessionStats(
+ const hardware::CameraExtensionSessionStats& extStats) {
+ Mutex::Autolock l(mLock);
+ CameraExtensionSessionStats& currStats = mSessionStats.mCameraExtensionSessionStats;
+ if (currStats.key != extStats.key) {
+ // Mismatched keys. Extensions stats likely reported for a closed session
+ ALOGW("%s: mismatched extensions stats key: current='%s' reported='%s'. Dropping stats.",
+ __FUNCTION__, toStdString(currStats.key).c_str(), toStdString(extStats.key).c_str());
+ return POISON_EXT_STATS_KEY; // return poisoned key to so future calls are
+ // definitely dropped.
+ }
+
+ // Matching keys...
+ if (currStats.key.size()) {
+ // non-empty matching keys. overwrite.
+ ALOGV("%s: Overwriting extension session stats: %s", __FUNCTION__,
+ extStats.toString().c_str());
+ currStats = extStats;
+ return toStdString(currStats.key);
+ }
+
+ // Matching empty keys...
+ if (mSessionStats.mClientName != toStdString(extStats.clientName)) {
+ ALOGW("%s: extension stats reported for unexpected package: current='%s' reported='%s'. "
+ "Dropping stats.", __FUNCTION__,
+ mSessionStats.mClientName.c_str(),
+ toStdString(extStats.clientName).c_str());
+ return POISON_EXT_STATS_KEY;
+ }
+
+ // Matching empty keys for the current client...
+ if (mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_OPEN ||
+ mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_IDLE) {
+ // Camera is open, but not active. It is possible that the active callback hasn't
+ // occurred yet. Keep the stats, but don't associate it with any session.
+ ALOGV("%s: extension stat reported for an open, but not active camera. "
+ "Saving stats, but not generating key.", __FUNCTION__);
+ currStats = extStats;
+ return {}; // Subsequent calls will handle setting the correct key.
+ }
+
+ if (mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_ACTIVE) {
+ // camera is active. First call for the session!
+ currStats = extStats;
+
+ // Generate a new key from logId and sessionIndex.
+ std::ostringstream key;
+ key << mSessionStats.mSessionIndex << '/' << mSessionStats.mLogId;
+ currStats.key = String16(key.str().c_str());
+ ALOGV("%s: New extension session stats: %s", __FUNCTION__, currStats.toString().c_str());
+ return toStdString(currStats.key);
+ }
+
+ // Camera is closed. Probably a stale call.
+ ALOGW("%s: extension stats reported for closed camera id '%s'. Dropping stats.",
+ __FUNCTION__, mSessionStats.mCameraId.c_str());
+ return {};
}
/**
@@ -105,19 +183,26 @@
sp<ICameraServiceProxy> CameraServiceProxyWrapper::getCameraServiceProxy() {
#ifndef __BRILLO__
- Mutex::Autolock al(sProxyMutex);
- if (sCameraServiceProxy == nullptr) {
- sp<IServiceManager> sm = defaultServiceManager();
- // Use checkService because cameraserver normally starts before the
- // system server and the proxy service. So the long timeout that getService
- // has before giving up is inappropriate.
- sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
- if (binder != nullptr) {
- sCameraServiceProxy = interface_cast<ICameraServiceProxy>(binder);
- }
+ Mutex::Autolock al(mProxyMutex);
+ if (mCameraServiceProxy == nullptr) {
+ mCameraServiceProxy = getDefaultCameraServiceProxy();
}
#endif
- return sCameraServiceProxy;
+ return mCameraServiceProxy;
+}
+
+sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::getDefaultCameraServiceProxy() {
+#ifndef __BRILLO__
+ sp<IServiceManager> sm = defaultServiceManager();
+ // Use checkService because cameraserver normally starts before the
+ // system server and the proxy service. So the long timeout that getService
+ // has before giving up is inappropriate.
+ sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
+ if (binder != nullptr) {
+ return interface_cast<ICameraServiceProxy>(binder);
+ }
+#endif
+ return nullptr;
}
void CameraServiceProxyWrapper::pingCameraServiceProxy() {
@@ -141,10 +226,19 @@
return ret;
}
-void CameraServiceProxyWrapper::updateProxyDeviceState(const CameraSessionStats& sessionStats) {
+int CameraServiceProxyWrapper::getAutoframingOverride(const std::string& packageName) {
sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
- if (proxyBinder == nullptr) return;
- proxyBinder->notifyCameraState(sessionStats);
+ if (proxyBinder == nullptr) {
+ return ANDROID_CONTROL_AUTOFRAMING_OFF;
+ }
+ int ret = 0;
+ auto status = proxyBinder->getAutoframingOverride(packageName, &ret);
+ if (!status.isOk()) {
+ ALOGE("%s: Failed during autoframing override query: %s", __FUNCTION__,
+ status.exceptionMessage().c_str());
+ }
+
+ return ret;
}
void CameraServiceProxyWrapper::logStreamConfigured(const std::string& id,
@@ -152,12 +246,12 @@
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
- sessionStats = mSessionStatsMap[id];
- if (sessionStats == nullptr) {
+ if (mSessionStatsMap.count(id) == 0) {
ALOGE("%s: SessionStatsMap should contain camera %s",
__FUNCTION__, id.c_str());
return;
}
+ sessionStats = mSessionStatsMap[id];
}
ALOGV("%s: id %s, operatingMode %d, internalConfig %d, latencyMs %d",
@@ -169,16 +263,17 @@
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
- sessionStats = mSessionStatsMap[id];
- if (sessionStats == nullptr) {
+ if (mSessionStatsMap.count(id) == 0) {
ALOGE("%s: SessionStatsMap should contain camera %s when logActive is called",
__FUNCTION__, id.c_str());
return;
}
+ sessionStats = mSessionStatsMap[id];
}
ALOGV("%s: id %s", __FUNCTION__, id.c_str());
- sessionStats->onActive(maxPreviewFps);
+ sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ sessionStats->onActive(proxyBinder, maxPreviewFps);
}
void CameraServiceProxyWrapper::logIdle(const std::string& id,
@@ -188,13 +283,12 @@
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
- sessionStats = mSessionStatsMap[id];
- }
-
- if (sessionStats == nullptr) {
- ALOGE("%s: SessionStatsMap should contain camera %s when logIdle is called",
+ if (mSessionStatsMap.count(id) == 0) {
+ ALOGE("%s: SessionStatsMap should contain camera %s when logIdle is called",
__FUNCTION__, id.c_str());
- return;
+ return;
+ }
+ sessionStats = mSessionStatsMap[id];
}
ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d"
@@ -208,7 +302,8 @@
streamStats[i].mStartLatencyMs);
}
- sessionStats->onIdle(requestCount, resultErrorCount, deviceError, userTag,
+ sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ sessionStats->onIdle(proxyBinder, requestCount, resultErrorCount, deviceError, userTag,
videoStabilizationMode, streamStats);
}
@@ -229,19 +324,24 @@
apiLevel = CameraSessionStats::CAMERA_API_LEVEL_2;
}
- sessionStats = std::make_shared<CameraSessionStatsWrapper>(id, facing,
- CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
- apiLevel, isNdk, latencyMs);
+ // Generate a new log ID for open events
+ int64_t logId = generateLogId(mRandomDevice);
+
+ sessionStats = std::make_shared<CameraSessionStatsWrapper>(
+ id, facing, CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
+ apiLevel, isNdk, latencyMs, logId);
mSessionStatsMap.emplace(id, sessionStats);
ALOGV("%s: Adding id %s", __FUNCTION__, id.c_str());
}
ALOGV("%s: id %s, facing %d, effectiveApiLevel %d, isNdk %d, latencyMs %d",
__FUNCTION__, id.c_str(), facing, effectiveApiLevel, isNdk, latencyMs);
- sessionStats->onOpen();
+ sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ sessionStats->onOpen(proxyBinder);
}
-void CameraServiceProxyWrapper::logClose(const std::string& id, int32_t latencyMs) {
+void CameraServiceProxyWrapper::logClose(const std::string& id, int32_t latencyMs,
+ bool deviceError) {
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
@@ -257,12 +357,15 @@
__FUNCTION__, id.c_str());
return;
}
+
mSessionStatsMap.erase(id);
- ALOGV("%s: Erasing id %s", __FUNCTION__, id.c_str());
+ ALOGV("%s: Erasing id %s, deviceError %d", __FUNCTION__, id.c_str(), deviceError);
}
- ALOGV("%s: id %s, latencyMs %d", __FUNCTION__, id.c_str(), latencyMs);
- sessionStats->onClose(latencyMs);
+ ALOGV("%s: id %s, latencyMs %d, deviceError %d", __FUNCTION__,
+ id.c_str(), latencyMs, deviceError);
+ sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ sessionStats->onClose(proxyBinder, latencyMs, deviceError);
}
bool CameraServiceProxyWrapper::isCameraDisabled(int userId) {
@@ -277,4 +380,48 @@
return ret;
}
-}; // namespace android
+int64_t CameraServiceProxyWrapper::getCurrentLogIdForCamera(const std::string& cameraId) {
+ std::shared_ptr<CameraSessionStatsWrapper> stats;
+ {
+ Mutex::Autolock _l(mLock);
+ if (mSessionStatsMap.count(cameraId) == 0) {
+ ALOGE("%s: SessionStatsMap should contain camera %s before asking for its logging ID.",
+ __FUNCTION__, cameraId.c_str());
+ return 0;
+ }
+
+ stats = mSessionStatsMap[cameraId];
+ }
+ return stats->getLogId();
+}
+
+int64_t CameraServiceProxyWrapper::generateLogId(std::random_device& randomDevice) {
+ int64_t ret = 0;
+ do {
+ // std::random_device generates 32 bits per call, so we call it twice
+ ret = randomDevice();
+ ret = ret << 32;
+ ret = ret | randomDevice();
+ } while (ret == 0); // 0 is not a valid identifier
+
+ return ret;
+}
+
+std::string CameraServiceProxyWrapper::updateExtensionStats(
+ const hardware::CameraExtensionSessionStats& extStats) {
+ std::shared_ptr<CameraSessionStatsWrapper> stats;
+ std::string cameraId = toStdString(extStats.cameraId);
+ {
+ Mutex::Autolock _l(mLock);
+ if (mSessionStatsMap.count(cameraId) == 0) {
+ ALOGE("%s CameraExtensionSessionStats reported for camera id that isn't open: %s",
+ __FUNCTION__, cameraId.c_str());
+ return {};
+ }
+
+ stats = mSessionStatsMap[cameraId];
+ return stats->updateExtensionSessionStats(extStats);
+ }
+}
+
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index aee875f..1afe5b3 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -22,6 +22,7 @@
#include <utils/Mutex.h>
#include <utils/StrongPointer.h>
#include <utils/Timers.h>
+#include <random>
#include <string>
#include <camera/CameraSessionStats.h>
@@ -31,72 +32,106 @@
class CameraServiceProxyWrapper {
private:
// Guard mCameraServiceProxy
- static Mutex sProxyMutex;
+ Mutex mProxyMutex;
// Cached interface to the camera service proxy in system service
- static sp<hardware::ICameraServiceProxy> sCameraServiceProxy;
+ sp<hardware::ICameraServiceProxy> mCameraServiceProxy;
- struct CameraSessionStatsWrapper {
+ class CameraSessionStatsWrapper {
+ private:
hardware::CameraSessionStats mSessionStats;
Mutex mLock; // lock for per camera session stats
- CameraSessionStatsWrapper(const std::string& cameraId, int facing, int newCameraState,
- const std::string& clientName, int apiLevel, bool isNdk, int32_t latencyMs) :
- mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk, latencyMs)
- {}
+ /**
+ * Update the session stats of a given camera device (open/close/active/idle) with
+ * the camera proxy service in the system service
+ */
+ void updateProxyDeviceState(sp<hardware::ICameraServiceProxy>& proxyBinder);
- void onOpen();
- void onClose(int32_t latencyMs);
+ public:
+ CameraSessionStatsWrapper(const std::string& cameraId, int facing, int newCameraState,
+ const std::string& clientName, int apiLevel, bool isNdk,
+ int32_t latencyMs, int64_t logId)
+ : mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk,
+ latencyMs, logId) {}
+
+ void onOpen(sp<hardware::ICameraServiceProxy>& proxyBinder);
+ void onClose(sp<hardware::ICameraServiceProxy>& proxyBinder, int32_t latencyMs,
+ bool deviceError);
void onStreamConfigured(int operatingMode, bool internalReconfig, int32_t latencyMs);
- void onActive(float maxPreviewFps);
- void onIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ void onActive(sp<hardware::ICameraServiceProxy>& proxyBinder, float maxPreviewFps);
+ void onIdle(sp<hardware::ICameraServiceProxy>& proxyBinder,
+ int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats);
+
+ std::string updateExtensionSessionStats(
+ const hardware::CameraExtensionSessionStats& extStats);
+
+ // Returns the logId associated with this event.
+ int64_t getLogId();
};
// Lock for camera session stats map
- static Mutex mLock;
+ Mutex mLock;
// Map from camera id to the camera's session statistics
- static std::map<std::string, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
+ std::map<std::string, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
- /**
- * Update the session stats of a given camera device (open/close/active/idle) with
- * the camera proxy service in the system service
- */
- static void updateProxyDeviceState(
- const hardware::CameraSessionStats& sessionStats);
+ std::random_device mRandomDevice; // pulls 32-bit random numbers from /dev/urandom
- static sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
+ sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
+
+ // Returns a randomly generated ID that is suitable for logging the event. A new identifier
+ // should only be generated for an open event. All other events for the cameraId should use the
+ // ID generated for the open event associated with them.
+ static int64_t generateLogId(std::random_device& randomDevice);
public:
+ CameraServiceProxyWrapper(sp<hardware::ICameraServiceProxy> serviceProxy = nullptr) :
+ mCameraServiceProxy(serviceProxy)
+ { }
+
+ static sp<hardware::ICameraServiceProxy> getDefaultCameraServiceProxy();
+
// Open
- static void logOpen(const std::string& id, int facing,
+ void logOpen(const std::string& id, int facing,
const std::string& clientPackageName, int apiLevel, bool isNdk,
int32_t latencyMs);
// Close
- static void logClose(const std::string& id, int32_t latencyMs);
+ void logClose(const std::string& id, int32_t latencyMs, bool deviceError);
// Stream configuration
- static void logStreamConfigured(const std::string& id, int operatingMode, bool internalReconfig,
+ void logStreamConfigured(const std::string& id, int operatingMode, bool internalReconfig,
int32_t latencyMs);
// Session state becomes active
- static void logActive(const std::string& id, float maxPreviewFps);
+ void logActive(const std::string& id, float maxPreviewFps);
// Session state becomes idle
- static void logIdle(const std::string& id,
+ void logIdle(const std::string& id,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats);
// Ping camera service proxy for user update
- static void pingCameraServiceProxy();
+ void pingCameraServiceProxy();
// Return the current top activity rotate and crop override.
- static int getRotateAndCropOverride(const std::string &packageName, int lensFacing, int userId);
+ int getRotateAndCropOverride(const std::string &packageName, int lensFacing, int userId);
+
+ // Return the current top activity autoframing.
+ int getAutoframingOverride(const std::string& packageName);
// Detect if the camera is disabled by device policy.
- static bool isCameraDisabled(int userId);
+ bool isCameraDisabled(int userId);
+
+ // Returns the logId currently associated with the given cameraId. See 'mLogId' in
+ // frameworks/av/camera/include/camera/CameraSessionStats.h for more details about this
+ // identifier. Returns a non-0 value on success.
+ int64_t getCurrentLogIdForCamera(const std::string& cameraId);
+
+ // Update the stored extension stats to the latest values
+ std::string updateExtensionStats(const hardware::CameraExtensionSessionStats& extStats);
};
} // android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index c9520d5..f7257e3 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -19,6 +19,8 @@
#include "SessionConfigurationUtils.h"
#include "../api2/DepthCompositeStream.h"
#include "../api2/HeicCompositeStream.h"
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "api2/JpegRCompositeStream.h"
#include "common/CameraDeviceBase.h"
#include "common/HalConversionsTemplated.h"
#include "../CameraService.h"
@@ -27,6 +29,7 @@
#include "device3/Camera3OutputStream.h"
#include "system/graphics-base-v1.1.h"
#include <camera/StringUtils.h>
+#include <ui/PublicFormat.h>
using android::camera3::OutputStreamInfo;
using android::camera3::OutputStreamInfo;
@@ -71,11 +74,11 @@
int32_t dynamicDepthKey =
SessionConfigurationUtils::getAppropriateModeTag(
- ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
+ ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, maxRes);
int32_t heicKey =
SessionConfigurationUtils::getAppropriateModeTag(
- ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
+ ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxRes);
getStreamConfigurations(staticInfo, scalerKey, scm);
getStreamConfigurations(staticInfo, depthKey, scm);
@@ -128,7 +131,7 @@
size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
- return ((float)uhrMaxJpegSize.width * uhrMaxJpegSize.height) /
+ return ((float)(uhrMaxJpegSize.width * uhrMaxJpegSize.height)) /
(defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
}
@@ -159,8 +162,13 @@
getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
const int32_t heicSizesTag =
getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t jpegRSizesTag = getAppropriateModeTag(
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
+ bool isJpegRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
+ ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R));
camera_metadata_ro_entry streamConfigs =
+ (isJpegRDataSpace) ? info.find(jpegRSizesTag) :
(dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
(dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
info.find(heicSizesTag) :
@@ -194,6 +202,8 @@
if (bestWidth == -1) {
// Return false if no configurations for this format were listed
+ ALOGE("%s: No configurations for format %d width %d, height %d, maxResolution ? %s",
+ __FUNCTION__, format, width, height, maxResolution ? "true" : "false");
return false;
}
@@ -210,11 +220,18 @@
}
//check if format is 10-bit compatible
-bool is10bitCompatibleFormat(int32_t format) {
+bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace) {
switch(format) {
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
case HAL_PIXEL_FORMAT_YCBCR_P010:
return true;
+ case HAL_PIXEL_FORMAT_BLOB:
+ if (dataSpace == static_cast<android_dataspace_t>(
+ ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+ return true;
+ }
+
+ return false;
default:
return false;
}
@@ -283,6 +300,65 @@
}
}
+bool deviceReportsColorSpaces(const CameraMetadata& staticInfo) {
+ camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ for (size_t i = 0; i < entry.count; ++i) {
+ uint8_t capability = entry.data.u8[i];
+ if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+ int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
+ int64_t colorSpace64 = colorSpace;
+ int64_t format64 = format;
+
+ // Translate HAL format + data space to public format
+ if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_V0_JFIF) {
+ format64 = 0x100; // JPEG
+ } else if (format == HAL_PIXEL_FORMAT_BLOB
+ && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) {
+ format64 = 0x48454946; // HEIC
+ } else if (format == HAL_PIXEL_FORMAT_BLOB
+ && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_DYNAMIC_DEPTH)) {
+ format64 = 0x69656963; // DEPTH_JPEG
+ } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // DEPTH_POINT_CLOUD, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_Y16 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // DEPTH16, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_RAW16 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // RAW_DEPTH, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_RAW10 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // RAW_DEPTH10, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
+ static_cast<android_dataspace>(
+ ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+ format64 = static_cast<int64_t>(PublicFormat::JPEG_R);
+ }
+
+ camera_metadata_ro_entry_t entry =
+ staticInfo.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
+ for (size_t i = 0; i < entry.count; i += 3) {
+ bool isFormatCompatible = (format64 == entry.data.i64[i + 1]);
+ bool isDynamicProfileCompatible =
+ (dynamicRangeProfile & entry.data.i64[i + 2]) != 0;
+
+ if (colorSpace64 == entry.data.i64[i]
+ && isFormatCompatible
+ && isDynamicProfileCompatible) {
+ return true;
+ }
+ }
+
+ ALOGE("Color space %d, image format %" PRId64 ", and dynamic range 0x%" PRIx64
+ " combination not found", colorSpace, format64, dynamicRangeProfile);
+ return false;
+}
+
bool isPublicFormat(int32_t format)
{
switch(format) {
@@ -310,6 +386,23 @@
}
}
+bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace) {
+ switch (colorSpace) {
+ case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB:
+ *dataSpace = HAL_DATASPACE_V0_SRGB;
+ return true;
+ case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3:
+ *dataSpace = HAL_DATASPACE_DISPLAY_P3;
+ return true;
+ case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG:
+ *(reinterpret_cast<int32_t*>(dataSpace)) = HAL_DATASPACE_BT2020_HLG;
+ return true;
+ default:
+ ALOGE("%s: Unsupported color space %d", __FUNCTION__, colorSpace);
+ return false;
+ }
+}
+
bool isStreamUseCaseSupported(int64_t streamUseCase,
const CameraMetadata &deviceInfo) {
camera_metadata_ro_entry_t availableStreamUseCases =
@@ -337,7 +430,8 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int64_t streamUseCase, int timestampBase, int mirrorMode) {
+ int64_t streamUseCase, int timestampBase, int mirrorMode,
+ int32_t colorSpace) {
// bufferProducer must be non-null
if (gbp == nullptr) {
std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
@@ -401,6 +495,16 @@
return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
}
+ if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+ format != HAL_PIXEL_FORMAT_BLOB) {
+ if (!dataSpaceFromColorSpace(&dataSpace, colorSpace)) {
+ std::string msg = fmt::sprintf("Camera %s: color space %d not supported, failed to "
+ "convert to data space", logicalCameraId.c_str(), colorSpace);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ }
+
// FIXME: remove this override since the default format should be
// IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
@@ -412,7 +516,7 @@
}
std::unordered_set<int32_t> overriddenSensorPixelModes;
if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
- physicalCameraMetadata, flexibleConsumer, &overriddenSensorPixelModes) != OK) {
+ physicalCameraMetadata, &overriddenSensorPixelModes) != OK) {
std::string msg = fmt::sprintf("Camera %s: sensor pixel modes for stream with "
"format %#x are not valid",logicalCameraId.c_str(), format);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
@@ -444,13 +548,23 @@
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
if (SessionConfigurationUtils::is10bitDynamicRangeProfile(dynamicRangeProfile) &&
- !SessionConfigurationUtils::is10bitCompatibleFormat(format)) {
+ !SessionConfigurationUtils::is10bitCompatibleFormat(format, dataSpace)) {
std::string msg = fmt::sprintf("Camera %s: No 10-bit supported stream configurations with "
"format %#x defined and profile %" PRIx64 ", failed to create output stream",
logicalCameraId.c_str(), format, dynamicRangeProfile);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
+ if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+ SessionConfigurationUtils::deviceReportsColorSpaces(physicalCameraMetadata) &&
+ !SessionConfigurationUtils::isColorSpaceSupported(colorSpace, format, dataSpace,
+ dynamicRangeProfile, physicalCameraMetadata)) {
+ std::string msg = fmt::sprintf("Camera %s: Color space %d not supported, failed to "
+ "create output stream (pixel format %d dynamic range profile %" PRId64 ")",
+ logicalCameraId.c_str(), colorSpace, format, dynamicRangeProfile);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
physicalCameraMetadata)) {
std::string msg = fmt::sprintf("Camera %s: stream use case %" PRId64 " not supported,"
@@ -484,6 +598,7 @@
streamInfo.streamUseCase = streamUseCase;
streamInfo.timestampBase = timestampBase;
streamInfo.mirrorMode = mirrorMode;
+ streamInfo.colorSpace = colorSpace;
return binder::Status::ok();
}
if (width != streamInfo.width) {
@@ -539,6 +654,7 @@
camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
+ stream->colorSpace = streamInfo.colorSpace;
stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
stream->id = -1; // Invalid stream id
stream->physicalCameraId = physicalId;
@@ -563,6 +679,7 @@
convertToHALStreamCombination(
const SessionConfiguration& sessionConfiguration,
const std::string &logicalCameraId, const CameraMetadata &deviceInfo,
+ bool isCompositeJpegRDisabled,
metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
bool overrideForPerfClass, bool *earlyExit) {
@@ -637,6 +754,7 @@
const std::string &physicalCameraId = it.getPhysicalCameraId();
int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
+ int32_t colorSpace = it.getColorSpace();
std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
overrideForPerfClass);
@@ -674,7 +792,7 @@
streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
streamInfo.format, streamInfo.width,
- streamInfo.height, metadataChosen, false /*flexibleConsumer*/,
+ streamInfo.height, metadataChosen,
&streamInfo.sensorPixelModesUsed) != OK) {
ALOGE("%s: Deferred surface sensor pixel modes not valid",
__FUNCTION__);
@@ -695,7 +813,7 @@
sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -705,7 +823,10 @@
camera3::DepthCompositeStream::isDepthCompositeStream(surface);
bool isHeicCompositeStream =
camera3::HeicCompositeStream::isHeicCompositeStream(surface);
- if (isDepthCompositeStream || isHeicCompositeStream) {
+ bool isJpegRCompositeStream =
+ camera3::JpegRCompositeStream::isJpegRCompositeStream(surface) &&
+ !isCompositeJpegRDisabled;
+ if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
// We need to take in to account that composite streams can have
// additional internal camera streams.
std::vector<OutputStreamInfo> compositeStreams;
@@ -713,10 +834,14 @@
// TODO: Take care of composite streams.
ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
deviceInfo, &compositeStreams);
- } else {
+ } else if (isHeicCompositeStream) {
ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
deviceInfo, &compositeStreams);
+ } else {
+ ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
+ deviceInfo, &compositeStreams);
}
+
if (ret != OK) {
std::string msg = fmt::sprintf(
"Camera %s: Failed adding composite streams: %s (%d)",
@@ -845,15 +970,17 @@
status_t checkAndOverrideSensorPixelModesUsed(
const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
- const CameraMetadata &staticInfo, bool flexibleConsumer,
+ const CameraMetadata &staticInfo,
std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
convertToSet(sensorPixelModesUsed);
- if (!isUltraHighResolutionSensor(staticInfo)) {
+ if (!supportsUltraHighResolutionCapture(staticInfo)) {
if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
sensorPixelModesUsedSet.end()) {
// invalid value for non ultra high res sensors
+ ALOGE("%s ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION used on a device which doesn't "
+ "support ultra high resolution capture", __FUNCTION__);
return BAD_VALUE;
}
overriddenSensorPixelModesUsed->clear();
@@ -874,35 +1001,40 @@
// Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
// size + format of the OutputConfiguration is found exclusively in 1.
// If yes, add that sensorPixelMode to overriddenSensorPixelModes.
- // If no, add 'DEFAULT' to sensorPixelMode. This maintains backwards
- // compatibility.
+ // If no, add 'DEFAULT' and MAXIMUM_RESOLUTION to overriddenSensorPixelModes.
+ // This maintains backwards compatibility and also tells the framework the stream
+ // might be used in either sensor pixel mode.
if (sensorPixelModesUsedSet.size() == 0) {
- // Ambiguous case, default to only 'DEFAULT' mode.
+ // Ambiguous case, override to include both cases.
if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
- return OK;
- }
- // We don't allow flexible consumer for max resolution mode.
- if (isInMaximumResolutionStreamConfigurationMap) {
overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
return OK;
}
- if (isInDefaultStreamConfigurationMap || (flexibleConsumer && width < ROUNDING_WIDTH_CAP)) {
+ if (isInMaximumResolutionStreamConfigurationMap) {
+ overriddenSensorPixelModesUsed->insert(
+ ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+ } else {
overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
- return OK;
}
- return BAD_VALUE;
+ return OK;
}
// Case2: The app has set sensorPixelModesUsed, we need to verify that they
// are valid / err out.
if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
+ ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_DEFAULT set by client, but stream f: %d size %d x %d"
+ " isn't present in default stream configuration map", __FUNCTION__, format, width,
+ height);
return BAD_VALUE;
}
if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
+ ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION set by client, but stream f: "
+ "%d size %d x %d isn't present in default stream configuration map", __FUNCTION__,
+ format, width, height);
return BAD_VALUE;
}
*overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 220d1f8..79d80ea 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -100,10 +100,11 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int64_t streamUseCase, int timestampBase, int mirrorMode);
+ int64_t streamUseCase, int timestampBase, int mirrorMode,
+ int32_t colorSpace);
//check if format is 10-bit output compatible
-bool is10bitCompatibleFormat(int32_t format);
+bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace);
// check if the dynamic range requires 10-bit output
bool is10bitDynamicRangeProfile(int64_t dynamicRangeProfile);
@@ -111,6 +112,13 @@
// Check if the device supports a given dynamicRangeProfile
bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+bool deviceReportsColorSpaces(const CameraMetadata& staticMeta);
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+ int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+
+bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace);
+
bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
void mapStreamInfo(const OutputStreamInfo &streamInfo,
@@ -133,7 +141,8 @@
convertToHALStreamCombination(
const SessionConfiguration& sessionConfiguration,
const std::string &logicalCameraId, const CameraMetadata &deviceInfo,
- metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
+ bool isCompositeJpegRDisabled, metadataGetter getMetadata,
+ const std::vector<std::string> &physicalCameraIds,
aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
bool overrideForPerfClass, bool *earlyExit);
@@ -141,7 +150,7 @@
status_t checkAndOverrideSensorPixelModesUsed(
const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
- const CameraMetadata &staticInfo, bool flexibleConsumer,
+ const CameraMetadata &staticInfo,
std::unordered_set<int32_t> *overriddenSensorPixelModesUsed);
bool targetPerfClassPrimaryCamera(
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index f63eea1..cf93d3b 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -111,8 +111,8 @@
bool overrideForPerfClass, bool *earlyExit) {
aidl::android::hardware::camera::device::StreamConfiguration aidlStreamConfiguration;
auto ret = convertToHALStreamCombination(sessionConfiguration, logicalCameraId, deviceInfo,
- getMetadata, physicalCameraIds, aidlStreamConfiguration, overrideForPerfClass,
- earlyExit);
+ false /*isCompositeJpegRDisabled*/, getMetadata, physicalCameraIds,
+ aidlStreamConfiguration, overrideForPerfClass, earlyExit);
if (!ret.isOk()) {
return ret;
}
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
index 1efdc60..7d344f8 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
@@ -49,12 +49,22 @@
return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS:
+ return ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS:
+ return ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS:
+ return ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_SENSOR_OPAQUE_RAW_SIZE:
return ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION;
case ANDROID_LENS_INTRINSIC_CALIBRATION:
return ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION;
case ANDROID_LENS_DISTORTION:
return ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION;
+ case ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE:
+ return ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
+ case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE:
+ return ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
default:
ALOGE("%s: Tag %d doesn't have a maximum resolution counterpart", __FUNCTION__,
defaultTag);
@@ -63,7 +73,62 @@
return -1;
}
-bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo) {
+static bool isKeyPresentWithCount(const CameraMetadata &deviceInfo, uint32_t tag, uint32_t count) {
+ auto countFound = deviceInfo.find(tag).count;
+ return (countFound != 0) && (countFound % count == 0);
+}
+
+static bool supportsKeysForBasicUltraHighResolutionCapture(const CameraMetadata &deviceInfo) {
+ // Check whether the following conditions are satisfied for reduced ultra high
+ // resolution support :
+ // 1) SENSOR_PIXEL_MODE is advertised in ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS
+ // 2) The following keys are present in CameraCharacteristics for basic functionality
+ // a) ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+ // b) ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION
+ // c) ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION
+ // d) ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ // e) ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ // f) ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ camera_metadata_ro_entry_t entryChar;
+ entryChar = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ bool supportsSensorPixelMode = false;
+ for (size_t i = 0; i < entryChar.count; i++) {
+ int32_t key = entryChar.data.i32[i];
+ if (key == ANDROID_SENSOR_PIXEL_MODE) {
+ supportsSensorPixelMode = true;
+ break;
+ }
+ }
+ if (!supportsSensorPixelMode) {
+ return false;
+ }
+
+ // Basic sensor array size information tags are present
+ if (!isKeyPresentWithCount(deviceInfo, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ /*count*/2) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION, /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo, ANDROID_SENSOR_INFO_BINNING_FACTOR, /*count*/2)) {
+ return false;
+ }
+
+ // Basic stream configuration tags are present
+ if (!isKeyPresentWithCount(deviceInfo,
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION, /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION, /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION, /*count*/ 4)) {
+ return false;
+ }
+
+ return true;
+}
+
+bool supportsUltraHighResolutionCapture(const CameraMetadata &deviceInfo) {
camera_metadata_ro_entry_t entryCap;
entryCap = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
// Go through the capabilities and check if it has
@@ -74,7 +139,10 @@
return true;
}
}
- return false;
+
+ // If not, then check that the keys which guarantee basic supports for
+ // ultra high resolution capture are supported.
+ return supportsKeysForBasicUltraHighResolutionCapture(deviceInfo);
}
bool getArrayWidthAndHeight(const CameraMetadata *deviceInfo,
@@ -93,4 +161,4 @@
} // namespace SessionConfigurationUtils
} // namespace camera3
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
index 45b1e91..dac1824 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
@@ -22,7 +22,7 @@
namespace camera3 {
namespace SessionConfigurationUtils {
-bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo);
+bool supportsUltraHighResolutionCapture(const CameraMetadata &deviceInfo);
int32_t getAppropriateModeTag(int32_t defaultTag, bool maxResolution = false);
@@ -33,4 +33,4 @@
} // camera3
} // android
-#endif
\ No newline at end of file
+#endif
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 119bb6c..59d1ae4 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -238,6 +238,10 @@
"sample_rate",
"content_type",
"sharing_requested",
+ "format_hardware",
+ "channel_count_hardware",
+ "sample_rate_hardware",
+ "uid",
};
static constexpr const char * HeadTrackerDeviceEnabledFields[] {
@@ -269,6 +273,24 @@
"enabled",
};
+static constexpr const char * const MidiDeviceCloseFields[] {
+ "mediametrics_midi_device_close_reported",
+ "uid",
+ "midi_device_id",
+ "input_port_count",
+ "output_port_count",
+ "device_type",
+ "is_shared",
+ "supports_ump",
+ "using_alsa",
+ "duration_ns",
+ "opened_count",
+ "closed_count",
+ "device_disconnected",
+ "total_input_bytes",
+ "total_output_bytes",
+};
+
/**
* printFields is a helper method that prints the fields and corresponding values
* in a human readable style.
@@ -497,6 +519,15 @@
[this](const std::shared_ptr<const android::mediametrics::Item> &item){
mSpatializer.onEvent(item);
}));
+
+ // Handle MIDI
+ mActions.addAction(
+ AMEDIAMETRICS_KEY_AUDIO_MIDI "." AMEDIAMETRICS_PROP_EVENT,
+ std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_DEVICECLOSED),
+ std::make_shared<AnalyticsActions::Function>(
+ [this](const std::shared_ptr<const android::mediametrics::Item> &item) {
+ mMidiLogging.onEvent(item);
+ }));
}
AudioAnalytics::~AudioAnalytics()
@@ -1333,6 +1364,21 @@
const auto sharingModeRequested =
types::lookup<types::AAUDIO_SHARING_MODE, int32_t>(sharingModeRequestedStr);
+ std::string formatHardwareStr;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_ENCODINGHARDWARE, &formatHardwareStr);
+ const auto formatHardware = types::lookup<types::ENCODING, int32_t>(formatHardwareStr);
+
+ int32_t channelCountHardware = -1;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_CHANNELCOUNTHARDWARE, &channelCountHardware);
+
+ int32_t sampleRateHardware = 0;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE, &sampleRateHardware);
+
+ const auto uid = item->getUid();
+
LOG(LOG_LEVEL) << "key:" << key
<< " path:" << path
<< " direction:" << direction << "(" << directionStr << ")"
@@ -1352,7 +1398,11 @@
<< " sample_rate: " << sampleRate
<< " content_type: " << contentType << "(" << contentTypeStr << ")"
<< " sharing_requested:" << sharingModeRequested
- << "(" << sharingModeRequestedStr << ")";
+ << "(" << sharingModeRequestedStr << ")"
+ << " format_hardware:" << formatHardware << "(" << formatHardwareStr << ")"
+ << " channel_count_hardware:" << channelCountHardware
+ << " sample_rate_hardware: " << sampleRateHardware
+ << " uid: " << uid;
if (mAudioAnalytics.mDeliverStatistics) {
const stats::media_metrics::BytesField bf_serialized(
@@ -1377,6 +1427,10 @@
, sampleRate
, contentType
, sharingModeRequested
+ , formatHardware
+ , channelCountHardware
+ , sampleRateHardware
+ , uid
);
std::stringstream ss;
ss << "result:" << result;
@@ -1400,6 +1454,10 @@
, sampleRate
, contentType
, sharingModeRequested
+ , formatHardware
+ , channelCountHardware
+ , sampleRateHardware
+ , uid
);
ss << " " << fieldsStr;
std::string str = ss.str();
@@ -1710,6 +1768,127 @@
return { s, n };
}
+void AudioAnalytics::MidiLogging::onEvent(
+ const std::shared_ptr<const android::mediametrics::Item> &item) const {
+ const std::string& key = item->getKey();
+
+ const auto uid = item->getUid();
+
+ int32_t deviceId = -1;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_DEVICEID, &deviceId);
+
+ int32_t inputPortCount = -1;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_INPUTPORTCOUNT, &inputPortCount);
+
+ int32_t outputPortCount = -1;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_OUTPUTPORTCOUNT, &outputPortCount);
+
+ int32_t hardwareType = -1;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_HARDWARETYPE, &hardwareType);
+
+ std::string isSharedString;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_ISSHARED, &isSharedString);
+ const bool isShared = (isSharedString == "true");
+
+ std::string supportsMidiUmpString;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_SUPPORTSMIDIUMP, &supportsMidiUmpString);
+ const bool supportsMidiUmp = (supportsMidiUmpString == "true");
+
+ std::string usingAlsaString;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_USINGALSA, &usingAlsaString);
+ const bool usingAlsa = (usingAlsaString == "true");
+
+ int64_t durationNs = -1;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_DURATIONNS, &durationNs);
+
+ int32_t openedCount = -1;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_OPENEDCOUNT, &openedCount);
+
+ int32_t closedCount = -1;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_CLOSEDCOUNT, &closedCount);
+
+ std::string deviceDisconnectedString;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_DEVICEDISCONNECTED, &deviceDisconnectedString);
+ const bool deviceDisconnected = (deviceDisconnectedString == "true");
+
+ int32_t totalInputBytes = -1;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_TOTALINPUTBYTES, &totalInputBytes);
+
+ int32_t totalOutputBytes = -1;
+ mAudioAnalytics.mAnalyticsState->timeMachine().get(
+ key, AMEDIAMETRICS_PROP_TOTALOUTPUTBYTES, &totalOutputBytes);
+
+ LOG(LOG_LEVEL) << "key:" << key
+ << " uid:" << uid
+ << " id:" << deviceId
+ << " input_port_count:" << inputPortCount
+ << " output_port_count:" << outputPortCount
+ << " device_type:" << hardwareType
+ << " is_shared:" << isSharedString
+ << " supports_ump:" << supportsMidiUmpString
+ << " using_alsa:" << usingAlsaString
+ << " duration_opened_ms:" << durationNs
+ << " opened_count:" << openedCount
+ << " closed_count:" << closedCount
+ << " device_disconnected:" << deviceDisconnectedString
+ << " total_input_bytes:" << totalInputBytes
+ << " total_output_bytes:" << totalOutputBytes;
+
+ if (mAudioAnalytics.mDeliverStatistics) {
+ const auto result = sendToStatsd(
+ CONDITION(stats::media_metrics::MEDIAMETRICS_MIDI_DEVICE_CLOSE_REPORTED)
+ , uid
+ , deviceId
+ , inputPortCount
+ , outputPortCount
+ , hardwareType
+ , isShared
+ , supportsMidiUmp
+ , usingAlsa
+ , durationNs
+ , openedCount
+ , closedCount
+ , deviceDisconnected
+ , totalInputBytes
+ , totalOutputBytes);
+ std::stringstream ss;
+ ss << "result:" << result;
+ const auto fieldsStr = printFields(MidiDeviceCloseFields,
+ CONDITION(stats::media_metrics::MEDIAMETRICS_MIDI_DEVICE_CLOSE_REPORTED)
+ , uid
+ , deviceId
+ , inputPortCount
+ , outputPortCount
+ , hardwareType
+ , isShared
+ , supportsMidiUmp
+ , usingAlsa
+ , durationNs
+ , openedCount
+ , closedCount
+ , deviceDisconnected
+ , totalInputBytes
+ , totalOutputBytes);
+ ss << " " << fieldsStr;
+ std::string str = ss.str();
+ ALOGV("%s: statsd %s", __func__, str.c_str());
+ mAudioAnalytics.mStatsdLog->log(
+ stats::media_metrics::MEDIAMETRICS_MIDI_DEVICE_CLOSE_REPORTED, str);
+ }
+}
+
// This method currently suppresses the name.
std::string AudioAnalytics::getDeviceNamesFromOutputDevices(std::string_view devices) const {
std::string deviceNames;
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index baca57d..1b5255a 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -524,6 +524,8 @@
"audiotrack",
// other media
"codec",
+ "freeze",
+ "judder",
"extractor",
"mediadrm",
"mediaparser",
diff --git a/services/mediametrics/StringUtils.cpp b/services/mediametrics/StringUtils.cpp
index d1c7a18..5766f1c 100644
--- a/services/mediametrics/StringUtils.cpp
+++ b/services/mediametrics/StringUtils.cpp
@@ -15,11 +15,13 @@
*/
//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaMetricsService::stringutils"
+#define LOG_TAG "mediametrics::stringutils"
#include <utils/Log.h>
#include "StringUtils.h"
+#include <charconv>
+
#include "AudioTypes.h"
namespace android::mediametrics::stringutils {
@@ -54,6 +56,26 @@
}
}
+bool parseVector(const std::string &str, std::vector<int32_t> *vector) {
+ std::vector<int32_t> values;
+ const char *p = str.c_str();
+ const char *last = p + str.size();
+ while (p != last) {
+ if (*p == ',' || *p == '{' || *p == '}') {
+ p++;
+ }
+ int32_t value = -1;
+ auto [ptr, error] = std::from_chars(p, last, value);
+ if (error == std::errc::invalid_argument || error == std::errc::result_out_of_range) {
+ return false;
+ }
+ p = ptr;
+ values.push_back(value);
+ }
+ *vector = std::move(values);
+ return true;
+}
+
std::vector<std::pair<std::string, std::string>> getDeviceAddressPairs(const std::string& devices)
{
std::vector<std::pair<std::string, std::string>> result;
diff --git a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
index 82e928e..f0a4ac8 100644
--- a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
+++ b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
@@ -363,6 +363,20 @@
SimpleLog mSimpleLog GUARDED_BY(mLock) {64};
} mSpatializer{*this};
+ // MidiLogging collects info whenever a MIDI device is closed.
+ class MidiLogging {
+ public:
+ explicit MidiLogging(AudioAnalytics &audioAnalytics)
+ : mAudioAnalytics(audioAnalytics) {}
+
+ void onEvent(
+ const std::shared_ptr<const android::mediametrics::Item> &item) const;
+
+ private:
+
+ AudioAnalytics &mAudioAnalytics;
+ } mMidiLogging{*this};
+
AudioPowerUsage mAudioPowerUsage;
};
diff --git a/services/mediametrics/include/mediametricsservice/StatsdLog.h b/services/mediametrics/include/mediametricsservice/StatsdLog.h
index e207bac..5d5009e 100644
--- a/services/mediametrics/include/mediametricsservice/StatsdLog.h
+++ b/services/mediametrics/include/mediametricsservice/StatsdLog.h
@@ -16,11 +16,13 @@
#pragma once
-#include <audio_utils/SimpleLog.h>
#include <map>
#include <mutex>
#include <sstream>
+#include <android-base/thread_annotations.h>
+#include <audio_utils/SimpleLog.h>
+
namespace android::mediametrics {
class StatsdLog {
@@ -61,9 +63,9 @@
}
private:
+ mutable std::mutex mLock;
SimpleLog mSimpleLog; // internally locked
std::map<int /* atom */, size_t /* count */> mCountMap GUARDED_BY(mLock); // sorted
- mutable std::mutex mLock;
};
} // namespace android::mediametrics
diff --git a/services/mediametrics/include/mediametricsservice/StringUtils.h b/services/mediametrics/include/mediametricsservice/StringUtils.h
index 78c25ff..ed2cf2e 100644
--- a/services/mediametrics/include/mediametricsservice/StringUtils.h
+++ b/services/mediametrics/include/mediametricsservice/StringUtils.h
@@ -72,6 +72,12 @@
std::vector<std::string> split(const std::string& flags, const char *delim);
/**
+ * Parses a vector of integers using ',' '{' and '}' as delimeters. Leaves
+ * vector unmodified if the parsing fails.
+ */
+bool parseVector(const std::string &str, std::vector<int32_t> *vector);
+
+/**
* Parse the devices string and return a vector of device address pairs.
*
* A failure to parse returns early with the contents that were able to be parsed.
diff --git a/services/mediametrics/include/mediametricsservice/TimeMachine.h b/services/mediametrics/include/mediametricsservice/TimeMachine.h
index ce579b3..1445c7c 100644
--- a/services/mediametrics/include/mediametricsservice/TimeMachine.h
+++ b/services/mediametrics/include/mediametricsservice/TimeMachine.h
@@ -143,6 +143,7 @@
if (mPropertyMap.size() >= kKeyMaxProperties &&
!mPropertyMap.count(property)) {
ALOGV("%s: too many properties, rejecting %s", __func__, property.c_str());
+ mRejectedPropertiesCount++;
return;
}
auto& timeSequence = mPropertyMap[property];
@@ -172,6 +173,10 @@
ss << s;
}
}
+ if (ll > 0 && mRejectedPropertiesCount > 0) {
+ ss << "Rejected properties: " << mRejectedPropertiesCount << "\n";
+ ll--;
+ }
return { ss.str(), lines - ll };
}
@@ -214,6 +219,7 @@
const uid_t mAllowUid;
const int64_t mCreationTime;
+ unsigned int mRejectedPropertiesCount = 0;
int64_t mLastModificationTime;
std::map<std::string /* property */, PropertyHistory> mPropertyMap;
};
@@ -221,7 +227,7 @@
using History = std::map<std::string /* key */, std::shared_ptr<KeyHistory>>;
static inline constexpr size_t kTimeSequenceMaxElements = 50;
- static inline constexpr size_t kKeyMaxProperties = 50;
+ static inline constexpr size_t kKeyMaxProperties = 128;
static inline constexpr size_t kKeyLowWaterMark = 400;
static inline constexpr size_t kKeyHighWaterMark = 500;
diff --git a/services/mediametrics/include/mediametricsservice/iface_statsd.h b/services/mediametrics/include/mediametricsservice/iface_statsd.h
index 5bc293b..34d71ba 100644
--- a/services/mediametrics/include/mediametricsservice/iface_statsd.h
+++ b/services/mediametrics/include/mediametricsservice/iface_statsd.h
@@ -15,7 +15,9 @@
*/
#include <memory>
+
#include <stats_event.h>
+#include <StatsdLog.h>
namespace android {
namespace mediametrics {
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index cb5e783..ea76bcd 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -23,6 +23,7 @@
#include <pthread.h>
#include <pwd.h>
#include <stdint.h>
+#include <string>
#include <string.h>
#include <sys/stat.h>
#include <sys/time.h>
@@ -32,14 +33,149 @@
#include <stats_media_metrics.h>
#include <stats_event.h>
-#include "cleaner.h"
-#include "MediaMetricsService.h"
-#include "ValidateId.h"
-#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
-#include "iface_statsd.h"
+#include <frameworks/proto_logging/stats/message/mediametrics_message.pb.h>
+#include <mediametricsservice/cleaner.h>
+#include <mediametricsservice/iface_statsd.h>
+#include <mediametricsservice/MediaMetricsService.h>
+#include <mediametricsservice/StringUtils.h>
+#include <mediametricsservice/ValidateId.h>
namespace android {
+using stats::media_metrics::stats_write;
+using stats::media_metrics::MEDIA_CODEC_RENDERED;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_UNKNOWN;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_INVALID;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNKNOWN;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNDETERMINED;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_24_3_2_PULLDOWN;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_NONE;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HLG;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10_PLUS;
+using stats::media_metrics::MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_DOLBY_VISION;
+
+static const int BITRATE_UNKNOWN =
+ stats::media_metrics::MEDIA_CODEC_RENDERED__BITRATE__BITRATE_UNKNOWN;
+
+static const std::pair<char const *, int> CODEC_LOOKUP[] = {
+ { "avc", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AVC },
+ { "h264", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AVC },
+ { "hevc", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_HEVC },
+ { "h265", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_HEVC },
+ { "vp8", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_VP8 },
+ { "vp9", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_VP9 },
+ { "av1", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AV1 },
+ { "av01", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_AV1 },
+ { "dolby-vision", stats::media_metrics::MEDIA_CODEC_RENDERED__CODEC__CODEC_HEVC },
+};
+
+static const int32_t RESOLUTION_LOOKUP[] = {
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_MAX_SIZE,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_32K,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_16K,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_8K_UHD,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_8K_UHD_ALMOST,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_4K_UHD_ALMOST,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1440X2560,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080X2400,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080X2340,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080P_FHD,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_1080P_FHD_ALMOST,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_720P_HD,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_720P_HD_ALMOST,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_576X1024,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_540X960,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_480X854,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_480X640,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_360X640,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_352X640,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_VERY_LOW,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_SMALLEST,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO,
+};
+
+static const int32_t FRAMERATE_LOOKUP[] = {
+ stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_24,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_25,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_30,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_50,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_60,
+ stats::media_metrics::MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_120,
+};
+
+static int32_t getMetricsCodecEnum(const std::string &mime, const std::string &componentName) {
+ for (const auto & codecStrAndEnum : CODEC_LOOKUP) {
+ if (strcasestr(mime.c_str(), codecStrAndEnum.first) != nullptr ||
+ strcasestr(componentName.c_str(), codecStrAndEnum.first) != nullptr) {
+ return codecStrAndEnum.second;
+ }
+ }
+ return MEDIA_CODEC_RENDERED__CODEC__CODEC_UNKNOWN;
+}
+
+static int32_t getMetricsResolutionEnum(int32_t width, int32_t height) {
+ if (width == 0 || height == 0) {
+ return MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO;
+ }
+ int64_t pixels = int64_t(width) * height / 1000;
+ if (width < 0 || height < 0 || pixels > RESOLUTION_LOOKUP[0]) {
+ return MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_INVALID;
+ }
+ for (int32_t resolutionEnum : RESOLUTION_LOOKUP) {
+ if (pixels > resolutionEnum) {
+ return resolutionEnum;
+ }
+ }
+ return MEDIA_CODEC_RENDERED__RESOLUTION__RESOLUTION_ZERO;
+}
+
+static int32_t getMetricsFramerateEnum(float inFramerate) {
+ if (inFramerate == -1.0f) {
+ return MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNDETERMINED;
+ }
+ if (inFramerate == -2.0f) {
+ return MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_24_3_2_PULLDOWN;
+ }
+ int framerate = int(inFramerate * 100); // Table is in hundredths of frames per second
+ static const int framerateTolerance = 40; // Tolerance is 0.4 frames per second - table is 100s
+ for (int32_t framerateEnum : FRAMERATE_LOOKUP) {
+ if (abs(framerate - framerateEnum) < framerateTolerance) {
+ return framerateEnum;
+ }
+ }
+ return MEDIA_CODEC_RENDERED__CONTENT_FRAMERATE__FRAMERATE_UNKNOWN;
+}
+
+static int32_t getMetricsHdrFormatEnum(std::string &mime, std::string &componentName,
+ int32_t configColorTransfer, int32_t parsedColorTransfer,
+ int32_t hdr10StaticInfo, int32_t hdr10PlusInfo) {
+ if (hdr10PlusInfo) {
+ return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10_PLUS;
+ }
+ if (hdr10StaticInfo) {
+ return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HDR10;
+ }
+ // 7 = COLOR_TRANSFER_HLG in MediaCodecConstants.h
+ if (configColorTransfer == 7 || parsedColorTransfer == 7) {
+ return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_HLG;
+ }
+ if (strcasestr(mime.c_str(), "dolby-vision") != nullptr ||
+ strcasestr(componentName.c_str(), "dvhe") != nullptr ||
+ strcasestr(componentName.c_str(), "dvav") != nullptr ||
+ strcasestr(componentName.c_str(), "dav1") != nullptr) {
+ return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_DOLBY_VISION;
+ }
+ return MEDIA_CODEC_RENDERED__HDR_FORMAT__HDR_FORMAT_NONE;
+}
+
+static void parseVector(const std::string &str, std::vector<int32_t> *vector) {
+ if (!mediametrics::stringutils::parseVector(str, vector)) {
+ ALOGE("failed to parse integer vector from '%s'", str.c_str());
+ }
+}
+
bool statsd_codec(const std::shared_ptr<const mediametrics::Item>& item,
const std::shared_ptr<mediametrics::StatsdLog>& statsdLog)
{
@@ -48,17 +184,17 @@
AStatsEvent* event = AStatsEvent_obtain();
AStatsEvent_setAtomId(event, stats::media_metrics::MEDIA_CODEC_REPORTED);
- const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
- AStatsEvent_writeInt64(event, timestamp_nanos);
+ const nsecs_t timestampNanos = MediaMetricsService::roundTime(item->getTimestamp());
+ AStatsEvent_writeInt64(event, timestampNanos);
- std::string package_name = item->getPkgName();
- AStatsEvent_writeString(event, package_name.c_str());
+ std::string packageName = item->getPkgName();
+ AStatsEvent_writeString(event, packageName.c_str());
- int64_t package_version_code = item->getPkgVersionCode();
- AStatsEvent_writeInt64(event, package_version_code);
+ int64_t packageVersionCode = item->getPkgVersionCode();
+ AStatsEvent_writeInt64(event, packageVersionCode);
- int64_t media_apex_version = 0;
- AStatsEvent_writeInt64(event, media_apex_version);
+ int64_t mediaApexVersion = 0;
+ AStatsEvent_writeInt64(event, mediaApexVersion);
// the rest into our own proto
//
@@ -84,17 +220,25 @@
}
AStatsEvent_writeString(event, mode.c_str());
- int32_t encoder = -1;
- if (item->getInt32("android.media.mediacodec.encoder", &encoder)) {
- metrics_proto.set_encoder(encoder);
+ int32_t isEncoder = -1;
+ if (item->getInt32("android.media.mediacodec.encoder", &isEncoder)) {
+ metrics_proto.set_encoder(isEncoder);
}
- AStatsEvent_writeInt32(event, encoder);
+ AStatsEvent_writeInt32(event, isEncoder);
- int32_t secure = -1;
- if (item->getInt32("android.media.mediacodec.secure", &secure)) {
- metrics_proto.set_secure(secure);
+ int32_t isSecure = -1;
+ if (item->getInt32("android.media.mediacodec.secure", &isSecure)) {
+ metrics_proto.set_secure(isSecure);
}
- AStatsEvent_writeInt32(event, secure);
+ AStatsEvent_writeInt32(event, isSecure);
+
+ int32_t isHardware = -1;
+ item->getInt32("android.media.mediacodec.hardware", &isHardware);
+ // not logged to MediaCodecReported or MediametricsCodecReported
+
+ int32_t isTunneled = -1;
+ item->getInt32("android.media.mediacodec.tunneled", &isTunneled);
+ // not logged to MediaCodecReported or MediametricsCodecReported
int32_t width = -1;
if (item->getInt32("android.media.mediacodec.width", &width)) {
@@ -133,79 +277,78 @@
AStatsEvent_writeInt32(event, level);
- int32_t max_width = -1;
- if ( item->getInt32("android.media.mediacodec.maxwidth", &max_width)) {
- metrics_proto.set_max_width(max_width);
+ int32_t maxWidth = -1;
+ if ( item->getInt32("android.media.mediacodec.maxwidth", &maxWidth)) {
+ metrics_proto.set_max_width(maxWidth);
}
- AStatsEvent_writeInt32(event, max_width);
+ AStatsEvent_writeInt32(event, maxWidth);
- int32_t max_height = -1;
- if ( item->getInt32("android.media.mediacodec.maxheight", &max_height)) {
- metrics_proto.set_max_height(max_height);
+ int32_t maxHeight = -1;
+ if ( item->getInt32("android.media.mediacodec.maxheight", &maxHeight)) {
+ metrics_proto.set_max_height(maxHeight);
}
- AStatsEvent_writeInt32(event, max_height);
+ AStatsEvent_writeInt32(event, maxHeight);
- int32_t error_code = -1;
- if ( item->getInt32("android.media.mediacodec.errcode", &error_code)) {
- metrics_proto.set_error_code(error_code);
+ int32_t errorCode = -1;
+ if ( item->getInt32("android.media.mediacodec.errcode", &errorCode)) {
+ metrics_proto.set_error_code(errorCode);
}
- AStatsEvent_writeInt32(event, error_code);
+ AStatsEvent_writeInt32(event, errorCode);
- std::string error_state;
- if ( item->getString("android.media.mediacodec.errstate", &error_state)) {
- metrics_proto.set_error_state(error_state);
+ std::string errorState;
+ if ( item->getString("android.media.mediacodec.errstate", &errorState)) {
+ metrics_proto.set_error_state(errorState);
}
- AStatsEvent_writeString(event, error_state.c_str());
+ AStatsEvent_writeString(event, errorState.c_str());
- int64_t latency_max = -1;
- if (item->getInt64("android.media.mediacodec.latency.max", &latency_max)) {
- metrics_proto.set_latency_max(latency_max);
+ int64_t latencyMax = -1;
+ if (item->getInt64("android.media.mediacodec.latency.max", &latencyMax)) {
+ metrics_proto.set_latency_max(latencyMax);
}
- AStatsEvent_writeInt64(event, latency_max);
+ AStatsEvent_writeInt64(event, latencyMax);
- int64_t latency_min = -1;
- if (item->getInt64("android.media.mediacodec.latency.min", &latency_min)) {
- metrics_proto.set_latency_min(latency_min);
+ int64_t latencyMin = -1;
+ if (item->getInt64("android.media.mediacodec.latency.min", &latencyMin)) {
+ metrics_proto.set_latency_min(latencyMin);
}
- AStatsEvent_writeInt64(event, latency_min);
+ AStatsEvent_writeInt64(event, latencyMin);
- int64_t latency_avg = -1;
- if (item->getInt64("android.media.mediacodec.latency.avg", &latency_avg)) {
- metrics_proto.set_latency_avg(latency_avg);
+ int64_t latencyAvg = -1;
+ if (item->getInt64("android.media.mediacodec.latency.avg", &latencyAvg)) {
+ metrics_proto.set_latency_avg(latencyAvg);
}
- AStatsEvent_writeInt64(event, latency_avg);
+ AStatsEvent_writeInt64(event, latencyAvg);
- int64_t latency_count = -1;
- if (item->getInt64("android.media.mediacodec.latency.n", &latency_count)) {
- metrics_proto.set_latency_count(latency_count);
+ int64_t latencyCount = -1;
+ if (item->getInt64("android.media.mediacodec.latency.n", &latencyCount)) {
+ metrics_proto.set_latency_count(latencyCount);
}
- AStatsEvent_writeInt64(event, latency_count);
+ AStatsEvent_writeInt64(event, latencyCount);
- int64_t latency_unknown = -1;
- if (item->getInt64("android.media.mediacodec.latency.unknown", &latency_unknown)) {
- metrics_proto.set_latency_unknown(latency_unknown);
+ int64_t latencyUnknown = -1;
+ if (item->getInt64("android.media.mediacodec.latency.unknown", &latencyUnknown)) {
+ metrics_proto.set_latency_unknown(latencyUnknown);
}
- AStatsEvent_writeInt64(event, latency_unknown);
+ AStatsEvent_writeInt64(event, latencyUnknown);
- int32_t queue_secure_input_buffer_error = -1;
+ int32_t queueSecureInputBufferError = -1;
if (item->getInt32("android.media.mediacodec.queueSecureInputBufferError",
- &queue_secure_input_buffer_error)) {
- metrics_proto.set_queue_secure_input_buffer_error(queue_secure_input_buffer_error);
+ &queueSecureInputBufferError)) {
+ metrics_proto.set_queue_secure_input_buffer_error(queueSecureInputBufferError);
}
- AStatsEvent_writeInt32(event, queue_secure_input_buffer_error);
+ AStatsEvent_writeInt32(event, queueSecureInputBufferError);
- int32_t queue_input_buffer_error = -1;
- if (item->getInt32("android.media.mediacodec.queueInputBufferError",
- &queue_input_buffer_error)) {
- metrics_proto.set_queue_input_buffer_error(queue_input_buffer_error);
+ int32_t queueInputBufferError = -1;
+ if (item->getInt32("android.media.mediacodec.queueInputBufferError", &queueInputBufferError)) {
+ metrics_proto.set_queue_input_buffer_error(queueInputBufferError);
}
- AStatsEvent_writeInt32(event, queue_input_buffer_error);
+ AStatsEvent_writeInt32(event, queueInputBufferError);
- std::string bitrate_mode;
- if (item->getString("android.media.mediacodec.bitrate_mode", &bitrate_mode)) {
- metrics_proto.set_bitrate_mode(bitrate_mode);
+ std::string bitrateMode;
+ if (item->getString("android.media.mediacodec.bitrate_mode", &bitrateMode)) {
+ metrics_proto.set_bitrate_mode(bitrateMode);
}
- AStatsEvent_writeString(event, bitrate_mode.c_str());
+ AStatsEvent_writeString(event, bitrateMode.c_str());
int32_t bitrate = -1;
if (item->getInt32("android.media.mediacodec.bitrate", &bitrate)) {
@@ -213,18 +356,18 @@
}
AStatsEvent_writeInt32(event, bitrate);
- int64_t lifetime_millis = -1;
- if (item->getInt64("android.media.mediacodec.lifetimeMs", &lifetime_millis)) {
- lifetime_millis = mediametrics::bucket_time_minutes(lifetime_millis);
- metrics_proto.set_lifetime_millis(lifetime_millis);
+ int64_t lifetimeMillis = -1;
+ if (item->getInt64("android.media.mediacodec.lifetimeMs", &lifetimeMillis)) {
+ lifetimeMillis = mediametrics::bucket_time_minutes(lifetimeMillis);
+ metrics_proto.set_lifetime_millis(lifetimeMillis);
}
- AStatsEvent_writeInt64(event, lifetime_millis);
+ AStatsEvent_writeInt64(event, lifetimeMillis);
- int64_t playback_duration_sec = -1;
- item->getInt64("android.media.mediacodec.playback-duration-sec", &playback_duration_sec);
+ int64_t playbackDurationSec = -1;
+ item->getInt64("android.media.mediacodec.playback-duration-sec", &playbackDurationSec);
// DO NOT record playback-duration in the metrics_proto - it should only
// exist in the flattened atom
- AStatsEvent_writeInt64(event, playback_duration_sec);
+ AStatsEvent_writeInt64(event, playbackDurationSec);
std::string sessionId;
if (item->getString("android.media.mediacodec.log-session-id", &sessionId)) {
@@ -438,7 +581,7 @@
}
AStatsEvent_writeInt32(event, hdr10PlusInfo);
- int32_t hdrFormat= -1;
+ int32_t hdrFormat = -1;
if (item->getInt32("android.media.mediacodec.hdr-format", &hdrFormat)) {
metrics_proto.set_hdr_format(hdrFormat);
}
@@ -450,61 +593,243 @@
}
AStatsEvent_writeInt64(event, codecId);
+ int32_t arrayMode = -1;
+ if (item->getInt32("android.media.mediacodec.array-mode", &arrayMode)) {
+ metrics_proto.set_array_mode(arrayMode);
+ }
+ AStatsEvent_writeInt32(event, arrayMode);
+
+ int32_t operationMode = -1;
+ if (item->getInt32("android.media.mediacodec.operation-mode", &operationMode)) {
+ metrics_proto.set_operation_mode(operationMode);
+ }
+ AStatsEvent_writeInt32(event, operationMode);
+
+ int32_t outputSurface = -1;
+ if (item->getInt32("android.media.mediacodec.output-surface", &outputSurface)) {
+ metrics_proto.set_output_surface(outputSurface);
+ }
+ AStatsEvent_writeInt32(event, outputSurface);
+
+ int32_t appMaxInputSize = -1;
+ if (item->getInt32("android.media.mediacodec.app-max-input-size", &appMaxInputSize)) {
+ metrics_proto.set_app_max_input_size(appMaxInputSize);
+ }
+ AStatsEvent_writeInt32(event, appMaxInputSize);
+
+ int32_t usedMaxInputSize = -1;
+ if (item->getInt32("android.media.mediacodec.used-max-input-size", &usedMaxInputSize)) {
+ metrics_proto.set_used_max_input_size(usedMaxInputSize);
+ }
+ AStatsEvent_writeInt32(event, usedMaxInputSize);
+
+ int32_t codecMaxInputSize = -1;
+ if (item->getInt32("android.media.mediacodec.codec-max-input-size", &codecMaxInputSize)) {
+ metrics_proto.set_codec_max_input_size(codecMaxInputSize);
+ }
+ AStatsEvent_writeInt32(event, codecMaxInputSize);
+
+ int32_t flushCount = -1;
+ if (item->getInt32("android.media.mediacodec.flush-count", &flushCount)) {
+ metrics_proto.set_flush_count(flushCount);
+ }
+ AStatsEvent_writeInt32(event, flushCount);
+
+ int32_t setSurfaceCount = -1;
+ if (item->getInt32("android.media.mediacodec.set-surface-count", &setSurfaceCount)) {
+ metrics_proto.set_set_surface_count(setSurfaceCount);
+ }
+ AStatsEvent_writeInt32(event, setSurfaceCount);
+
+ int32_t resolutionChangeCount = -1;
+ if (item->getInt32("android.media.mediacodec.resolution-change-count",
+ &resolutionChangeCount)) {
+ metrics_proto.set_resolution_change_count(resolutionChangeCount);
+ }
+ AStatsEvent_writeInt32(event, resolutionChangeCount);
+
+ int32_t componentColorFormat = -1;
+ if (item->getInt32("android.media.mediacodec.component-color-format", &componentColorFormat)) {
+ metrics_proto.set_component_color_format(componentColorFormat);
+ }
+ AStatsEvent_writeInt32(event, componentColorFormat);
+
+ int64_t firstRenderTimeUs = -1;
+ item->getInt64("android.media.mediacodec.first-render-time-us", &firstRenderTimeUs);
+ int64_t framesReleased = -1;
+ item->getInt64("android.media.mediacodec.frames-released", &framesReleased);
+ int64_t framesRendered = -1;
+ item->getInt64("android.media.mediacodec.frames-rendered", &framesRendered);
+ int64_t framesDropped = -1;
+ item->getInt64("android.media.mediacodec.frames-dropped", &framesDropped);
+ int64_t framesSkipped = -1;
+ item->getInt64("android.media.mediacodec.frames-skipped", &framesSkipped);
+ double framerateContent = -1;
+ item->getDouble("android.media.mediacodec.framerate-content", &framerateContent);
+ double framerateActual = -1;
+ item->getDouble("android.media.mediacodec.framerate-actual", &framerateActual);
+ int64_t freezeScore = -1;
+ item->getInt64("android.media.mediacodec.freeze-score", &freezeScore);
+ double freezeRate = -1;
+ item->getDouble("android.media.mediacodec.freeze-rate", &freezeRate);
+ std::string freezeScoreHistogramStr;
+ item->getString("android.media.mediacodec.freeze-score-histogram", &freezeScoreHistogramStr);
+ std::string freezeScoreHistogramBucketsStr;
+ item->getString("android.media.mediacodec.freeze-score-histogram-buckets",
+ &freezeScoreHistogramBucketsStr);
+ std::string freezeDurationMsHistogramStr;
+ item->getString("android.media.mediacodec.freeze-duration-ms-histogram",
+ &freezeDurationMsHistogramStr);
+ std::string freezeDurationMsHistogramBucketsStr;
+ item->getString("android.media.mediacodec.freeze-duration-ms-histogram-buckets",
+ &freezeDurationMsHistogramBucketsStr);
+ std::string freezeDistanceMsHistogramStr;
+ item->getString("android.media.mediacodec.freeze-distance-ms-histogram",
+ &freezeDistanceMsHistogramStr);
+ std::string freezeDistanceMsHistogramBucketsStr;
+ item->getString("android.media.mediacodec.freeze-distance-ms-histogram-buckets",
+ &freezeDistanceMsHistogramBucketsStr);
+ int64_t judderScore = -1;
+ item->getInt64("android.media.mediacodec.judder-score", &judderScore);
+ double judderRate = -1;
+ item->getDouble("android.media.mediacodec.judder-rate", &judderRate);
+ std::string judderScoreHistogramStr;
+ item->getString("android.media.mediacodec.judder-score-histogram", &judderScoreHistogramStr);
+ std::string judderScoreHistogramBucketsStr;
+ item->getString("android.media.mediacodec.judder-score-histogram-buckets",
+ &judderScoreHistogramBucketsStr);
+
int err = AStatsEvent_write(event);
if (err < 0) {
ALOGE("Failed to write codec metrics to statsd (%d)", err);
}
AStatsEvent_release(event);
+ if (framesRendered > 0) {
+ int32_t statsUid = item->getUid();
+ int64_t statsCodecId = codecId;
+ char const *statsLogSessionId = sessionId.c_str();
+ int32_t statsIsHardware = isHardware;
+ int32_t statsIsSecure = isSecure;
+ int32_t statsIsTunneled = isTunneled;
+ int32_t statsCodec = getMetricsCodecEnum(mime, codec);
+ int32_t statsResolution = getMetricsResolutionEnum(width, height);
+ int32_t statsBitrate = BITRATE_UNKNOWN;
+ int32_t statsContentFramerate = getMetricsFramerateEnum(framerateContent);
+ int32_t statsActualFramerate = getMetricsFramerateEnum(framerateActual);
+ int32_t statsHdrFormat = getMetricsHdrFormatEnum(mime, codec, configColorTransfer,
+ parsedColorTransfer, hdrStaticInfo,
+ hdr10PlusInfo);
+ int64_t statsFirstRenderTimeUs = firstRenderTimeUs;
+ int64_t statsPlaybackDurationSeconds = playbackDurationSec;
+ int64_t statsFramesTotal = framesReleased + framesSkipped;
+ int64_t statsFramesReleased = framesReleased;
+ int64_t statsFramesRendered = framesRendered;
+ int64_t statsFramesDropped = framesDropped;
+ int64_t statsFramesSkipped = framesSkipped;
+ float statsFrameDropRate = float(double(framesDropped) / statsFramesTotal);
+ float statsFrameSkipRate = float(double(framesSkipped) / statsFramesTotal);
+ float statsFrameSkipDropRate = float(double(framesSkipped + framesDropped) /
+ statsFramesTotal);
+ int64_t statsFreezeScore = freezeScore;
+ float statsFreezeRate = freezeRate;
+ std::vector<int32_t> statsFreezeDurationMsHistogram;
+ parseVector(freezeDurationMsHistogramStr, &statsFreezeDurationMsHistogram);
+ std::vector<int32_t> statsFreezeDurationMsHistogramBuckets;
+ parseVector(freezeDurationMsHistogramBucketsStr, &statsFreezeDurationMsHistogramBuckets);
+ std::vector<int32_t> statsFreezeDistanceMsHistogram;
+ parseVector(freezeDistanceMsHistogramStr, &statsFreezeDistanceMsHistogram);
+ std::vector<int32_t> statsFreezeDistanceMsHistogramBuckets;
+ parseVector(freezeDistanceMsHistogramBucketsStr, &statsFreezeDistanceMsHistogramBuckets);
+ int64_t statsJudderScore = judderScore;
+ float statsJudderRate = judderRate;
+ std::vector<int32_t> statsJudderScoreHistogram;
+ parseVector(judderScoreHistogramStr, &statsJudderScoreHistogram);
+ std::vector<int32_t> statsJudderScoreHistogramBuckets;
+ parseVector(judderScoreHistogramBucketsStr, &statsJudderScoreHistogramBuckets);
+ int result = stats_write(
+ MEDIA_CODEC_RENDERED,
+ statsUid,
+ statsCodecId,
+ statsLogSessionId,
+ statsIsHardware,
+ statsIsSecure,
+ statsIsTunneled,
+ statsCodec,
+ statsResolution,
+ statsBitrate,
+ statsContentFramerate,
+ statsActualFramerate,
+ statsHdrFormat,
+ statsFirstRenderTimeUs,
+ statsPlaybackDurationSeconds,
+ statsFramesTotal,
+ statsFramesReleased,
+ statsFramesRendered,
+ statsFramesDropped,
+ statsFramesSkipped,
+ statsFrameDropRate,
+ statsFrameSkipRate,
+ statsFrameSkipDropRate,
+ statsFreezeScore,
+ statsFreezeRate,
+ statsFreezeDurationMsHistogram,
+ statsFreezeDurationMsHistogramBuckets,
+ statsFreezeDistanceMsHistogram,
+ statsFreezeDistanceMsHistogramBuckets,
+ statsJudderScore,
+ statsJudderRate,
+ statsJudderScoreHistogram,
+ statsJudderScoreHistogramBuckets);
+ ALOGE_IF(result < 0, "Failed to record MEDIA_CODEC_RENDERED atom (%d)", result);
+ }
+
std::string serialized;
if (!metrics_proto.SerializeToString(&serialized)) {
ALOGE("Failed to serialize codec metrics");
return false;
}
- const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const stats::media_metrics::BytesField bf_serialized(serialized.c_str(), serialized.size());
const int result = stats::media_metrics::stats_write(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED,
- timestamp_nanos, package_name.c_str(), package_version_code,
- media_apex_version,
+ timestampNanos, packageName.c_str(), packageVersionCode,
+ mediaApexVersion,
bf_serialized);
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_codec_reported:"
<< stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED
- << " timestamp_nanos:" << timestamp_nanos
- << " package_name:" << package_name
- << " package_version_code:" << package_version_code
- << " media_apex_version:" << media_apex_version
-
+ << " timestamp_nanos:" << timestampNanos
+ << " package_name:" << packageName
+ << " package_version_code:" << packageVersionCode
+ << " media_apex_version:" << mediaApexVersion
<< " codec:" << codec
<< " mime:" << mime
<< " mode:" << mode
- << " encoder:" << encoder
- << " secure:" << secure
+ << " encoder:" << isEncoder
+ << " secure:" << isSecure
<< " width:" << width
<< " height:" << height
<< " rotation:" << rotation
<< " crypto:" << crypto
<< " profile:" << profile
-
<< " level:" << level
- << " max_width:" << max_width
- << " max_height:" << max_height
- << " error_code:" << error_code
- << " error_state:" << error_state
- << " latency_max:" << latency_max
- << " latency_min:" << latency_min
- << " latency_avg:" << latency_avg
- << " latency_count:" << latency_count
- << " latency_unknown:" << latency_unknown
-
- << " queue_input_buffer_error:" << queue_input_buffer_error
- << " queue_secure_input_buffer_error:" << queue_secure_input_buffer_error
- << " bitrate_mode:" << bitrate_mode
+ << " max_width:" << maxWidth
+ << " max_height:" << maxHeight
+ << " error_code:" << errorCode
+ << " error_state:" << errorState
+ << " latency_max:" << latencyMax
+ << " latency_min:" << latencyMin
+ << " latency_avg:" << latencyAvg
+ << " latency_count:" << latencyCount
+ << " latency_unknown:" << latencyUnknown
+ << " queue_input_buffer_error:" << queueInputBufferError
+ << " queue_secure_input_buffer_error:" << queueSecureInputBufferError
+ << " bitrate_mode:" << bitrateMode
<< " bitrate:" << bitrate
<< " original_bitrate:" << originalBitrate
- << " lifetime_millis:" << lifetime_millis
- << " playback_duration_seconds:" << playback_duration_sec
+ << " lifetime_millis:" << lifetimeMillis
+ << " playback_duration_seconds:" << playbackDurationSec
<< " log_session_id:" << sessionId
<< " channel_count:" << channelCount
<< " sample_rate:" << sampleRate
@@ -517,7 +842,6 @@
<< " operating_rate:" << operatingRate
<< " priority:" << priority
<< " shaping_enhanced:" << shapingEnhanced
-
<< " qp_i_min:" << qpIMin
<< " qp_i_max:" << qpIMax
<< " qp_p_min:" << qpPMin
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index bc7b47b..4a6aee4 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -17,9 +17,10 @@
#define LOG_TAG "mediametrics_tests"
#include <utils/Log.h>
-
#include <stdio.h>
+#include <string>
#include <unordered_set>
+#include <vector>
#include <gtest/gtest.h>
#include <media/MediaMetricsItem.h>
@@ -30,6 +31,7 @@
#include <system/audio.h>
using namespace android;
+using android::mediametrics::stringutils::parseVector;
static size_t countNewlines(const char *s) {
size_t count = 0;
@@ -57,6 +59,35 @@
ASSERT_EQ(false, android::mediametrics::startsWith(s, std::string("est")));
}
+TEST(mediametrics_tests, parseVector) {
+ {
+ std::vector<int32_t> values;
+ EXPECT_EQ(true, parseVector("0{4,300,0,-112343,350}9", &values));
+ EXPECT_EQ(values, std::vector<int32_t>({0, 4, 300, 0, -112343, 350, 9}));
+ }
+ {
+ std::vector<int32_t> values;
+ EXPECT_EQ(true, parseVector("53", &values));
+ EXPECT_EQ(values, std::vector<int32_t>({53}));
+ }
+ {
+ std::vector<int32_t> values;
+ EXPECT_EQ(false, parseVector("5{3,6*3}3", &values));
+ EXPECT_EQ(values, std::vector<int32_t>({}));
+ }
+ {
+ std::vector<int32_t> values = {1}; // should still be this when parsing fails
+ std::vector<int32_t> expected = {1};
+ EXPECT_EQ(false, parseVector("51342abcd,1232", &values));
+ EXPECT_EQ(values, std::vector<int32_t>({1}));
+ }
+ {
+ std::vector<int32_t> values = {2}; // should still be this when parsing fails
+ EXPECT_EQ(false, parseVector("12345678901234,12345678901234", &values));
+ EXPECT_EQ(values, std::vector<int32_t>({2}));
+ }
+}
+
TEST(mediametrics_tests, defer) {
bool check = false;
{
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 9c81f89..5d1ba2b 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -390,7 +390,7 @@
std::shared_ptr<ResourceManagerService> service =
::ndk::SharedRefBase::make<ResourceManagerService>();
binder_status_t status =
- AServiceManager_addServiceWithFlags(
+ AServiceManager_addServiceWithFlags(
service->asBinder().get(), getServiceName(),
AServiceManager_AddServiceFlag::ADD_SERVICE_ALLOW_ISOLATED);
if (status != STATUS_OK) {
diff --git a/services/mediaresourcemanager/UidObserver.cpp b/services/mediaresourcemanager/UidObserver.cpp
index f321ebc..4bcd325 100644
--- a/services/mediaresourcemanager/UidObserver.cpp
+++ b/services/mediaresourcemanager/UidObserver.cpp
@@ -170,7 +170,7 @@
int32_t /*capability*/) {
}
-void UidObserver::onUidProcAdjChanged(uid_t /*uid*/) {
+void UidObserver::onUidProcAdjChanged(uid_t /*uid*/, int32_t /*adj*/) {
}
void UidObserver::binderDied(const wp<IBinder>& /*who*/) {
diff --git a/services/mediaresourcemanager/UidObserver.h b/services/mediaresourcemanager/UidObserver.h
index ed76839..b5cc3b9 100644
--- a/services/mediaresourcemanager/UidObserver.h
+++ b/services/mediaresourcemanager/UidObserver.h
@@ -74,7 +74,7 @@
void onUidIdle(uid_t uid, bool disabled) override;
void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
int32_t capability) override;
- void onUidProcAdjChanged(uid_t uid) override;
+ void onUidProcAdjChanged(uid_t uid, int32_t adj) override;
// IServiceManager::LocalRegistrationCallback implementation.
void onServiceRegistration(const String16& name,
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 6695c12..de24e1e 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -18,6 +18,7 @@
"libbinder_ndk",
"liblog",
"libmedia",
+ "libmediautils",
"libutils",
"libmediautils",
"libstats_media_metrics",
diff --git a/services/oboeservice/AAudioClientTracker.cpp b/services/oboeservice/AAudioClientTracker.cpp
index 054a896..c91ead0 100644
--- a/services/oboeservice/AAudioClientTracker.cpp
+++ b/services/oboeservice/AAudioClientTracker.cpp
@@ -73,13 +73,13 @@
return AAUDIO_ERROR_NULL;
}
- std::lock_guard<std::mutex> lock(mLock);
+ const std::lock_guard<std::mutex> lock(mLock);
if (mNotificationClients.count(pid) == 0) {
- sp<IBinder> binder = IInterface::asBinder(client);
- sp<NotificationClient> notificationClient = new NotificationClient(pid, binder);
+ const sp<IBinder> binder = IInterface::asBinder(client);
+ const sp<NotificationClient> notificationClient = new NotificationClient(pid, binder);
mNotificationClients[pid] = notificationClient;
- status_t status = binder->linkToDeath(notificationClient);
+ const status_t status = binder->linkToDeath(notificationClient);
ALOGW_IF(status != NO_ERROR, "registerClient() linkToDeath = %d\n", status);
return AAudioConvert_androidToAAudioResult(status);
} else {
@@ -90,12 +90,12 @@
void AAudioClientTracker::unregisterClient(pid_t pid) {
ALOGV("unregisterClient(), calling pid = %d, getpid() = %d\n", pid, getpid());
- std::lock_guard<std::mutex> lock(mLock);
+ const std::lock_guard<std::mutex> lock(mLock);
mNotificationClients.erase(pid);
}
int32_t AAudioClientTracker::getStreamCount(pid_t pid) {
- std::lock_guard<std::mutex> lock(mLock);
+ const std::lock_guard<std::mutex> lock(mLock);
auto it = mNotificationClients.find(pid);
if (it != mNotificationClients.end()) {
return it->second->getStreamCount();
@@ -105,18 +105,19 @@
}
aaudio_result_t
-AAudioClientTracker::registerClientStream(pid_t pid, sp<AAudioServiceStreamBase> serviceStream) {
+AAudioClientTracker::registerClientStream(
+ pid_t pid, const sp<AAudioServiceStreamBase>& serviceStream) {
ALOGV("registerClientStream(%d,)\n", pid);
- std::lock_guard<std::mutex> lock(mLock);
+ const std::lock_guard<std::mutex> lock(mLock);
return getNotificationClient_l(pid)->registerClientStream(serviceStream);
}
// Find the tracker for this process and remove it.
aaudio_result_t
AAudioClientTracker::unregisterClientStream(pid_t pid,
- sp<AAudioServiceStreamBase> serviceStream) {
+ const sp<AAudioServiceStreamBase>& serviceStream) {
ALOGV("unregisterClientStream(%d,)\n", pid);
- std::lock_guard<std::mutex> lock(mLock);
+ const std::lock_guard<std::mutex> lock(mLock);
auto it = mNotificationClients.find(pid);
if (it != mNotificationClients.end()) {
ALOGV("unregisterClientStream(%d,) found NotificationClient\n", pid);
@@ -129,12 +130,12 @@
void AAudioClientTracker::setExclusiveEnabled(pid_t pid, bool enabled) {
ALOGD("%s(%d, %d)\n", __func__, pid, enabled);
- std::lock_guard<std::mutex> lock(mLock);
+ const std::lock_guard<std::mutex> lock(mLock);
getNotificationClient_l(pid)->setExclusiveEnabled(enabled);
}
bool AAudioClientTracker::isExclusiveEnabled(pid_t pid) {
- std::lock_guard<std::mutex> lock(mLock);
+ const std::lock_guard<std::mutex> lock(mLock);
return getNotificationClient_l(pid)->isExclusiveEnabled();
}
@@ -158,24 +159,21 @@
: mProcessId(pid), mBinder(binder) {
}
-AAudioClientTracker::NotificationClient::~NotificationClient() {
-}
-
int32_t AAudioClientTracker::NotificationClient::getStreamCount() {
- std::lock_guard<std::mutex> lock(mLock);
+ const std::lock_guard<std::mutex> lock(mLock);
return mStreams.size();
}
aaudio_result_t AAudioClientTracker::NotificationClient::registerClientStream(
- sp<AAudioServiceStreamBase> serviceStream) {
- std::lock_guard<std::mutex> lock(mLock);
+ const sp<AAudioServiceStreamBase>& serviceStream) {
+ const std::lock_guard<std::mutex> lock(mLock);
mStreams.insert(serviceStream);
return AAUDIO_OK;
}
aaudio_result_t AAudioClientTracker::NotificationClient::unregisterClientStream(
- sp<AAudioServiceStreamBase> serviceStream) {
- std::lock_guard<std::mutex> lock(mLock);
+ const sp<AAudioServiceStreamBase>& serviceStream) {
+ const std::lock_guard<std::mutex> lock(mLock);
mStreams.erase(serviceStream);
return AAUDIO_OK;
}
@@ -189,20 +187,21 @@
std::set<sp<AAudioServiceStreamBase>> streamsToClose;
{
- std::lock_guard<std::mutex> lock(mLock);
+ const std::lock_guard<std::mutex> lock(mLock);
for (const auto& serviceStream : mStreams) {
streamsToClose.insert(serviceStream);
}
}
for (const auto& serviceStream : streamsToClose) {
- aaudio_handle_t handle = serviceStream->getHandle();
+ const aaudio_handle_t handle = serviceStream->getHandle();
ALOGW("binderDied() close abandoned stream 0x%08X\n", handle);
- aaudioService->asAAudioServiceInterface().closeStream(handle);
+ AAudioHandleInfo handleInfo(DEFAULT_AAUDIO_SERVICE_ID, handle);
+ aaudioService->asAAudioServiceInterface().closeStream(handleInfo);
}
// mStreams should be empty now
}
- sp<NotificationClient> keep(this);
+ const sp<NotificationClient> keep(this);
AAudioClientTracker::getInstance().unregisterClient(mProcessId);
}
diff --git a/services/oboeservice/AAudioClientTracker.h b/services/oboeservice/AAudioClientTracker.h
index 2b38621..cd3b75a 100644
--- a/services/oboeservice/AAudioClientTracker.h
+++ b/services/oboeservice/AAudioClientTracker.h
@@ -54,10 +54,10 @@
int32_t getStreamCount(pid_t pid);
aaudio_result_t registerClientStream(pid_t pid,
- android::sp<AAudioServiceStreamBase> serviceStream);
+ const android::sp<AAudioServiceStreamBase>& serviceStream);
- aaudio_result_t unregisterClientStream(pid_t pid,
- android::sp<AAudioServiceStreamBase> serviceStream);
+ aaudio_result_t unregisterClientStream(
+ pid_t pid, const android::sp<AAudioServiceStreamBase>& serviceStream);
/**
* Specify whether a process is allowed to create an EXCLUSIVE MMAP stream.
@@ -84,15 +84,17 @@
class NotificationClient : public IBinder::DeathRecipient {
public:
NotificationClient(pid_t pid, const android::sp<IBinder>& binder);
- virtual ~NotificationClient();
+ ~NotificationClient() override = default;
int32_t getStreamCount();
std::string dump() const;
- aaudio_result_t registerClientStream(android::sp<AAudioServiceStreamBase> serviceStream);
+ aaudio_result_t registerClientStream(
+ const android::sp<AAudioServiceStreamBase>& serviceStream);
- aaudio_result_t unregisterClientStream(android::sp<AAudioServiceStreamBase> serviceStream);
+ aaudio_result_t unregisterClientStream(
+ const android::sp<AAudioServiceStreamBase>& serviceStream);
void setExclusiveEnabled(bool enabled) {
mExclusiveEnabled = enabled;
@@ -103,7 +105,7 @@
}
// IBinder::DeathRecipient
- virtual void binderDied(const android::wp<IBinder>& who);
+ void binderDied(const android::wp<IBinder>& who) override;
private:
mutable std::mutex mLock;
diff --git a/services/oboeservice/AAudioCommandQueue.cpp b/services/oboeservice/AAudioCommandQueue.cpp
index 9bd18b3..be80b12 100644
--- a/services/oboeservice/AAudioCommandQueue.cpp
+++ b/services/oboeservice/AAudioCommandQueue.cpp
@@ -25,7 +25,7 @@
namespace aaudio {
-aaudio_result_t AAudioCommandQueue::sendCommand(std::shared_ptr<AAudioCommand> command) {
+aaudio_result_t AAudioCommandQueue::sendCommand(const std::shared_ptr<AAudioCommand>& command) {
{
std::scoped_lock<std::mutex> _l(mLock);
if (!mRunning) {
diff --git a/services/oboeservice/AAudioCommandQueue.h b/services/oboeservice/AAudioCommandQueue.h
index 64442a3..ad62d8a 100644
--- a/services/oboeservice/AAudioCommandQueue.h
+++ b/services/oboeservice/AAudioCommandQueue.h
@@ -26,7 +26,7 @@
namespace aaudio {
-typedef int32_t aaudio_command_opcode;
+using aaudio_command_opcode = int32_t;
class AAudioCommandParam {
public:
@@ -39,7 +39,7 @@
explicit AAudioCommand(
aaudio_command_opcode opCode, std::shared_ptr<AAudioCommandParam> param = nullptr,
bool waitForReply = false, int64_t timeoutNanos = 0)
- : operationCode(opCode), parameter(param), isWaitingForReply(waitForReply),
+ : operationCode(opCode), parameter(std::move(param)), isWaitingForReply(waitForReply),
timeoutNanoseconds(timeoutNanos) { }
virtual ~AAudioCommand() = default;
@@ -66,7 +66,7 @@
* @return the result of sending the command or the result of executing the command if command
* need to wait for a reply. If timeout happens, AAUDIO_ERROR_TIMEOUT will be returned.
*/
- aaudio_result_t sendCommand(std::shared_ptr<AAudioCommand> command);
+ aaudio_result_t sendCommand(const std::shared_ptr<AAudioCommand>& command);
/**
* Wait for next available command OR until the timeout is expired.
diff --git a/services/oboeservice/AAudioEndpointManager.cpp b/services/oboeservice/AAudioEndpointManager.cpp
index 20e4cc5..b5ee2f2 100644
--- a/services/oboeservice/AAudioEndpointManager.cpp
+++ b/services/oboeservice/AAudioEndpointManager.cpp
@@ -162,7 +162,7 @@
const aaudio::AAudioStreamRequest &request,
sp<AAudioServiceEndpoint> &endpointToSteal) {
- std::lock_guard<std::mutex> lock(mExclusiveLock);
+ const std::lock_guard<std::mutex> lock(mExclusiveLock);
const AAudioStreamConfiguration &configuration = request.getConstantConfiguration();
@@ -183,19 +183,20 @@
// and START calls. This will help preserve app compatibility.
// An app can avoid having this happen by closing their streams when
// the app is paused.
- pid_t pid = VALUE_OR_FATAL(
+ const pid_t pid = VALUE_OR_FATAL(
aidl2legacy_int32_t_pid_t(request.getAttributionSource().pid));
AAudioClientTracker::getInstance().setExclusiveEnabled(pid, false);
endpointToSteal = endpoint; // return it to caller
}
return nullptr;
} else {
- sp<AAudioServiceEndpointMMAP> endpointMMap = new AAudioServiceEndpointMMAP(aaudioService);
+ const sp<AAudioServiceEndpointMMAP> endpointMMap =
+ new AAudioServiceEndpointMMAP(aaudioService);
ALOGV("%s(), no match so try to open MMAP %p for dev %d",
__func__, endpointMMap.get(), configuration.getDeviceId());
endpoint = endpointMMap;
- aaudio_result_t result = endpoint->open(request);
+ const aaudio_result_t result = endpoint->open(request);
if (result != AAUDIO_OK) {
endpoint.clear();
} else {
@@ -217,10 +218,10 @@
AAudioService &aaudioService,
const aaudio::AAudioStreamRequest &request) {
- std::lock_guard<std::mutex> lock(mSharedLock);
+ const std::lock_guard<std::mutex> lock(mSharedLock);
const AAudioStreamConfiguration &configuration = request.getConstantConfiguration();
- aaudio_direction_t direction = configuration.getDirection();
+ const aaudio_direction_t direction = configuration.getDirection();
// Try to find an existing endpoint.
sp<AAudioServiceEndpointShared> endpoint = findSharedEndpoint_l(configuration);
@@ -228,7 +229,7 @@
// If we can't find an existing one then open a new one.
if (endpoint.get() == nullptr) {
// we must call openStream with audioserver identity
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ const int64_t token = IPCThreadState::self()->clearCallingIdentity();
switch (direction) {
case AAUDIO_DIRECTION_INPUT:
endpoint = new AAudioServiceEndpointCapture(aaudioService);
@@ -241,7 +242,7 @@
}
if (endpoint.get() != nullptr) {
- aaudio_result_t result = endpoint->open(request);
+ const aaudio_result_t result = endpoint->open(request);
if (result != AAUDIO_OK) {
endpoint.clear();
} else {
@@ -261,7 +262,7 @@
return endpoint;
}
-void AAudioEndpointManager::closeEndpoint(sp<AAudioServiceEndpoint>serviceEndpoint) {
+void AAudioEndpointManager::closeEndpoint(const sp<AAudioServiceEndpoint>& serviceEndpoint) {
if (serviceEndpoint->getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE) {
return closeExclusiveEndpoint(serviceEndpoint);
} else {
@@ -269,14 +270,15 @@
}
}
-void AAudioEndpointManager::closeExclusiveEndpoint(sp<AAudioServiceEndpoint> serviceEndpoint) {
+void AAudioEndpointManager::closeExclusiveEndpoint(
+ const sp<AAudioServiceEndpoint>& serviceEndpoint) {
if (serviceEndpoint.get() == nullptr) {
return;
}
// Decrement the reference count under this lock.
- std::lock_guard<std::mutex> lock(mExclusiveLock);
- int32_t newRefCount = serviceEndpoint->getOpenCount() - 1;
+ const std::lock_guard<std::mutex> lock(mExclusiveLock);
+ const int32_t newRefCount = serviceEndpoint->getOpenCount() - 1;
serviceEndpoint->setOpenCount(newRefCount);
// If no longer in use then actually close it.
@@ -292,14 +294,14 @@
}
}
-void AAudioEndpointManager::closeSharedEndpoint(sp<AAudioServiceEndpoint> serviceEndpoint) {
+void AAudioEndpointManager::closeSharedEndpoint(const sp<AAudioServiceEndpoint>& serviceEndpoint) {
if (serviceEndpoint.get() == nullptr) {
return;
}
// Decrement the reference count under this lock.
- std::lock_guard<std::mutex> lock(mSharedLock);
- int32_t newRefCount = serviceEndpoint->getOpenCount() - 1;
+ const std::lock_guard<std::mutex> lock(mSharedLock);
+ const int32_t newRefCount = serviceEndpoint->getOpenCount() - 1;
serviceEndpoint->setOpenCount(newRefCount);
// If no longer in use then actually close it.
diff --git a/services/oboeservice/AAudioEndpointManager.h b/services/oboeservice/AAudioEndpointManager.h
index b07bcef..1d38d26 100644
--- a/services/oboeservice/AAudioEndpointManager.h
+++ b/services/oboeservice/AAudioEndpointManager.h
@@ -61,7 +61,7 @@
android::sp<AAudioServiceEndpoint> openEndpoint(android::AAudioService &audioService,
const aaudio::AAudioStreamRequest &request);
- void closeEndpoint(android::sp<AAudioServiceEndpoint> serviceEndpoint);
+ void closeEndpoint(const android::sp<AAudioServiceEndpoint>& serviceEndpoint);
private:
android::sp<AAudioServiceEndpoint> openExclusiveEndpoint(android::AAudioService &aaudioService,
@@ -79,8 +79,8 @@
const AAudioStreamConfiguration& configuration)
REQUIRES(mSharedLock);
- void closeExclusiveEndpoint(android::sp<AAudioServiceEndpoint> serviceEndpoint);
- void closeSharedEndpoint(android::sp<AAudioServiceEndpoint> serviceEndpoint);
+ void closeExclusiveEndpoint(const android::sp<AAudioServiceEndpoint>& serviceEndpoint);
+ void closeSharedEndpoint(const android::sp<AAudioServiceEndpoint>& serviceEndpoint);
// Use separate locks because opening a Shared endpoint requires opening an Exclusive one.
// That could cause a recursive lock.
diff --git a/services/oboeservice/AAudioMixer.cpp b/services/oboeservice/AAudioMixer.cpp
index ad4b830..6890985 100644
--- a/services/oboeservice/AAudioMixer.cpp
+++ b/services/oboeservice/AAudioMixer.cpp
@@ -45,7 +45,8 @@
memset(mOutputBuffer.get(), 0, mBufferSizeInBytes);
}
-int32_t AAudioMixer::mix(int streamIndex, std::shared_ptr<FifoBuffer> fifo, bool allowUnderflow) {
+int32_t AAudioMixer::mix(
+ int streamIndex, const std::shared_ptr<FifoBuffer>& fifo, bool allowUnderflow) {
WrappingBuffer wrappingBuffer;
float *destination = mOutputBuffer.get();
diff --git a/services/oboeservice/AAudioMixer.h b/services/oboeservice/AAudioMixer.h
index 1a120f2..d773178 100644
--- a/services/oboeservice/AAudioMixer.h
+++ b/services/oboeservice/AAudioMixer.h
@@ -24,7 +24,7 @@
class AAudioMixer {
public:
- AAudioMixer() {}
+ AAudioMixer() = default;
void allocate(int32_t samplesPerFrame, int32_t framesPerBurst);
@@ -37,7 +37,9 @@
* @param allowUnderflow if true then allow mixer to advance read index past the write index
* @return frames read from this stream
*/
- int32_t mix(int streamIndex, std::shared_ptr<android::FifoBuffer> fifo, bool allowUnderflow);
+ int32_t mix(int streamIndex,
+ const std::shared_ptr<android::FifoBuffer>& fifo,
+ bool allowUnderflow);
float *getOutputBuffer();
diff --git a/services/oboeservice/AAudioService.cpp b/services/oboeservice/AAudioService.cpp
index 2679b2e..e9c0884 100644
--- a/services/oboeservice/AAudioService.cpp
+++ b/services/oboeservice/AAudioService.cpp
@@ -62,7 +62,7 @@
AAudioClientTracker::getInstance().setAAudioService(this);
}
-status_t AAudioService::dump(int fd, const Vector<String16>& args) {
+status_t AAudioService::dump(int fd, const Vector<String16>& /*args*/) {
std::string result;
if (!dumpAllowed()) {
@@ -83,7 +83,7 @@
}
Status AAudioService::registerClient(const sp<IAAudioClient> &client) {
- pid_t pid = IPCThreadState::self()->getCallingPid();
+ const pid_t pid = IPCThreadState::self()->getCallingPid();
AAudioClientTracker::getInstance().registerClient(pid, client);
return Status::ok();
}
@@ -106,24 +106,24 @@
// 4) Thread A can then get the lock and also open a shared stream.
// Without the lock. Thread A might sneak in and reallocate an exclusive stream
// before B can open the shared stream.
- std::unique_lock<std::recursive_mutex> lock(mOpenLock);
+ const std::unique_lock<std::recursive_mutex> lock(mOpenLock);
aaudio_result_t result = AAUDIO_OK;
sp<AAudioServiceStreamBase> serviceStream;
const AAudioStreamConfiguration &configurationInput = request.getConstantConfiguration();
- bool sharingModeMatchRequired = request.isSharingModeMatchRequired();
- aaudio_sharing_mode_t sharingMode = configurationInput.getSharingMode();
+ const bool sharingModeMatchRequired = request.isSharingModeMatchRequired();
+ const aaudio_sharing_mode_t sharingMode = configurationInput.getSharingMode();
// Enforce limit on client processes.
AttributionSourceState attributionSource = request.getAttributionSource();
- pid_t pid = IPCThreadState::self()->getCallingPid();
+ const pid_t pid = IPCThreadState::self()->getCallingPid();
attributionSource.pid = VALUE_OR_RETURN_ILLEGAL_ARG_STATUS(
legacy2aidl_pid_t_int32_t(pid));
attributionSource.uid = VALUE_OR_RETURN_ILLEGAL_ARG_STATUS(
legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));
attributionSource.token = sp<BBinder>::make();
if (attributionSource.pid != mAudioClient.attributionSource.pid) {
- int32_t count = AAudioClientTracker::getInstance().getStreamCount(pid);
+ const int32_t count = AAudioClientTracker::getInstance().getStreamCount(pid);
if (count >= MAX_STREAMS_PER_PROCESS) {
ALOGE("openStream(): exceeded max streams per process %d >= %d",
count, MAX_STREAMS_PER_PROCESS);
@@ -168,7 +168,7 @@
serviceStream.clear();
AIDL_RETURN(result);
} else {
- aaudio_handle_t handle = mStreamTracker.addStreamForHandle(serviceStream.get());
+ const aaudio_handle_t handle = mStreamTracker.addStreamForHandle(serviceStream.get());
serviceStream->setHandle(handle);
AAudioClientTracker::getInstance().registerClientStream(pid, serviceStream);
paramsOut.copyFrom(*serviceStream);
@@ -185,7 +185,7 @@
static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
// Check permission and ownership first.
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
ALOGE("closeStream(0x%0x), illegal stream handle", streamHandle);
AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
@@ -197,13 +197,13 @@
int32_t *_aidl_return) {
static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
ALOGE("getStreamDescription(), illegal stream handle = 0x%0x", streamHandle);
AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
}
AudioEndpointParcelable endpointParcelable;
- aaudio_result_t result = serviceStream->getDescription(endpointParcelable);
+ const aaudio_result_t result = serviceStream->getDescription(endpointParcelable);
if (result == AAUDIO_OK) {
*endpoint = std::move(endpointParcelable).parcelable();
}
@@ -213,7 +213,7 @@
Status AAudioService::startStream(int32_t streamHandle, int32_t *_aidl_return) {
static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
@@ -224,7 +224,7 @@
Status AAudioService::pauseStream(int32_t streamHandle, int32_t *_aidl_return) {
static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
@@ -235,7 +235,7 @@
Status AAudioService::stopStream(int32_t streamHandle, int32_t *_aidl_return) {
static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
@@ -246,7 +246,7 @@
Status AAudioService::flushStream(int32_t streamHandle, int32_t *_aidl_return) {
static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
@@ -254,16 +254,16 @@
AIDL_RETURN(serviceStream->flush());
}
-Status AAudioService::registerAudioThread(int32_t streamHandle, int32_t clientThreadId, int64_t periodNanoseconds,
- int32_t *_aidl_return) {
+Status AAudioService::registerAudioThread(int32_t streamHandle, int32_t clientThreadId,
+ int64_t /*periodNanoseconds*/, int32_t *_aidl_return) {
static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
}
- int32_t priority = isCallerInService()
+ const int32_t priority = isCallerInService()
? kRealTimeAudioPriorityService : kRealTimeAudioPriorityClient;
AIDL_RETURN(serviceStream->registerAudioThread(clientThreadId, priority));
}
@@ -272,7 +272,7 @@
int32_t *_aidl_return) {
static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
@@ -283,13 +283,13 @@
Status AAudioService::exitStandby(int32_t streamHandle, Endpoint* endpoint, int32_t *_aidl_return) {
static_assert(std::is_same_v<aaudio_result_t, std::decay_t<typeof(*_aidl_return)>>);
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
ALOGE("getStreamDescription(), illegal stream handle = 0x%0x", streamHandle);
AIDL_RETURN(AAUDIO_ERROR_INVALID_HANDLE);
}
AudioEndpointParcelable endpointParcelable;
- aaudio_result_t result = serviceStream->exitStandby(&endpointParcelable);
+ const aaudio_result_t result = serviceStream->exitStandby(&endpointParcelable);
if (result == AAUDIO_OK) {
*endpoint = std::move(endpointParcelable).parcelable();
}
@@ -297,16 +297,18 @@
}
bool AAudioService::isCallerInService() {
- pid_t clientPid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mAudioClient.attributionSource.pid));
- uid_t clientUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mAudioClient.attributionSource.uid));
+ const pid_t clientPid = VALUE_OR_FATAL(
+ aidl2legacy_int32_t_pid_t(mAudioClient.attributionSource.pid));
+ const uid_t clientUid = VALUE_OR_FATAL(
+ aidl2legacy_int32_t_uid_t(mAudioClient.attributionSource.uid));
return clientPid == IPCThreadState::self()->getCallingPid() &&
clientUid == IPCThreadState::self()->getCallingUid();
}
-aaudio_result_t AAudioService::closeStream(sp<AAudioServiceStreamBase> serviceStream) {
+aaudio_result_t AAudioService::closeStream(const sp<AAudioServiceStreamBase>& serviceStream) {
// This is protected by a lock in AAudioClientTracker.
// It is safe to unregister the same stream twice.
- pid_t pid = serviceStream->getOwnerProcessId();
+ const pid_t pid = serviceStream->getOwnerProcessId();
AAudioClientTracker::getInstance().unregisterClientStream(pid, serviceStream);
// This is protected by a lock in mStreamTracker.
// It is safe to remove the same stream twice.
@@ -325,10 +327,10 @@
const uid_t ownerUserId = serviceStream->getOwnerUserId();
const uid_t clientUid = VALUE_OR_FATAL(
aidl2legacy_int32_t_uid_t(mAudioClient.attributionSource.uid));
- bool callerOwnsIt = callingUserId == ownerUserId;
- bool serverCalling = callingUserId == clientUid;
- bool serverOwnsIt = ownerUserId == clientUid;
- bool allowed = callerOwnsIt || serverCalling || serverOwnsIt;
+ const bool callerOwnsIt = callingUserId == ownerUserId;
+ const bool serverCalling = callingUserId == clientUid;
+ const bool serverOwnsIt = ownerUserId == clientUid;
+ const bool allowed = callerOwnsIt || serverCalling || serverOwnsIt;
if (!allowed) {
ALOGE("AAudioService: calling uid %d cannot access stream 0x%08X owned by %d",
callingUserId, streamHandle, ownerUserId);
@@ -342,7 +344,7 @@
const android::AudioClient& client,
const audio_attributes_t *attr,
audio_port_handle_t *clientHandle) {
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
return AAUDIO_ERROR_INVALID_HANDLE;
@@ -352,7 +354,7 @@
aaudio_result_t AAudioService::stopClient(aaudio_handle_t streamHandle,
audio_port_handle_t portHandle) {
- sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
+ const sp<AAudioServiceStreamBase> serviceStream = convertHandleToServiceStream(streamHandle);
if (serviceStream.get() == nullptr) {
ALOGW("%s(), invalid streamHandle = 0x%0x", __func__, streamHandle);
return AAUDIO_ERROR_INVALID_HANDLE;
@@ -364,14 +366,14 @@
// So we do not have to check permissions.
aaudio_result_t AAudioService::disconnectStreamByPortHandle(audio_port_handle_t portHandle) {
ALOGD("%s(%d) called", __func__, portHandle);
- sp<AAudioServiceStreamBase> serviceStream =
+ const sp<AAudioServiceStreamBase> serviceStream =
mStreamTracker.findStreamByPortHandle(portHandle);
if (serviceStream.get() == nullptr) {
ALOGE("%s(), could not find stream with portHandle = %d", __func__, portHandle);
return AAUDIO_ERROR_INVALID_HANDLE;
}
// This is protected by a lock and will just return if already stopped.
- aaudio_result_t result = serviceStream->stop();
+ const aaudio_result_t result = serviceStream->stop();
serviceStream->disconnect();
return result;
}
diff --git a/services/oboeservice/AAudioService.h b/services/oboeservice/AAudioService.h
index 0a111fb..ada3d53 100644
--- a/services/oboeservice/AAudioService.h
+++ b/services/oboeservice/AAudioService.h
@@ -36,6 +36,7 @@
namespace android {
#define AAUDIO_SERVICE_NAME "media.aaudio"
+#define DEFAULT_AAUDIO_SERVICE_ID 0
class AAudioService :
public BinderService<AAudioService>,
@@ -45,7 +46,7 @@
public:
AAudioService();
- virtual ~AAudioService() = default;
+ ~AAudioService() override = default;
aaudio::AAudioServiceInterface& asAAudioServiceInterface() {
return mAdapter;
@@ -53,7 +54,7 @@
static const char* getServiceName() { return AAUDIO_SERVICE_NAME; }
- virtual status_t dump(int fd, const Vector<String16>& args) override;
+ status_t dump(int fd, const Vector<String16>& args) override;
binder::Status registerClient(const ::android::sp<::aaudio::IAAudioClient>& client) override;
@@ -103,25 +104,27 @@
* This is only called from within the Service.
* It bypasses the permission checks in closeStream(handle).
*/
- aaudio_result_t closeStream(sp<aaudio::AAudioServiceStreamBase> serviceStream);
+ aaudio_result_t closeStream(const sp<aaudio::AAudioServiceStreamBase>& serviceStream);
private:
class Adapter : public aaudio::AAudioBinderAdapter {
public:
+ // Always use default service id in server side since when crash happens,
+ // the aaudio service will restart.
explicit Adapter(AAudioService *service)
- : aaudio::AAudioBinderAdapter(service),
+ : aaudio::AAudioBinderAdapter(service, DEFAULT_AAUDIO_SERVICE_ID),
mService(service) {}
- aaudio_result_t startClient(aaudio::aaudio_handle_t streamHandle,
+ aaudio_result_t startClient(const aaudio::AAudioHandleInfo& streamHandleInfo,
const android::AudioClient &client,
const audio_attributes_t *attr,
audio_port_handle_t *clientHandle) override {
- return mService->startClient(streamHandle, client, attr, clientHandle);
+ return mService->startClient(streamHandleInfo.getHandle(), client, attr, clientHandle);
}
- aaudio_result_t stopClient(aaudio::aaudio_handle_t streamHandle,
+ aaudio_result_t stopClient(const aaudio::AAudioHandleInfo& streamHandleInfo,
audio_port_handle_t clientHandle) override {
- return mService->stopClient(streamHandle, clientHandle);
+ return mService->stopClient(streamHandleInfo.getHandle(), clientHandle);
}
private:
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index b55b601..e7d14a0 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -69,6 +69,10 @@
result << " Reference Count: " << mOpenCount << "\n";
result << " Session Id: " << getSessionId() << "\n";
result << " Privacy Sensitive: " << isPrivacySensitive() << "\n";
+ result << " Hardware Channel Count:" << getHardwareSamplesPerFrame() << "\n";
+ result << " Hardware Format: " << getHardwareFormat() << " ("
+ << audio_format_to_string(getHardwareFormat()) << ")\n";
+ result << " Hardware Sample Rate: " << getHardwareSampleRate() << "\n";
result << " Connected: " << mConnected.load() << "\n";
result << " Registered Streams:" << "\n";
result << AAudioServiceStreamShared::dumpHeader() << "\n";
@@ -84,7 +88,7 @@
// @return true if stream found
bool AAudioServiceEndpoint::isStreamRegistered(audio_port_handle_t portHandle) {
- std::lock_guard<std::mutex> lock(mLockStreams);
+ const std::lock_guard<std::mutex> lock(mLockStreams);
for (const auto& stream : mRegisteredStreams) {
if (stream->getPortHandle() == portHandle) {
return true;
@@ -97,7 +101,7 @@
AAudioServiceEndpoint::disconnectRegisteredStreams() {
std::vector<android::sp<AAudioServiceStreamBase>> streamsDisconnected;
{
- std::lock_guard<std::mutex> lock(mLockStreams);
+ const std::lock_guard<std::mutex> lock(mLockStreams);
mRegisteredStreams.swap(streamsDisconnected);
}
mConnected.store(false);
@@ -118,7 +122,7 @@
void AAudioServiceEndpoint::releaseRegisteredStreams() {
// List of streams to be closed after we disconnect everything.
- std::vector<android::sp<AAudioServiceStreamBase>> streamsToClose
+ const std::vector<android::sp<AAudioServiceStreamBase>> streamsToClose
= disconnectRegisteredStreams();
// Close outside the lock to avoid recursive locks.
@@ -129,14 +133,14 @@
}
}
-aaudio_result_t AAudioServiceEndpoint::registerStream(sp<AAudioServiceStreamBase>stream) {
- std::lock_guard<std::mutex> lock(mLockStreams);
+aaudio_result_t AAudioServiceEndpoint::registerStream(const sp<AAudioServiceStreamBase>& stream) {
+ const std::lock_guard<std::mutex> lock(mLockStreams);
mRegisteredStreams.push_back(stream);
return AAUDIO_OK;
}
-aaudio_result_t AAudioServiceEndpoint::unregisterStream(sp<AAudioServiceStreamBase>stream) {
- std::lock_guard<std::mutex> lock(mLockStreams);
+aaudio_result_t AAudioServiceEndpoint::unregisterStream(const sp<AAudioServiceStreamBase>& stream) {
+ const std::lock_guard<std::mutex> lock(mLockStreams);
mRegisteredStreams.erase(std::remove(
mRegisteredStreams.begin(), mRegisteredStreams.end(), stream),
mRegisteredStreams.end());
@@ -192,11 +196,11 @@
: AUDIO_SOURCE_DEFAULT;
audio_flags_mask_t flags;
if (direction == AAUDIO_DIRECTION_OUTPUT) {
- flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
- | AAudioConvert_allowCapturePolicyToAudioFlagsMask(
+ flags = AAudio_computeAudioFlagsMask(
params->getAllowedCapturePolicy(),
params->getSpatializationBehavior(),
- params->isContentSpatialized()));
+ params->isContentSpatialized(),
+ AUDIO_OUTPUT_FLAG_FAST);
} else {
flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
| AAudioConvert_privacySensitiveToAudioFlagsMask(params->isPrivacySensitive()));
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index 92004c5..dff571b 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -43,7 +43,7 @@
, public AAudioStreamParameters {
public:
- virtual ~AAudioServiceEndpoint();
+ ~AAudioServiceEndpoint() override;
virtual std::string dump() const;
@@ -55,9 +55,9 @@
*/
virtual void close() = 0;
- aaudio_result_t registerStream(android::sp<AAudioServiceStreamBase> stream);
+ aaudio_result_t registerStream(const android::sp<AAudioServiceStreamBase>& stream);
- aaudio_result_t unregisterStream(android::sp<AAudioServiceStreamBase> stream);
+ aaudio_result_t unregisterStream(const android::sp<AAudioServiceStreamBase>& stream);
virtual aaudio_result_t startStream(android::sp<AAudioServiceStreamBase> stream,
audio_port_handle_t *clientHandle) = 0;
@@ -65,14 +65,14 @@
virtual aaudio_result_t stopStream(android::sp<AAudioServiceStreamBase> stream,
audio_port_handle_t clientHandle) = 0;
- virtual aaudio_result_t startClient(const android::AudioClient& client,
- const audio_attributes_t *attr,
- audio_port_handle_t *clientHandle) {
+ virtual aaudio_result_t startClient(const android::AudioClient& /*client*/,
+ const audio_attributes_t* /*attr*/,
+ audio_port_handle_t* /*clientHandle*/) {
ALOGD("AAudioServiceEndpoint::startClient(...) AAUDIO_ERROR_UNAVAILABLE");
return AAUDIO_ERROR_UNAVAILABLE;
}
- virtual aaudio_result_t stopClient(audio_port_handle_t clientHandle) {
+ virtual aaudio_result_t stopClient(audio_port_handle_t /*clientHandle*/) {
ALOGD("AAudioServiceEndpoint::stopClient(...) AAUDIO_ERROR_UNAVAILABLE");
return AAUDIO_ERROR_UNAVAILABLE;
}
@@ -82,7 +82,7 @@
return AAUDIO_ERROR_UNAVAILABLE;
}
- virtual aaudio_result_t exitStandby(AudioEndpointParcelable* parcelable) {
+ virtual aaudio_result_t exitStandby(AudioEndpointParcelable* /*parcelable*/) {
ALOGD("AAudioServiceEndpoint::exitStandby() AAUDIO_ERROR_UNAVAILABLE");
return AAUDIO_ERROR_UNAVAILABLE;
}
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.cpp b/services/oboeservice/AAudioServiceEndpointCapture.cpp
index 95bd4bb..ba8070b 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.cpp
+++ b/services/oboeservice/AAudioServiceEndpointCapture.cpp
@@ -90,5 +90,5 @@
}
ALOGD("callbackLoop() exiting");
- return NULL; // TODO review
+ return nullptr; // TODO review
}
diff --git a/services/oboeservice/AAudioServiceEndpointCapture.h b/services/oboeservice/AAudioServiceEndpointCapture.h
index 2ca43cf..b77100d 100644
--- a/services/oboeservice/AAudioServiceEndpointCapture.h
+++ b/services/oboeservice/AAudioServiceEndpointCapture.h
@@ -30,7 +30,7 @@
class AAudioServiceEndpointCapture : public AAudioServiceEndpointShared {
public:
explicit AAudioServiceEndpointCapture(android::AAudioService &audioService);
- virtual ~AAudioServiceEndpointCapture() = default;
+ ~AAudioServiceEndpointCapture() override = default;
aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index ea817ab..7f228c7 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -22,6 +22,7 @@
#include <assert.h>
#include <map>
#include <mutex>
+#include <set>
#include <sstream>
#include <thread>
#include <utils/Singleton.h>
@@ -36,7 +37,7 @@
#include "AAudioServiceEndpointPlay.h"
#include "AAudioServiceEndpointMMAP.h"
-#define AAUDIO_BUFFER_CAPACITY_MIN 4 * 512
+#define AAUDIO_BUFFER_CAPACITY_MIN (4 * 512)
#define AAUDIO_SAMPLE_RATE_DEFAULT 48000
// This is an estimate of the time difference between the HW and the MMAP time.
@@ -57,7 +58,7 @@
result << " MMAP: framesTransferred = " << mFramesTransferred.get();
result << ", HW nanos = " << mHardwareTimeOffsetNanos;
result << ", port handle = " << mPortHandle;
- result << ", audio data FD = " << mAudioDataFileDescriptor;
+ result << ", audio data FD = " << mAudioDataWrapper->getDataFileDescriptor();
result << "\n";
result << " HW Offset Micros: " <<
@@ -68,8 +69,27 @@
return result.str();
}
+namespace {
+
+const static std::map<audio_format_t, audio_format_t> NEXT_FORMAT_TO_TRY = {
+ {AUDIO_FORMAT_PCM_FLOAT, AUDIO_FORMAT_PCM_32_BIT},
+ {AUDIO_FORMAT_PCM_32_BIT, AUDIO_FORMAT_PCM_24_BIT_PACKED},
+ {AUDIO_FORMAT_PCM_24_BIT_PACKED, AUDIO_FORMAT_PCM_16_BIT}
+};
+
+audio_format_t getNextFormatToTry(audio_format_t curFormat, audio_format_t returnedFromAPM) {
+ if (returnedFromAPM != AUDIO_FORMAT_DEFAULT) {
+ return returnedFromAPM;
+ }
+ const auto it = NEXT_FORMAT_TO_TRY.find(curFormat);
+ return it != NEXT_FORMAT_TO_TRY.end() ? it->second : AUDIO_FORMAT_DEFAULT;
+}
+
+} // namespace
+
aaudio_result_t AAudioServiceEndpointMMAP::open(const aaudio::AAudioStreamRequest &request) {
aaudio_result_t result = AAUDIO_OK;
+ mAudioDataWrapper = std::make_unique<SharedMemoryWrapper>();
copyFrom(request.getConstantConfiguration());
mRequestedDeviceId = getDeviceId();
@@ -81,36 +101,38 @@
legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
audio_format_t audioFormat = getFormat();
+ std::set<audio_format_t> formatsTried;
+ while (true) {
+ if (formatsTried.find(audioFormat) != formatsTried.end()) {
+ // APM returning something that has already tried.
+ ALOGW("Have already tried to open with format=%#x, but failed before", audioFormat);
+ break;
+ }
+ formatsTried.insert(audioFormat);
- result = openWithFormat(audioFormat);
- if (result == AAUDIO_OK) return result;
+ audio_format_t nextFormatToTry = AUDIO_FORMAT_DEFAULT;
+ result = openWithFormat(audioFormat, &nextFormatToTry);
+ if (result != AAUDIO_ERROR_UNAVAILABLE) {
+ // Return if it is successful or there is an error that is not
+ // AAUDIO_ERROR_UNAVAILABLE happens.
+ ALOGI("Opened format=%#x with result=%d", audioFormat, result);
+ break;
+ }
- if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_FLOAT) {
- ALOGD("%s() FLOAT failed, perhaps due to format. Try again with 32_BIT", __func__);
- audioFormat = AUDIO_FORMAT_PCM_32_BIT;
- result = openWithFormat(audioFormat);
- }
- if (result == AAUDIO_OK) return result;
-
- if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_32_BIT) {
- ALOGD("%s() 32_BIT failed, perhaps due to format. Try again with 24_BIT_PACKED", __func__);
- audioFormat = AUDIO_FORMAT_PCM_24_BIT_PACKED;
- result = openWithFormat(audioFormat);
- }
- if (result == AAUDIO_OK) return result;
-
- // TODO The HAL and AudioFlinger should be recommending a format if the open fails.
- // But that recommendation is not propagating back from the HAL.
- // So for now just try something very likely to work.
- if (result == AAUDIO_ERROR_UNAVAILABLE && audioFormat == AUDIO_FORMAT_PCM_24_BIT_PACKED) {
- ALOGD("%s() 24_BIT failed, perhaps due to format. Try again with 16_BIT", __func__);
- audioFormat = AUDIO_FORMAT_PCM_16_BIT;
- result = openWithFormat(audioFormat);
+ nextFormatToTry = getNextFormatToTry(audioFormat, nextFormatToTry);
+ ALOGD("%s() %#x failed, perhaps due to format. Try again with %#x",
+ __func__, audioFormat, nextFormatToTry);
+ audioFormat = nextFormatToTry;
+ if (audioFormat == AUDIO_FORMAT_DEFAULT) {
+ // Nothing else to try
+ break;
+ }
}
return result;
}
-aaudio_result_t AAudioServiceEndpointMMAP::openWithFormat(audio_format_t audioFormat) {
+aaudio_result_t AAudioServiceEndpointMMAP::openWithFormat(
+ audio_format_t audioFormat, audio_format_t* nextFormatToTry) {
aaudio_result_t result = AAUDIO_OK;
audio_config_base_t config;
audio_port_handle_t deviceId;
@@ -144,12 +166,12 @@
return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
}
- MmapStreamInterface::stream_direction_t streamDirection =
+ const MmapStreamInterface::stream_direction_t streamDirection =
(direction == AAUDIO_DIRECTION_OUTPUT)
? MmapStreamInterface::DIRECTION_OUTPUT
: MmapStreamInterface::DIRECTION_INPUT;
- aaudio_session_id_t requestedSessionId = getSessionId();
+ const aaudio_session_id_t requestedSessionId = getSessionId();
audio_session_t sessionId = AAudioConvert_aaudioToAndroidSessionId(requestedSessionId);
// Open HAL stream. Set mMmapStream
@@ -157,21 +179,25 @@
"sample_rate=%u, channel_mask=%#x, device=%d",
__func__, config.format, config.sample_rate,
config.channel_mask, deviceId);
- status_t status = MmapStreamInterface::openMmapStream(streamDirection,
- &attributes,
- &config,
- mMmapClient,
- &deviceId,
- &sessionId,
- this, // callback
- mMmapStream,
- &mPortHandle);
+ const status_t status = MmapStreamInterface::openMmapStream(streamDirection,
+ &attributes,
+ &config,
+ mMmapClient,
+ &deviceId,
+ &sessionId,
+ this, // callback
+ mMmapStream,
+ &mPortHandle);
ALOGD("%s() mMapClient.attributionSource = %s => portHandle = %d\n",
__func__, mMmapClient.attributionSource.toString().c_str(), mPortHandle);
if (status != OK) {
// This can happen if the resource is busy or the config does
// not match the hardware.
- ALOGD("%s() - openMmapStream() returned status %d", __func__, status);
+ ALOGD("%s() - openMmapStream() returned status=%d, suggested format=%#x, sample_rate=%u, "
+ "channel_mask=%#x",
+ __func__, status, config.format, config.sample_rate, config.channel_mask);
+ *nextFormatToTry = config.format != audioFormat ? config.format
+ : *nextFormatToTry;
return AAUDIO_ERROR_UNAVAILABLE;
}
@@ -184,7 +210,7 @@
ALOGW("%s() - openMmapStream() failed to set sessionId", __func__);
}
- aaudio_session_id_t actualSessionId =
+ const aaudio_session_id_t actualSessionId =
(requestedSessionId == AAUDIO_SESSION_ID_NONE)
? AAUDIO_SESSION_ID_NONE
: (aaudio_session_id_t) sessionId;
@@ -194,7 +220,7 @@
__func__, audioFormat, getDeviceId(), getSessionId());
// Create MMAP/NOIRQ buffer.
- result = createMmapBuffer(&mAudioDataFileDescriptor);
+ result = createMmapBuffer();
if (result != AAUDIO_OK) {
goto error;
}
@@ -206,6 +232,9 @@
setFormat(config.format);
setSampleRate(config.sample_rate);
+ setHardwareSampleRate(getSampleRate());
+ setHardwareFormat(getFormat());
+ setHardwareSamplesPerFrame(AAudioConvert_channelMaskToCount(getChannelMask()));
// If the position is not updated while the timestamp is updated for more than a certain amount,
// the timestamp reported from the HAL may not be accurate. Here, a timestamp grace period is
@@ -215,6 +244,8 @@
mTimestampGracePeriodMs = ((int64_t) kTimestampGraceBurstCount * mFramesPerBurst
* AAUDIO_MILLIS_PER_SECOND) / getSampleRate();
+ mDataReportOffsetNanos = ((int64_t)mTimestampGracePeriodMs) * AAUDIO_NANOS_PER_MILLISECOND;
+
ALOGD("%s() got rate = %d, channels = %d channelMask = %#x, deviceId = %d, capacity = %d\n",
__func__, getSampleRate(), getSamplesPerFrame(), getChannelMask(),
deviceId, getBufferCapacity());
@@ -237,9 +268,6 @@
if (mMmapStream != nullptr) {
// Needs to be explicitly cleared or CTS will fail but it is not clear why.
mMmapStream.clear();
- // Apparently the above close is asynchronous. An attempt to open a new device
- // right after a close can fail. Also some callbacks may still be in flight!
- // FIXME Make closing synchronous.
AudioClock::sleepForNanos(100 * AAUDIO_NANOS_PER_MILLISECOND);
}
}
@@ -253,7 +281,7 @@
if (stream != nullptr) {
attr = getAudioAttributesFrom(stream.get());
}
- aaudio_result_t result = startClient(
+ const aaudio_result_t result = startClient(
mMmapClient, stream == nullptr ? nullptr : &attr, &tempHandle);
// When AudioFlinger is passed a valid port handle then it should not change it.
LOG_ALWAYS_FATAL_IF(tempHandle != mPortHandle,
@@ -263,8 +291,8 @@
return result;
}
-aaudio_result_t AAudioServiceEndpointMMAP::stopStream(sp<AAudioServiceStreamBase> stream,
- audio_port_handle_t clientHandle __unused) {
+aaudio_result_t AAudioServiceEndpointMMAP::stopStream(sp<AAudioServiceStreamBase> /*stream*/,
+ audio_port_handle_t /*clientHandle*/) {
mFramesTransferred.reset32();
// Round 64-bit counter up to a multiple of the buffer capacity.
@@ -281,39 +309,31 @@
aaudio_result_t AAudioServiceEndpointMMAP::startClient(const android::AudioClient& client,
const audio_attributes_t *attr,
audio_port_handle_t *clientHandle) {
- if (mMmapStream == nullptr) return AAUDIO_ERROR_NULL;
- status_t status = mMmapStream->start(client, attr, clientHandle);
- return AAudioConvert_androidToAAudioResult(status);
+ return mMmapStream == nullptr
+ ? AAUDIO_ERROR_NULL
+ : AAudioConvert_androidToAAudioResult(mMmapStream->start(client, attr, clientHandle));
}
aaudio_result_t AAudioServiceEndpointMMAP::stopClient(audio_port_handle_t clientHandle) {
- if (mMmapStream == nullptr) return AAUDIO_ERROR_NULL;
- aaudio_result_t result = AAudioConvert_androidToAAudioResult(mMmapStream->stop(clientHandle));
- return result;
+ return mMmapStream == nullptr
+ ? AAUDIO_ERROR_NULL
+ : AAudioConvert_androidToAAudioResult(mMmapStream->stop(clientHandle));
}
aaudio_result_t AAudioServiceEndpointMMAP::standby() {
- if (mMmapStream == nullptr) {
- return AAUDIO_ERROR_NULL;
- }
- aaudio_result_t result = AAudioConvert_androidToAAudioResult(mMmapStream->standby());
- return result;
+ return mMmapStream == nullptr
+ ? AAUDIO_ERROR_NULL
+ : AAudioConvert_androidToAAudioResult(mMmapStream->standby());
}
aaudio_result_t AAudioServiceEndpointMMAP::exitStandby(AudioEndpointParcelable* parcelable) {
if (mMmapStream == nullptr) {
return AAUDIO_ERROR_NULL;
}
- mAudioDataFileDescriptor.reset();
- aaudio_result_t result = createMmapBuffer(&mAudioDataFileDescriptor);
+ mAudioDataWrapper->reset();
+ const aaudio_result_t result = createMmapBuffer();
if (result == AAUDIO_OK) {
- int32_t bytesPerFrame = calculateBytesPerFrame();
- int32_t capacityInBytes = getBufferCapacity() * bytesPerFrame;
- int fdIndex = parcelable->addFileDescriptor(mAudioDataFileDescriptor, capacityInBytes);
- parcelable->mDownDataQueueParcelable.setupMemory(fdIndex, 0, capacityInBytes);
- parcelable->mDownDataQueueParcelable.setBytesPerFrame(bytesPerFrame);
- parcelable->mDownDataQueueParcelable.setFramesPerBurst(mFramesPerBurst);
- parcelable->mDownDataQueueParcelable.setCapacityInFrames(getBufferCapacity());
+ getDownDataDescription(parcelable);
}
return result;
}
@@ -325,10 +345,10 @@
if (mMmapStream == nullptr) {
return AAUDIO_ERROR_NULL;
}
- status_t status = mMmapStream->getMmapPosition(&position);
+ const status_t status = mMmapStream->getMmapPosition(&position);
ALOGV("%s() status= %d, pos = %d, nanos = %lld\n",
__func__, status, position.position_frames, (long long) position.time_nanoseconds);
- aaudio_result_t result = AAudioConvert_androidToAAudioResult(status);
+ const aaudio_result_t result = AAudioConvert_androidToAAudioResult(status);
if (result == AAUDIO_ERROR_UNAVAILABLE) {
ALOGW("%s(): getMmapPosition() has no position data available", __func__);
} else if (result != AAUDIO_OK) {
@@ -342,8 +362,8 @@
return result;
}
-aaudio_result_t AAudioServiceEndpointMMAP::getTimestamp(int64_t *positionFrames,
- int64_t *timeNanos) {
+aaudio_result_t AAudioServiceEndpointMMAP::getTimestamp(int64_t* /*positionFrames*/,
+ int64_t* /*timeNanos*/) {
return 0; // TODO
}
@@ -356,7 +376,7 @@
} else {
// Must be a SHARED stream?
ALOGD("%s(%d) disconnect a specific stream", __func__, portHandle);
- aaudio_result_t result = mAAudioService.disconnectStreamByPortHandle(portHandle);
+ const aaudio_result_t result = mAAudioService.disconnectStreamByPortHandle(portHandle);
ALOGD("%s(%d) disconnectStreamByPortHandle returned %d", __func__, portHandle, result);
}
};
@@ -364,31 +384,27 @@
// This is called by AudioFlinger when it wants to destroy a stream.
void AAudioServiceEndpointMMAP::onTearDown(audio_port_handle_t portHandle) {
ALOGD("%s(portHandle = %d) called", __func__, portHandle);
- android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
+ const android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
std::thread asyncTask([holdEndpoint, portHandle]() {
holdEndpoint->handleTearDownAsync(portHandle);
});
asyncTask.detach();
}
-void AAudioServiceEndpointMMAP::onVolumeChanged(audio_channel_mask_t channels,
- android::Vector<float> values) {
- // TODO Do we really need a different volume for each channel?
- // We get called with an array filled with a single value!
- float volume = values[0];
- ALOGD("%s() volume[0] = %f", __func__, volume);
- std::lock_guard<std::mutex> lock(mLockStreams);
+void AAudioServiceEndpointMMAP::onVolumeChanged(float volume) {
+ ALOGD("%s() volume = %f", __func__, volume);
+ const std::lock_guard<std::mutex> lock(mLockStreams);
for(const auto& stream : mRegisteredStreams) {
stream->onVolumeChanged(volume);
}
};
void AAudioServiceEndpointMMAP::onRoutingChanged(audio_port_handle_t portHandle) {
- const int32_t deviceId = static_cast<int32_t>(portHandle);
+ const auto deviceId = static_cast<int32_t>(portHandle);
ALOGD("%s() called with dev %d, old = %d", __func__, deviceId, getDeviceId());
if (getDeviceId() != deviceId) {
if (getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
- android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
+ const android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
std::thread asyncTask([holdEndpoint, deviceId]() {
ALOGD("onRoutingChanged() asyncTask launched");
holdEndpoint->disconnectRegisteredStreams();
@@ -407,14 +423,19 @@
aaudio_result_t AAudioServiceEndpointMMAP::getDownDataDescription(
AudioEndpointParcelable* parcelable)
{
+ if (mAudioDataWrapper->setupFifoBuffer(calculateBytesPerFrame(), getBufferCapacity())
+ != AAUDIO_OK) {
+ ALOGE("Failed to setup audio data wrapper, will not be able to "
+ "set data for sound dose computation");
+ // This will not affect the audio processing capability
+ }
// Gather information on the data queue based on HAL info.
- int32_t bytesPerFrame = calculateBytesPerFrame();
- int32_t capacityInBytes = getBufferCapacity() * bytesPerFrame;
- int fdIndex = parcelable->addFileDescriptor(mAudioDataFileDescriptor, capacityInBytes);
- parcelable->mDownDataQueueParcelable.setupMemory(fdIndex, 0, capacityInBytes);
- parcelable->mDownDataQueueParcelable.setBytesPerFrame(bytesPerFrame);
- parcelable->mDownDataQueueParcelable.setFramesPerBurst(mFramesPerBurst);
- parcelable->mDownDataQueueParcelable.setCapacityInFrames(getBufferCapacity());
+ mAudioDataWrapper->fillParcelable(parcelable, parcelable->mDownDataQueueParcelable,
+ calculateBytesPerFrame(), mFramesPerBurst,
+ getBufferCapacity(),
+ getDirection() == AAUDIO_DIRECTION_OUTPUT
+ ? SharedMemoryWrapper::WRITE
+ : SharedMemoryWrapper::NONE);
return AAUDIO_OK;
}
@@ -426,7 +447,7 @@
}
uint64_t tempPositionFrames;
int64_t tempTimeNanos;
- status_t status = mMmapStream->getExternalPosition(&tempPositionFrames, &tempTimeNanos);
+ const status_t status = mMmapStream->getExternalPosition(&tempPositionFrames, &tempTimeNanos);
if (status != OK) {
// getExternalPosition reports error. The HAL may not support the API. Cache the result
// so that the call will not go to the HAL next time.
@@ -498,16 +519,15 @@
return mHalExternalPositionStatus;
}
-aaudio_result_t AAudioServiceEndpointMMAP::createMmapBuffer(
- android::base::unique_fd* fileDescriptor)
+aaudio_result_t AAudioServiceEndpointMMAP::createMmapBuffer()
{
memset(&mMmapBufferinfo, 0, sizeof(struct audio_mmap_buffer_info));
int32_t minSizeFrames = getBufferCapacity();
if (minSizeFrames <= 0) { // zero will get rejected
minSizeFrames = AAUDIO_BUFFER_CAPACITY_MIN;
}
- status_t status = mMmapStream->createMmapBuffer(minSizeFrames, &mMmapBufferinfo);
- bool isBufferShareable = mMmapBufferinfo.flags & AUDIO_MMAP_APPLICATION_SHAREABLE;
+ const status_t status = mMmapStream->createMmapBuffer(minSizeFrames, &mMmapBufferinfo);
+ const bool isBufferShareable = mMmapBufferinfo.flags & AUDIO_MMAP_APPLICATION_SHAREABLE;
if (status != OK) {
ALOGE("%s() - createMmapBuffer() failed with status %d %s",
__func__, status, strerror(-status));
@@ -524,7 +544,7 @@
setBufferCapacity(mMmapBufferinfo.buffer_size_frames);
if (!isBufferShareable) {
// Exclusive mode can only be used by the service because the FD cannot be shared.
- int32_t audioServiceUid =
+ const int32_t audioServiceUid =
VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
if ((mMmapClient.attributionSource.uid != audioServiceUid) &&
getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE) {
@@ -535,8 +555,9 @@
// AAudio creates a copy of this FD and retains ownership of the copy.
// Assume that AudioFlinger will close the original shared_memory_fd.
- fileDescriptor->reset(dup(mMmapBufferinfo.shared_memory_fd));
- if (fileDescriptor->get() == -1) {
+
+ mAudioDataWrapper->getDataFileDescriptor().reset(dup(mMmapBufferinfo.shared_memory_fd));
+ if (mAudioDataWrapper->getDataFileDescriptor().get() == -1) {
ALOGE("%s() - could not dup shared_memory_fd", __func__);
return AAUDIO_ERROR_INTERNAL;
}
@@ -551,3 +572,31 @@
return AAUDIO_OK;
}
+
+int64_t AAudioServiceEndpointMMAP::nextDataReportTime() {
+ return getDirection() == AAUDIO_DIRECTION_OUTPUT
+ ? AudioClock::getNanoseconds() + mDataReportOffsetNanos
+ : std::numeric_limits<int64_t>::max();
+}
+
+void AAudioServiceEndpointMMAP::reportData() {
+ if (mMmapStream == nullptr) {
+ // This must not happen
+ ALOGE("%s() invalid state, mmap stream is not initialized", __func__);
+ return;
+ }
+ auto fifo = mAudioDataWrapper->getFifoBuffer();
+ if (fifo == nullptr) {
+ ALOGE("%s() fifo buffer is not initialized, cannot report data", __func__);
+ return;
+ }
+
+ WrappingBuffer wrappingBuffer;
+ fifo_frames_t framesAvailable = fifo->getFullDataAvailable(&wrappingBuffer);
+ for (size_t i = 0; i < WrappingBuffer::SIZE; ++i) {
+ if (wrappingBuffer.numFrames[i] > 0) {
+ mMmapStream->reportData(wrappingBuffer.data[i], wrappingBuffer.numFrames[i]);
+ }
+ }
+ fifo->advanceReadIndex(framesAvailable);
+}
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 3e7f2c7..38cf0ba 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -30,6 +30,7 @@
#include "AAudioServiceStreamMMAP.h"
#include "AAudioMixer.h"
#include "AAudioService.h"
+#include "SharedMemoryWrapper.h"
namespace aaudio {
@@ -44,7 +45,7 @@
public:
explicit AAudioServiceEndpointMMAP(android::AAudioService &audioService);
- virtual ~AAudioServiceEndpointMMAP() = default;
+ ~AAudioServiceEndpointMMAP() override = default;
std::string dump() const override;
@@ -77,8 +78,7 @@
// -------------- Callback functions for MmapStreamCallback ---------------------
void onTearDown(audio_port_handle_t portHandle) override;
- void onVolumeChanged(audio_channel_mask_t channels,
- android::Vector<float> values) override;
+ void onVolumeChanged(float volume) override;
void onRoutingChanged(audio_port_handle_t portHandle) override;
// ------------------------------------------------------------------------------
@@ -91,11 +91,15 @@
aaudio_result_t getExternalPosition(uint64_t *positionFrames, int64_t *timeNanos);
+ int64_t nextDataReportTime();
+
+ void reportData();
+
private:
- aaudio_result_t openWithFormat(audio_format_t audioFormat);
+ aaudio_result_t openWithFormat(audio_format_t audioFormat, audio_format_t* nextFormatToTry);
- aaudio_result_t createMmapBuffer(android::base::unique_fd* fileDescriptor);
+ aaudio_result_t createMmapBuffer();
MonotonicCounter mFramesTransferred;
@@ -108,7 +112,7 @@
android::AAudioService &mAAudioService;
- android::base::unique_fd mAudioDataFileDescriptor;
+ std::unique_ptr<SharedMemoryWrapper> mAudioDataWrapper;
int64_t mHardwareTimeOffsetNanos = 0; // TODO get from HAL
@@ -118,6 +122,7 @@
int32_t mTimestampGracePeriodMs;
int32_t mFrozenPositionCount = 0;
int32_t mFrozenTimestampCount = 0;
+ int64_t mDataReportOffsetNanos = 0;
};
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index 2a5939f..637405d 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -158,5 +158,5 @@
ALOGD("%s() exiting, enabled = %d, state = %d, result = %d <<<<<<<<<<<<< MIXER",
__func__, mCallbackEnabled.load(), getStreamInternal()->getState(), result);
- return NULL; // TODO review
+ return nullptr; // TODO review
}
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index dd421fe..1dd0c3a 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -82,6 +82,9 @@
setDeviceId(mStreamInternal->getDeviceId());
setSessionId(mStreamInternal->getSessionId());
setFormat(AUDIO_FORMAT_PCM_FLOAT); // force for mixer
+ setHardwareSampleRate(mStreamInternal->getHardwareSampleRate());
+ setHardwareFormat(mStreamInternal->getHardwareFormat());
+ setHardwareSamplesPerFrame(mStreamInternal->getHardwareSamplesPerFrame());
mFramesPerBurst = mStreamInternal->getFramesPerBurst();
return result;
@@ -100,8 +103,7 @@
// Prevent the stream from being deleted while being used.
// This is just for extra safety. It is probably not needed because
// this callback should be joined before the stream is closed.
- AAudioServiceEndpointShared *endpointPtr =
- static_cast<AAudioServiceEndpointShared *>(arg);
+ auto endpointPtr = static_cast<AAudioServiceEndpointShared *>(arg);
android::sp<AAudioServiceEndpointShared> endpoint(endpointPtr);
// Balance the incStrong() in startSharingThread_l().
endpoint->decStrong(nullptr);
@@ -137,7 +139,7 @@
aaudio_result_t aaudio::AAudioServiceEndpointShared::stopSharingThread() {
mCallbackEnabled.store(false);
- return getStreamInternal()->joinThread(NULL);
+ return getStreamInternal()->joinThread(nullptr);
}
aaudio_result_t AAudioServiceEndpointShared::startStream(
@@ -177,8 +179,8 @@
return result;
}
-aaudio_result_t AAudioServiceEndpointShared::stopStream(sp<AAudioServiceStreamBase> sharedStream,
- audio_port_handle_t clientHandle) {
+aaudio_result_t AAudioServiceEndpointShared::stopStream(
+ sp<AAudioServiceStreamBase> /*sharedStream*/, audio_port_handle_t clientHandle) {
// Ignore result.
(void) getStreamInternal()->stopClient(clientHandle);
diff --git a/services/oboeservice/AAudioServiceEndpointShared.h b/services/oboeservice/AAudioServiceEndpointShared.h
index 3e760c4..0efb227 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.h
+++ b/services/oboeservice/AAudioServiceEndpointShared.h
@@ -39,7 +39,7 @@
public:
explicit AAudioServiceEndpointShared(AudioStreamInternal *streamInternal);
- virtual ~AAudioServiceEndpointShared() = default;
+ ~AAudioServiceEndpointShared() override = default;
std::string dump() const override;
@@ -79,6 +79,6 @@
std::atomic<int> mRunningStreamCount{0};
};
-}
+} // namespace aaudio
#endif //AAUDIO_SERVICE_ENDPOINT_SHARED_H
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index f4ee84f..65854c8 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -83,8 +83,8 @@
}
std::string AAudioServiceStreamBase::dumpHeader() {
- return std::string(
- " T Handle UId Port Run State Format Burst Chan Mask Capacity");
+ return {" T Handle UId Port Run State Format Burst Chan Mask Capacity"
+ " HwFormat HwChan HwRate"};
}
std::string AAudioServiceStreamBase::dump() const {
@@ -101,6 +101,9 @@
result << std::setw(5) << getSamplesPerFrame();
result << std::setw(8) << std::hex << getChannelMask() << std::dec;
result << std::setw(9) << getBufferCapacity();
+ result << std::setw(9) << getHardwareFormat();
+ result << std::setw(7) << getHardwareSamplesPerFrame();
+ result << std::setw(7) << getHardwareSampleRate();
return result.str();
}
@@ -278,7 +281,7 @@
if (result != AAUDIO_OK) goto error;
// This should happen at the end of the start.
- sendServiceEvent(AAUDIO_SERVICE_EVENT_STARTED);
+ sendServiceEvent(AAUDIO_SERVICE_EVENT_STARTED, static_cast<int64_t>(mClientHandle));
setState(AAUDIO_STREAM_STATE_STARTED);
return result;
@@ -401,7 +404,8 @@
// Hold onto the ref counted stream until the end.
android::sp<AAudioServiceStreamBase> holdStream(this);
TimestampScheduler timestampScheduler;
- int64_t nextTime;
+ int64_t nextTimestampReportTime;
+ int64_t nextDataReportTime;
int64_t standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
// Balance the incStrong from when the thread was launched.
holdStream->decStrong(nullptr);
@@ -414,8 +418,18 @@
while (mThreadEnabled.load()) {
loopCount++;
int64_t timeoutNanos = -1;
- if (isRunning() || (isIdle_l() && !isStandby_l())) {
- timeoutNanos = (isRunning() ? nextTime : standbyTime) - AudioClock::getNanoseconds();
+ if (isDisconnected_l()) {
+ if (!isStandby_l()) {
+ // If the stream is disconnected but not in standby mode, wait until standby time.
+ timeoutNanos = standbyTime - AudioClock::getNanoseconds();
+ timeoutNanos = std::max<int64_t>(0, timeoutNanos);
+ } // else {
+ // If the stream is disconnected and in standby mode, keep `timeoutNanos` as
+ // -1 to wait forever until next command as the stream can only be closed.
+ // }
+ } else if (isRunning() || (isIdle_l() && !isStandby_l())) {
+ timeoutNanos = (isRunning() ? std::min(nextTimestampReportTime, nextDataReportTime)
+ : standbyTime) - AudioClock::getNanoseconds();
timeoutNanos = std::max<int64_t>(0, timeoutNanos);
}
@@ -425,16 +439,22 @@
break;
}
- if (isRunning() && AudioClock::getNanoseconds() >= nextTime) {
- // It is time to update timestamp.
- if (sendCurrentTimestamp_l() != AAUDIO_OK) {
- ALOGE("Failed to send current timestamp, stop updating timestamp");
- disconnect_l();
- } else {
- nextTime = timestampScheduler.nextAbsoluteTime();
+ if (isRunning() && !isDisconnected_l()) {
+ auto currentTimestamp = AudioClock::getNanoseconds();
+ if (currentTimestamp >= nextDataReportTime) {
+ reportData_l();
+ nextDataReportTime = nextDataReportTime_l();
+ }
+ if (currentTimestamp >= nextTimestampReportTime) {
+ // It is time to update timestamp.
+ if (sendCurrentTimestamp_l() != AAUDIO_OK) {
+ ALOGE("Failed to send current timestamp, stop updating timestamp");
+ disconnect_l();
+ }
+ nextTimestampReportTime = timestampScheduler.nextAbsoluteTime();
}
}
- if (isIdle_l() && AudioClock::getNanoseconds() >= standbyTime) {
+ if ((isIdle_l() || isDisconnected_l()) && AudioClock::getNanoseconds() >= standbyTime) {
aaudio_result_t result = standby_l();
if (result != AAUDIO_OK) {
// If standby failed because of the function is not implemented, there is no
@@ -453,7 +473,8 @@
command->result = start_l();
timestampScheduler.setBurstPeriod(mFramesPerBurst, getSampleRate());
timestampScheduler.start(AudioClock::getNanoseconds());
- nextTime = timestampScheduler.nextAbsoluteTime();
+ nextTimestampReportTime = timestampScheduler.nextAbsoluteTime();
+ nextDataReportTime = nextDataReportTime_l();
break;
case PAUSE:
command->result = pause_l();
@@ -473,8 +494,7 @@
disconnect_l();
break;
case REGISTER_AUDIO_THREAD: {
- RegisterAudioThreadParam *param =
- (RegisterAudioThreadParam *) command->parameter.get();
+ auto param = (RegisterAudioThreadParam *) command->parameter.get();
command->result =
param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
: registerAudioThread_l(param->mOwnerPid,
@@ -483,21 +503,20 @@
}
break;
case UNREGISTER_AUDIO_THREAD: {
- UnregisterAudioThreadParam *param =
- (UnregisterAudioThreadParam *) command->parameter.get();
+ auto param = (UnregisterAudioThreadParam *) command->parameter.get();
command->result =
param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
: unregisterAudioThread_l(param->mClientThreadId);
}
break;
case GET_DESCRIPTION: {
- GetDescriptionParam *param = (GetDescriptionParam *) command->parameter.get();
+ auto param = (GetDescriptionParam *) command->parameter.get();
command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
: getDescription_l(param->mParcelable);
}
break;
case EXIT_STANDBY: {
- ExitStandbyParam *param = (ExitStandbyParam *) command->parameter.get();
+ auto param = (ExitStandbyParam *) command->parameter.get();
command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
: exitStandby_l(param->mParcelable);
standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index b5f8b90..bc7ccde 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -62,7 +62,7 @@
public:
explicit AAudioServiceStreamBase(android::AAudioService &aAudioService);
- virtual ~AAudioServiceStreamBase();
+ ~AAudioServiceStreamBase() override;
enum {
ILLEGAL_THREAD_ID = 0
@@ -254,7 +254,7 @@
RegisterAudioThreadParam(pid_t ownerPid, pid_t clientThreadId, int priority)
: AAudioCommandParam(), mOwnerPid(ownerPid),
mClientThreadId(clientThreadId), mPriority(priority) { }
- ~RegisterAudioThreadParam() = default;
+ ~RegisterAudioThreadParam() override = default;
pid_t mOwnerPid;
pid_t mClientThreadId;
@@ -265,9 +265,9 @@
class UnregisterAudioThreadParam : public AAudioCommandParam {
public:
- UnregisterAudioThreadParam(pid_t clientThreadId)
+ explicit UnregisterAudioThreadParam(pid_t clientThreadId)
: AAudioCommandParam(), mClientThreadId(clientThreadId) { }
- ~UnregisterAudioThreadParam() = default;
+ ~UnregisterAudioThreadParam() override = default;
pid_t mClientThreadId;
};
@@ -275,9 +275,9 @@
class GetDescriptionParam : public AAudioCommandParam {
public:
- GetDescriptionParam(AudioEndpointParcelable* parcelable)
+ explicit GetDescriptionParam(AudioEndpointParcelable* parcelable)
: AAudioCommandParam(), mParcelable(parcelable) { }
- ~GetDescriptionParam() = default;
+ ~GetDescriptionParam() override = default;
AudioEndpointParcelable* mParcelable;
};
@@ -324,9 +324,9 @@
}
class ExitStandbyParam : public AAudioCommandParam {
public:
- ExitStandbyParam(AudioEndpointParcelable* parcelable)
+ explicit ExitStandbyParam(AudioEndpointParcelable* parcelable)
: AAudioCommandParam(), mParcelable(parcelable) { }
- ~ExitStandbyParam() = default;
+ ~ExitStandbyParam() override = default;
AudioEndpointParcelable* mParcelable;
};
@@ -346,6 +346,11 @@
|| mState == AAUDIO_STREAM_STATE_STOPPED;
}
+ virtual int64_t nextDataReportTime_l() REQUIRES(mLock) {
+ return std::numeric_limits<int64_t>::max();
+ }
+ virtual void reportData_l() REQUIRES(mLock) { return; }
+
pid_t mRegisteredClientThread = ILLEGAL_THREAD_ID;
std::mutex mUpMessageQueueLock;
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index ec9b2e2..89f6e33 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -238,3 +238,25 @@
static_cast<AAudioServiceEndpointMMAP *>(endpoint.get());
return serviceEndpointMMAP->getDownDataDescription(parcelable);
}
+
+int64_t AAudioServiceStreamMMAP::nextDataReportTime_l() {
+ sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+ if (endpoint == nullptr) {
+ ALOGE("%s() has no endpoint", __func__);
+ return std::numeric_limits<int64_t>::max();
+ }
+ sp<AAudioServiceEndpointMMAP> serviceEndpointMMAP =
+ static_cast<AAudioServiceEndpointMMAP *>(endpoint.get());
+ return serviceEndpointMMAP->nextDataReportTime();
+}
+
+void AAudioServiceStreamMMAP::reportData_l() {
+ sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+ if (endpoint == nullptr) {
+ ALOGE("%s() has no endpoint", __func__);
+ return;
+ }
+ sp<AAudioServiceEndpointMMAP> serviceEndpointMMAP =
+ static_cast<AAudioServiceEndpointMMAP *>(endpoint.get());
+ return serviceEndpointMMAP->reportData();
+}
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index cd8c91e..db3c8d0 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -47,7 +47,7 @@
public:
AAudioServiceStreamMMAP(android::AAudioService &aAudioService,
bool inService);
- virtual ~AAudioServiceStreamMMAP() = default;
+ ~AAudioServiceStreamMMAP() override = default;
aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
@@ -84,6 +84,10 @@
aaudio_result_t getHardwareTimestamp_l(
int64_t *positionFrames, int64_t *timeNanos) REQUIRES(mLock) override;
+ int64_t nextDataReportTime_l() REQUIRES(mLock) override;
+
+ void reportData_l() REQUIRES(mLock) override;
+
/**
* Device specific startup.
* @return AAUDIO_OK or negative error.
diff --git a/services/oboeservice/AAudioServiceStreamShared.h b/services/oboeservice/AAudioServiceStreamShared.h
index 78f9787..0b2513a 100644
--- a/services/oboeservice/AAudioServiceStreamShared.h
+++ b/services/oboeservice/AAudioServiceStreamShared.h
@@ -44,7 +44,7 @@
public:
explicit AAudioServiceStreamShared(android::AAudioService &aAudioService);
- virtual ~AAudioServiceStreamShared() = default;
+ ~AAudioServiceStreamShared() override = default;
static std::string dumpHeader();
diff --git a/services/oboeservice/AAudioStreamTracker.cpp b/services/oboeservice/AAudioStreamTracker.cpp
index 9bbbc73..c86a7a2 100644
--- a/services/oboeservice/AAudioStreamTracker.cpp
+++ b/services/oboeservice/AAudioStreamTracker.cpp
@@ -78,7 +78,8 @@
return handle;
}
-aaudio_handle_t AAudioStreamTracker::addStreamForHandle(sp<AAudioServiceStreamBase> serviceStream) {
+aaudio_handle_t AAudioStreamTracker::addStreamForHandle(
+ const sp<AAudioServiceStreamBase>& serviceStream) {
std::lock_guard<std::mutex> lock(mHandleLock);
aaudio_handle_t handle = mPreviousHandle;
// Assign a unique handle.
diff --git a/services/oboeservice/AAudioStreamTracker.h b/services/oboeservice/AAudioStreamTracker.h
index 43870fc..99f4b6c 100644
--- a/services/oboeservice/AAudioStreamTracker.h
+++ b/services/oboeservice/AAudioStreamTracker.h
@@ -64,7 +64,7 @@
* @param serviceStream
* @return handle for identifying the stream
*/
- aaudio_handle_t addStreamForHandle(android::sp<AAudioServiceStreamBase> serviceStream);
+ aaudio_handle_t addStreamForHandle(const android::sp<AAudioServiceStreamBase>& serviceStream);
/**
* @return string that can be added to dumpsys
diff --git a/services/oboeservice/AAudioThread.h b/services/oboeservice/AAudioThread.h
index b2774e0..91ad715 100644
--- a/services/oboeservice/AAudioThread.h
+++ b/services/oboeservice/AAudioThread.h
@@ -29,7 +29,7 @@
*/
class Runnable {
public:
- Runnable() {};
+ Runnable() = default;
virtual ~Runnable() = default;
virtual void run() = 0;
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 5076239..c5080a4 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -21,6 +21,64 @@
default_applicable_licenses: ["frameworks_av_license"],
}
+tidy_errors = [
+ // https://clang.llvm.org/extra/clang-tidy/checks/list.html
+ // For many categories, the checks are too many to specify individually.
+ // Feel free to disable as needed - as warnings are generally ignored,
+ // we treat warnings as errors.
+ "android-*",
+ "bugprone-*",
+ "cert-*",
+ "clang-analyzer-security*",
+ "google-*",
+ "misc-*",
+ //"modernize-*", // explicitly list the modernize as they can be subjective.
+ "modernize-avoid-bind",
+ //"modernize-avoid-c-arrays", // std::array<> can be verbose
+ "modernize-concat-nested-namespaces",
+ //"modernize-deprecated-headers", // C headers still ok even if there is C++ equivalent.
+ "modernize-deprecated-ios-base-aliases",
+ "modernize-loop-convert",
+ "modernize-make-shared",
+ "modernize-make-unique",
+ "modernize-pass-by-value",
+ "modernize-raw-string-literal",
+ "modernize-redundant-void-arg",
+ "modernize-replace-auto-ptr",
+ "modernize-replace-random-shuffle",
+ "modernize-return-braced-init-list",
+ "modernize-shrink-to-fit",
+ "modernize-unary-static-assert",
+ "modernize-use-auto",
+ "modernize-use-bool-literals",
+ "modernize-use-default-member-init",
+ "modernize-use-emplace",
+ "modernize-use-equals-default",
+ "modernize-use-equals-delete",
+ // "modernize-use-nodiscard", // Maybe add this in the future
+ "modernize-use-noexcept",
+ "modernize-use-nullptr",
+ "modernize-use-override",
+ // "modernize-use-trailing-return-type", // not necessarily more readable
+ "modernize-use-transparent-functors",
+ "modernize-use-uncaught-exceptions",
+ "modernize-use-using",
+ "performance-*",
+
+ // Remove some pedantic stylistic requirements.
+ "-android-cloexec-dup", // found in AAudioServiceEndpointMMAP.cpp
+ "-bugprone-narrowing-conversions", // found in several interface from size_t to int32_t
+
+ "-google-readability-casting", // C++ casts not always necessary and may be verbose
+ "-google-readability-todo", // do not require TODO(info)
+ "-google-build-using-namespace", // Reenable and fix later.
+ "-google-global-names-in-headers", // found in several files
+
+ "-misc-non-private-member-variables-in-classes", // found in aidl generated files
+
+]
+
+
cc_library {
name: "libaaudioservice",
@@ -46,6 +104,7 @@
"AAudioStreamTracker.cpp",
"AAudioThread.cpp",
"SharedMemoryProxy.cpp",
+ "SharedMemoryWrapper.cpp",
"SharedRingBuffer.cpp",
"TimestampScheduler.cpp",
],
@@ -89,4 +148,11 @@
"frameworks/av/media/libnbaio/include_mono",
"frameworks/av/media/libnbaio/include",
],
+
+ tidy: true,
+ tidy_checks: tidy_errors,
+ tidy_checks_as_errors: tidy_errors,
+ tidy_flags: [
+ "-format-style=file",
+ ]
}
diff --git a/services/oboeservice/SharedMemoryProxy.cpp b/services/oboeservice/SharedMemoryProxy.cpp
index 78d4884..549a36e 100644
--- a/services/oboeservice/SharedMemoryProxy.cpp
+++ b/services/oboeservice/SharedMemoryProxy.cpp
@@ -58,7 +58,7 @@
}
// Get original memory address.
- mOriginalSharedMemory = (uint8_t *) mmap(0, mSharedMemorySizeInBytes,
+ mOriginalSharedMemory = (uint8_t *) mmap(nullptr, mSharedMemorySizeInBytes,
PROT_READ|PROT_WRITE,
MAP_SHARED,
mOriginalFileDescriptor, 0);
diff --git a/services/oboeservice/SharedMemoryProxy.h b/services/oboeservice/SharedMemoryProxy.h
index 89eeb4b..81a0753 100644
--- a/services/oboeservice/SharedMemoryProxy.h
+++ b/services/oboeservice/SharedMemoryProxy.h
@@ -30,7 +30,7 @@
*/
class SharedMemoryProxy {
public:
- SharedMemoryProxy() {}
+ SharedMemoryProxy() = default;
~SharedMemoryProxy();
diff --git a/services/oboeservice/SharedMemoryWrapper.cpp b/services/oboeservice/SharedMemoryWrapper.cpp
new file mode 100644
index 0000000..c0dcccb
--- /dev/null
+++ b/services/oboeservice/SharedMemoryWrapper.cpp
@@ -0,0 +1,132 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "SharedMemoryWrapper"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <iomanip>
+#include <iostream>
+#include <sys/mman.h>
+
+#include "SharedMemoryWrapper.h"
+
+namespace aaudio {
+
+constexpr int COUNTER_SIZE_IN_BYTES = sizeof(android::fifo_counter_t);
+constexpr int WRAPPER_SIZE_IN_BYTES = 2 * COUNTER_SIZE_IN_BYTES;
+
+SharedMemoryWrapper::SharedMemoryWrapper() {
+ mCounterFd.reset(ashmem_create_region("AAudioSharedMemoryWrapper", WRAPPER_SIZE_IN_BYTES));
+ if (mCounterFd.get() == -1) {
+ ALOGE("allocate() ashmem_create_region() failed %d", errno);
+ return;
+ }
+ int err = ashmem_set_prot_region(mCounterFd.get(), PROT_READ|PROT_WRITE);
+ if (err < 0) {
+ ALOGE("allocate() ashmem_set_prot_region() failed %d", errno);
+ mCounterFd.reset();
+ return;
+ }
+ auto tmpPtr = (uint8_t *) mmap(nullptr, WRAPPER_SIZE_IN_BYTES,
+ PROT_READ|PROT_WRITE,
+ MAP_SHARED,
+ mCounterFd.get(), 0);
+ if (tmpPtr == MAP_FAILED) {
+ ALOGE("allocate() mmap() failed %d", errno);
+ mCounterFd.reset();
+ return;
+ }
+ mCounterMemoryAddress = tmpPtr;
+
+ mReadCounterAddress = (android::fifo_counter_t*) mCounterMemoryAddress;
+ mWriteCounterAddress = (android::fifo_counter_t*) &mCounterMemoryAddress[COUNTER_SIZE_IN_BYTES];
+}
+
+SharedMemoryWrapper::~SharedMemoryWrapper()
+{
+ reset();
+ if (mCounterMemoryAddress != nullptr) {
+ munmap(mCounterMemoryAddress, COUNTER_SIZE_IN_BYTES);
+ mCounterMemoryAddress = nullptr;
+ }
+}
+
+aaudio_result_t SharedMemoryWrapper::setupFifoBuffer(android::fifo_frames_t bytesPerFrame,
+ android::fifo_frames_t capacityInFrames) {
+ if (mDataFd.get() == -1) {
+ ALOGE("%s data file descriptor is not initialized", __func__);
+ return AAUDIO_ERROR_INTERNAL;
+ }
+ if (mCounterMemoryAddress == nullptr) {
+ ALOGE("%s the counter memory is not allocated correctly", __func__);
+ return AAUDIO_ERROR_INTERNAL;
+ }
+ mSharedMemorySizeInBytes = bytesPerFrame * capacityInFrames;
+ auto tmpPtr = (uint8_t *) mmap(nullptr, mSharedMemorySizeInBytes,
+ PROT_READ|PROT_WRITE,
+ MAP_SHARED,
+ mDataFd.get(), 0);
+ if (tmpPtr == MAP_FAILED) {
+ ALOGE("allocate() mmap() failed %d", errno);
+ return AAUDIO_ERROR_INTERNAL;
+ }
+ mSharedMemory = tmpPtr;
+
+ mFifoBuffer = std::make_shared<android::FifoBufferIndirect>(
+ bytesPerFrame, capacityInFrames, mReadCounterAddress,
+ mWriteCounterAddress, mSharedMemory);
+ return AAUDIO_OK;
+}
+
+void SharedMemoryWrapper::reset() {
+ mFifoBuffer.reset();
+ if (mSharedMemory != nullptr) {
+ munmap(mSharedMemory, mSharedMemorySizeInBytes);
+ mSharedMemory = nullptr;
+ }
+ mDataFd.reset();
+}
+
+void SharedMemoryWrapper::fillParcelable(
+ AudioEndpointParcelable* endpointParcelable, RingBufferParcelable &ringBufferParcelable,
+ int32_t bytesPerFrame, int32_t framesPerBurst, int32_t capacityInFrames,
+ CounterFilling counterFilling) {
+ const int capacityInBytes = bytesPerFrame * capacityInFrames;
+ const int dataFdIndex =
+ endpointParcelable->addFileDescriptor(mDataFd, mSharedMemorySizeInBytes);
+ ringBufferParcelable.setBytesPerFrame(bytesPerFrame);
+ ringBufferParcelable.setFramesPerBurst(framesPerBurst);
+ ringBufferParcelable.setCapacityInFrames(capacityInFrames);
+ if (mCounterFd.get() == -1 || counterFilling == NONE) {
+ // Failed to create shared memory for read/write counter or requesting no filling counters.
+ ALOGD("%s no counter is filled, counterFd=%d", __func__, mCounterFd.get());
+ ringBufferParcelable.setupMemory(dataFdIndex, 0, capacityInBytes);
+ } else {
+ int counterFdIndex =
+ endpointParcelable->addFileDescriptor(mCounterFd, WRAPPER_SIZE_IN_BYTES);
+ const int readCounterSize = (counterFilling & READ) == NONE ? 0 : COUNTER_SIZE_IN_BYTES;
+ const int writeCounterSize = (counterFilling & WRITE) == NONE ? 0 : COUNTER_SIZE_IN_BYTES;
+ ALOGD("%s counterFdIndex=%d readCounterSize=%d, writeCounterSize=%d",
+ __func__, counterFdIndex, readCounterSize, writeCounterSize);
+ ringBufferParcelable.setupMemory(
+ {dataFdIndex, 0 /*offset*/, capacityInBytes},
+ {counterFdIndex, 0 /*offset*/, readCounterSize},
+ {counterFdIndex, COUNTER_SIZE_IN_BYTES, writeCounterSize});
+ }
+}
+
+} // namespace aaudio
diff --git a/services/oboeservice/SharedMemoryWrapper.h b/services/oboeservice/SharedMemoryWrapper.h
new file mode 100644
index 0000000..323c7f1
--- /dev/null
+++ b/services/oboeservice/SharedMemoryWrapper.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/unique_fd.h>
+#include <cutils/ashmem.h>
+#include <stdint.h>
+#include <string>
+#include <sys/mman.h>
+
+#include "fifo/FifoBuffer.h"
+#include "binding/RingBufferParcelable.h"
+#include "binding/AudioEndpointParcelable.h"
+
+namespace aaudio {
+
+/**
+ * Wrap the shared memory with read and write counters. Provide a fifo buffer to access the
+ * wrapped shared memory.
+ */
+class SharedMemoryWrapper {
+public:
+ explicit SharedMemoryWrapper();
+
+ virtual ~SharedMemoryWrapper();
+
+ android::base::unique_fd& getDataFileDescriptor() { return mDataFd; }
+
+ aaudio_result_t setupFifoBuffer(android::fifo_frames_t bytesPerFrame,
+ android::fifo_frames_t capacityInFrames);
+
+ void reset();
+
+ enum CounterFilling {
+ NONE = 0,
+ READ = 1,
+ WRITE = 2,
+ };
+ /**
+ * Fill shared memory into parcelable.
+ *
+ * @param endpointParcelable container for ring buffers and shared memories
+ * @param ringBufferParcelable the ring buffer
+ * @param bytesPerFrame the bytes per frame of the data memory
+ * @param framesPerBurst the frame per burst of the data memory
+ * @param capacityInFrames the capacity in frames of the data memory
+ * @param counterFilling a bit mask to control if the counter from the wrapper should be filled
+ * or not.
+ */
+ void fillParcelable(AudioEndpointParcelable* endpointParcelable,
+ RingBufferParcelable &ringBufferParcelable,
+ int32_t bytesPerFrame,
+ int32_t framesPerBurst,
+ int32_t capacityInFrames,
+ CounterFilling counterFilling = NONE);
+
+ std::shared_ptr<android::FifoBuffer> getFifoBuffer() {
+ return mFifoBuffer;
+ }
+
+private:
+ android::base::unique_fd mDataFd;
+ android::base::unique_fd mCounterFd;
+ uint8_t* mCounterMemoryAddress = nullptr;
+ android::fifo_counter_t* mReadCounterAddress = nullptr;
+ android::fifo_counter_t* mWriteCounterAddress = nullptr;
+ std::shared_ptr<android::FifoBufferIndirect> mFifoBuffer;
+ uint8_t* mSharedMemory = nullptr;
+ int32_t mSharedMemorySizeInBytes = 0;
+};
+
+} /* namespace aaudio */
diff --git a/services/oboeservice/SharedRingBuffer.cpp b/services/oboeservice/SharedRingBuffer.cpp
index fd2a454..eebff51 100644
--- a/services/oboeservice/SharedRingBuffer.cpp
+++ b/services/oboeservice/SharedRingBuffer.cpp
@@ -62,7 +62,7 @@
// Map the fd to memory addresses. Use a temporary pointer to keep the mmap result and update
// it to `mSharedMemory` only when mmap operate successfully.
- uint8_t* tmpPtr = (uint8_t *) mmap(0, mSharedMemorySizeInBytes,
+ auto tmpPtr = (uint8_t *) mmap(nullptr, mSharedMemorySizeInBytes,
PROT_READ|PROT_WRITE,
MAP_SHARED,
mFileDescriptor.get(), 0);
@@ -74,10 +74,8 @@
mSharedMemory = tmpPtr;
// Get addresses for our counters and data from the shared memory.
- fifo_counter_t *readCounterAddress =
- (fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_READ_OFFSET];
- fifo_counter_t *writeCounterAddress =
- (fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_WRITE_OFFSET];
+ auto readCounterAddress = (fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_READ_OFFSET];
+ auto writeCounterAddress = (fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_WRITE_OFFSET];
uint8_t *dataAddress = &mSharedMemory[SHARED_RINGBUFFER_DATA_OFFSET];
mFifoBuffer = std::make_shared<FifoBufferIndirect>(bytesPerFrame, capacityInFrames,
diff --git a/services/oboeservice/SharedRingBuffer.h b/services/oboeservice/SharedRingBuffer.h
index cff1261..463bf11 100644
--- a/services/oboeservice/SharedRingBuffer.h
+++ b/services/oboeservice/SharedRingBuffer.h
@@ -39,7 +39,7 @@
*/
class SharedRingBuffer {
public:
- SharedRingBuffer() {}
+ SharedRingBuffer() = default;
virtual ~SharedRingBuffer();
diff --git a/services/oboeservice/TimestampScheduler.h b/services/oboeservice/TimestampScheduler.h
index baa5c41..6b5f4b1 100644
--- a/services/oboeservice/TimestampScheduler.h
+++ b/services/oboeservice/TimestampScheduler.h
@@ -31,7 +31,7 @@
class TimestampScheduler
{
public:
- TimestampScheduler() {};
+ TimestampScheduler() = default;
virtual ~TimestampScheduler() = default;
/**
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
index 605ac01..91ae511 100644
--- a/services/oboeservice/fuzzer/Android.bp
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -29,6 +29,9 @@
cc_fuzz {
name: "oboeservice_fuzzer",
+ defaults: [
+ "latest_android_media_audio_common_types_cpp_shared",
+ ],
srcs: [
"oboeservice_fuzzer.cpp",
],
diff --git a/services/oboeservice/fuzzer/README.md b/services/oboeservice/fuzzer/README.md
index ae7af3eb..617822f 100644
--- a/services/oboeservice/fuzzer/README.md
+++ b/services/oboeservice/fuzzer/README.md
@@ -23,21 +23,24 @@
12. InputPreset
13. BufferCapacity
-| Parameter| Valid Input Values| Configured Value|
-|------------- |-------------| ----- |
-| `AAudioFormat` | `AAUDIO_FORMAT_UNSPECIFIED`, `AAUDIO_FORMAT_PCM_I16`, `AAUDIO_FORMAT_PCM_FLOAT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `UserId` | `INT32_MIN` to `INT32_MAX` | Value obtained from getuid() |
-| `ProcessId` | `INT32_MIN` to `INT32_MAX` | Value obtained from getpid() |
-| `InService` | `bool` | Value obtained from FuzzedDataProvider |
-| `DeviceId` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
-| `SampleRate` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
-| `ChannelMask` | `AAUDIO_UNSPECIFIED`, `AAUDIO_CHANNEL_INDEX_MASK_1`, `AAUDIO_CHANNEL_INDEX_MASK_2`, `AAUDIO_CHANNEL_INDEX_MASK_3`, `AAUDIO_CHANNEL_INDEX_MASK_4`, `AAUDIO_CHANNEL_INDEX_MASK_5`, `AAUDIO_CHANNEL_INDEX_MASK_6`, `AAUDIO_CHANNEL_INDEX_MASK_7`, `AAUDIO_CHANNEL_INDEX_MASK_8`, `AAUDIO_CHANNEL_INDEX_MASK_9`, `AAUDIO_CHANNEL_INDEX_MASK_10`, `AAUDIO_CHANNEL_INDEX_MASK_11`, `AAUDIO_CHANNEL_INDEX_MASK_12`, `AAUDIO_CHANNEL_INDEX_MASK_13`, `AAUDIO_CHANNEL_INDEX_MASK_14`, `AAUDIO_CHANNEL_INDEX_MASK_15`, `AAUDIO_CHANNEL_INDEX_MASK_16`, `AAUDIO_CHANNEL_INDEX_MASK_17`, `AAUDIO_CHANNEL_INDEX_MASK_18`, `AAUDIO_CHANNEL_INDEX_MASK_19`, `AAUDIO_CHANNEL_INDEX_MASK_20`, `AAUDIO_CHANNEL_INDEX_MASK_21`, `AAUDIO_CHANNEL_INDEX_MASK_22`, `AAUDIO_CHANNEL_INDEX_MASK_23`, `AAUDIO_CHANNEL_INDEX_MASK_24`, `AAUDIO_CHANNEL_MONO`, `AAUDIO_CHANNEL_STEREO`, `AAUDIO_CHANNEL_FRONT_BACK`, `AAUDIO_CHANNEL_2POINT0POINT2`, `AAUDIO_CHANNEL_2POINT1POINT2`, `AAUDIO_CHANNEL_3POINT0POINT2`, `AAUDIO_CHANNEL_3POINT1POINT2`, `AAUDIO_CHANNEL_5POINT1`, `AAUDIO_CHANNEL_MONO`, `AAUDIO_CHANNEL_STEREO`, `AAUDIO_CHANNEL_2POINT1`, `AAUDIO_CHANNEL_TRI`, `AAUDIO_CHANNEL_TRI_BACK`, `AAUDIO_CHANNEL_3POINT1`, `AAUDIO_CHANNEL_2POINT0POINT2`, `AAUDIO_CHANNEL_2POINT1POINT2`, `AAUDIO_CHANNEL_3POINT0POINT2`, `AAUDIO_CHANNEL_3POINT1POINT2`, `AAUDIO_CHANNEL_QUAD`, `AAUDIO_CHANNEL_QUAD_SIDE`, `AAUDIO_CHANNEL_SURROUND`, `AAUDIO_CHANNEL_PENTA`, `AAUDIO_CHANNEL_5POINT1`, `AAUDIO_CHANNEL_5POINT1_SIDE`, `AAUDIO_CHANNEL_5POINT1POINT2`, `AAUDIO_CHANNEL_5POINT1POINT4`, `AAUDIO_CHANNEL_6POINT1`, `AAUDIO_CHANNEL_7POINT1`, `AAUDIO_CHANNEL_7POINT1POINT2`, `AAUDIO_CHANNEL_7POINT1POINT4`, `AAUDIO_CHANNEL_9POINT1POINT4`, `AAUDIO_CHANNEL_9POINT1POINT6` | Value obtained from FuzzedDataProvider |
-| `Direction` | `AAUDIO_DIRECTION_OUTPUT`, `AAUDIO_DIRECTION_INPUT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `SharingMode` | `AAUDIO_SHARING_MODE_EXCLUSIVE`, `AAUDIO_SHARING_MODE_SHARED` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `Usage` | `AAUDIO_USAGE_MEDIA`, `AAUDIO_USAGE_VOICE_COMMUNICATION`, `AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING`, `AAUDIO_USAGE_ALARM`, `AAUDIO_USAGE_NOTIFICATION`, `AAUDIO_USAGE_NOTIFICATION_RINGTONE`, `AAUDIO_USAGE_NOTIFICATION_EVENT`, `AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY`, `AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE`, `AAUDIO_USAGE_ASSISTANCE_SONIFICATION`, `AAUDIO_USAGE_GAME`, `AAUDIO_USAGE_ASSISTANT`, `AAUDIO_SYSTEM_USAGE_EMERGENCY`, `AAUDIO_SYSTEM_USAGE_SAFETY`, `AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS`, `AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `ContentType` | `AAUDIO_CONTENT_TYPE_SPEECH`, `AAUDIO_CONTENT_TYPE_MUSIC`, `AAUDIO_CONTENT_TYPE_MOVIE`, `AAUDIO_CONTENT_TYPE_SONIFICATION` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `InputPreset` | `AAUDIO_INPUT_PRESET_GENERIC`, `AAUDIO_INPUT_PRESET_CAMCORDER`, `AAUDIO_INPUT_PRESET_VOICE_RECOGNITION`, `AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION`, `AAUDIO_INPUT_PRESET_UNPROCESSED`, `AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
-| `BufferCapacity` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| Parameter | Valid Input Values| Configured Value|
+|---------------------------|-------------| ----- |
+| `Format` | `AAUDIO_FORMAT_UNSPECIFIED`, `AAUDIO_FORMAT_PCM_I16`, `AAUDIO_FORMAT_PCM_FLOAT`, `AAUDIO_FORMAT_IEC61937`, `AAUDIO_FORMAT_PCM_I24_PACKED`, `AAUDIO_FORMAT_PCM_I32` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `UserId` | `INT32_MIN` to `INT32_MAX` | Value obtained from getuid() |
+| `ProcessId` | `INT32_MIN` to `INT32_MAX` | Value obtained from getpid() |
+| `InService` | `bool` | Value obtained from FuzzedDataProvider |
+| `DeviceId` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `SampleRate` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `ChannelMask` | `AAUDIO_UNSPECIFIED`, `AAUDIO_CHANNEL_INDEX_MASK_1`, `AAUDIO_CHANNEL_INDEX_MASK_2`, `AAUDIO_CHANNEL_INDEX_MASK_3`, `AAUDIO_CHANNEL_INDEX_MASK_4`, `AAUDIO_CHANNEL_INDEX_MASK_5`, `AAUDIO_CHANNEL_INDEX_MASK_6`, `AAUDIO_CHANNEL_INDEX_MASK_7`, `AAUDIO_CHANNEL_INDEX_MASK_8`, `AAUDIO_CHANNEL_INDEX_MASK_9`, `AAUDIO_CHANNEL_INDEX_MASK_10`, `AAUDIO_CHANNEL_INDEX_MASK_11`, `AAUDIO_CHANNEL_INDEX_MASK_12`, `AAUDIO_CHANNEL_INDEX_MASK_13`, `AAUDIO_CHANNEL_INDEX_MASK_14`, `AAUDIO_CHANNEL_INDEX_MASK_15`, `AAUDIO_CHANNEL_INDEX_MASK_16`, `AAUDIO_CHANNEL_INDEX_MASK_17`, `AAUDIO_CHANNEL_INDEX_MASK_18`, `AAUDIO_CHANNEL_INDEX_MASK_19`, `AAUDIO_CHANNEL_INDEX_MASK_20`, `AAUDIO_CHANNEL_INDEX_MASK_21`, `AAUDIO_CHANNEL_INDEX_MASK_22`, `AAUDIO_CHANNEL_INDEX_MASK_23`, `AAUDIO_CHANNEL_INDEX_MASK_24`, `AAUDIO_CHANNEL_MONO`, `AAUDIO_CHANNEL_STEREO`, `AAUDIO_CHANNEL_FRONT_BACK`, `AAUDIO_CHANNEL_2POINT0POINT2`, `AAUDIO_CHANNEL_2POINT1POINT2`, `AAUDIO_CHANNEL_3POINT0POINT2`, `AAUDIO_CHANNEL_3POINT1POINT2`, `AAUDIO_CHANNEL_5POINT1`, `AAUDIO_CHANNEL_MONO`, `AAUDIO_CHANNEL_STEREO`, `AAUDIO_CHANNEL_2POINT1`, `AAUDIO_CHANNEL_TRI`, `AAUDIO_CHANNEL_TRI_BACK`, `AAUDIO_CHANNEL_3POINT1`, `AAUDIO_CHANNEL_2POINT0POINT2`, `AAUDIO_CHANNEL_2POINT1POINT2`, `AAUDIO_CHANNEL_3POINT0POINT2`, `AAUDIO_CHANNEL_3POINT1POINT2`, `AAUDIO_CHANNEL_QUAD`, `AAUDIO_CHANNEL_QUAD_SIDE`, `AAUDIO_CHANNEL_SURROUND`, `AAUDIO_CHANNEL_PENTA`, `AAUDIO_CHANNEL_5POINT1`, `AAUDIO_CHANNEL_5POINT1_SIDE`, `AAUDIO_CHANNEL_5POINT1POINT2`, `AAUDIO_CHANNEL_5POINT1POINT4`, `AAUDIO_CHANNEL_6POINT1`, `AAUDIO_CHANNEL_7POINT1`, `AAUDIO_CHANNEL_7POINT1POINT2`, `AAUDIO_CHANNEL_7POINT1POINT4`, `AAUDIO_CHANNEL_9POINT1POINT4`, `AAUDIO_CHANNEL_9POINT1POINT6` | Value obtained from FuzzedDataProvider |
+| `Direction` | `AAUDIO_DIRECTION_OUTPUT`, `AAUDIO_DIRECTION_INPUT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `SharingMode` | `AAUDIO_SHARING_MODE_EXCLUSIVE`, `AAUDIO_SHARING_MODE_SHARED` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `Usage` | `AAUDIO_USAGE_MEDIA`, `AAUDIO_USAGE_VOICE_COMMUNICATION`, `AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING`, `AAUDIO_USAGE_ALARM`, `AAUDIO_USAGE_NOTIFICATION`, `AAUDIO_USAGE_NOTIFICATION_RINGTONE`, `AAUDIO_USAGE_NOTIFICATION_EVENT`, `AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY`, `AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE`, `AAUDIO_USAGE_ASSISTANCE_SONIFICATION`, `AAUDIO_USAGE_GAME`, `AAUDIO_USAGE_ASSISTANT`, `AAUDIO_SYSTEM_USAGE_EMERGENCY`, `AAUDIO_SYSTEM_USAGE_SAFETY`, `AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS`, `AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `ContentType` | `AAUDIO_CONTENT_TYPE_SPEECH`, `AAUDIO_CONTENT_TYPE_MUSIC`, `AAUDIO_CONTENT_TYPE_MOVIE`, `AAUDIO_CONTENT_TYPE_SONIFICATION` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `InputPreset` | `AAUDIO_INPUT_PRESET_GENERIC`, `AAUDIO_INPUT_PRESET_CAMCORDER`, `AAUDIO_INPUT_PRESET_VOICE_RECOGNITION`, `AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION`, `AAUDIO_INPUT_PRESET_UNPROCESSED`, `AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `BufferCapacity` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `HardwareSampleRate` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `HardwareSamplesPerFrame` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `HardwareFormat` | `AAUDIO_FORMAT_UNSPECIFIED`, `AAUDIO_FORMAT_PCM_I16`, `AAUDIO_FORMAT_PCM_FLOAT`, `AAUDIO_FORMAT_IEC61937`, `AAUDIO_FORMAT_PCM_I24_PACKED`, `AAUDIO_FORMAT_PCM_I32` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
This also ensures that the plugin is always deterministic for any given input.
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
index 5e48955..f047065 100644
--- a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -34,6 +34,9 @@
AAUDIO_FORMAT_UNSPECIFIED,
AAUDIO_FORMAT_PCM_I16,
AAUDIO_FORMAT_PCM_FLOAT,
+ AAUDIO_FORMAT_PCM_I24_PACKED,
+ AAUDIO_FORMAT_PCM_I32,
+ AAUDIO_FORMAT_IEC61937
};
aaudio_usage_t kAAudioUsages[] = {
@@ -146,41 +149,42 @@
void registerClient(const sp<IAAudioClient> &client UNUSED_PARAM) override {}
- aaudio_handle_t openStream(const AAudioStreamRequest &request,
- AAudioStreamConfiguration &configurationOutput) override;
+ AAudioHandleInfo openStream(const AAudioStreamRequest &request,
+ AAudioStreamConfiguration &configurationOutput) override;
- aaudio_result_t closeStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t closeStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+ aaudio_result_t getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable &parcelable) override;
- aaudio_result_t startStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t startStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t pauseStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t stopStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t stopStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t flushStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t flushStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle, pid_t clientThreadId,
+ aaudio_result_t registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
+ pid_t clientThreadId,
int64_t periodNanoseconds) override;
- aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
+ aaudio_result_t unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId) override;
- aaudio_result_t startClient(aaudio_handle_t streamHandle UNUSED_PARAM,
+ aaudio_result_t startClient(const AAudioHandleInfo& streamHandleInfo UNUSED_PARAM,
const AudioClient &client UNUSED_PARAM,
const audio_attributes_t *attr UNUSED_PARAM,
audio_port_handle_t *clientHandle UNUSED_PARAM) override {
return AAUDIO_ERROR_UNAVAILABLE;
}
- aaudio_result_t stopClient(aaudio_handle_t streamHandle UNUSED_PARAM,
+ aaudio_result_t stopClient(const AAudioHandleInfo& streamHandleInfo UNUSED_PARAM,
audio_port_handle_t clientHandle UNUSED_PARAM) override {
return AAUDIO_ERROR_UNAVAILABLE;
}
- aaudio_result_t exitStandby(aaudio_handle_t streamHandle UNUSED_PARAM,
+ aaudio_result_t exitStandby(const AAudioHandleInfo& streamHandleInfo UNUSED_PARAM,
AudioEndpointParcelable &parcelable UNUSED_PARAM) override {
return AAUDIO_ERROR_UNAVAILABLE;
}
@@ -247,92 +251,91 @@
mAAudioService.clear();
}
-aaudio_handle_t FuzzAAudioClient::openStream(const AAudioStreamRequest &request,
- AAudioStreamConfiguration &configurationOutput) {
- aaudio_handle_t stream;
+AAudioHandleInfo FuzzAAudioClient::openStream(const AAudioStreamRequest &request,
+ AAudioStreamConfiguration &configurationOutput) {
for (int i = 0; i < 2; ++i) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
- return AAUDIO_ERROR_NO_SERVICE;
+ return {-1, AAUDIO_ERROR_NO_SERVICE};
}
- stream = service->openStream(request, configurationOutput);
+ auto streamHandleInfo = service->openStream(request, configurationOutput);
- if (stream == AAUDIO_ERROR_NO_SERVICE) {
+ if (streamHandleInfo.getHandle() == AAUDIO_ERROR_NO_SERVICE) {
dropAAudioService();
} else {
- break;
+ return streamHandleInfo;
}
}
- return stream;
+ return {-1, AAUDIO_ERROR_NO_SERVICE};
}
-aaudio_result_t FuzzAAudioClient::closeStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::closeStream(const AAudioHandleInfo& streamHandleInfo) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->closeStream(streamHandle);
+ return service->closeStream(streamHandleInfo);
}
-aaudio_result_t FuzzAAudioClient::getStreamDescription(aaudio_handle_t streamHandle,
+aaudio_result_t FuzzAAudioClient::getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable &parcelable) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->getStreamDescription(streamHandle, parcelable);
+ return service->getStreamDescription(streamHandleInfo, parcelable);
}
-aaudio_result_t FuzzAAudioClient::startStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::startStream(const AAudioHandleInfo& streamHandleInfo) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->startStream(streamHandle);
+ return service->startStream(streamHandleInfo);
}
-aaudio_result_t FuzzAAudioClient::pauseStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::pauseStream(const AAudioHandleInfo& streamHandleInfo) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->pauseStream(streamHandle);
+ return service->pauseStream(streamHandleInfo);
}
-aaudio_result_t FuzzAAudioClient::stopStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::stopStream(const AAudioHandleInfo& streamHandleInfo) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->stopStream(streamHandle);
+ return service->stopStream(streamHandleInfo);
}
-aaudio_result_t FuzzAAudioClient::flushStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::flushStream(const AAudioHandleInfo& streamHandleInfo) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->flushStream(streamHandle);
+ return service->flushStream(streamHandleInfo);
}
-aaudio_result_t FuzzAAudioClient::registerAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t FuzzAAudioClient::registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId,
int64_t periodNanoseconds) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds);
+ return service->registerAudioThread(streamHandleInfo, clientThreadId, periodNanoseconds);
}
-aaudio_result_t FuzzAAudioClient::unregisterAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t FuzzAAudioClient::unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->unregisterAudioThread(streamHandle, clientThreadId);
+ return service->unregisterAudioThread(streamHandleInfo, clientThreadId);
}
class OboeserviceFuzzer {
@@ -400,8 +403,15 @@
request.getConfiguration().setBufferCapacity(fdp.ConsumeIntegral<int32_t>());
- aaudio_handle_t stream = mClient->openStream(request, configurationOutput);
- if (stream < 0) {
+ request.getConfiguration().setHardwareSampleRate(fdp.ConsumeIntegral<int32_t>());
+ request.getConfiguration().setHardwareSamplesPerFrame(fdp.ConsumeIntegral<int32_t>());
+ request.getConfiguration().setHardwareFormat((audio_format_t)(
+ fdp.ConsumeBool()
+ ? fdp.ConsumeIntegral<int32_t>()
+ : kAAudioFormats[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioFormats - 1)]));
+
+ auto streamHandleInfo = mClient->openStream(request, configurationOutput);
+ if (streamHandleInfo.getHandle() < 0) {
// invalid request, stream not opened.
return;
}
@@ -410,23 +420,23 @@
int action = fdp.ConsumeIntegralInRange<int32_t>(0, 4);
switch (action) {
case 0:
- mClient->getStreamDescription(stream, audioEndpointParcelable);
+ mClient->getStreamDescription(streamHandleInfo, audioEndpointParcelable);
break;
case 1:
- mClient->startStream(stream);
+ mClient->startStream(streamHandleInfo);
break;
case 2:
- mClient->pauseStream(stream);
+ mClient->pauseStream(streamHandleInfo);
break;
case 3:
- mClient->stopStream(stream);
+ mClient->stopStream(streamHandleInfo);
break;
case 4:
- mClient->flushStream(stream);
+ mClient->flushStream(streamHandleInfo);
break;
}
}
- mClient->closeStream(stream);
+ mClient->closeStream(streamHandleInfo);
assert(mClient->getDeathCount() == 0);
}
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index 5c1dda1..ea5139d 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -15,9 +15,8 @@
imports: [
"android.hardware.common-V2",
"android.hardware.common.fmq-V1",
- "android.hardware.tv.tuner-V1",
+ "android.hardware.tv.tuner-V2",
],
-
backend: {
java: {
enabled: false,
@@ -42,7 +41,7 @@
shared_libs: [
"android.hardware.tv.tuner@1.0",
"android.hardware.tv.tuner@1.1",
- "android.hardware.tv.tuner-V1-ndk",
+ "android.hardware.tv.tuner-V2-ndk",
"libbase",
"libbinder",
"libbinder_ndk",
@@ -85,10 +84,11 @@
shared_libs: [
"android.hardware.tv.tuner@1.0",
"android.hardware.tv.tuner@1.1",
- "android.hardware.tv.tuner-V1-ndk",
+ "android.hardware.tv.tuner-V2-ndk",
"libbase",
"libbinder",
"libfmq",
+ "libhidlbase",
"liblog",
"libtunerservice",
"libutils",
diff --git a/services/tuner/TunerDemux.cpp b/services/tuner/TunerDemux.cpp
index a6f3a2c..92fa970 100644
--- a/services/tuner/TunerDemux.cpp
+++ b/services/tuner/TunerDemux.cpp
@@ -26,6 +26,7 @@
#include <aidl/android/hardware/tv/tuner/Result.h>
#include "TunerDvr.h"
+#include "TunerService.h"
#include "TunerTimeFilter.h"
using ::aidl::android::hardware::tv::tuner::IDvr;
@@ -41,23 +42,21 @@
namespace tv {
namespace tuner {
-TunerDemux::TunerDemux(shared_ptr<IDemux> demux, int id) {
+TunerDemux::TunerDemux(const shared_ptr<IDemux> demux, const int id,
+ const shared_ptr<TunerService> tuner) {
mDemux = demux;
mDemuxId = id;
+ mTunerService = tuner;
}
TunerDemux::~TunerDemux() {
+ close();
mDemux = nullptr;
+ mTunerService = nullptr;
}
::ndk::ScopedAStatus TunerDemux::setFrontendDataSource(
const shared_ptr<ITunerFrontend>& in_frontend) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
int frontendId;
in_frontend->getFrontendId(&frontendId);
@@ -65,43 +64,26 @@
}
::ndk::ScopedAStatus TunerDemux::setFrontendDataSourceById(int frontendId) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mDemux->setFrontendDataSource(frontendId);
}
::ndk::ScopedAStatus TunerDemux::openFilter(const DemuxFilterType& in_type, int32_t in_bufferSize,
const shared_ptr<ITunerFilterCallback>& in_cb,
shared_ptr<ITunerFilter>* _aidl_return) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
shared_ptr<IFilter> filter;
shared_ptr<TunerFilter::FilterCallback> filterCb =
::ndk::SharedRefBase::make<TunerFilter::FilterCallback>(in_cb);
shared_ptr<IFilterCallback> cb = filterCb;
auto status = mDemux->openFilter(in_type, in_bufferSize, cb, &filter);
if (status.isOk()) {
- *_aidl_return = ::ndk::SharedRefBase::make<TunerFilter>(filter, filterCb, in_type);
+ *_aidl_return =
+ ::ndk::SharedRefBase::make<TunerFilter>(filter, filterCb, in_type, mTunerService);
}
return status;
}
::ndk::ScopedAStatus TunerDemux::openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
shared_ptr<ITimeFilter> filter;
auto status = mDemux->openTimeFilter(&filter);
if (status.isOk()) {
@@ -113,35 +95,17 @@
::ndk::ScopedAStatus TunerDemux::getAvSyncHwId(const shared_ptr<ITunerFilter>& tunerFilter,
int32_t* _aidl_return) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
shared_ptr<IFilter> halFilter = (static_cast<TunerFilter*>(tunerFilter.get()))->getHalFilter();
return mDemux->getAvSyncHwId(halFilter, _aidl_return);
}
::ndk::ScopedAStatus TunerDemux::getAvSyncTime(int32_t avSyncHwId, int64_t* _aidl_return) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mDemux->getAvSyncTime(avSyncHwId, _aidl_return);
}
::ndk::ScopedAStatus TunerDemux::openDvr(DvrType in_dvbType, int32_t in_bufferSize,
const shared_ptr<ITunerDvrCallback>& in_cb,
shared_ptr<ITunerDvr>* _aidl_return) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
shared_ptr<IDvrCallback> callback = ::ndk::SharedRefBase::make<TunerDvr::DvrCallback>(in_cb);
shared_ptr<IDvr> halDvr;
auto res = mDemux->openDvr(in_dvbType, in_bufferSize, callback, &halDvr);
@@ -153,36 +117,15 @@
}
::ndk::ScopedAStatus TunerDemux::connectCiCam(int32_t ciCamId) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mDemux->connectCiCam(ciCamId);
}
::ndk::ScopedAStatus TunerDemux::disconnectCiCam() {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mDemux->disconnectCiCam();
}
::ndk::ScopedAStatus TunerDemux::close() {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
- auto res = mDemux->close();
- mDemux = nullptr;
-
- return res;
+ return mDemux->close();
}
} // namespace tuner
diff --git a/services/tuner/TunerDemux.h b/services/tuner/TunerDemux.h
index cdb3aa0..0c71987 100644
--- a/services/tuner/TunerDemux.h
+++ b/services/tuner/TunerDemux.h
@@ -32,10 +32,13 @@
namespace tv {
namespace tuner {
+class TunerService;
+
class TunerDemux : public BnTunerDemux {
public:
- TunerDemux(shared_ptr<IDemux> demux, int demuxId);
+ TunerDemux(const shared_ptr<IDemux> demux, const int demuxId,
+ const shared_ptr<TunerService> tuner);
virtual ~TunerDemux();
::ndk::ScopedAStatus setFrontendDataSource(
@@ -60,6 +63,7 @@
private:
shared_ptr<IDemux> mDemux;
int mDemuxId;
+ shared_ptr<TunerService> mTunerService;
};
} // namespace tuner
diff --git a/services/tuner/TunerDescrambler.cpp b/services/tuner/TunerDescrambler.cpp
index 70aee20..ffe0be9 100644
--- a/services/tuner/TunerDescrambler.cpp
+++ b/services/tuner/TunerDescrambler.cpp
@@ -41,38 +41,21 @@
}
TunerDescrambler::~TunerDescrambler() {
+ close();
mDescrambler = nullptr;
}
::ndk::ScopedAStatus TunerDescrambler::setDemuxSource(
const shared_ptr<ITunerDemux>& in_tunerDemux) {
- if (mDescrambler == nullptr) {
- ALOGE("IDescrambler is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mDescrambler->setDemuxSource((static_cast<TunerDemux*>(in_tunerDemux.get()))->getId());
}
::ndk::ScopedAStatus TunerDescrambler::setKeyToken(const vector<uint8_t>& in_keyToken) {
- if (mDescrambler == nullptr) {
- ALOGE("IDescrambler is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mDescrambler->setKeyToken(in_keyToken);
}
::ndk::ScopedAStatus TunerDescrambler::addPid(
const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
- if (mDescrambler == nullptr) {
- ALOGE("IDescrambler is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
shared_ptr<IFilter> halFilter =
(in_optionalSourceFilter == nullptr)
? nullptr
@@ -83,12 +66,6 @@
::ndk::ScopedAStatus TunerDescrambler::removePid(
const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
- if (mDescrambler == nullptr) {
- ALOGE("IDescrambler is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
shared_ptr<IFilter> halFilter =
(in_optionalSourceFilter == nullptr)
? nullptr
@@ -98,16 +75,7 @@
}
::ndk::ScopedAStatus TunerDescrambler::close() {
- if (mDescrambler == nullptr) {
- ALOGE("IDescrambler is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
- auto res = mDescrambler->close();
- mDescrambler = nullptr;
-
- return res;
+ return mDescrambler->close();
}
} // namespace tuner
diff --git a/services/tuner/TunerDvr.cpp b/services/tuner/TunerDvr.cpp
index 8776f7e..fcee966 100644
--- a/services/tuner/TunerDvr.cpp
+++ b/services/tuner/TunerDvr.cpp
@@ -37,36 +37,19 @@
}
TunerDvr::~TunerDvr() {
+ close();
mDvr = nullptr;
}
::ndk::ScopedAStatus TunerDvr::getQueueDesc(AidlMQDesc* _aidl_return) {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mDvr->getQueueDesc(_aidl_return);
}
::ndk::ScopedAStatus TunerDvr::configure(const DvrSettings& in_settings) {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mDvr->configure(in_settings);
}
::ndk::ScopedAStatus TunerDvr::attachFilter(const shared_ptr<ITunerFilter>& in_filter) {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (in_filter == nullptr) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -82,12 +65,6 @@
}
::ndk::ScopedAStatus TunerDvr::detachFilter(const shared_ptr<ITunerFilter>& in_filter) {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (in_filter == nullptr) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -103,46 +80,34 @@
}
::ndk::ScopedAStatus TunerDvr::start() {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mDvr->start();
}
::ndk::ScopedAStatus TunerDvr::stop() {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mDvr->stop();
}
::ndk::ScopedAStatus TunerDvr::flush() {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mDvr->flush();
}
::ndk::ScopedAStatus TunerDvr::close() {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
+ return mDvr->close();
+}
+
+::ndk::ScopedAStatus TunerDvr::setStatusCheckIntervalHint(const int64_t milliseconds) {
+ if (milliseconds < 0L) {
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(Result::INVALID_ARGUMENT));
+ }
+
+ ::ndk::ScopedAStatus s = mDvr->setStatusCheckIntervalHint(milliseconds);
+ if (s.getStatus() == STATUS_UNKNOWN_TRANSACTION) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::UNAVAILABLE));
}
- auto status = mDvr->close();
- mDvr = nullptr;
-
- return status;
+ return s;
}
/////////////// IDvrCallback ///////////////////////
diff --git a/services/tuner/TunerDvr.h b/services/tuner/TunerDvr.h
index 1854d08..2330e7b 100644
--- a/services/tuner/TunerDvr.h
+++ b/services/tuner/TunerDvr.h
@@ -61,6 +61,7 @@
::ndk::ScopedAStatus stop() override;
::ndk::ScopedAStatus flush() override;
::ndk::ScopedAStatus close() override;
+ ::ndk::ScopedAStatus setStatusCheckIntervalHint(int64_t in_milliseconds) override;
struct DvrCallback : public BnDvrCallback {
DvrCallback(const shared_ptr<ITunerDvrCallback> tunerDvrCallback)
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index e8c7767..478e7ea 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -36,28 +36,28 @@
using namespace std;
-TunerFilter::TunerFilter(shared_ptr<IFilter> filter, shared_ptr<FilterCallback> cb,
- DemuxFilterType type)
+TunerFilter::TunerFilter(const shared_ptr<IFilter> filter, const shared_ptr<FilterCallback> cb,
+ const DemuxFilterType type, const shared_ptr<TunerService> tuner)
: mFilter(filter),
mType(type),
mStarted(false),
mShared(false),
mClientPid(-1),
- mFilterCallback(cb) {}
+ mFilterCallback(cb),
+ mTunerService(tuner) {}
TunerFilter::~TunerFilter() {
- Mutex::Autolock _l(mLock);
- mFilter = nullptr;
+ close();
+ freeSharedFilterToken("");
+ {
+ Mutex::Autolock _l(mLock);
+ mFilter = nullptr;
+ mTunerService = nullptr;
+ }
}
::ndk::ScopedAStatus TunerFilter::getQueueDesc(AidlMQDesc* _aidl_return) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
IPCThreadState* ipc = IPCThreadState::self();
int32_t callingPid = ipc->getCallingPid();
@@ -73,12 +73,6 @@
::ndk::ScopedAStatus TunerFilter::getId(int32_t* _aidl_return) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
ALOGD("%s is called on a shared filter", __FUNCTION__);
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -94,12 +88,6 @@
::ndk::ScopedAStatus TunerFilter::getId64Bit(int64_t* _aidl_return) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
ALOGD("%s is called on a shared filter", __FUNCTION__);
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -115,12 +103,6 @@
::ndk::ScopedAStatus TunerFilter::configure(const DemuxFilterSettings& in_settings) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
ALOGD("%s is called on a shared filter", __FUNCTION__);
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -132,12 +114,6 @@
::ndk::ScopedAStatus TunerFilter::configureMonitorEvent(int32_t monitorEventType) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
ALOGD("%s is called on a shared filter", __FUNCTION__);
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -149,12 +125,6 @@
::ndk::ScopedAStatus TunerFilter::configureIpFilterContextId(int32_t cid) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
ALOGD("%s is called on a shared filter", __FUNCTION__);
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -166,12 +136,6 @@
::ndk::ScopedAStatus TunerFilter::configureAvStreamType(const AvStreamType& in_avStreamType) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
ALOGD("%s is called on a shared filter", __FUNCTION__);
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -183,12 +147,6 @@
::ndk::ScopedAStatus TunerFilter::setDataSource(const shared_ptr<ITunerFilter>& filter) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (filter == nullptr) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -207,12 +165,6 @@
::ndk::ScopedAStatus TunerFilter::getAvSharedHandle(NativeHandle* out_avMemory,
int64_t* _aidl_return) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
ALOGD("%s is called on a shared filter", __FUNCTION__);
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -225,12 +177,6 @@
::ndk::ScopedAStatus TunerFilter::releaseAvHandle(const NativeHandle& in_handle,
int64_t in_avDataId) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
ALOGD("%s is called on a shared filter", __FUNCTION__);
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -242,12 +188,6 @@
::ndk::ScopedAStatus TunerFilter::start() {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
IPCThreadState* ipc = IPCThreadState::self();
int32_t callingPid = ipc->getCallingPid();
@@ -267,12 +207,6 @@
::ndk::ScopedAStatus TunerFilter::stop() {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
IPCThreadState* ipc = IPCThreadState::self();
int32_t callingPid = ipc->getCallingPid();
@@ -291,12 +225,6 @@
::ndk::ScopedAStatus TunerFilter::flush() {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
IPCThreadState* ipc = IPCThreadState::self();
int32_t callingPid = ipc->getCallingPid();
@@ -312,12 +240,6 @@
::ndk::ScopedAStatus TunerFilter::close() {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
IPCThreadState* ipc = IPCThreadState::self();
int32_t callingPid = ipc->getCallingPid();
@@ -326,7 +248,7 @@
mFilterCallback->sendSharedFilterStatus(STATUS_INACCESSIBLE);
mFilterCallback->detachSharedFilterCallback();
}
- TunerService::getTunerService()->removeSharedFilter(this->ref<TunerFilter>());
+ mTunerService->removeSharedFilter(this->ref<TunerFilter>());
} else {
// Calling from shared process, do not really close this filter.
if (mFilterCallback != nullptr) {
@@ -341,7 +263,6 @@
mFilterCallback->detachCallbacks();
}
auto res = mFilter->close();
- mFilter = nullptr;
mStarted = false;
mShared = false;
mClientPid = -1;
@@ -351,12 +272,6 @@
::ndk::ScopedAStatus TunerFilter::acquireSharedFilterToken(string* _aidl_return) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared || mStarted) {
ALOGD("create SharedFilter in wrong state");
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -365,7 +280,7 @@
IPCThreadState* ipc = IPCThreadState::self();
mClientPid = ipc->getCallingPid();
- string token = TunerService::getTunerService()->addFilterToShared(this->ref<TunerFilter>());
+ string token = mTunerService->addFilterToShared(this->ref<TunerFilter>());
_aidl_return->assign(token);
mShared = true;
@@ -374,12 +289,6 @@
::ndk::ScopedAStatus TunerFilter::freeSharedFilterToken(const string& /* in_filterToken */) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (!mShared) {
// The filter is not shared or the shared filter has been closed.
return ::ndk::ScopedAStatus::ok();
@@ -390,7 +299,7 @@
mFilterCallback->detachSharedFilterCallback();
}
- TunerService::getTunerService()->removeSharedFilter(this->ref<TunerFilter>());
+ mTunerService->removeSharedFilter(this->ref<TunerFilter>());
mShared = false;
return ::ndk::ScopedAStatus::ok();
@@ -398,24 +307,12 @@
::ndk::ScopedAStatus TunerFilter::getFilterType(DemuxFilterType* _aidl_return) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
*_aidl_return = mType;
return ::ndk::ScopedAStatus::ok();
}
::ndk::ScopedAStatus TunerFilter::setDelayHint(const FilterDelayHint& in_hint) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mFilter->setDelayHint(in_hint);
}
diff --git a/services/tuner/TunerFilter.h b/services/tuner/TunerFilter.h
index 93d8898..f6178c4 100644
--- a/services/tuner/TunerFilter.h
+++ b/services/tuner/TunerFilter.h
@@ -53,8 +53,9 @@
using AidlMQDesc = MQDescriptor<int8_t, SynchronizedReadWrite>;
-class TunerFilter : public BnTunerFilter {
+class TunerService;
+class TunerFilter : public BnTunerFilter {
public:
class FilterCallback : public BnFilterCallback {
public:
@@ -75,7 +76,8 @@
Mutex mCallbackLock;
};
- TunerFilter(shared_ptr<IFilter> filter, shared_ptr<FilterCallback> cb, DemuxFilterType type);
+ TunerFilter(const shared_ptr<IFilter> filter, const shared_ptr<FilterCallback> cb,
+ const DemuxFilterType type, const shared_ptr<TunerService> tuner);
virtual ~TunerFilter();
::ndk::ScopedAStatus getId(int32_t* _aidl_return) override;
@@ -113,6 +115,7 @@
int32_t mClientPid;
shared_ptr<FilterCallback> mFilterCallback;
Mutex mLock;
+ shared_ptr<TunerService> mTunerService;
};
} // namespace tuner
diff --git a/services/tuner/TunerFrontend.cpp b/services/tuner/TunerFrontend.cpp
index 5116305..1e93d95 100644
--- a/services/tuner/TunerFrontend.cpp
+++ b/services/tuner/TunerFrontend.cpp
@@ -37,18 +37,13 @@
}
TunerFrontend::~TunerFrontend() {
+ close();
mFrontend = nullptr;
mId = -1;
}
::ndk::ScopedAStatus TunerFrontend::setCallback(
const shared_ptr<ITunerFrontendCallback>& tunerFrontendCallback) {
- if (mFrontend == nullptr) {
- ALOGE("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (tunerFrontendCallback == nullptr) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -60,53 +55,23 @@
}
::ndk::ScopedAStatus TunerFrontend::tune(const FrontendSettings& settings) {
- if (mFrontend == nullptr) {
- ALOGE("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mFrontend->tune(settings);
}
::ndk::ScopedAStatus TunerFrontend::stopTune() {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mFrontend->stopTune();
}
::ndk::ScopedAStatus TunerFrontend::scan(const FrontendSettings& settings,
FrontendScanType frontendScanType) {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mFrontend->scan(settings, frontendScanType);
}
::ndk::ScopedAStatus TunerFrontend::stopScan() {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mFrontend->stopScan();
}
::ndk::ScopedAStatus TunerFrontend::setLnb(const shared_ptr<ITunerLnb>& lnb) {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (lnb == nullptr) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -116,46 +81,19 @@
}
::ndk::ScopedAStatus TunerFrontend::linkCiCamToFrontend(int32_t ciCamId, int32_t* _aidl_return) {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mFrontend->linkCiCam(ciCamId, _aidl_return);
}
::ndk::ScopedAStatus TunerFrontend::unlinkCiCamToFrontend(int32_t ciCamId) {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mFrontend->unlinkCiCam(ciCamId);
}
::ndk::ScopedAStatus TunerFrontend::close() {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
- auto res = mFrontend->close();
- mFrontend = nullptr;
-
- return res;
+ return mFrontend->close();
}
::ndk::ScopedAStatus TunerFrontend::getStatus(const vector<FrontendStatusType>& in_statusTypes,
vector<FrontendStatus>* _aidl_return) {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mFrontend->getStatus(in_statusTypes, _aidl_return);
}
@@ -165,34 +103,16 @@
}
::ndk::ScopedAStatus TunerFrontend::getHardwareInfo(std::string* _aidl_return) {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mFrontend->getHardwareInfo(_aidl_return);
}
::ndk::ScopedAStatus TunerFrontend::removeOutputPid(int32_t in_pid) {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mFrontend->removeOutputPid(in_pid);
}
::ndk::ScopedAStatus TunerFrontend::getFrontendStatusReadiness(
const std::vector<FrontendStatusType>& in_statusTypes,
std::vector<FrontendStatusReadiness>* _aidl_return) {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mFrontend->getFrontendStatusReadiness(in_statusTypes, _aidl_return);
}
diff --git a/services/tuner/TunerHelper.cpp b/services/tuner/TunerHelper.cpp
index dc67110..a03386f 100644
--- a/services/tuner/TunerHelper.cpp
+++ b/services/tuner/TunerHelper.cpp
@@ -83,6 +83,22 @@
tunerRM->setFrontendInfoList(feInfos);
tunerRM->setLnbInfoList(lnbHandles);
}
+void TunerHelper::updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
+ const vector<TunerDemuxInfo>& demuxInfos,
+ const vector<int32_t>& lnbHandles) {
+ ::ndk::SpAIBinder binder(AServiceManager_waitForService("tv_tuner_resource_mgr"));
+ shared_ptr<ITunerResourceManager> tunerRM = ITunerResourceManager::fromBinder(binder);
+ if (tunerRM == nullptr) {
+ return;
+ }
+
+ updateTunerResources(feInfos, lnbHandles);
+
+ // for Tuner 2.0 and below, Demux resource is not really managed under TRM
+ if (demuxInfos.size() > 0) {
+ tunerRM->setDemuxInfoList(demuxInfos);
+ }
+}
// TODO: create a map between resource id and handles.
int TunerHelper::getResourceIdFromHandle(int resourceHandle, int /*type*/) {
diff --git a/services/tuner/TunerHelper.h b/services/tuner/TunerHelper.h
index 755df57..65a9b0b 100644
--- a/services/tuner/TunerHelper.h
+++ b/services/tuner/TunerHelper.h
@@ -17,9 +17,11 @@
#ifndef ANDROID_MEDIA_TUNERDVRHELPER_H
#define ANDROID_MEDIA_TUNERDVRHELPER_H
+#include <aidl/android/media/tv/tunerresourcemanager/TunerDemuxInfo.h>
#include <aidl/android/media/tv/tunerresourcemanager/TunerFrontendInfo.h>
#include <utils/String16.h>
+using ::aidl::android::media::tv::tunerresourcemanager::TunerDemuxInfo;
using ::aidl::android::media::tv::tunerresourcemanager::TunerFrontendInfo;
using ::android::String16;
@@ -55,6 +57,10 @@
// TODO: update Demux, Descrambler.
static void updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
const vector<int32_t>& lnbHandles);
+
+ static void updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
+ const vector<TunerDemuxInfo>& demuxInfos,
+ const vector<int32_t>& lnbHandles);
// TODO: create a map between resource id and handles.
static int getResourceIdFromHandle(int resourceHandle, int type);
static int getResourceHandleFromId(int id, int resourceType);
diff --git a/services/tuner/TunerLnb.cpp b/services/tuner/TunerLnb.cpp
index 1e143c3..2fb6135 100644
--- a/services/tuner/TunerLnb.cpp
+++ b/services/tuner/TunerLnb.cpp
@@ -36,18 +36,13 @@
}
TunerLnb::~TunerLnb() {
+ close();
mLnb = nullptr;
mId = -1;
}
::ndk::ScopedAStatus TunerLnb::setCallback(
const shared_ptr<ITunerLnbCallback>& in_tunerLnbCallback) {
- if (mLnb == nullptr) {
- ALOGE("ILnb is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (in_tunerLnbCallback == nullptr) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -59,56 +54,23 @@
}
::ndk::ScopedAStatus TunerLnb::setVoltage(LnbVoltage in_voltage) {
- if (mLnb == nullptr) {
- ALOGE("ILnb is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mLnb->setVoltage(in_voltage);
}
::ndk::ScopedAStatus TunerLnb::setTone(LnbTone in_tone) {
- if (mLnb == nullptr) {
- ALOGE("ILnb is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mLnb->setTone(in_tone);
}
::ndk::ScopedAStatus TunerLnb::setSatellitePosition(LnbPosition in_position) {
- if (mLnb == nullptr) {
- ALOGE("ILnb is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mLnb->setSatellitePosition(in_position);
}
::ndk::ScopedAStatus TunerLnb::sendDiseqcMessage(const vector<uint8_t>& in_diseqcMessage) {
- if (mLnb == nullptr) {
- ALOGE("ILnb is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mLnb->sendDiseqcMessage(in_diseqcMessage);
}
::ndk::ScopedAStatus TunerLnb::close() {
- if (mLnb == nullptr) {
- ALOGE("ILnb is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
- auto res = mLnb->close();
- mLnb = nullptr;
-
- return res;
+ return mLnb->close();
}
/////////////// ILnbCallback ///////////////////////
diff --git a/services/tuner/TunerService.cpp b/services/tuner/TunerService.cpp
index 4833aaf..9a1e8bb 100644
--- a/services/tuner/TunerService.cpp
+++ b/services/tuner/TunerService.cpp
@@ -27,6 +27,7 @@
#include <android/binder_manager.h>
#include <binder/IPCThreadState.h>
#include <binder/PermissionCache.h>
+#include <cutils/properties.h>
#include <utils/Log.h>
#include <string>
@@ -51,107 +52,125 @@
namespace tv {
namespace tuner {
-shared_ptr<TunerService> TunerService::sTunerService = nullptr;
-
TunerService::TunerService() {
- if (!TunerHelper::checkTunerFeature()) {
- ALOGD("Device doesn't have tuner hardware.");
- return;
+ const string statsServiceName = string() + ITuner::descriptor + "/default";
+ ::ndk::SpAIBinder binder(AServiceManager_waitForService(statsServiceName.c_str()));
+ mTuner = ITuner::fromBinder(binder);
+ ALOGE_IF(mTuner == nullptr, "Failed to get Tuner HAL Service");
+
+ mTunerVersion = TUNER_HAL_VERSION_2_0;
+ if (mTuner->getInterfaceVersion(&mTunerVersion).isOk()) {
+ // Tuner AIDL HAL version 1 will be Tuner HAL 2.0
+ mTunerVersion = (mTunerVersion + 1) << 16;
}
+ // Register the tuner resources to TRM.
updateTunerResources();
}
-TunerService::~TunerService() {}
+TunerService::~TunerService() {
+ mTuner = nullptr;
+}
binder_status_t TunerService::instantiate() {
- sTunerService = ::ndk::SharedRefBase::make<TunerService>();
- return AServiceManager_addService(sTunerService->asBinder().get(), getServiceName());
-}
-
-shared_ptr<TunerService> TunerService::getTunerService() {
- return sTunerService;
-}
-
-bool TunerService::hasITuner() {
- ALOGV("hasITuner");
- if (mTuner != nullptr) {
- return true;
+ shared_ptr<TunerService> tunerService = ::ndk::SharedRefBase::make<TunerService>();
+ bool lazyHal = property_get_bool("ro.tuner.lazyhal", false);
+ if (lazyHal) {
+ return AServiceManager_registerLazyService(tunerService->asBinder().get(),
+ getServiceName());
}
- const string statsServiceName = string() + ITuner::descriptor + "/default";
- if (AServiceManager_isDeclared(statsServiceName.c_str())) {
- ::ndk::SpAIBinder binder(AServiceManager_waitForService(statsServiceName.c_str()));
- mTuner = ITuner::fromBinder(binder);
- } else {
- mTuner = nullptr;
- ALOGE("Failed to get Tuner HAL Service");
- return false;
- }
-
- mTunerVersion = TUNER_HAL_VERSION_2_0;
- // TODO: Enable this after Tuner HAL is frozen.
- // if (mTuner->getInterfaceVersion(&mTunerVersion).isOk()) {
- // // Tuner AIDL HAL version 1 will be Tuner HAL 2.0
- // mTunerVersion = (mTunerVersion + 1) << 16;
- //}
-
- return true;
+ return AServiceManager_addService(tunerService->asBinder().get(), getServiceName());
}
-::ndk::ScopedAStatus TunerService::openDemux(int32_t /* in_demuxHandle */,
+::ndk::ScopedAStatus TunerService::openDemux(int32_t in_demuxHandle,
shared_ptr<ITunerDemux>* _aidl_return) {
ALOGV("openDemux");
- if (!hasITuner()) {
+ shared_ptr<IDemux> demux;
+ bool fallBackToOpenDemux = false;
+ vector<int32_t> ids;
+
+ if (mTunerVersion <= TUNER_HAL_VERSION_2_0) {
+ fallBackToOpenDemux = true;
+ } else {
+ mTuner->getDemuxIds(&ids);
+ if (ids.size() == 0) {
+ fallBackToOpenDemux = true;
+ }
+ }
+
+ if (fallBackToOpenDemux) {
+ auto status = mTuner->openDemux(&ids, &demux);
+ if (status.isOk()) {
+ *_aidl_return = ::ndk::SharedRefBase::make<TunerDemux>(demux, ids[0],
+ this->ref<TunerService>());
+ }
+ return status;
+ } else {
+ int id = TunerHelper::getResourceIdFromHandle(in_demuxHandle, DEMUX);
+ auto status = mTuner->openDemuxById(id, &demux);
+ if (status.isOk()) {
+ *_aidl_return =
+ ::ndk::SharedRefBase::make<TunerDemux>(demux, id, this->ref<TunerService>());
+ }
+ return status;
+ }
+}
+
+::ndk::ScopedAStatus TunerService::getDemuxInfo(int32_t in_demuxHandle, DemuxInfo* _aidl_return) {
+ if (mTunerVersion <= TUNER_HAL_VERSION_2_0) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::UNAVAILABLE));
}
- vector<int32_t> id;
- shared_ptr<IDemux> demux;
- auto status = mTuner->openDemux(&id, &demux);
- if (status.isOk()) {
- *_aidl_return = ::ndk::SharedRefBase::make<TunerDemux>(demux, id[0]);
+ int id = TunerHelper::getResourceIdFromHandle(in_demuxHandle, DEMUX);
+ return mTuner->getDemuxInfo(id, _aidl_return);
+}
+
+::ndk::ScopedAStatus TunerService::getDemuxInfoList(vector<DemuxInfo>* _aidl_return) {
+ if (mTunerVersion <= TUNER_HAL_VERSION_2_0) {
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(Result::UNAVAILABLE));
+ }
+ vector<DemuxInfo> demuxInfoList;
+ vector<int32_t> ids;
+ auto status = mTuner->getDemuxIds(&ids);
+ if (!status.isOk()) {
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(Result::UNAVAILABLE));
}
- return status;
+ for (int i = 0; i < ids.size(); i++) {
+ DemuxInfo demuxInfo;
+ auto res = mTuner->getDemuxInfo(ids[i], &demuxInfo);
+ if (!res.isOk()) {
+ continue;
+ }
+ demuxInfoList.push_back(demuxInfo);
+ }
+
+ if (demuxInfoList.size() > 0) {
+ *_aidl_return = demuxInfoList;
+ return ::ndk::ScopedAStatus::ok();
+ } else {
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(Result::UNAVAILABLE));
+ }
}
::ndk::ScopedAStatus TunerService::getDemuxCaps(DemuxCapabilities* _aidl_return) {
ALOGV("getDemuxCaps");
- if (!hasITuner()) {
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mTuner->getDemuxCaps(_aidl_return);
}
::ndk::ScopedAStatus TunerService::getFrontendIds(vector<int32_t>* ids) {
- if (!hasITuner()) {
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mTuner->getFrontendIds(ids);
}
::ndk::ScopedAStatus TunerService::getFrontendInfo(int32_t id, FrontendInfo* _aidl_return) {
- if (!hasITuner()) {
- ALOGE("ITuner service is not init.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mTuner->getFrontendInfo(id, _aidl_return);
}
::ndk::ScopedAStatus TunerService::openFrontend(int32_t frontendHandle,
shared_ptr<ITunerFrontend>* _aidl_return) {
- if (!hasITuner()) {
- ALOGE("ITuner service is not init.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
int id = TunerHelper::getResourceIdFromHandle(frontendHandle, FRONTEND);
shared_ptr<IFrontend> frontend;
auto status = mTuner->openFrontendById(id, &frontend);
@@ -163,12 +182,6 @@
}
::ndk::ScopedAStatus TunerService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
- if (!hasITuner()) {
- ALOGD("get ITuner failed");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
shared_ptr<ILnb> lnb;
int id = TunerHelper::getResourceIdFromHandle(lnbHandle, LNB);
auto status = mTuner->openLnbById(id, &lnb);
@@ -181,12 +194,6 @@
::ndk::ScopedAStatus TunerService::openLnbByName(const string& lnbName,
shared_ptr<ITunerLnb>* _aidl_return) {
- if (!hasITuner()) {
- ALOGE("get ITuner failed");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
vector<int32_t> id;
shared_ptr<ILnb> lnb;
auto status = mTuner->openLnbByName(lnbName, &id, &lnb);
@@ -199,12 +206,6 @@
::ndk::ScopedAStatus TunerService::openDescrambler(int32_t /*descramblerHandle*/,
shared_ptr<ITunerDescrambler>* _aidl_return) {
- if (!hasITuner()) {
- ALOGD("get ITuner failed");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
shared_ptr<IDescrambler> descrambler;
// int id = TunerHelper::getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
auto status = mTuner->openDescrambler(&descrambler);
@@ -216,7 +217,6 @@
}
::ndk::ScopedAStatus TunerService::getTunerHalVersion(int* _aidl_return) {
- hasITuner();
*_aidl_return = mTunerVersion;
return ::ndk::ScopedAStatus::ok();
}
@@ -224,12 +224,6 @@
::ndk::ScopedAStatus TunerService::openSharedFilter(const string& in_filterToken,
const shared_ptr<ITunerFilterCallback>& in_cb,
shared_ptr<ITunerFilter>* _aidl_return) {
- if (!hasITuner()) {
- ALOGE("get ITuner failed");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (!PermissionCache::checkCallingPermission(sSharedFilterPermission)) {
ALOGE("Request requires android.permission.ACCESS_TV_SHARED_FILTER");
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -260,35 +254,22 @@
return ::ndk::ScopedAStatus::ok();
}
-::ndk::ScopedAStatus TunerService::setLna(bool bEnable) {
- if (!hasITuner()) {
- ALOGD("get ITuner failed");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
+::ndk::ScopedAStatus TunerService::isLnaSupported(bool* _aidl_return) {
+ ALOGV("isLnaSupported");
+ return mTuner->isLnaSupported(_aidl_return);
+}
+::ndk::ScopedAStatus TunerService::setLna(bool bEnable) {
return mTuner->setLna(bEnable);
}
::ndk::ScopedAStatus TunerService::setMaxNumberOfFrontends(FrontendType in_frontendType,
int32_t in_maxNumber) {
- if (!hasITuner()) {
- ALOGD("get ITuner failed");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mTuner->setMaxNumberOfFrontends(in_frontendType, in_maxNumber);
}
::ndk::ScopedAStatus TunerService::getMaxNumberOfFrontends(FrontendType in_frontendType,
int32_t* _aidl_return) {
- if (!hasITuner()) {
- ALOGD("get ITuner failed");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mTuner->getMaxNumberOfFrontends(in_frontendType, _aidl_return);
}
@@ -309,12 +290,9 @@
}
void TunerService::updateTunerResources() {
- if (!hasITuner()) {
- ALOGE("Failed to updateTunerResources");
- return;
- }
-
- TunerHelper::updateTunerResources(getTRMFrontendInfos(), getTRMLnbHandles());
+ TunerHelper::updateTunerResources(getTRMFrontendInfos(),
+ getTRMDemuxInfos(),
+ getTRMLnbHandles());
}
vector<TunerFrontendInfo> TunerService::getTRMFrontendInfos() {
@@ -342,6 +320,32 @@
return infos;
}
+vector<TunerDemuxInfo> TunerService::getTRMDemuxInfos() {
+ vector<TunerDemuxInfo> infos;
+ vector<int32_t> ids;
+
+ if (mTunerVersion <= TUNER_HAL_VERSION_2_0) {
+ return infos;
+ }
+
+ auto status = mTuner->getDemuxIds(&ids);
+ if (!status.isOk()) {
+ return infos;
+ }
+
+ for (int i = 0; i < ids.size(); i++) {
+ DemuxInfo demuxInfo;
+ mTuner->getDemuxInfo(ids[i], &demuxInfo);
+ TunerDemuxInfo tunerDemuxInfo{
+ .handle = TunerHelper::getResourceHandleFromId((int)ids[i], DEMUX),
+ .filterTypes = static_cast<int>(demuxInfo.filterTypes)
+ };
+ infos.push_back(tunerDemuxInfo);
+ }
+
+ return infos;
+}
+
vector<int32_t> TunerService::getTRMLnbHandles() {
vector<int32_t> lnbHandles;
if (mTuner != nullptr) {
diff --git a/services/tuner/TunerService.h b/services/tuner/TunerService.h
index 7fc2aa4..190ccd4 100644
--- a/services/tuner/TunerService.h
+++ b/services/tuner/TunerService.h
@@ -32,6 +32,7 @@
using ::aidl::android::hardware::tv::tuner::DemuxCapabilities;
using ::aidl::android::hardware::tv::tuner::DemuxFilterEvent;
using ::aidl::android::hardware::tv::tuner::DemuxFilterStatus;
+using ::aidl::android::hardware::tv::tuner::DemuxInfo;
using ::aidl::android::hardware::tv::tuner::FrontendInfo;
using ::aidl::android::hardware::tv::tuner::FrontendType;
using ::aidl::android::hardware::tv::tuner::ITuner;
@@ -71,12 +72,15 @@
::ndk::ScopedAStatus openDemux(int32_t in_demuxHandle,
shared_ptr<ITunerDemux>* _aidl_return) override;
::ndk::ScopedAStatus getDemuxCaps(DemuxCapabilities* _aidl_return) override;
+ ::ndk::ScopedAStatus getDemuxInfo(int32_t in_demuxHandle, DemuxInfo* _aidl_return) override;
+ ::ndk::ScopedAStatus getDemuxInfoList(vector<DemuxInfo>* _aidl_return) override;
::ndk::ScopedAStatus openDescrambler(int32_t in_descramblerHandle,
shared_ptr<ITunerDescrambler>* _aidl_return) override;
::ndk::ScopedAStatus getTunerHalVersion(int32_t* _aidl_return) override;
::ndk::ScopedAStatus openSharedFilter(const string& in_filterToken,
const shared_ptr<ITunerFilterCallback>& in_cb,
shared_ptr<ITunerFilter>* _aidl_return) override;
+ ::ndk::ScopedAStatus isLnaSupported(bool* _aidl_return) override;
::ndk::ScopedAStatus setLna(bool in_bEnable) override;
::ndk::ScopedAStatus setMaxNumberOfFrontends(FrontendType in_frontendType,
int32_t in_maxNumber) override;
@@ -86,20 +90,16 @@
string addFilterToShared(const shared_ptr<TunerFilter>& sharedFilter);
void removeSharedFilter(const shared_ptr<TunerFilter>& sharedFilter);
- static shared_ptr<TunerService> getTunerService();
-
private:
- bool hasITuner();
void updateTunerResources();
vector<TunerFrontendInfo> getTRMFrontendInfos();
+ vector<TunerDemuxInfo> getTRMDemuxInfos();
vector<int32_t> getTRMLnbHandles();
shared_ptr<ITuner> mTuner;
int mTunerVersion = TUNER_HAL_VERSION_UNKNOWN;
Mutex mSharedFiltersLock;
map<string, shared_ptr<TunerFilter>> mSharedFilters;
-
- static shared_ptr<TunerService> sTunerService;
};
} // namespace tuner
diff --git a/services/tuner/TunerTimeFilter.cpp b/services/tuner/TunerTimeFilter.cpp
index 73cd6b4..385a063 100644
--- a/services/tuner/TunerTimeFilter.cpp
+++ b/services/tuner/TunerTimeFilter.cpp
@@ -35,37 +35,19 @@
}
TunerTimeFilter::~TunerTimeFilter() {
+ close();
mTimeFilter = nullptr;
}
::ndk::ScopedAStatus TunerTimeFilter::setTimeStamp(int64_t timeStamp) {
- if (mTimeFilter == nullptr) {
- ALOGE("ITimeFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mTimeFilter->setTimeStamp(timeStamp);
}
::ndk::ScopedAStatus TunerTimeFilter::clearTimeStamp() {
- if (mTimeFilter == nullptr) {
- ALOGE("ITimeFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
return mTimeFilter->clearTimeStamp();
}
::ndk::ScopedAStatus TunerTimeFilter::getSourceTime(int64_t* _aidl_return) {
- if (mTimeFilter == nullptr) {
- *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
- ALOGE("ITimeFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
auto status = mTimeFilter->getSourceTime(_aidl_return);
if (!status.isOk()) {
*_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
@@ -74,13 +56,6 @@
}
::ndk::ScopedAStatus TunerTimeFilter::getTimeStamp(int64_t* _aidl_return) {
- if (mTimeFilter == nullptr) {
- *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
- ALOGE("ITimeFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
auto status = mTimeFilter->getTimeStamp(_aidl_return);
if (!status.isOk()) {
*_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
@@ -89,16 +64,7 @@
}
::ndk::ScopedAStatus TunerTimeFilter::close() {
- if (mTimeFilter == nullptr) {
- ALOGE("ITimeFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
- auto status = mTimeFilter->close();
- mTimeFilter = nullptr;
-
- return status;
+ return mTimeFilter->close();
}
} // namespace tuner
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
index 2c01c4e..cafe075 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerDvr.aidl
@@ -66,4 +66,9 @@
* close the DVR instance to release resource for DVR.
*/
void close();
+
+ /**
+ * Set status check time interval.
+ */
+ void setStatusCheckIntervalHint(in long milliseconds);
}
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
index b8084ab..932133e 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
@@ -17,6 +17,7 @@
package android.media.tv.tuner;
import android.hardware.tv.tuner.DemuxCapabilities;
+import android.hardware.tv.tuner.DemuxInfo;
import android.hardware.tv.tuner.FrontendInfo;
import android.hardware.tv.tuner.FrontendType;
import android.media.tv.tuner.ITunerDemux;
@@ -77,6 +78,21 @@
ITunerDemux openDemux(in int demuxHandle);
/**
+ * Retrieve the supported filter main types
+ *
+ * @param demuxHandle the handle of the demux to query demux info for
+ * @return the demux info
+ */
+ DemuxInfo getDemuxInfo(in int demuxHandle);
+
+ /**
+ * Retrieve the list of demux info for all the demuxes on the system
+ *
+ * @return the list of DemuxInfo
+ */
+ DemuxInfo[] getDemuxInfoList();
+
+ /**
* Retrieve the Tuner Demux capabilities.
*
* @return the demux’s capabilities.
@@ -107,6 +123,13 @@
ITunerFilter openSharedFilter(in String filterToken, in ITunerFilterCallback cb);
/**
+ * Is Low Noise Amplifier (LNA) supported by the Tuner.
+ *
+ * @return {@code true} if supported, otherwise {@code false}.
+ */
+ boolean isLnaSupported();
+
+ /**
* Enable or Disable Low Noise Amplifier (LNA).
*
* @param bEnable enable Lna or not.
diff --git a/services/tuner/hidl/TunerHidlDemux.cpp b/services/tuner/hidl/TunerHidlDemux.cpp
index a8151d2..bbb7782 100644
--- a/services/tuner/hidl/TunerHidlDemux.cpp
+++ b/services/tuner/hidl/TunerHidlDemux.cpp
@@ -20,6 +20,7 @@
#include "TunerHidlDvr.h"
#include "TunerHidlFilter.h"
+#include "TunerHidlService.h"
#include "TunerHidlTimeFilter.h"
using ::aidl::android::hardware::tv::tuner::DemuxFilterSubType;
@@ -42,23 +43,20 @@
namespace tv {
namespace tuner {
-TunerHidlDemux::TunerHidlDemux(sp<IDemux> demux, int id) {
+TunerHidlDemux::TunerHidlDemux(const sp<IDemux> demux, const int id,
+ const shared_ptr<TunerHidlService> tuner) {
mDemux = demux;
mDemuxId = id;
+ mTunerService = tuner;
}
TunerHidlDemux::~TunerHidlDemux() {
mDemux = nullptr;
+ mTunerService = nullptr;
}
::ndk::ScopedAStatus TunerHidlDemux::setFrontendDataSource(
const shared_ptr<ITunerFrontend>& in_frontend) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(HidlResult::UNAVAILABLE));
- }
-
int frontendId;
in_frontend->getFrontendId(&frontendId);
HidlResult res = mDemux->setFrontendDataSource(frontendId);
@@ -69,12 +67,6 @@
}
::ndk::ScopedAStatus TunerHidlDemux::setFrontendDataSourceById(int frontendId) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(HidlResult::UNAVAILABLE));
- }
-
HidlResult res = mDemux->setFrontendDataSource(frontendId);
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -86,12 +78,6 @@
int32_t in_bufferSize,
const shared_ptr<ITunerFilterCallback>& in_cb,
shared_ptr<ITunerFilter>* _aidl_return) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(HidlResult::UNAVAILABLE));
- }
-
HidlDemuxFilterMainType mainType = static_cast<HidlDemuxFilterMainType>(in_type.mainType);
HidlDemuxFilterType filterType{
.mainType = mainType,
@@ -132,17 +118,12 @@
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
}
- *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlFilter>(filterSp, filterCb, in_type);
+ *_aidl_return =
+ ::ndk::SharedRefBase::make<TunerHidlFilter>(filterSp, filterCb, in_type, mTunerService);
return ::ndk::ScopedAStatus::ok();
}
::ndk::ScopedAStatus TunerHidlDemux::openTimeFilter(shared_ptr<ITunerTimeFilter>* _aidl_return) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(HidlResult::UNAVAILABLE));
- }
-
HidlResult status;
sp<HidlITimeFilter> filterSp;
mDemux->openTimeFilter([&](HidlResult r, const sp<HidlITimeFilter>& filter) {
@@ -159,12 +140,6 @@
::ndk::ScopedAStatus TunerHidlDemux::getAvSyncHwId(const shared_ptr<ITunerFilter>& tunerFilter,
int32_t* _aidl_return) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(HidlResult::UNAVAILABLE));
- }
-
uint32_t avSyncHwId;
HidlResult res;
sp<HidlIFilter> halFilter = static_cast<TunerHidlFilter*>(tunerFilter.get())->getHalFilter();
@@ -181,12 +156,6 @@
}
::ndk::ScopedAStatus TunerHidlDemux::getAvSyncTime(int32_t avSyncHwId, int64_t* _aidl_return) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(HidlResult::UNAVAILABLE));
- }
-
uint64_t time;
HidlResult res;
mDemux->getAvSyncTime(static_cast<uint32_t>(avSyncHwId), [&](HidlResult r, uint64_t ts) {
@@ -204,12 +173,6 @@
::ndk::ScopedAStatus TunerHidlDemux::openDvr(DvrType in_dvbType, int32_t in_bufferSize,
const shared_ptr<ITunerDvrCallback>& in_cb,
shared_ptr<ITunerDvr>* _aidl_return) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(HidlResult::UNAVAILABLE));
- }
-
HidlResult res;
sp<HidlIDvrCallback> callback = new TunerHidlDvr::DvrCallback(in_cb);
sp<HidlIDvr> hidlDvr;
@@ -228,12 +191,6 @@
}
::ndk::ScopedAStatus TunerHidlDemux::connectCiCam(int32_t ciCamId) {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(HidlResult::UNAVAILABLE));
- }
-
HidlResult res = mDemux->connectCiCam(static_cast<uint32_t>(ciCamId));
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -242,12 +199,6 @@
}
::ndk::ScopedAStatus TunerHidlDemux::disconnectCiCam() {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(HidlResult::UNAVAILABLE));
- }
-
HidlResult res = mDemux->disconnectCiCam();
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -256,15 +207,7 @@
}
::ndk::ScopedAStatus TunerHidlDemux::close() {
- if (mDemux == nullptr) {
- ALOGE("IDemux is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(HidlResult::UNAVAILABLE));
- }
-
HidlResult res = mDemux->close();
- mDemux = nullptr;
-
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
}
diff --git a/services/tuner/hidl/TunerHidlDemux.h b/services/tuner/hidl/TunerHidlDemux.h
index d535da6..94a715e 100644
--- a/services/tuner/hidl/TunerHidlDemux.h
+++ b/services/tuner/hidl/TunerHidlDemux.h
@@ -37,9 +37,12 @@
namespace tv {
namespace tuner {
+class TunerHidlService;
+
class TunerHidlDemux : public BnTunerDemux {
public:
- TunerHidlDemux(sp<HidlIDemux> demux, int demuxId);
+ TunerHidlDemux(const sp<HidlIDemux> demux, const int demuxId,
+ const shared_ptr<TunerHidlService> tuner);
virtual ~TunerHidlDemux();
::ndk::ScopedAStatus setFrontendDataSource(
@@ -64,6 +67,7 @@
private:
sp<HidlIDemux> mDemux;
int mDemuxId;
+ shared_ptr<TunerHidlService> mTunerService;
};
} // namespace tuner
diff --git a/services/tuner/hidl/TunerHidlDescrambler.cpp b/services/tuner/hidl/TunerHidlDescrambler.cpp
index dd8cd9c..51b7ede 100644
--- a/services/tuner/hidl/TunerHidlDescrambler.cpp
+++ b/services/tuner/hidl/TunerHidlDescrambler.cpp
@@ -45,12 +45,6 @@
::ndk::ScopedAStatus TunerHidlDescrambler::setDemuxSource(
const shared_ptr<ITunerDemux>& in_tunerDemux) {
- if (mDescrambler == nullptr) {
- ALOGE("IDescrambler is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult res = mDescrambler->setDemuxSource(
static_cast<TunerHidlDemux*>(in_tunerDemux.get())->getId());
if (res != HidlResult::SUCCESS) {
@@ -60,12 +54,6 @@
}
::ndk::ScopedAStatus TunerHidlDescrambler::setKeyToken(const vector<uint8_t>& in_keyToken) {
- if (mDescrambler == nullptr) {
- ALOGE("IDescrambler is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult res = mDescrambler->setKeyToken(in_keyToken);
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -75,12 +63,6 @@
::ndk::ScopedAStatus TunerHidlDescrambler::addPid(
const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
- if (mDescrambler == nullptr) {
- ALOGE("IDescrambler is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
sp<HidlIFilter> halFilter =
(in_optionalSourceFilter == nullptr)
? nullptr
@@ -94,12 +76,6 @@
::ndk::ScopedAStatus TunerHidlDescrambler::removePid(
const DemuxPid& in_pid, const shared_ptr<ITunerFilter>& in_optionalSourceFilter) {
- if (mDescrambler == nullptr) {
- ALOGE("IDescrambler is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
sp<HidlIFilter> halFilter =
(in_optionalSourceFilter == nullptr)
? nullptr
@@ -112,15 +88,7 @@
}
::ndk::ScopedAStatus TunerHidlDescrambler::close() {
- if (mDescrambler == nullptr) {
- ALOGE("IDescrambler is not initialized.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult res = mDescrambler->close();
- mDescrambler = nullptr;
-
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
}
diff --git a/services/tuner/hidl/TunerHidlDvr.cpp b/services/tuner/hidl/TunerHidlDvr.cpp
index 3ea1eb1..285e32b 100644
--- a/services/tuner/hidl/TunerHidlDvr.cpp
+++ b/services/tuner/hidl/TunerHidlDvr.cpp
@@ -54,12 +54,6 @@
}
::ndk::ScopedAStatus TunerHidlDvr::getQueueDesc(AidlMQDesc* _aidl_return) {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
MQDesc dvrMQDesc;
HidlResult res;
mDvr->getQueueDesc([&](HidlResult r, const MQDesc& desc) {
@@ -77,12 +71,6 @@
}
::ndk::ScopedAStatus TunerHidlDvr::configure(const DvrSettings& in_settings) {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult res = mDvr->configure(getHidlDvrSettings(in_settings));
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -91,12 +79,6 @@
}
::ndk::ScopedAStatus TunerHidlDvr::attachFilter(const shared_ptr<ITunerFilter>& in_filter) {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (in_filter == nullptr) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -116,12 +98,6 @@
}
::ndk::ScopedAStatus TunerHidlDvr::detachFilter(const shared_ptr<ITunerFilter>& in_filter) {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (in_filter == nullptr) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -141,12 +117,6 @@
}
::ndk::ScopedAStatus TunerHidlDvr::start() {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult res = mDvr->start();
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -155,12 +125,6 @@
}
::ndk::ScopedAStatus TunerHidlDvr::stop() {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult res = mDvr->stop();
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -169,12 +133,6 @@
}
::ndk::ScopedAStatus TunerHidlDvr::flush() {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult res = mDvr->flush();
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -183,21 +141,18 @@
}
::ndk::ScopedAStatus TunerHidlDvr::close() {
- if (mDvr == nullptr) {
- ALOGE("IDvr is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult res = mDvr->close();
- mDvr = nullptr;
-
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
}
return ::ndk::ScopedAStatus::ok();
}
+::ndk::ScopedAStatus TunerHidlDvr::setStatusCheckIntervalHint(int64_t /* in_milliseconds */) {
+ HidlResult res = HidlResult::UNAVAILABLE;
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
+}
+
HidlDvrSettings TunerHidlDvr::getHidlDvrSettings(const DvrSettings& settings) {
HidlDvrSettings s;
switch (mType) {
diff --git a/services/tuner/hidl/TunerHidlDvr.h b/services/tuner/hidl/TunerHidlDvr.h
index a280ff7..aa86b14 100644
--- a/services/tuner/hidl/TunerHidlDvr.h
+++ b/services/tuner/hidl/TunerHidlDvr.h
@@ -63,6 +63,7 @@
::ndk::ScopedAStatus stop() override;
::ndk::ScopedAStatus flush() override;
::ndk::ScopedAStatus close() override;
+ ::ndk::ScopedAStatus setStatusCheckIntervalHint(int64_t in_milliseconds) override;
struct DvrCallback : public HidlIDvrCallback {
DvrCallback(const shared_ptr<ITunerDvrCallback> tunerDvrCallback)
diff --git a/services/tuner/hidl/TunerHidlFilter.cpp b/services/tuner/hidl/TunerHidlFilter.cpp
index c82732b..617622d 100644
--- a/services/tuner/hidl/TunerHidlFilter.cpp
+++ b/services/tuner/hidl/TunerHidlFilter.cpp
@@ -92,31 +92,32 @@
namespace tv {
namespace tuner {
-TunerHidlFilter::TunerHidlFilter(sp<HidlIFilter> filter, sp<FilterCallback> cb,
- DemuxFilterType type)
+TunerHidlFilter::TunerHidlFilter(const sp<HidlIFilter> filter, const sp<FilterCallback> cb,
+ const DemuxFilterType type,
+ const shared_ptr<TunerHidlService> tuner)
: mFilter(filter),
mType(type),
mStarted(false),
mShared(false),
mClientPid(-1),
- mFilterCallback(cb) {
+ mFilterCallback(cb),
+ mTunerService(tuner) {
mFilter_1_1 = ::android::hardware::tv::tuner::V1_1::IFilter::castFrom(filter);
}
TunerHidlFilter::~TunerHidlFilter() {
- Mutex::Autolock _l(mLock);
- mFilter = nullptr;
- mFilter_1_1 = nullptr;
+ freeSharedFilterToken("");
+
+ {
+ Mutex::Autolock _l(mLock);
+ mFilter = nullptr;
+ mFilter_1_1 = nullptr;
+ mTunerService = nullptr;
+ }
}
::ndk::ScopedAStatus TunerHidlFilter::getQueueDesc(AidlMQDesc* _aidl_return) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
IPCThreadState* ipc = IPCThreadState::self();
int32_t callingPid = ipc->getCallingPid();
@@ -146,12 +147,6 @@
::ndk::ScopedAStatus TunerHidlFilter::getId(int32_t* _aidl_return) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
ALOGD("%s is called on a shared filter", __FUNCTION__);
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -200,12 +195,6 @@
::ndk::ScopedAStatus TunerHidlFilter::configure(const DemuxFilterSettings& in_settings) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
ALOGD("%s is called on a shared filter", __FUNCTION__);
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -318,12 +307,6 @@
::ndk::ScopedAStatus TunerHidlFilter::setDataSource(const shared_ptr<ITunerFilter>& filter) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (filter == nullptr) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -378,12 +361,6 @@
::ndk::ScopedAStatus TunerHidlFilter::releaseAvHandle(const NativeHandle& in_handle,
int64_t in_avDataId) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
ALOGD("%s is called on a shared filter", __FUNCTION__);
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -391,7 +368,7 @@
}
hidl_handle handle;
- handle.setTo(makeFromAidl(in_handle), true);
+ handle.setTo(makeFromAidl(in_handle));
HidlResult res = mFilter->releaseAvHandle(handle, in_avDataId);
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
@@ -407,12 +384,6 @@
::ndk::ScopedAStatus TunerHidlFilter::start() {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
IPCThreadState* ipc = IPCThreadState::self();
int32_t callingPid = ipc->getCallingPid();
@@ -434,12 +405,6 @@
::ndk::ScopedAStatus TunerHidlFilter::stop() {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
IPCThreadState* ipc = IPCThreadState::self();
int32_t callingPid = ipc->getCallingPid();
@@ -461,12 +426,6 @@
::ndk::ScopedAStatus TunerHidlFilter::flush() {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
IPCThreadState* ipc = IPCThreadState::self();
int32_t callingPid = ipc->getCallingPid();
@@ -487,12 +446,6 @@
::ndk::ScopedAStatus TunerHidlFilter::close() {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared) {
IPCThreadState* ipc = IPCThreadState::self();
int32_t callingPid = ipc->getCallingPid();
@@ -501,7 +454,7 @@
mFilterCallback->sendSharedFilterStatus(STATUS_INACCESSIBLE);
mFilterCallback->detachSharedFilterCallback();
}
- TunerHidlService::getTunerService()->removeSharedFilter(this->ref<TunerHidlFilter>());
+ mTunerService->removeSharedFilter(this->ref<TunerHidlFilter>());
} else {
// Calling from shared process, do not really close this filter.
if (mFilterCallback != nullptr) {
@@ -516,8 +469,6 @@
mFilterCallback->detachCallbacks();
}
HidlResult res = mFilter->close();
- mFilter = nullptr;
- mFilter_1_1 = nullptr;
mStarted = false;
mShared = false;
mClientPid = -1;
@@ -531,12 +482,6 @@
::ndk::ScopedAStatus TunerHidlFilter::acquireSharedFilterToken(string* _aidl_return) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (mShared || mStarted) {
ALOGD("create SharedFilter in wrong state");
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -545,8 +490,7 @@
IPCThreadState* ipc = IPCThreadState::self();
mClientPid = ipc->getCallingPid();
- string token =
- TunerHidlService::getTunerService()->addFilterToShared(this->ref<TunerHidlFilter>());
+ string token = mTunerService->addFilterToShared(this->ref<TunerHidlFilter>());
_aidl_return->assign(token);
mShared = true;
@@ -555,12 +499,6 @@
::ndk::ScopedAStatus TunerHidlFilter::freeSharedFilterToken(const string& /* in_filterToken */) {
Mutex::Autolock _l(mLock);
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (!mShared) {
// The filter is not shared or the shared filter has been closed.
return ::ndk::ScopedAStatus::ok();
@@ -571,19 +509,13 @@
mFilterCallback->detachSharedFilterCallback();
}
- TunerHidlService::getTunerService()->removeSharedFilter(this->ref<TunerHidlFilter>());
+ mTunerService->removeSharedFilter(this->ref<TunerHidlFilter>());
mShared = false;
return ::ndk::ScopedAStatus::ok();
}
::ndk::ScopedAStatus TunerHidlFilter::getFilterType(DemuxFilterType* _aidl_return) {
- if (mFilter == nullptr) {
- ALOGE("IFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
*_aidl_return = mType;
return ::ndk::ScopedAStatus::ok();
}
@@ -908,6 +840,10 @@
static_cast<uint32_t>(settings.scIndexMask.get<DemuxFilterScIndexMask::scHevc>()));
break;
}
+ case DemuxFilterScIndexMask::scVvc: {
+ // Shouldn't be here.
+ break;
+ }
}
return record;
}
diff --git a/services/tuner/hidl/TunerHidlFilter.h b/services/tuner/hidl/TunerHidlFilter.h
index 63c7a1b..a58eeca 100644
--- a/services/tuner/hidl/TunerHidlFilter.h
+++ b/services/tuner/hidl/TunerHidlFilter.h
@@ -114,6 +114,8 @@
const static int IP_V4_LENGTH = 4;
const static int IP_V6_LENGTH = 16;
+class TunerHidlService;
+
class TunerHidlFilter : public BnTunerFilter {
public:
class FilterCallback : public HidlIFilterCallback {
@@ -165,7 +167,8 @@
Mutex mCallbackLock;
};
- TunerHidlFilter(sp<HidlIFilter> filter, sp<FilterCallback> cb, DemuxFilterType type);
+ TunerHidlFilter(const sp<HidlIFilter> filter, const sp<FilterCallback> cb,
+ const DemuxFilterType type, const shared_ptr<TunerHidlService> tuner);
virtual ~TunerHidlFilter();
::ndk::ScopedAStatus getId(int32_t* _aidl_return) override;
@@ -230,6 +233,7 @@
int32_t mClientPid;
sp<FilterCallback> mFilterCallback;
Mutex mLock;
+ shared_ptr<TunerHidlService> mTunerService;
};
} // namespace tuner
diff --git a/services/tuner/hidl/TunerHidlFrontend.cpp b/services/tuner/hidl/TunerHidlFrontend.cpp
index 03957f3..7ffb2a4 100644
--- a/services/tuner/hidl/TunerHidlFrontend.cpp
+++ b/services/tuner/hidl/TunerHidlFrontend.cpp
@@ -176,26 +176,23 @@
namespace tv {
namespace tuner {
-TunerHidlFrontend::TunerHidlFrontend(sp<HidlIFrontend> frontend, int id) {
+TunerHidlFrontend::TunerHidlFrontend(const sp<HidlIFrontend> frontend, const int id,
+ const shared_ptr<TunerHidlService> tuner) {
mFrontend = frontend;
mFrontend_1_1 = ::android::hardware::tv::tuner::V1_1::IFrontend::castFrom(mFrontend);
mId = id;
+ mTunerService = tuner;
}
TunerHidlFrontend::~TunerHidlFrontend() {
mFrontend = nullptr;
mFrontend_1_1 = nullptr;
mId = -1;
+ mTunerService = nullptr;
}
::ndk::ScopedAStatus TunerHidlFrontend::setCallback(
const shared_ptr<ITunerFrontendCallback>& tunerFrontendCallback) {
- if (mFrontend == nullptr) {
- ALOGE("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (tunerFrontendCallback == nullptr) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -211,12 +208,6 @@
}
::ndk::ScopedAStatus TunerHidlFrontend::tune(const FrontendSettings& settings) {
- if (mFrontend == nullptr) {
- ALOGE("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult status;
HidlFrontendSettings frontendSettings;
HidlFrontendSettingsExt1_1 frontendSettingsExt;
@@ -234,12 +225,6 @@
}
::ndk::ScopedAStatus TunerHidlFrontend::stopTune() {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult status = mFrontend->stopTune();
if (status == HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::ok();
@@ -250,12 +235,6 @@
::ndk::ScopedAStatus TunerHidlFrontend::scan(const FrontendSettings& settings,
FrontendScanType frontendScanType) {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult status;
HidlFrontendSettings frontendSettings;
HidlFrontendSettingsExt1_1 frontendSettingsExt;
@@ -276,12 +255,6 @@
}
::ndk::ScopedAStatus TunerHidlFrontend::stopScan() {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult status = mFrontend->stopScan();
if (status == HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::ok();
@@ -291,12 +264,6 @@
}
::ndk::ScopedAStatus TunerHidlFrontend::setLnb(const shared_ptr<ITunerLnb>& lnb) {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
if (lnb == nullptr) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::INVALID_ARGUMENT));
@@ -349,18 +316,8 @@
}
::ndk::ScopedAStatus TunerHidlFrontend::close() {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
- TunerHidlService::getTunerService()->removeFrontend(this->ref<TunerHidlFrontend>());
-
+ mTunerService->removeFrontend(this->ref<TunerHidlFrontend>());
HidlResult status = mFrontend->close();
- mFrontend = nullptr;
- mFrontend_1_1 = nullptr;
-
if (status != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
}
@@ -370,12 +327,6 @@
::ndk::ScopedAStatus TunerHidlFrontend::getStatus(const vector<FrontendStatusType>& in_statusTypes,
vector<FrontendStatus>* _aidl_return) {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult res;
vector<HidlFrontendStatus> status;
vector<HidlFrontendStatusExt1_1> statusExt;
@@ -442,11 +393,6 @@
}
void TunerHidlFrontend::setLna(bool bEnable) {
- if (mFrontend == nullptr) {
- ALOGD("IFrontend is not initialized");
- return;
- }
-
mFrontend->setLna(bEnable);
}
diff --git a/services/tuner/hidl/TunerHidlFrontend.h b/services/tuner/hidl/TunerHidlFrontend.h
index f698655..f54127b 100644
--- a/services/tuner/hidl/TunerHidlFrontend.h
+++ b/services/tuner/hidl/TunerHidlFrontend.h
@@ -64,9 +64,12 @@
namespace tv {
namespace tuner {
+class TunerHidlService;
+
class TunerHidlFrontend : public BnTunerFrontend {
public:
- TunerHidlFrontend(sp<HidlIFrontend> frontend, int id);
+ TunerHidlFrontend(const sp<HidlIFrontend> frontend, const int id,
+ const shared_ptr<TunerHidlService> tuner);
virtual ~TunerHidlFrontend();
::ndk::ScopedAStatus setCallback(
@@ -118,6 +121,7 @@
int mId;
sp<HidlIFrontend> mFrontend;
sp<::android::hardware::tv::tuner::V1_1::IFrontend> mFrontend_1_1;
+ shared_ptr<TunerHidlService> mTunerService;
};
} // namespace tuner
diff --git a/services/tuner/hidl/TunerHidlService.cpp b/services/tuner/hidl/TunerHidlService.cpp
index 6f55f1e..6bc36be 100644
--- a/services/tuner/hidl/TunerHidlService.cpp
+++ b/services/tuner/hidl/TunerHidlService.cpp
@@ -24,6 +24,7 @@
#include <android/binder_manager.h>
#include <binder/IPCThreadState.h>
#include <binder/PermissionCache.h>
+#include <cutils/properties.h>
#include <utils/Log.h>
#include "TunerHelper.h"
@@ -46,7 +47,6 @@
using ::aidl::android::hardware::tv::tuner::FrontendIsdbtTimeInterleaveMode;
using ::aidl::android::hardware::tv::tuner::FrontendType;
using ::aidl::android::hardware::tv::tuner::Result;
-using ::aidl::android::media::tv::tunerresourcemanager::TunerFrontendInfo;
using ::android::IPCThreadState;
using ::android::PermissionCache;
using ::android::hardware::hidl_vec;
@@ -63,47 +63,9 @@
namespace tv {
namespace tuner {
-shared_ptr<TunerHidlService> TunerHidlService::sTunerService = nullptr;
-
TunerHidlService::TunerHidlService() {
- if (!TunerHelper::checkTunerFeature()) {
- ALOGD("Device doesn't have tuner hardware.");
- return;
- }
-
- updateTunerResources();
-}
-
-TunerHidlService::~TunerHidlService() {
- mOpenedFrontends.clear();
- mLnaStatus = -1;
-}
-
-binder_status_t TunerHidlService::instantiate() {
- if (HidlITuner::getService() == nullptr) {
- ALOGD("Failed to get ITuner HIDL HAL");
- return STATUS_NAME_NOT_FOUND;
- }
-
- sTunerService = ::ndk::SharedRefBase::make<TunerHidlService>();
- return AServiceManager_addService(sTunerService->asBinder().get(), getServiceName());
-}
-
-shared_ptr<TunerHidlService> TunerHidlService::getTunerService() {
- return sTunerService;
-}
-
-bool TunerHidlService::hasITuner() {
- ALOGV("hasITuner");
- if (mTuner != nullptr) {
- return true;
- }
-
mTuner = HidlITuner::getService();
- if (mTuner == nullptr) {
- ALOGE("Failed to get ITuner service");
- return false;
- }
+ ALOGE_IF(mTuner == nullptr, "Failed to get ITuner service");
mTunerVersion = TUNER_HAL_VERSION_1_0;
mTuner_1_1 = ::android::hardware::tv::tuner::V1_1::ITuner::castFrom(mTuner);
@@ -113,23 +75,35 @@
ALOGD("Failed to get ITuner_1_1 service");
}
- return true;
+ // Register tuner resources to TRM.
+ updateTunerResources();
}
-bool TunerHidlService::hasITuner_1_1() {
- ALOGV("hasITuner_1_1");
- hasITuner();
- return (mTunerVersion == TUNER_HAL_VERSION_1_1);
+TunerHidlService::~TunerHidlService() {
+ mOpenedFrontends.clear();
+ mLnaStatus = -1;
+ mTuner = nullptr;
+ mTuner_1_1 = nullptr;
+}
+
+binder_status_t TunerHidlService::instantiate() {
+ if (HidlITuner::getService() == nullptr) {
+ ALOGD("Failed to get ITuner HIDL HAL");
+ return STATUS_NAME_NOT_FOUND;
+ }
+
+ shared_ptr<TunerHidlService> tunerService = ::ndk::SharedRefBase::make<TunerHidlService>();
+ bool lazyHal = property_get_bool("ro.tuner.lazyhal", false);
+ if (lazyHal) {
+ return AServiceManager_registerLazyService(tunerService->asBinder().get(),
+ getServiceName());
+ }
+ return AServiceManager_addService(tunerService->asBinder().get(), getServiceName());
}
::ndk::ScopedAStatus TunerHidlService::openDemux(int32_t /* in_demuxHandle */,
shared_ptr<ITunerDemux>* _aidl_return) {
ALOGV("openDemux");
- if (!hasITuner()) {
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult res;
uint32_t id;
sp<IDemux> demuxSp = nullptr;
@@ -140,7 +114,8 @@
ALOGD("open demux, id = %d", demuxId);
});
if (res == HidlResult::SUCCESS) {
- *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlDemux>(demuxSp, id);
+ *_aidl_return = ::ndk::SharedRefBase::make<TunerHidlDemux>(demuxSp, id,
+ this->ref<TunerHidlService>());
return ::ndk::ScopedAStatus::ok();
}
@@ -148,13 +123,22 @@
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
}
+::ndk::ScopedAStatus TunerHidlService::getDemuxInfo(int32_t /* in_demuxHandle */,
+ DemuxInfo* /* _aidl_return */) {
+ ALOGE("getDemuxInfo is not supported");
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(HidlResult::UNAVAILABLE));
+}
+
+::ndk::ScopedAStatus TunerHidlService::getDemuxInfoList(
+ vector<DemuxInfo>* /* _aidle_return */) {
+ ALOGE("getDemuxInfoList is not supported");
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(HidlResult::UNAVAILABLE));
+}
+
::ndk::ScopedAStatus TunerHidlService::getDemuxCaps(DemuxCapabilities* _aidl_return) {
ALOGV("getDemuxCaps");
- if (!hasITuner()) {
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult res;
HidlDemuxCapabilities caps;
mTuner->getDemuxCaps([&](HidlResult r, const HidlDemuxCapabilities& demuxCaps) {
@@ -171,11 +155,6 @@
}
::ndk::ScopedAStatus TunerHidlService::getFrontendIds(vector<int32_t>* ids) {
- if (!hasITuner()) {
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
hidl_vec<HidlFrontendId> feIds;
HidlResult res = getHidlFrontendIds(feIds);
if (res != HidlResult::SUCCESS) {
@@ -188,12 +167,6 @@
}
::ndk::ScopedAStatus TunerHidlService::getFrontendInfo(int32_t id, FrontendInfo* _aidl_return) {
- if (!hasITuner()) {
- ALOGE("ITuner service is not init.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlFrontendInfo info;
HidlResult res = getHidlFrontendInfo(id, info);
if (res != HidlResult::SUCCESS) {
@@ -202,7 +175,7 @@
HidlFrontendDtmbCapabilities dtmbCaps;
if (static_cast<HidlFrontendType>(info.type) == HidlFrontendType::DTMB) {
- if (!hasITuner_1_1()) {
+ if (mTuner_1_1 == nullptr) {
ALOGE("ITuner_1_1 service is not init.");
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::UNAVAILABLE));
@@ -224,12 +197,6 @@
::ndk::ScopedAStatus TunerHidlService::openFrontend(int32_t frontendHandle,
shared_ptr<ITunerFrontend>* _aidl_return) {
- if (!hasITuner()) {
- ALOGE("ITuner service is not init.");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult status;
sp<HidlIFrontend> frontend;
int id = TunerHelper::getResourceIdFromHandle(frontendHandle, FRONTEND);
@@ -241,8 +208,8 @@
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
}
- shared_ptr<TunerHidlFrontend> tunerFrontend =
- ::ndk::SharedRefBase::make<TunerHidlFrontend>(frontend, id);
+ shared_ptr<TunerHidlFrontend> tunerFrontend = ::ndk::SharedRefBase::make<TunerHidlFrontend>(
+ frontend, id, this->ref<TunerHidlService>());
if (mLnaStatus != -1) {
tunerFrontend->setLna(mLnaStatus == 1);
}
@@ -255,12 +222,6 @@
}
::ndk::ScopedAStatus TunerHidlService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
- if (!hasITuner()) {
- ALOGD("get ITuner failed");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult status;
sp<HidlILnb> lnb;
int id = TunerHelper::getResourceIdFromHandle(lnbHandle, LNB);
@@ -278,12 +239,6 @@
::ndk::ScopedAStatus TunerHidlService::openLnbByName(const string& lnbName,
shared_ptr<ITunerLnb>* _aidl_return) {
- if (!hasITuner()) {
- ALOGE("get ITuner failed");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
int lnbId;
HidlResult status;
sp<HidlILnb> lnb;
@@ -302,12 +257,6 @@
::ndk::ScopedAStatus TunerHidlService::openDescrambler(
int32_t /*descramblerHandle*/, shared_ptr<ITunerDescrambler>* _aidl_return) {
- if (!hasITuner()) {
- ALOGD("get ITuner failed");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult status;
sp<HidlIDescrambler> descrambler;
//int id = TunerHelper::getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
@@ -324,7 +273,6 @@
}
::ndk::ScopedAStatus TunerHidlService::getTunerHalVersion(int* _aidl_return) {
- hasITuner();
*_aidl_return = mTunerVersion;
return ::ndk::ScopedAStatus::ok();
}
@@ -332,7 +280,7 @@
::ndk::ScopedAStatus TunerHidlService::openSharedFilter(
const string& in_filterToken, const shared_ptr<ITunerFilterCallback>& in_cb,
shared_ptr<ITunerFilter>* _aidl_return) {
- if (!hasITuner()) {
+ if (mTuner == nullptr) {
ALOGE("get ITuner failed");
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::UNAVAILABLE));
@@ -368,8 +316,13 @@
return ::ndk::ScopedAStatus::ok();
}
+::ndk::ScopedAStatus TunerHidlService::isLnaSupported(bool* /* _aidl_return */) {
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(Result::UNAVAILABLE));
+}
+
::ndk::ScopedAStatus TunerHidlService::setLna(bool bEnable) {
- if (!hasITuner()) {
+ if (mTuner == nullptr) {
ALOGE("get ITuner failed");
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::UNAVAILABLE));
@@ -428,11 +381,6 @@
}
void TunerHidlService::updateTunerResources() {
- if (!hasITuner()) {
- ALOGE("Failed to updateTunerResources");
- return;
- }
-
TunerHelper::updateTunerResources(getTRMFrontendInfos(), getTRMLnbHandles());
}
diff --git a/services/tuner/hidl/TunerHidlService.h b/services/tuner/hidl/TunerHidlService.h
index 2252d35..526c5e6 100644
--- a/services/tuner/hidl/TunerHidlService.h
+++ b/services/tuner/hidl/TunerHidlService.h
@@ -33,6 +33,7 @@
using ::aidl::android::hardware::tv::tuner::DemuxCapabilities;
using ::aidl::android::hardware::tv::tuner::DemuxFilterEvent;
using ::aidl::android::hardware::tv::tuner::DemuxFilterStatus;
+using ::aidl::android::hardware::tv::tuner::DemuxInfo;
using ::aidl::android::hardware::tv::tuner::FrontendInfo;
using ::aidl::android::hardware::tv::tuner::FrontendType;
using ::aidl::android::media::tv::tuner::ITunerDemux;
@@ -83,12 +84,15 @@
::ndk::ScopedAStatus openDemux(int32_t in_demuxHandle,
shared_ptr<ITunerDemux>* _aidl_return) override;
::ndk::ScopedAStatus getDemuxCaps(DemuxCapabilities* _aidl_return) override;
+ ::ndk::ScopedAStatus getDemuxInfo(int32_t in_demuxHandle, DemuxInfo* _aidl_return) override;
+ ::ndk::ScopedAStatus getDemuxInfoList(vector<DemuxInfo>* _aidl_return) override;
::ndk::ScopedAStatus openDescrambler(int32_t in_descramblerHandle,
shared_ptr<ITunerDescrambler>* _aidl_return) override;
::ndk::ScopedAStatus getTunerHalVersion(int32_t* _aidl_return) override;
::ndk::ScopedAStatus openSharedFilter(const string& in_filterToken,
const shared_ptr<ITunerFilterCallback>& in_cb,
shared_ptr<ITunerFilter>* _aidl_return) override;
+ ::ndk::ScopedAStatus isLnaSupported(bool* _aidl_return) override;
::ndk::ScopedAStatus setLna(bool in_bEnable) override;
::ndk::ScopedAStatus setMaxNumberOfFrontends(FrontendType in_frontendType,
int32_t in_maxNumber) override;
@@ -99,11 +103,7 @@
void removeSharedFilter(const shared_ptr<TunerHidlFilter>& sharedFilter);
void removeFrontend(const shared_ptr<TunerHidlFrontend>& frontend);
- static shared_ptr<TunerHidlService> getTunerService();
-
private:
- bool hasITuner();
- bool hasITuner_1_1();
void updateTunerResources();
vector<TunerFrontendInfo> getTRMFrontendInfos();
vector<int32_t> getTRMLnbHandles();
@@ -121,8 +121,6 @@
Mutex mOpenedFrontendsLock;
unordered_set<shared_ptr<TunerHidlFrontend>> mOpenedFrontends;
int mLnaStatus = -1;
-
- static shared_ptr<TunerHidlService> sTunerService;
};
} // namespace tuner
diff --git a/services/tuner/hidl/TunerHidlTimeFilter.cpp b/services/tuner/hidl/TunerHidlTimeFilter.cpp
index d0606d6..06a71d0 100644
--- a/services/tuner/hidl/TunerHidlTimeFilter.cpp
+++ b/services/tuner/hidl/TunerHidlTimeFilter.cpp
@@ -43,12 +43,6 @@
}
::ndk::ScopedAStatus TunerHidlTimeFilter::setTimeStamp(int64_t timeStamp) {
- if (mTimeFilter == nullptr) {
- ALOGE("ITimeFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult status = mTimeFilter->setTimeStamp(static_cast<uint64_t>(timeStamp));
if (status != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
@@ -57,12 +51,6 @@
}
::ndk::ScopedAStatus TunerHidlTimeFilter::clearTimeStamp() {
- if (mTimeFilter == nullptr) {
- ALOGE("ITimeFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult status = mTimeFilter->clearTimeStamp();
if (status != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
@@ -71,13 +59,6 @@
}
::ndk::ScopedAStatus TunerHidlTimeFilter::getSourceTime(int64_t* _aidl_return) {
- if (mTimeFilter == nullptr) {
- *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
- ALOGE("ITimeFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult status;
mTimeFilter->getSourceTime([&](HidlResult r, uint64_t t) {
status = r;
@@ -91,13 +72,6 @@
}
::ndk::ScopedAStatus TunerHidlTimeFilter::getTimeStamp(int64_t* _aidl_return) {
- if (mTimeFilter == nullptr) {
- *_aidl_return = (int64_t)Constant64Bit::INVALID_PRESENTATION_TIME_STAMP;
- ALOGE("ITimeFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult status;
mTimeFilter->getTimeStamp([&](HidlResult r, uint64_t t) {
status = r;
@@ -111,15 +85,7 @@
}
::ndk::ScopedAStatus TunerHidlTimeFilter::close() {
- if (mTimeFilter == nullptr) {
- ALOGE("ITimeFilter is not initialized");
- return ::ndk::ScopedAStatus::fromServiceSpecificError(
- static_cast<int32_t>(Result::UNAVAILABLE));
- }
-
HidlResult res = mTimeFilter->close();
- mTimeFilter = nullptr;
-
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
}
diff --git a/services/tuner/main_tunerservice.cpp b/services/tuner/main_tunerservice.cpp
index a014dea..90f1731 100644
--- a/services/tuner/main_tunerservice.cpp
+++ b/services/tuner/main_tunerservice.cpp
@@ -18,6 +18,7 @@
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
#include <utils/Log.h>
+#include <hidl/HidlTransportSupport.h>
#include "TunerService.h"
#include "hidl/TunerHidlService.h"
@@ -32,6 +33,8 @@
sp<ProcessState> proc(ProcessState::self());
sp<IServiceManager> sm = defaultServiceManager();
+ hardware::configureRpcThreadpool(16, true);
+ ProcessState::self()->setThreadPoolMaxThreadCount(16);
// Check legacy HIDL HAL first. If it's not existed, use AIDL HAL.
binder_status_t status = TunerHidlService::instantiate();
diff --git a/services/tuner/mediatuner.rc b/services/tuner/mediatuner.rc
index d813b7d..8e5d100 100644
--- a/services/tuner/mediatuner.rc
+++ b/services/tuner/mediatuner.rc
@@ -1,9 +1,15 @@
+# media.tuner service is not started by default unless tuner.server.enable is set
+# as "true" by TRM (Tuner Resource Manager). TRM checks ro.tuner.lazyhal, if it
+# isn't true , TRM sets tuner.server.enable as "true".
service media.tuner /system/bin/mediatuner
class main
group media
user root
ioprio rt 4
- onrestart restart vendor.tuner-hal-1-0
- onrestart restart vendor.tuner-hal-1-1
- onrestart restart vendor.tuner-default
task_profiles ProcessCapacityHigh HighPerformance
+ interface aidl media.tuner
+ oneshot
+ disabled
+
+on property:tuner.server.enable=true
+ enable media.tuner