Merge "Create API for reporting color space support to Camera2 framework consumers."
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index 729182e..7388678 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -209,7 +209,8 @@
* Query the capabilities of a camera device. These capabilities are
* immutable for a given camera.
*
- * <p>See {@link ACameraMetadata} document and {@link NdkCameraMetadataTags.h} for more details.</p>
+ * <p>See {@link ACameraMetadata} document and <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * for more details.</p>
*
* <p>The caller must call {@link ACameraMetadata_free} to free the memory of the output
* characteristics.</p>
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index b331d50..a9f53dd 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -96,9 +96,12 @@
/**
* The tag identifying the entry.
*
- * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+ * <p> It is one of the values defined in
+ * <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * , and defines how the
* entry should be interpreted and which parts of the API provide it.
- * See {@link NdkCameraMetadataTags.h} for more details. </p>
+ * See <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * for more details. </p>
*/
uint32_t tag;
@@ -141,9 +144,11 @@
/**
* The tag identifying the entry.
*
- * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+ * <p> It is one of the values defined in <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * , and defines how the
* entry should be interpreted and which parts of the API provide it.
- * See {@link NdkCameraMetadataTags.h} for more details. </p>
+ * See <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * for more details. </p>
*/
uint32_t tag;
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
index 3e7c415..bae8706 100644
--- a/camera/tests/fuzzer/Android.bp
+++ b/camera/tests/fuzzer/Android.bp
@@ -13,6 +13,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_camera_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_camera_license"],
+}
+
cc_defaults {
name: "camera_defaults",
static_libs: [
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index dd101e2..d757cd6 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -419,30 +419,39 @@
/*
* Saves metadata needed by Winscope to synchronize the screen recording playback with other traces.
*
- * The metadata (version 1) is written as a binary array with the following format:
+ * The metadata (version 2) is written as a binary array with the following format:
* - winscope magic string (#VV1NSC0PET1ME2#, 16B).
- * - the metadata version number (4B).
- * - Realtime-to-monotonic time offset in nanoseconds (8B).
- * - the recorded frames count (8B)
+ * - the metadata version number (4B little endian).
+ * - Realtime-to-elapsed time offset in nanoseconds (8B little endian).
+ * - the recorded frames count (8B little endian)
* - for each recorded frame:
- * - System time in monotonic clock timebase in nanoseconds (8B).
+ * - System time in elapsed clock timebase in nanoseconds (8B little endian).
*
- * All numbers are Little Endian encoded.
+ *
+ * Metadata version 2 changes
+ *
+ * Use elapsed time for compatibility with other UI traces (most of them):
+ * - Realtime-to-elapsed time offset (instead of realtime-to-monotonic)
+ * - Frame timestamps in elapsed clock timebase (instead of monotonic)
*/
static status_t writeWinscopeMetadata(const Vector<std::int64_t>& timestampsMonotonicUs,
const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
ALOGV("Writing winscope metadata");
static constexpr auto kWinscopeMagicString = std::string_view {"#VV1NSC0PET1ME2#"};
- static constexpr std::uint32_t metadataVersion = 1;
- const std::int64_t realToMonotonicTimeOffsetNs =
- systemTime(SYSTEM_TIME_REALTIME) - systemTime(SYSTEM_TIME_MONOTONIC);
+ static constexpr std::uint32_t metadataVersion = 2;
+
+ const auto elapsedTimeNs = android::elapsedRealtimeNano();
+ const std::int64_t elapsedToMonotonicTimeOffsetNs =
+ elapsedTimeNs - systemTime(SYSTEM_TIME_MONOTONIC);
+ const std::int64_t realToElapsedTimeOffsetNs =
+ systemTime(SYSTEM_TIME_REALTIME) - elapsedTimeNs;
const std::uint32_t framesCount = static_cast<std::uint32_t>(timestampsMonotonicUs.size());
sp<ABuffer> buffer = new ABuffer(
kWinscopeMagicString.size() +
sizeof(decltype(metadataVersion)) +
- sizeof(decltype(realToMonotonicTimeOffsetNs)) +
+ sizeof(decltype(realToElapsedTimeOffsetNs)) +
sizeof(decltype(framesCount)) +
framesCount * sizeof(std::uint64_t)
);
@@ -454,14 +463,16 @@
writeValueLE(metadataVersion, pos);
pos += sizeof(decltype(metadataVersion));
- writeValueLE(realToMonotonicTimeOffsetNs, pos);
- pos += sizeof(decltype(realToMonotonicTimeOffsetNs));
+ writeValueLE(realToElapsedTimeOffsetNs, pos);
+ pos += sizeof(decltype(realToElapsedTimeOffsetNs));
writeValueLE(framesCount, pos);
pos += sizeof(decltype(framesCount));
for (const auto timestampMonotonicUs : timestampsMonotonicUs) {
- writeValueLE<std::uint64_t>(timestampMonotonicUs * 1000, pos);
+ const auto timestampElapsedNs =
+ elapsedToMonotonicTimeOffsetNs + timestampMonotonicUs * 1000;
+ writeValueLE<std::uint64_t>(timestampElapsedNs, pos);
pos += sizeof(std::uint64_t);
}
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 953afc5..96a4c4a 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -671,6 +671,9 @@
void C2SoftAvcDec::resetPlugin() {
mSignalledOutputEos = false;
mTimeStart = mTimeEnd = systemTime();
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
}
status_t C2SoftAvcDec::deleteDecoder() {
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index a27c218..15d6dcd 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -664,6 +664,9 @@
void C2SoftHevcDec::resetPlugin() {
mSignalledOutputEos = false;
mTimeStart = mTimeEnd = systemTime();
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
}
status_t C2SoftHevcDec::deleteDecoder() {
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 9a41910..439323c 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -732,6 +732,9 @@
void C2SoftMpeg2Dec::resetPlugin() {
mSignalledOutputEos = false;
mTimeStart = mTimeEnd = systemTime();
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
}
status_t C2SoftMpeg2Dec::deleteDecoder() {
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 54a1d0e..3bf9c48 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -256,7 +256,9 @@
mFramesConfigured = false;
mSignalledOutputEos = false;
mSignalledError = false;
-
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
return C2_OK;
}
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index a727db2..abfd8a7 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -887,6 +887,12 @@
* will be respected if both this function and {@link AAudioStreamBuilder_setChannelMask} are
* called.
*
+ * Note that if the channel count is two then it may get mixed to mono when the device only supports
+ * one channel. If the channel count is greater than two but the device's supported channel count is
+ * less than the requested value, the channels higher than the device channel will be dropped. If
+ * higher channels should be mixed or spatialized, use {@link AAudioStreamBuilder_setChannelMask}
+ * instead.
+ *
* Available since API level 26.
*
* @param builder reference provided by AAudio_createStreamBuilder()
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index 8743c04..3956a6c 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -17,11 +17,16 @@
#define LOG_TAG "EffectHalHidl"
//#define LOG_NDEBUG 0
+#include <android/hidl/manager/1.0/IServiceManager.h>
+#include <android-base/stringprintf.h>
#include <common/all-versions/VersionUtils.h>
#include <cutils/native_handle.h>
+#include <cutils/properties.h>
#include <hwbinder/IPCThreadState.h>
#include <media/EffectsFactoryApi.h>
+#include <mediautils/SchedulingPolicyService.h>
#include <mediautils/TimeCheck.h>
+#include <system/audio_effects/effect_spatializer.h>
#include <utils/Log.h>
#include <util/EffectUtils.h>
@@ -50,6 +55,18 @@
effect_descriptor_t halDescriptor{};
if (EffectHalHidl::getDescriptor(&halDescriptor) == NO_ERROR) {
mIsInput = (halDescriptor.flags & EFFECT_FLAG_TYPE_PRE_PROC) == EFFECT_FLAG_TYPE_PRE_PROC;
+ const bool isSpatializer =
+ memcmp(&halDescriptor.type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0;
+ if (isSpatializer) {
+ constexpr int32_t kRTPriorityMin = 1;
+ constexpr int32_t kRTPriorityMax = 3;
+ const int32_t priorityBoost = property_get_int32("audio.spatializer.priority", 1);
+ if (priorityBoost >= kRTPriorityMin && priorityBoost <= kRTPriorityMax) {
+ ALOGD("%s: audio.spatializer.priority %d on effect %lld",
+ __func__, priorityBoost, (long long)effectId);
+ mHalThreadPriority = priorityBoost;
+ }
+ }
}
}
@@ -127,6 +144,8 @@
ALOGE_IF(!mEfGroup, "Event flag creation for effects failed");
return NO_INIT;
}
+
+ (void)checkHalThreadPriority();
mStatusMQ = std::move(tempStatusMQ);
return OK;
}
@@ -317,5 +336,67 @@
return result;
}
+status_t EffectHalHidl::getHalPid(pid_t *pid) const {
+ using ::android::hidl::base::V1_0::DebugInfo;
+ using ::android::hidl::manager::V1_0::IServiceManager;
+ DebugInfo debugInfo;
+ const auto ret = mEffect->getDebugInfo([&] (const auto &info) {
+ debugInfo = info;
+ });
+ if (!ret.isOk()) {
+ ALOGW("%s: cannot get effect debug info", __func__);
+ return INVALID_OPERATION;
+ }
+ if (debugInfo.pid != (int)IServiceManager::PidConstant::NO_PID) {
+ *pid = debugInfo.pid;
+ return NO_ERROR;
+ }
+ ALOGW("%s: effect debug info does not contain pid", __func__);
+ return NAME_NOT_FOUND;
+}
+
+status_t EffectHalHidl::getHalWorkerTid(pid_t *tid) {
+ int32_t reply = -1;
+ uint32_t replySize = sizeof(reply);
+ const status_t status =
+ command('gtid', 0 /* cmdSize */, nullptr /* pCmdData */, &replySize, &reply);
+ if (status == OK) {
+ *tid = (pid_t)reply;
+ } else {
+ ALOGW("%s: failed with status:%d", __func__, status);
+ }
+ return status;
+}
+
+bool EffectHalHidl::requestHalThreadPriority(pid_t threadPid, pid_t threadId) {
+ if (mHalThreadPriority == kRTPriorityDisabled) {
+ return true;
+ }
+ const int err = requestPriority(
+ threadPid, threadId,
+ mHalThreadPriority, false /*isForApp*/, true /*asynchronous*/);
+ ALOGW_IF(err, "%s: failed to set RT priority %d for pid %d tid %d; error %d",
+ __func__, mHalThreadPriority, threadPid, threadId, err);
+ // Audio will still work, but may be more susceptible to glitches.
+ return err == 0;
+}
+
+status_t EffectHalHidl::checkHalThreadPriority() {
+ if (mHalThreadPriority == kRTPriorityDisabled) return OK;
+ if (mHalThreadPriority < kRTPriorityMin
+ || mHalThreadPriority > kRTPriorityMax) return BAD_VALUE;
+
+ pid_t halPid, halWorkerTid;
+ const status_t status = getHalPid(&halPid) ?: getHalWorkerTid(&halWorkerTid);
+ const bool success = status == OK && requestHalThreadPriority(halPid, halWorkerTid);
+ ALOGD("%s: effectId %lld RT priority(%d) request %s%s",
+ __func__, (long long)mEffectId, mHalThreadPriority,
+ success ? "succeeded" : "failed",
+ status == OK
+ ? base::StringPrintf(" for pid:%d tid:%d", halPid, halWorkerTid).c_str()
+ : " (pid / tid cannot be read)");
+ return success ? OK : status != OK ? status : INVALID_OPERATION /* request failed */;
+}
+
} // namespace effect
} // namespace android
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index e139768..94dcd7e 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -78,6 +78,11 @@
std::unique_ptr<StatusMQ> mStatusMQ;
EventFlag* mEfGroup;
bool mIsInput = false;
+ static constexpr int32_t kRTPriorityMin = 1;
+ static constexpr int32_t kRTPriorityMax = 3;
+ static constexpr int kRTPriorityDisabled = 0;
+ // Typical RealTime mHalThreadPriority ranges from 1 (low) to 3 (high).
+ int mHalThreadPriority = kRTPriorityDisabled;
// Can not be constructed directly by clients.
EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId);
@@ -93,6 +98,10 @@
uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
uint32_t *replySize, void *pReplyData);
status_t setProcessBuffers();
+ status_t getHalPid(pid_t *pid) const;
+ status_t getHalWorkerTid(pid_t *tid);
+ bool requestHalThreadPriority(pid_t threadPid, pid_t threadId);
+ status_t checkHalThreadPriority();
};
} // namespace effect
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index f3cbdb0..bdf1cbc 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -2699,7 +2699,7 @@
// This is a benign busy-wait, with the next data request generated 10 ms or more later;
// nevertheless for power reasons, we don't want to see too many of these.
- ALOGV_IF(actualSize == 0 && buffer->size > 0, "callbackwrapper: empty buffer returned");
+ ALOGV_IF(actualSize == 0 && buffer.size() > 0, "callbackwrapper: empty buffer returned");
unlock();
return actualSize;
}
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index fd30b35..c5b5199 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -796,6 +796,8 @@
{ "valid-samples", kKeyValidSamples },
{ "dvb-component-tag", kKeyDvbComponentTag},
{ "dvb-audio-description", kKeyDvbAudioDescription},
+ { "dvb-teletext-magazine-number", kKeyDvbTeletextMagazineNumber},
+ { "dvb-teletext-page-number", kKeyDvbTeletextPageNumber},
}
};
@@ -1014,6 +1016,16 @@
msg->setInt32("dvb-audio-description", dvbAudioDescription);
}
+ int32_t dvbTeletextMagazineNumber = 0;
+ if (meta->findInt32(kKeyDvbTeletextMagazineNumber, &dvbTeletextMagazineNumber)) {
+ msg->setInt32("dvb-teletext-magazine-number", dvbTeletextMagazineNumber);
+ }
+
+ int32_t dvbTeletextPageNumber = 0;
+ if (meta->findInt32(kKeyDvbTeletextPageNumber, &dvbTeletextPageNumber)) {
+ msg->setInt32("dvb-teletext-page-number", dvbTeletextPageNumber);
+ }
+
const char *lang;
if (meta->findCString(kKeyMediaLanguage, &lang)) {
msg->setString("language", lang);
@@ -1810,6 +1822,16 @@
meta->setInt32(kKeyDvbAudioDescription, dvbAudioDescription);
}
+ int32_t dvbTeletextMagazineNumber = 0;
+ if (msg->findInt32("dvb-teletext-magazine-number", &dvbTeletextMagazineNumber)) {
+ meta->setInt32(kKeyDvbTeletextMagazineNumber, dvbTeletextMagazineNumber);
+ }
+
+ int32_t dvbTeletextPageNumber = 0;
+ if (msg->findInt32("dvb-teletext-page-number", &dvbTeletextPageNumber)) {
+ meta->setInt32(kKeyDvbTeletextPageNumber, dvbTeletextPageNumber);
+ }
+
AString lang;
if (msg->findString("language", &lang)) {
meta->setCString(kKeyMediaLanguage, lang.c_str());
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index cdf8d35..33f224c 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -283,6 +283,12 @@
// DVB audio description
kKeyDvbAudioDescription = 'addt', // bool (int32_t), DVB audio description only defined for
// audio component
+
+ // DVB teletext magazine number
+ kKeyDvbTeletextMagazineNumber = 'ttxm', // int32_t, DVB teletext magazine number
+
+ // DVB teletext page number
+ kKeyDvbTeletextPageNumber = 'ttxp', // int32_t, DVB teletext page number
};
enum {
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index afaa012..968b8fb 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1078,8 +1078,6 @@
clientPid = callingPid;
adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
}
- adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
- adjAttributionSource);
audio_session_t sessionId = input.sessionId;
if (sessionId == AUDIO_SESSION_ALLOCATE) {
@@ -2298,8 +2296,7 @@
__func__, callingUid, callingPid, currentPid);
adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
}
- adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
- adjAttributionSource);
+
// we don't yet support anything other than linear PCM
if (!audio_is_valid_format(input.config.format) || !audio_is_linear_pcm(input.config.format)) {
ALOGE("createRecord() invalid format %#x", input.config.format);
@@ -3951,7 +3948,6 @@
adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
currentPid = callingPid;
}
- adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(adjAttributionSource);
ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
adjAttributionSource.pid, effectClient.get(), priority, sessionId, io,
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 87eb1f5..17bb83b 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -7361,6 +7361,27 @@
if (status != INVALID_OPERATION) {
updateHalSupportedLatencyModes_l();
}
+
+ // update priority if specified.
+ constexpr int32_t kRTPriorityMin = 1;
+ constexpr int32_t kRTPriorityMax = 3;
+ const int32_t priorityBoost =
+ property_get_int32("audio.spatializer.priority", kRTPriorityMin);
+ if (priorityBoost >= kRTPriorityMin && priorityBoost <= kRTPriorityMax) {
+ const pid_t pid = getpid();
+ const pid_t tid = getTid();
+
+ if (tid == -1) {
+ // Unusual: PlaybackThread::onFirstRef() should set the threadLoop running.
+ ALOGW("%s: audio.spatializer.priority %d ignored, thread not running",
+ __func__, priorityBoost);
+ } else {
+ ALOGD("%s: audio.spatializer.priority %d, allowing real time for pid %d tid %d",
+ __func__, priorityBoost, pid, tid);
+ sendPrioConfigEvent_l(pid, tid, priorityBoost, false /*forApp*/);
+ stream()->setHalThreadPriority(priorityBoost);
+ }
+ }
}
status_t AudioFlinger::SpatializerThread::createAudioPatch_l(const struct audio_patch *patch,
@@ -8355,6 +8376,8 @@
audio_input_flags_t inputFlags = mInput->flags;
audio_input_flags_t requestedFlags = *flags;
uint32_t sampleRate;
+ AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+ attributionSource);
lStatus = initCheck();
if (lStatus != NO_ERROR) {
@@ -8369,7 +8392,7 @@
}
if (maxSharedAudioHistoryMs != 0) {
- if (!captureHotwordAllowed(attributionSource)) {
+ if (!captureHotwordAllowed(checkedAttributionSource)) {
lStatus = PERMISSION_DENIED;
goto Exit;
}
@@ -8490,16 +8513,16 @@
Mutex::Autolock _l(mLock);
int32_t startFrames = -1;
if (!mSharedAudioPackageName.empty()
- && mSharedAudioPackageName == attributionSource.packageName
+ && mSharedAudioPackageName == checkedAttributionSource.packageName
&& mSharedAudioSessionId == sessionId
- && captureHotwordAllowed(attributionSource)) {
+ && captureHotwordAllowed(checkedAttributionSource)) {
startFrames = mSharedAudioStartFrames;
}
track = new RecordTrack(this, client, attr, sampleRate,
format, channelMask, frameCount,
nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, creatorPid,
- attributionSource, *flags, TrackBase::TYPE_DEFAULT, portId,
+ checkedAttributionSource, *flags, TrackBase::TYPE_DEFAULT, portId,
startFrames);
lStatus = track->initCheck();
@@ -10287,28 +10310,22 @@
void AudioFlinger::MmapThread::checkInvalidTracks_l()
{
- std::vector<audio_port_handle_t> invalidPortIds;
+ sp<MmapStreamCallback> callback;
for (const sp<MmapTrack> &track : mActiveTracks) {
if (track->isInvalid()) {
- invalidPortIds.push_back(track->portId());
+ callback = mCallback.promote();
+ if (callback == nullptr && mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
+ ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
+ mNoCallbackWarningCount++;
+ }
+ break;
}
}
- if (invalidPortIds.empty()) {
- return;
+ if (callback != 0) {
+ mLock.unlock();
+ callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+ mLock.lock();
}
- sp<MmapStreamCallback> callback = mCallback.promote();
- if (callback == nullptr) {
- if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
- ALOGW("Could not notify MMAP stream tear down: no onTearDown callback!");
- mNoCallbackWarningCount++;
- }
- return;
- }
- mLock.unlock();
- for (const auto invalidPortId : invalidPortIds) {
- callback->onTearDown(invalidPortId);
- }
- mLock.lock();
}
void AudioFlinger::MmapThread::dumpInternals_l(int fd, const Vector<String16>& args __unused)
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 1f64f93..cfb296c 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -530,7 +530,10 @@
id, attr.flags);
return nullptr;
}
- return new OpPlayAudioMonitor(attributionSource, attr.usage, id);
+
+ AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+ attributionSource);
+ return new OpPlayAudioMonitor(checkedAttributionSource, attr.usage, id);
}
AudioFlinger::PlaybackThread::OpPlayAudioMonitor::OpPlayAudioMonitor(
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index af40d1f..d9b856b 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -741,6 +741,17 @@
msg->post();
}
+void Spatializer::resetEngineHeadPose_l() {
+ ALOGV("%s mEngine %p", __func__, mEngine.get());
+ if (mEngine == nullptr) {
+ return;
+ }
+ const std::vector<float> headToStage(6, 0.0);
+ setEffectParameter_l(SPATIALIZER_PARAM_HEAD_TO_STAGE, headToStage);
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{SpatializerHeadTrackingMode::DISABLED});
+}
+
void Spatializer::onHeadToStagePoseMsg(const std::vector<float>& headToStage) {
ALOGV("%s", __func__);
sp<media::ISpatializerHeadTrackingCallback> callback;
@@ -792,8 +803,12 @@
}
mActualHeadTrackingMode = spatializerMode;
if (mEngine != nullptr) {
- setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
- std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+ if (spatializerMode == SpatializerHeadTrackingMode::DISABLED) {
+ resetEngineHeadPose_l();
+ } else {
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+ }
}
callback = mHeadTrackingCallback;
mLocalLog.log("%s: %s, spatializerMode %s", __func__, media::toString(mode).c_str(),
@@ -924,16 +939,25 @@
bool lowLatencySupported = mSupportedLatencyModes.empty()
|| (std::find(mSupportedLatencyModes.begin(), mSupportedLatencyModes.end(),
AUDIO_LATENCY_MODE_LOW) != mSupportedLatencyModes.end());
- if (mSupportsHeadTracking && mPoseController != nullptr) {
- if (lowLatencySupported && mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
- && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
- && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
- mPoseController->setHeadSensor(mHeadSensor);
- mPoseController->setScreenSensor(mScreenSensor);
- requestedLatencyMode = AUDIO_LATENCY_MODE_LOW;
+ if (mSupportsHeadTracking) {
+ if (mPoseController != nullptr) {
+ if (lowLatencySupported && mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
+ && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
+ && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
+ if (mEngine != nullptr) {
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
+ }
+ mPoseController->setHeadSensor(mHeadSensor);
+ mPoseController->setScreenSensor(mScreenSensor);
+ requestedLatencyMode = AUDIO_LATENCY_MODE_LOW;
+ } else {
+ mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+ mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+ resetEngineHeadPose_l();
+ }
} else {
- mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
- mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+ resetEngineHeadPose_l();
}
}
if (mOutput != AUDIO_IO_HANDLE_NONE) {
@@ -947,8 +971,6 @@
mEngine->setEnabled(true);
setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
std::vector<SpatializationLevel>{mLevel});
- setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
- std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
} else {
setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
std::vector<SpatializationLevel>{SpatializationLevel::NONE});
@@ -970,6 +992,7 @@
mPoseController->setDisplayOrientation(mDisplayOrientation);
} else if (!isControllerNeeded && mPoseController != nullptr) {
mPoseController.reset();
+ resetEngineHeadPose_l();
}
if (mPoseController != nullptr) {
mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 9eb8c48..ba60cae 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -351,6 +351,12 @@
*/
void checkEngineState_l() REQUIRES(mLock);
+ /**
+ * Reset head tracking mode and recenter pose in engine: Called when the head tracking
+ * is disabled.
+ */
+ void resetEngineHeadPose_l() REQUIRES(mLock);
+
/** Effect engine descriptor */
const effect_descriptor_t mEngineDescriptor;
/** Callback interface to parent audio policy service */