Merge "C2BqBuffer: resolve 3 way deadlock" am: e847865272 am: e50a2706f5
Original change: https://android-review.googlesource.com/c/platform/frameworks/av/+/2285818
Change-Id: I561f107a69a767f0885cdf7071531fca8b28adb5
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 2e0b678..d866c18 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -13,6 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+#include <algorithm>
+#include <string_view>
+#include <type_traits>
#include <assert.h>
#include <ctype.h>
@@ -100,7 +103,6 @@
static const uint32_t kFallbackHeight = 720;
static const char* kMimeTypeAvc = "video/avc";
static const char* kMimeTypeApplicationOctetstream = "application/octet-stream";
-static const char* kWinscopeMagicString = "#VV1NSC0PET1ME!#";
// Command-line parameters.
static bool gVerbose = false; // chatty on stdout
@@ -354,14 +356,15 @@
}
/*
- * Writes an unsigned integer byte-by-byte in little endian order regardless
+ * Writes an unsigned/signed integer byte-by-byte in little endian order regardless
* of the platform endianness.
*/
-template <typename UINT>
-static void writeValueLE(UINT value, uint8_t* buffer) {
- for (int i = 0; i < sizeof(UINT); ++i) {
- buffer[i] = static_cast<uint8_t>(value);
- value >>= 8;
+template <typename T>
+static void writeValueLE(T value, uint8_t* buffer) {
+ std::remove_const_t<T> temp = value;
+ for (int i = 0; i < sizeof(T); ++i) {
+ buffer[i] = static_cast<std::uint8_t>(temp & 0xff);
+ temp >>= 8;
}
}
@@ -377,16 +380,18 @@
* - for every frame its presentation time relative to the elapsed realtime clock in microseconds
* (as little endian uint64).
*/
-static status_t writeWinscopeMetadata(const Vector<int64_t>& timestamps,
+static status_t writeWinscopeMetadataLegacy(const Vector<int64_t>& timestamps,
const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
- ALOGV("Writing metadata");
+ static constexpr auto kWinscopeMagicStringLegacy = "#VV1NSC0PET1ME!#";
+
+ ALOGV("Writing winscope metadata legacy");
int64_t systemTimeToElapsedTimeOffsetMicros = (android::elapsedRealtimeNano()
- systemTime(SYSTEM_TIME_MONOTONIC)) / 1000;
sp<ABuffer> buffer = new ABuffer(timestamps.size() * sizeof(int64_t)
- + sizeof(uint32_t) + strlen(kWinscopeMagicString));
+ + sizeof(uint32_t) + strlen(kWinscopeMagicStringLegacy));
uint8_t* pos = buffer->data();
- strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicString);
- pos += strlen(kWinscopeMagicString);
+ strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicStringLegacy);
+ pos += strlen(kWinscopeMagicStringLegacy);
writeValueLE<uint32_t>(timestamps.size(), pos);
pos += sizeof(uint32_t);
for (size_t idx = 0; idx < timestamps.size(); ++idx) {
@@ -395,10 +400,79 @@
pos += sizeof(uint64_t);
}
AMediaCodecBufferInfo bufferInfo = {
- 0,
+ 0 /* offset */,
static_cast<int32_t>(buffer->size()),
- timestamps[0],
- 0
+ timestamps[0] /* presentationTimeUs */,
+ 0 /* flags */
+ };
+ return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
+}
+
+/*
+ * Saves metadata needed by Winscope to synchronize the screen recording playback with other traces.
+ *
+ * The metadata (version 2) is written as a binary array with the following format:
+ * - winscope magic string (#VV1NSC0PET1ME2#, 16B).
+ * - the metadata version number (4B little endian).
+ * - Realtime-to-elapsed time offset in nanoseconds (8B little endian).
+ * - the recorded frames count (8B little endian)
+ * - for each recorded frame:
+ * - System time in elapsed clock timebase in nanoseconds (8B little endian).
+ *
+ *
+ * Metadata version 2 changes
+ *
+ * Use elapsed time for compatibility with other UI traces (most of them):
+ * - Realtime-to-elapsed time offset (instead of realtime-to-monotonic)
+ * - Frame timestamps in elapsed clock timebase (instead of monotonic)
+ */
+static status_t writeWinscopeMetadata(const Vector<std::int64_t>& timestampsMonotonicUs,
+ const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
+ ALOGV("Writing winscope metadata");
+
+ static constexpr auto kWinscopeMagicString = std::string_view {"#VV1NSC0PET1ME2#"};
+ static constexpr std::uint32_t metadataVersion = 2;
+
+ const auto elapsedTimeNs = android::elapsedRealtimeNano();
+ const std::int64_t elapsedToMonotonicTimeOffsetNs =
+ elapsedTimeNs - systemTime(SYSTEM_TIME_MONOTONIC);
+ const std::int64_t realToElapsedTimeOffsetNs =
+ systemTime(SYSTEM_TIME_REALTIME) - elapsedTimeNs;
+ const std::uint32_t framesCount = static_cast<std::uint32_t>(timestampsMonotonicUs.size());
+
+ sp<ABuffer> buffer = new ABuffer(
+ kWinscopeMagicString.size() +
+ sizeof(decltype(metadataVersion)) +
+ sizeof(decltype(realToElapsedTimeOffsetNs)) +
+ sizeof(decltype(framesCount)) +
+ framesCount * sizeof(std::uint64_t)
+ );
+ std::uint8_t* pos = buffer->data();
+
+ std::copy(kWinscopeMagicString.cbegin(), kWinscopeMagicString.cend(), pos);
+ pos += kWinscopeMagicString.size();
+
+ writeValueLE(metadataVersion, pos);
+ pos += sizeof(decltype(metadataVersion));
+
+ writeValueLE(realToElapsedTimeOffsetNs, pos);
+ pos += sizeof(decltype(realToElapsedTimeOffsetNs));
+
+ writeValueLE(framesCount, pos);
+ pos += sizeof(decltype(framesCount));
+
+ for (const auto timestampMonotonicUs : timestampsMonotonicUs) {
+ const auto timestampElapsedNs =
+ elapsedToMonotonicTimeOffsetNs + timestampMonotonicUs * 1000;
+ writeValueLE<std::uint64_t>(timestampElapsedNs, pos);
+ pos += sizeof(std::uint64_t);
+ }
+
+ AMediaCodecBufferInfo bufferInfo = {
+ 0 /* offset */,
+ static_cast<std::int32_t>(buffer->size()),
+ timestampsMonotonicUs[0] /* presentationTimeUs */,
+ 0 /* flags */
};
return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
}
@@ -418,11 +492,12 @@
static int kTimeout = 250000; // be responsive on signal
status_t err;
ssize_t trackIdx = -1;
+ ssize_t metaLegacyTrackIdx = -1;
ssize_t metaTrackIdx = -1;
uint32_t debugNumFrames = 0;
int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
- Vector<int64_t> timestamps;
+ Vector<int64_t> timestampsMonotonicUs;
bool firstFrame = true;
assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
@@ -520,9 +595,9 @@
sp<ABuffer> buffer = new ABuffer(
buffers[bufIndex]->data(), buffers[bufIndex]->size());
AMediaCodecBufferInfo bufferInfo = {
- 0,
+ 0 /* offset */,
static_cast<int32_t>(buffer->size()),
- ptsUsec,
+ ptsUsec /* presentationTimeUs */,
flags
};
err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo);
@@ -532,7 +607,7 @@
return err;
}
if (gOutputFormat == FORMAT_MP4) {
- timestamps.add(ptsUsec);
+ timestampsMonotonicUs.add(ptsUsec);
}
}
debugNumFrames++;
@@ -565,6 +640,7 @@
if (gOutputFormat == FORMAT_MP4) {
AMediaFormat *metaFormat = AMediaFormat_new();
AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream);
+ metaLegacyTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
AMediaFormat_delete(metaFormat);
}
@@ -604,10 +680,16 @@
systemTime(CLOCK_MONOTONIC) - startWhenNsec));
fflush(stdout);
}
- if (metaTrackIdx >= 0 && !timestamps.isEmpty()) {
- err = writeWinscopeMetadata(timestamps, metaTrackIdx, muxer);
+ if (metaLegacyTrackIdx >= 0 && metaTrackIdx >= 0 && !timestampsMonotonicUs.isEmpty()) {
+ err = writeWinscopeMetadataLegacy(timestampsMonotonicUs, metaLegacyTrackIdx, muxer);
if (err != NO_ERROR) {
- fprintf(stderr, "Failed writing metadata to muxer (err=%d)\n", err);
+ fprintf(stderr, "Failed writing legacy winscope metadata to muxer (err=%d)\n", err);
+ return err;
+ }
+
+ err = writeWinscopeMetadata(timestampsMonotonicUs, metaTrackIdx, muxer);
+ if (err != NO_ERROR) {
+ fprintf(stderr, "Failed writing winscope metadata to muxer (err=%d)\n", err);
return err;
}
}
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index 8743c04..3956a6c 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -17,11 +17,16 @@
#define LOG_TAG "EffectHalHidl"
//#define LOG_NDEBUG 0
+#include <android/hidl/manager/1.0/IServiceManager.h>
+#include <android-base/stringprintf.h>
#include <common/all-versions/VersionUtils.h>
#include <cutils/native_handle.h>
+#include <cutils/properties.h>
#include <hwbinder/IPCThreadState.h>
#include <media/EffectsFactoryApi.h>
+#include <mediautils/SchedulingPolicyService.h>
#include <mediautils/TimeCheck.h>
+#include <system/audio_effects/effect_spatializer.h>
#include <utils/Log.h>
#include <util/EffectUtils.h>
@@ -50,6 +55,18 @@
effect_descriptor_t halDescriptor{};
if (EffectHalHidl::getDescriptor(&halDescriptor) == NO_ERROR) {
mIsInput = (halDescriptor.flags & EFFECT_FLAG_TYPE_PRE_PROC) == EFFECT_FLAG_TYPE_PRE_PROC;
+ const bool isSpatializer =
+ memcmp(&halDescriptor.type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0;
+ if (isSpatializer) {
+ constexpr int32_t kRTPriorityMin = 1;
+ constexpr int32_t kRTPriorityMax = 3;
+ const int32_t priorityBoost = property_get_int32("audio.spatializer.priority", 1);
+ if (priorityBoost >= kRTPriorityMin && priorityBoost <= kRTPriorityMax) {
+ ALOGD("%s: audio.spatializer.priority %d on effect %lld",
+ __func__, priorityBoost, (long long)effectId);
+ mHalThreadPriority = priorityBoost;
+ }
+ }
}
}
@@ -127,6 +144,8 @@
ALOGE_IF(!mEfGroup, "Event flag creation for effects failed");
return NO_INIT;
}
+
+ (void)checkHalThreadPriority();
mStatusMQ = std::move(tempStatusMQ);
return OK;
}
@@ -317,5 +336,67 @@
return result;
}
+status_t EffectHalHidl::getHalPid(pid_t *pid) const {
+ using ::android::hidl::base::V1_0::DebugInfo;
+ using ::android::hidl::manager::V1_0::IServiceManager;
+ DebugInfo debugInfo;
+ const auto ret = mEffect->getDebugInfo([&] (const auto &info) {
+ debugInfo = info;
+ });
+ if (!ret.isOk()) {
+ ALOGW("%s: cannot get effect debug info", __func__);
+ return INVALID_OPERATION;
+ }
+ if (debugInfo.pid != (int)IServiceManager::PidConstant::NO_PID) {
+ *pid = debugInfo.pid;
+ return NO_ERROR;
+ }
+ ALOGW("%s: effect debug info does not contain pid", __func__);
+ return NAME_NOT_FOUND;
+}
+
+status_t EffectHalHidl::getHalWorkerTid(pid_t *tid) {
+ int32_t reply = -1;
+ uint32_t replySize = sizeof(reply);
+ const status_t status =
+ command('gtid', 0 /* cmdSize */, nullptr /* pCmdData */, &replySize, &reply);
+ if (status == OK) {
+ *tid = (pid_t)reply;
+ } else {
+ ALOGW("%s: failed with status:%d", __func__, status);
+ }
+ return status;
+}
+
+bool EffectHalHidl::requestHalThreadPriority(pid_t threadPid, pid_t threadId) {
+ if (mHalThreadPriority == kRTPriorityDisabled) {
+ return true;
+ }
+ const int err = requestPriority(
+ threadPid, threadId,
+ mHalThreadPriority, false /*isForApp*/, true /*asynchronous*/);
+ ALOGW_IF(err, "%s: failed to set RT priority %d for pid %d tid %d; error %d",
+ __func__, mHalThreadPriority, threadPid, threadId, err);
+ // Audio will still work, but may be more susceptible to glitches.
+ return err == 0;
+}
+
+status_t EffectHalHidl::checkHalThreadPriority() {
+ if (mHalThreadPriority == kRTPriorityDisabled) return OK;
+ if (mHalThreadPriority < kRTPriorityMin
+ || mHalThreadPriority > kRTPriorityMax) return BAD_VALUE;
+
+ pid_t halPid, halWorkerTid;
+ const status_t status = getHalPid(&halPid) ?: getHalWorkerTid(&halWorkerTid);
+ const bool success = status == OK && requestHalThreadPriority(halPid, halWorkerTid);
+ ALOGD("%s: effectId %lld RT priority(%d) request %s%s",
+ __func__, (long long)mEffectId, mHalThreadPriority,
+ success ? "succeeded" : "failed",
+ status == OK
+ ? base::StringPrintf(" for pid:%d tid:%d", halPid, halWorkerTid).c_str()
+ : " (pid / tid cannot be read)");
+ return success ? OK : status != OK ? status : INVALID_OPERATION /* request failed */;
+}
+
} // namespace effect
} // namespace android
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index e139768..94dcd7e 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -78,6 +78,11 @@
std::unique_ptr<StatusMQ> mStatusMQ;
EventFlag* mEfGroup;
bool mIsInput = false;
+ static constexpr int32_t kRTPriorityMin = 1;
+ static constexpr int32_t kRTPriorityMax = 3;
+ static constexpr int kRTPriorityDisabled = 0;
+ // Typical RealTime mHalThreadPriority ranges from 1 (low) to 3 (high).
+ int mHalThreadPriority = kRTPriorityDisabled;
// Can not be constructed directly by clients.
EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId);
@@ -93,6 +98,10 @@
uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
uint32_t *replySize, void *pReplyData);
status_t setProcessBuffers();
+ status_t getHalPid(pid_t *pid) const;
+ status_t getHalWorkerTid(pid_t *tid);
+ bool requestHalThreadPriority(pid_t threadPid, pid_t threadId);
+ status_t checkHalThreadPriority();
};
} // namespace effect
diff --git a/media/libheadtracking/SensorPoseProvider.cpp b/media/libheadtracking/SensorPoseProvider.cpp
index bd8af04..3dee40a 100644
--- a/media/libheadtracking/SensorPoseProvider.cpp
+++ b/media/libheadtracking/SensorPoseProvider.cpp
@@ -122,6 +122,7 @@
~SensorPoseProviderImpl() override {
// Disable all active sensors.
mEnabledSensors.clear();
+ mQuit = true;
mLooper->wake();
mThread.join();
}
@@ -217,6 +218,7 @@
std::optional<int32_t> discontinuityCount;
};
+ bool mQuit = false;
sp<Looper> mLooper;
Listener* const mListener;
SensorManager* const mSensorManager;
@@ -260,13 +262,14 @@
initFinished(true);
- while (true) {
+ while (!mQuit) {
int ret = mLooper->pollOnce(-1 /* no timeout */, nullptr, nullptr, nullptr);
switch (ret) {
case ALOOPER_POLL_WAKE:
- // Normal way to exit.
- return;
+ // Continue to see if mQuit flag is set.
+ // This can be spurious (due to bugreport being taken).
+ continue;
case kIdent:
// Possible events on our queue.
@@ -285,7 +288,8 @@
ssize_t size = mQueue->filterEvents(&event, actual);
if (size < 0 || size > 1) {
- ALOGE("Unexpected return value from SensorEventQueue::filterEvents: %zd", size);
+ ALOGE("%s: Unexpected return value from SensorEventQueue::filterEvents: %zd",
+ __func__, size);
break;
}
if (size == 0) {
@@ -295,6 +299,7 @@
handleEvent(event);
}
+ ALOGD("%s: Exiting sensor event loop", __func__);
}
void handleEvent(const ASensorEvent& event) {
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index a0bc8ca..6497b58 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -129,6 +129,7 @@
mRTPCVOExtMap(-1),
mRTPCVODegrees(0),
mRTPSockDscp(0),
+ mRTPSockOptEcn(0),
mRTPSockNetwork(0),
mLastSeqNo(0),
mStarted(false),
@@ -910,6 +911,13 @@
return OK;
}
+status_t StagefrightRecorder::setParamRtpEcn(int32_t ecn) {
+ ALOGV("setParamRtpEcn: %d", ecn);
+
+ mRTPSockOptEcn = ecn;
+ return OK;
+}
+
status_t StagefrightRecorder::requestIDRFrame() {
status_t ret = BAD_VALUE;
if (mVideoEncoderSource != NULL) {
@@ -1091,6 +1099,11 @@
if (safe_strtoi32(value.string(), &dscp)) {
return setParamRtpDscp(dscp);
}
+ } else if (key == "rtp-param-set-socket-ecn") {
+ int32_t targetEcn;
+ if (safe_strtoi32(value.string(), &targetEcn)) {
+ return setParamRtpEcn(targetEcn);
+ }
} else if (key == "rtp-param-set-socket-network") {
int64_t networkHandle;
if (safe_strtoi64(value.string(), &networkHandle)) {
@@ -1272,6 +1285,9 @@
if (mRTPSockDscp > 0) {
meta->setInt32(kKeyRtpDscp, mRTPSockDscp);
}
+ if (mRTPSockOptEcn > 0) {
+ meta->setInt32(kKeyRtpEcn, mRTPSockOptEcn);
+ }
status = mWriter->start(meta.get());
break;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index d7785da..0801101 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -153,6 +153,7 @@
int32_t mRTPCVOExtMap;
int32_t mRTPCVODegrees;
int32_t mRTPSockDscp;
+ int32_t mRTPSockOptEcn;
int64_t mRTPSockNetwork;
uint32_t mLastSeqNo;
@@ -247,6 +248,7 @@
status_t setRTPCVOExtMap(int32_t extmap);
status_t setRTPCVODegrees(int32_t cvoDegrees);
status_t setParamRtpDscp(int32_t dscp);
+ status_t setParamRtpEcn(int32_t ecn);
status_t setSocketNetwork(int64_t networkHandle);
status_t requestIDRFrame();
void clipVideoBitRate();
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index 6a17972..fd03150 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -115,7 +115,7 @@
int sockRtp, sockRtcp;
ARTPConnection::MakeRTPSocketPair(&sockRtp, &sockRtcp, info->mLocalIp, info->mRemoteIp,
- info->mLocalPort, info->mRemotePort, info->mSocketNetwork);
+ info->mLocalPort, info->mRemotePort, info->mSocketNetwork, info->mRtpSockOptEcn);
sp<AMessage> notify = new AMessage('accu', this);
@@ -125,6 +125,8 @@
mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
mRTPConn->setSelfID(info->mSelfID);
mRTPConn->setStaticJitterTimeMs(info->mJbTimeMs);
+ mRTPConn->setRtpSockOptEcn(info->mRtpSockOptEcn);
+ mRTPConn->setIsIPv6(info->mLocalIp);
unsigned long PT;
AString formatDesc, formatParams;
@@ -719,6 +721,8 @@
} else if (key == "rtp-param-set-socket-network") {
int64_t networkHandle = atoll(value);
setSocketNetwork(networkHandle);
+ } else if (key == "rtp-param-set-socket-ecn") {
+ info->mRtpSockOptEcn = atoi(value);
} else if (key == "rtp-param-jitter-buffer-time") {
// clamping min at 40, max at 3000
info->mJbTimeMs = std::min(std::max(40, atoi(value)), 3000);
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
index 7d9bb8f..b2afe86 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
@@ -121,6 +121,8 @@
uint32_t mSelfID;
/* extmap:<value> for CVO will be set to here */
int32_t mCVOExtMap;
+ /* To check ECN is supported or not */
+ int32_t mRtpSockOptEcn;
/* a copy of TrackInfo in RTSPSource */
sp<AnotherPacketSource> mSource;
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 33f224c..2ca0e33 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -267,6 +267,7 @@
kKeyRtpExtMap = 'extm', // int32_t, rtp extension ID for cvo on RTP protocol.
kKeyRtpCvoDegrees = 'cvod', // int32_t, rtp cvo degrees as per 3GPP 26.114.
kKeyRtpDscp = 'dscp', // int32_t, DSCP(Differentiated services codepoint) of RFC 2474.
+ kKeyRtpEcn = 'sEcn', // int32_t, ECN (Explicit Congestion Notification) of RFC 3168
kKeySocketNetwork = 'sNet', // int64_t, socket will be bound to network handle.
// Slow-motion markers
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index a61f48f..165c336 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -16,6 +16,12 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ARTPConnection"
+#define INET_ECN_NOT_ECT 0x00 /* ECN was not enabled */
+#define INET_ECN_ECT_1 0x01 /* ECN capable packet */
+#define INET_ECN_ECT_0 0x02 /* ECN capable packet */
+#define INET_ECN_CE 0x03 /* ECN congestion */
+#define INET_ECN_MASK 0x03 /* Mask of ECN bits */
+
#include <utils/Log.h>
#include <media/stagefright/rtsp/ARTPAssembler.h>
@@ -56,6 +62,7 @@
// static
const int64_t ARTPConnection::kSelectTimeoutUs = 1000LL;
+const int64_t ARTPConnection::kMinOneSecondNotifyDelayUs = 100000ll;
struct ARTPConnection::StreamInfo {
bool isIPv6;
@@ -84,7 +91,10 @@
mPollEventPending(false),
mLastReceiverReportTimeUs(-1),
mLastBitrateReportTimeUs(-1),
+ mLastCongestionNotifyTimeUs(-1),
mTargetBitrate(-1),
+ mRtpSockOptEcn(0),
+ mIsIPv6(false),
mStaticJitterTimeMs(kStaticJitterTimeMs) {
}
@@ -175,7 +185,7 @@
// static
void ARTPConnection::MakeRTPSocketPair(
int *rtpSocket, int *rtcpSocket, const char *localIp, const char *remoteIp,
- unsigned localPort, unsigned remotePort, int64_t socketNetwork) {
+ unsigned localPort, unsigned remotePort, int64_t socketNetwork, int32_t sockOptEcn) {
bool isIPv6 = false;
if (strchr(localIp, ':') != NULL)
isIPv6 = true;
@@ -204,6 +214,24 @@
}
}
+ if (sockOptEcn != 0) {
+ int sockOptForTOS = 1;
+ if (setsockopt(*rtpSocket, isIPv6 ? IPPROTO_IPV6 : IPPROTO_IP,
+ isIPv6 ? IPV6_RECVTCLASS : IP_RECVTOS,
+ (int *)&sockOptForTOS, sizeof(sockOptForTOS)) < 0) {
+ ALOGE("failed to set recv sockopt TOS on rtpsock(%d). err=%s", *rtpSocket,
+ strerror(errno));
+ } else {
+ ALOGD("successfully set recv sockopt TOS on rtpsock(%d)", *rtpSocket);
+ int result = setsockopt(*rtcpSocket, isIPv6 ? IPPROTO_IPV6 : IPPROTO_IP,
+ isIPv6 ? IPV6_RECVTCLASS : IP_RECVTOS,
+ (int *)&sockOptForTOS, sizeof(sockOptForTOS));
+ if (result >= 0) {
+ ALOGD("successfully set recv sockopt TOS on rtcpsock(%d).", *rtcpSocket);
+ }
+ }
+ }
+
bumpSocketBufferSize(*rtcpSocket);
struct sockaddr *addr;
@@ -593,32 +621,25 @@
sp<ABuffer> buffer = new ABuffer(65536);
- struct sockaddr *pRemoteRTCPAddr;
- int sizeSockSt;
- if (s->isIPv6) {
- pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr6;
- sizeSockSt = sizeof(struct sockaddr_in6);
- } else {
- pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr;
- sizeSockSt = sizeof(struct sockaddr_in);
- }
- socklen_t remoteAddrLen =
- (!receiveRTP && s->mNumRTCPPacketsReceived == 0)
- ? sizeSockSt : 0;
+ struct msghdr sMsg = {};
+ struct iovec sIov[1] = {};
- if (mFlags & kViLTEConnection) {
- remoteAddrLen = 0;
- }
+ sIov[0].iov_base = (char *) buffer->data();
+ sIov[0].iov_len = buffer->capacity();
+
+ sMsg.msg_iov = sIov;
+ sMsg.msg_iovlen = 1;
+
+ int cMsgSize = sizeof(struct cmsghdr) + sizeof(uint8_t);
+ char buf[CMSG_SPACE(cMsgSize)];
+ sMsg.msg_control = buf;
+ sMsg.msg_controllen = sizeof(buf);
+ sMsg.msg_flags = 0;
ssize_t nbytes;
do {
- nbytes = recvfrom(
- receiveRTP ? s->mRTPSocket : s->mRTCPSocket,
- buffer->data(),
- buffer->capacity(),
- 0,
- remoteAddrLen > 0 ? pRemoteRTCPAddr : NULL,
- remoteAddrLen > 0 ? &remoteAddrLen : NULL);
+ // Used recvmsg to get the TOS header of incoming packet
+ nbytes = recvmsg(receiveRTP ? s->mRTPSocket : s->mRTCPSocket, &sMsg, 0);
mCumulativeBytes += nbytes;
} while (nbytes < 0 && errno == EINTR);
@@ -633,6 +654,10 @@
}
}
+ if (nbytes > 0) {
+ handleIpHeadersIfReceived(s, sMsg);
+ }
+
buffer->setRange(0, nbytes);
// ALOGI("received %d bytes.", buffer->size());
@@ -647,13 +672,68 @@
return err;
}
+/* This function will check if TOS is present or not in received IP packet.
+ * After that if it is present then it will notify about congestion to upper
+ * layer if CE bit is set in TOS header.
+ **/
+void ARTPConnection::handleIpHeadersIfReceived(StreamInfo *s, struct msghdr sMsg) {
+ struct cmsghdr *cMsg;
+ cMsg = CMSG_FIRSTHDR(&sMsg);
+
+ if (cMsg == NULL) {
+ ALOGV("cmsg is null");
+ }
+
+ for (; cMsg != NULL; cMsg = CMSG_NXTHDR(&sMsg, cMsg)) {
+ bool isTOSHeader = ((cMsg->cmsg_level == (mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP))
+ && (cMsg->cmsg_type == (mIsIPv6 ? IPV6_TCLASS : IP_TOS))
+ && (cMsg->cmsg_len));
+ if (isTOSHeader) {
+ uint8_t receivedTOS;
+ receivedTOS = *((uint8_t *) CMSG_DATA(cMsg));
+ // checking CE bit is set
+ bool isCEBitMarked = ((receivedTOS & INET_ECN_MASK) == INET_ECN_CE);
+
+ ALOGV("receivedTos(value -> %d)", receivedTOS);
+
+ if (isCEBitMarked) {
+ ALOGD("receivedTos(value -> %d), is ECN CE marked = %d",
+ receivedTOS, isCEBitMarked);
+ notifyCongestionToUpperLayerIfNeeded(s);
+ }
+ break;
+ }
+ }
+}
+
+/* this function will be use to notify congestion in video call to upper layer */
+void ARTPConnection::notifyCongestionToUpperLayerIfNeeded(StreamInfo *s) {
+ int64_t nowUs = ALooper::GetNowUs();
+
+ if (mLastCongestionNotifyTimeUs <= 0) {
+ mLastCongestionNotifyTimeUs = nowUs;
+ }
+
+ bool isNeedToUpdate = (mLastCongestionNotifyTimeUs + kMinOneSecondNotifyDelayUs <= nowUs);
+ ALOGD("ECN info set by upper layer=%d, isNeedToUpdate=%d", mRtpSockOptEcn, isNeedToUpdate);
+
+ if ((mRtpSockOptEcn != 0) && (isNeedToUpdate)) {
+ sp<AMessage> notify = s->mNotifyMsg->dup();
+ notify->setInt32("rtcp-event", 1);
+ notify->setInt32("payload-type", ARTPSource::RTP_QUALITY_CD);
+ notify->post();
+ mLastCongestionNotifyTimeUs = nowUs;
+ ALOGD("Congestion detected in n/w, Notify upper layer");
+ }
+}
+
ssize_t ARTPConnection::send(const StreamInfo *info, const sp<ABuffer> buffer) {
struct sockaddr* pRemoteRTCPAddr;
int sizeSockSt;
/* It seems this isIPv6 variable is useless.
* We should remove it to prevent confusion */
- if (info->isIPv6) {
+ if (mIsIPv6) {
pRemoteRTCPAddr = (struct sockaddr *)&info->mRemoteRTCPAddr6;
sizeSockSt = sizeof(struct sockaddr_in6);
} else {
@@ -1215,12 +1295,20 @@
mTargetBitrate = targetBitrate;
}
+void ARTPConnection::setRtpSockOptEcn(int32_t sockOptEcn) {
+ mRtpSockOptEcn = sockOptEcn;
+}
+
+void ARTPConnection::setIsIPv6(const char *localIp) {
+ mIsIPv6 = (strchr(localIp, ':') != nullptr);
+}
+
void ARTPConnection::checkRxBitrate(int64_t nowUs) {
if (mLastBitrateReportTimeUs <= 0) {
mCumulativeBytes = 0;
mLastBitrateReportTimeUs = nowUs;
}
- else if (mLastEarlyNotifyTimeUs + 100000ll <= nowUs) {
+ else if (mLastEarlyNotifyTimeUs + kMinOneSecondNotifyDelayUs <= nowUs) {
int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
mLastEarlyNotifyTimeUs = nowUs;
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 8990f0c..41f2d67 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -255,9 +255,34 @@
if (params->findInt32(kKeyRtpCvoDegrees, &rtpCVODegrees))
mRTPCVODegrees = rtpCVODegrees;
+ bool needToSetSockOpt = false;
int32_t dscp = 0;
- if (params->findInt32(kKeyRtpDscp, &dscp))
- updateSocketDscp(dscp);
+ if (params->findInt32(kKeyRtpDscp, &dscp)) {
+ mRtpLayer3Dscp = dscp << 2;
+ needToSetSockOpt = true;
+ }
+
+ int32_t ecn = 0;
+ if (params->findInt32(kKeyRtpEcn, &ecn)) {
+ /*
+ * @ecn, possible value for ECN.
+ * +-----+-----+
+ * | ECN FIELD |
+ * +-----+-----+
+ * ECT CE [Obsolete] RFC 2481 names for the ECN bits.
+ * 0 0 Not-ECT
+ * 0 1 ECT (ECN-Capable Transport) (1)
+ * 1 0 ECT (ECN-Capable Transport) (0)
+ * 1 1 CE (Congestion Experienced)
+ *
+ */
+ mRtpSockOptEcn = ecn;
+ needToSetSockOpt = true;
+ }
+
+ if (needToSetSockOpt) {
+ updateSocketOpt();
+ }
int64_t sockNetwork = 0;
if (params->findInt64(kKeySocketNetwork, &sockNetwork))
@@ -1438,18 +1463,29 @@
mPayloadType = payloadType;
}
-void ARTPWriter::updateSocketDscp(int32_t dscp) {
- mRtpLayer3Dscp = dscp << 2;
+/*
+ * This function will set socket option in IP header
+ */
+void ARTPWriter::updateSocketOpt() {
+ /*
+ * 0 1 2 3 4 5 6 7
+ * +-----+-----+-----+-----+-----+-----+-----+-----+
+ * | DS FIELD, DSCP | ECN FIELD |
+ * +-----+-----+-----+-----+-----+-----+-----+-----+
+ */
+ int sockOpt = mRtpLayer3Dscp ^ mRtpSockOptEcn;
+ ALOGD("Update socket opt with sockopt=%d, mRtpLayer3Dscp=%d, mRtpSockOptEcn=%d",
+ sockOpt, mRtpLayer3Dscp, mRtpSockOptEcn);
- /* mRtpLayer3Dscp will be mapped to WMM(Wifi) as per operator's requirement */
- if (setsockopt(mRTPSocket, IPPROTO_IP, IP_TOS,
- (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp)) < 0) {
- ALOGE("failed to set dscp on rtpsock. err=%s", strerror(errno));
+ /* sockOpt will be used to set socket option in IP header */
+ if (setsockopt(mRTPSocket, mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP, mIsIPv6 ? IPV6_TCLASS : IP_TOS,
+ (int *)&sockOpt, sizeof(sockOpt)) < 0) {
+ ALOGE("failed to set sockopt on rtpsock. err=%s", strerror(errno));
} else {
- ALOGD("successfully set dscp on rtpsock. opt=%d", mRtpLayer3Dscp);
- setsockopt(mRTCPSocket, IPPROTO_IP, IP_TOS,
- (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp));
- ALOGD("successfully set dscp on rtcpsock. opt=%d", mRtpLayer3Dscp);
+ ALOGD("successfully set sockopt. opt=%d", sockOpt);
+ setsockopt(mRTCPSocket, mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP, mIsIPv6 ? IPV6_TCLASS : IP_TOS,
+ (int *)&sockOpt, sizeof(sockOpt));
+ ALOGD("successfully set sockopt rtcpsock. opt=%d", sockOpt);
}
}
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
index 73d2866..250de71 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
@@ -20,6 +20,7 @@
#include <media/stagefright/foundation/AHandler.h>
#include <utils/List.h>
+#include <sys/socket.h>
namespace android {
@@ -48,6 +49,8 @@
void setSelfID(const uint32_t selfID);
void setStaticJitterTimeMs(const uint32_t jbTimeMs);
void setTargetBitrate(int32_t targetBitrate);
+ void setRtpSockOptEcn(int32_t sockOptEcn);
+ void setIsIPv6(const char *localIp);
// Creates a pair of UDP datagram sockets bound to adjacent ports
// (the rtpSocket is bound to an even port, the rtcpSocket to the
@@ -60,7 +63,8 @@
static void MakeRTPSocketPair(
int *rtpSocket, int *rtcpSocket,
const char *localIp, const char *remoteIp,
- unsigned localPort, unsigned remotePort, int64_t socketNetwork = 0);
+ unsigned localPort, unsigned remotePort, int64_t socketNetwork = 0,
+ int32_t sockOptEcn = 0);
protected:
virtual ~ARTPConnection();
@@ -77,6 +81,7 @@
};
static const int64_t kSelectTimeoutUs;
+ static const int64_t kMinOneSecondNotifyDelayUs;
uint32_t mFlags;
@@ -87,9 +92,12 @@
int64_t mLastReceiverReportTimeUs;
int64_t mLastBitrateReportTimeUs;
int64_t mLastEarlyNotifyTimeUs;
+ int64_t mLastCongestionNotifyTimeUs;
int32_t mSelfID;
int32_t mTargetBitrate;
+ int32_t mRtpSockOptEcn;
+ bool mIsIPv6;
uint32_t mStaticJitterTimeMs;
@@ -103,6 +111,8 @@
void onInjectPacket(const sp<AMessage> &msg);
void onSendReceiverReports();
void checkRxBitrate(int64_t nowUs);
+ void notifyCongestionToUpperLayerIfNeeded(StreamInfo *s);
+ void handleIpHeadersIfReceived(StreamInfo *s, struct msghdr sMsg);
status_t receive(StreamInfo *info, bool receiveRTP);
ssize_t send(const StreamInfo *info, const sp<ABuffer> buffer);
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
index e9b4942..3fa5713 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
@@ -50,6 +50,7 @@
RTCP_FIRST_PACKET = 101,
RTP_QUALITY = 102,
RTP_QUALITY_EMC = 103,
+ RTP_QUALITY_CD = 104,
RTCP_SR = 200,
RTCP_RR = 201,
RTCP_TSFB = 205,
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
index 2982cf6..ecd29d0 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
@@ -50,7 +50,7 @@
virtual status_t pause();
void updateCVODegrees(int32_t cvoDegrees);
void updatePayloadType(int32_t payloadType);
- void updateSocketDscp(int32_t dscp);
+ void updateSocketOpt();
void updateSocketNetwork(int64_t socketNetwork);
uint32_t getSequenceNum();
virtual uint64_t getAccumulativeBytes() override;
@@ -98,6 +98,7 @@
struct sockaddr_in6 mRTPAddr6;
struct sockaddr_in6 mRTCPAddr6;
int32_t mRtpLayer3Dscp;
+ int32_t mRtpSockOptEcn;
net_handle_t mRTPSockNetwork;
AString mProfileLevel;
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 05bbf4f..c6da1fe 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -283,7 +283,6 @@
return opPackageLegacy == package; }) == packages.end()) {
ALOGW("The package name(%s) provided does not correspond to the uid %d",
attributionSource.packageName.value_or("").c_str(), attributionSource.uid);
- checkedAttributionSource.packageName = std::optional<std::string>();
}
}
return checkedAttributionSource;
@@ -582,6 +581,33 @@
audio_io_handle_t io = AUDIO_IO_HANDLE_NONE;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
audio_attributes_t localAttr = *attr;
+
+ // TODO b/182392553: refactor or make clearer
+ pid_t clientPid =
+ VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
+ bool updatePid = (clientPid == (pid_t)-1);
+ const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+
+ AttributionSourceState adjAttributionSource = client.attributionSource;
+ if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+ uid_t clientUid =
+ VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
+ ALOGW_IF(clientUid != callingUid,
+ "%s uid %d tried to pass itself off as %d",
+ __FUNCTION__, callingUid, clientUid);
+ adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+ updatePid = true;
+ }
+ if (updatePid) {
+ const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+ ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+ "%s uid %d pid %d tried to pass itself off as pid %d",
+ __func__, callingUid, callingPid, clientPid);
+ adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
+ }
+ adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+ adjAttributionSource);
+
if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
audio_config_t fullConfig = AUDIO_CONFIG_INITIALIZER;
fullConfig.sample_rate = config->sample_rate;
@@ -591,7 +617,7 @@
bool isSpatialized;
ret = AudioSystem::getOutputForAttr(&localAttr, &io,
actualSessionId,
- &streamType, client.attributionSource,
+ &streamType, adjAttributionSource,
&fullConfig,
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
AUDIO_OUTPUT_FLAG_DIRECT),
@@ -602,7 +628,7 @@
ret = AudioSystem::getInputForAttr(&localAttr, &io,
RECORD_RIID_INVALID,
actualSessionId,
- client.attributionSource,
+ adjAttributionSource,
config,
AUDIO_INPUT_FLAG_MMAP_NOIRQ, deviceId, &portId);
}
@@ -1051,7 +1077,7 @@
audio_attributes_t localAttr = input.attr;
AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
- if (!isAudioServerOrMediaServerUid(callingUid)) {
+ if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
ALOGW_IF(clientUid != callingUid,
"%s uid %d tried to pass itself off as %d",
__FUNCTION__, callingUid, clientUid);
@@ -2275,7 +2301,7 @@
const uid_t callingUid = IPCThreadState::self()->getCallingUid();
const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
adjAttributionSource.uid));
- if (!isAudioServerOrMediaServerUid(callingUid)) {
+ if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
ALOGW_IF(currentUid != callingUid,
"%s uid %d tried to pass itself off as %d",
__FUNCTION__, callingUid, currentUid);
@@ -3917,7 +3943,7 @@
const uid_t callingUid = IPCThreadState::self()->getCallingUid();
adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
- if (currentPid == -1 || !isAudioServerOrMediaServerUid(callingUid)) {
+ if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
const pid_t callingPid = IPCThreadState::self()->getCallingPid();
ALOGW_IF(currentPid != -1 && currentPid != callingPid,
"%s uid %d pid %d tried to pass itself off as pid %d",
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 45dd258..b54b41f 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -313,12 +313,19 @@
patch->sources[0].config_mask & AUDIO_PORT_CONFIG_FLAGS ?
patch->sources[0].flags.input : AUDIO_INPUT_FLAG_NONE;
audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
+ audio_source_t source = AUDIO_SOURCE_MIC;
+ // For telephony patches, propagate voice communication use case to record side
+ if (patch->num_sources == 2
+ && patch->sources[1].ext.mix.usecase.stream
+ == AUDIO_STREAM_VOICE_CALL) {
+ source = AUDIO_SOURCE_VOICE_COMMUNICATION;
+ }
sp<ThreadBase> thread = mAudioFlinger.openInput_l(srcModule,
&input,
&config,
device,
address,
- AUDIO_SOURCE_MIC,
+ source,
flags,
outputDevice,
outputDeviceAddress);
@@ -516,9 +523,14 @@
audio_output_flags_t outputFlags = mAudioPatch.sinks[0].config_mask & AUDIO_PORT_CONFIG_FLAGS ?
mAudioPatch.sinks[0].flags.output : AUDIO_OUTPUT_FLAG_NONE;
audio_stream_type_t streamType = AUDIO_STREAM_PATCH;
+ audio_source_t source = AUDIO_SOURCE_DEFAULT;
if (mAudioPatch.num_sources == 2 && mAudioPatch.sources[1].type == AUDIO_PORT_TYPE_MIX) {
// "reuse one existing output mix" case
streamType = mAudioPatch.sources[1].ext.mix.usecase.stream;
+ // For telephony patches, propagate voice communication use case to record side
+ if (streamType == AUDIO_STREAM_VOICE_CALL) {
+ source = AUDIO_SOURCE_VOICE_COMMUNICATION;
+ }
}
if (mPlayback.thread()->hasFastMixer()) {
// Create a fast track if the playback thread has fast mixer to get better performance.
@@ -546,7 +558,8 @@
inChannelMask,
format,
frameCount,
- inputFlags);
+ inputFlags,
+ source);
} else {
// use a pseudo LCM between input and output framecount
int playbackShift = __builtin_ctz(playbackFrameCount);
@@ -566,7 +579,9 @@
frameCount,
nullptr,
(size_t)0 /* bufferSize */,
- inputFlags);
+ inputFlags,
+ {} /* timeout */,
+ source);
}
status = mRecord.checkTrack(tempRecordTrack.get());
if (status != NO_ERROR) {
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index e8552c4..daec57e 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -87,6 +87,10 @@
&& (flags & AUDIO_INPUT_FLAG_HW_AV_SYNC) == 0;
}
+ using SinkMetadatas = std::vector<record_track_metadata_v7_t>;
+ using MetadataInserter = std::back_insert_iterator<SinkMetadatas>;
+ virtual void copyMetadataTo(MetadataInserter& backInserter) const;
+
private:
friend class AudioFlinger; // for mState
@@ -134,7 +138,8 @@
void *buffer,
size_t bufferSize,
audio_input_flags_t flags,
- const Timeout& timeout = {});
+ const Timeout& timeout = {},
+ audio_source_t source = AUDIO_SOURCE_DEFAULT);
virtual ~PatchRecord();
virtual Source* getSource() { return nullptr; }
@@ -166,7 +171,8 @@
audio_channel_mask_t channelMask,
audio_format_t format,
size_t frameCount,
- audio_input_flags_t flags);
+ audio_input_flags_t flags,
+ audio_source_t source = AUDIO_SOURCE_DEFAULT);
Source* getSource() override { return static_cast<Source*>(this); }
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index e5c18ec..58af653 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -7321,6 +7321,27 @@
if (status != INVALID_OPERATION) {
updateHalSupportedLatencyModes_l();
}
+
+ // update priority if specified.
+ constexpr int32_t kRTPriorityMin = 1;
+ constexpr int32_t kRTPriorityMax = 3;
+ const int32_t priorityBoost =
+ property_get_int32("audio.spatializer.priority", kRTPriorityMin);
+ if (priorityBoost >= kRTPriorityMin && priorityBoost <= kRTPriorityMax) {
+ const pid_t pid = getpid();
+ const pid_t tid = getTid();
+
+ if (tid == -1) {
+ // Unusual: PlaybackThread::onFirstRef() should set the threadLoop running.
+ ALOGW("%s: audio.spatializer.priority %d ignored, thread not running",
+ __func__, priorityBoost);
+ } else {
+ ALOGD("%s: audio.spatializer.priority %d, allowing real time for pid %d tid %d",
+ __func__, priorityBoost, pid, tid);
+ sendPrioConfigEvent_l(pid, tid, priorityBoost, false /*forApp*/);
+ stream()->setHalThreadPriority(priorityBoost);
+ }
+ }
}
status_t AudioFlinger::SpatializerThread::createAudioPatch_l(const struct audio_patch *patch,
@@ -8759,21 +8780,9 @@
return; // nothing to do
}
StreamInHalInterface::SinkMetadata metadata;
+ auto backInserter = std::back_inserter(metadata.tracks);
for (const sp<RecordTrack> &track : mActiveTracks) {
- // Do not forward PatchRecord metadata to audio HAL
- if (track->isPatchTrack()) {
- continue;
- }
- // No track is invalid as this is called after prepareTrack_l in the same critical section
- record_track_metadata_v7_t trackMetadata;
- trackMetadata.base = {
- .source = track->attributes().source,
- .gain = 1, // capture tracks do not have volumes
- };
- trackMetadata.channel_mask = track->channelMask(),
- strncpy(trackMetadata.tags, track->attributes().tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
-
- metadata.tracks.push_back(trackMetadata);
+ track->copyMetadataTo(backInserter);
}
mInput->stream->updateSinkMetadata(metadata);
}
@@ -10229,19 +10238,22 @@
void AudioFlinger::MmapThread::checkInvalidTracks_l()
{
+ sp<MmapStreamCallback> callback;
for (const sp<MmapTrack> &track : mActiveTracks) {
if (track->isInvalid()) {
- sp<MmapStreamCallback> callback = mCallback.promote();
- if (callback != 0) {
- mLock.unlock();
- callback->onTearDown(track->portId());
- mLock.lock();
- } else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
- ALOGW("Could not notify MMAP stream tear down: no onTearDown callback!");
+ callback = mCallback.promote();
+ if (callback == nullptr && mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
+ ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
mNoCallbackWarningCount++;
}
+ break;
}
}
+ if (callback != 0) {
+ mLock.unlock();
+ callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+ mLock.lock();
+ }
}
void AudioFlinger::MmapThread::dumpInternals_l(int fd, const Vector<String16>& args __unused)
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index ac8909f..2a77d22 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1477,7 +1477,7 @@
}
}
- metadata.channel_mask = mChannelMask,
+ metadata.channel_mask = mChannelMask;
strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
*backInserter++ = metadata;
}
@@ -2021,7 +2021,6 @@
{
Buffer *pInBuffer;
Buffer inBuffer;
- bool outputBufferFull = false;
inBuffer.frameCount = frames;
inBuffer.raw = data;
@@ -2051,7 +2050,6 @@
ALOGV("%s(%d): thread %d no more output buffers; status %d",
__func__, mId,
(int)mThreadIoHandle, status);
- outputBufferFull = true;
break;
}
uint32_t waitTimeMs = (uint32_t)ns2ms(systemTime() - startTime);
@@ -2747,6 +2745,25 @@
}
}
+void AudioFlinger::RecordThread::RecordTrack::copyMetadataTo(MetadataInserter& backInserter) const
+{
+
+ // Do not forward PatchRecord metadata with unspecified audio source
+ if (mAttr.source == AUDIO_SOURCE_DEFAULT) {
+ return;
+ }
+
+ // No track is invalid as this is called after prepareTrack_l in the same critical section
+ record_track_metadata_v7_t metadata;
+ metadata.base = {
+ .source = mAttr.source,
+ .gain = 1, // capture tracks do not have volumes
+ };
+ metadata.channel_mask = mChannelMask;
+ strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+
+ *backInserter++ = metadata;
+}
// ----------------------------------------------------------------------------
#undef LOG_TAG
@@ -2760,9 +2777,10 @@
void *buffer,
size_t bufferSize,
audio_input_flags_t flags,
- const Timeout& timeout)
+ const Timeout& timeout,
+ audio_source_t source)
: RecordTrack(recordThread, NULL,
- audio_attributes_t{} /* currently unused for patch track */,
+ audio_attributes_t{ .source = source } ,
sampleRate, format, channelMask, frameCount,
buffer, bufferSize, AUDIO_SESSION_NONE, getpid(),
audioServerAttributionSource(getpid()), flags, TYPE_PATCH),
@@ -2873,9 +2891,10 @@
audio_channel_mask_t channelMask,
audio_format_t format,
size_t frameCount,
- audio_input_flags_t flags)
+ audio_input_flags_t flags,
+ audio_source_t source)
: PatchRecord(recordThread, sampleRate, channelMask, format, frameCount,
- nullptr /*buffer*/, 0 /*bufferSize*/, flags),
+ nullptr /*buffer*/, 0 /*bufferSize*/, flags, {} /* timeout */, source),
mPatchRecordAudioBufferProvider(*this),
mSinkBuffer(allocAligned(32, mFrameCount * mFrameSize)),
mStubBuffer(allocAligned(32, mFrameCount * mFrameSize))
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 0431619..7119b85 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -239,12 +239,13 @@
}
void setUseSwBridge() { mUseSwBridge = true; }
bool useSwBridge() const { return mUseSwBridge; }
+ bool canCloseOutput() const { return mCloseOutput; }
bool isConnected() const { return mPatchHandle != AUDIO_PATCH_HANDLE_NONE; }
audio_patch_handle_t getPatchHandle() const { return mPatchHandle; }
sp<DeviceDescriptor> srcDevice() const { return mSrcDevice; }
sp<DeviceDescriptor> sinkDevice() const { return mSinkDevice; }
wp<SwAudioOutputDescriptor> swOutput() const { return mSwOutput; }
- void setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput);
+ void setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput, bool closeOutput = false);
wp<HwAudioOutputDescriptor> hwOutput() const { return mHwOutput; }
void setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput);
@@ -258,6 +259,15 @@
wp<SwAudioOutputDescriptor> mSwOutput;
wp<HwAudioOutputDescriptor> mHwOutput;
bool mUseSwBridge = false;
+ /**
+ * For either HW bridge associated to a SwOutput for activity / volume or SwBridge for also
+ * sample rendering / activity & volume, an existing playback thread may be reused (e.g.
+ * not already opened at APM startup or Direct Output).
+ * If reusing an already opened output, when this output is not used anymore, the AudioFlinger
+ * patch must be updated to refine the output device(s) information and ensure the right
+ * behavior of AudioDeviceCallback.
+ */
+ bool mCloseOutput = false;
};
/**
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 713b0ac..8b6866e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -105,9 +105,11 @@
{
}
-void SourceClientDescriptor::setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput)
+void SourceClientDescriptor::setSwOutput(
+ const sp<SwAudioOutputDescriptor>& swOutput, bool closeOutput)
{
mSwOutput = swOutput;
+ mCloseOutput = closeOutput;
}
void SourceClientDescriptor::setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput)
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index b8c39f4..e08caed 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -792,7 +792,8 @@
ALOGV("%s between source %s and sink %s", __func__,
srcDevice->toString().c_str(), sinkDevice->toString().c_str());
auto callTxSourceClientPortId = PolicyAudioPort::getNextUniqueId();
- const audio_attributes_t aa = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
+ const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
+
struct audio_port_config source = {};
srcDevice->toAudioPortConfig(&source);
mCallTxSourceClient = new InternalSourceClientDescriptor(
@@ -4538,7 +4539,7 @@
// In case of Hw bridge, it is a Work Around. The mixPort used is the one declared
// in config XML to reach the sink so that is can be declared as available.
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
- sp<SwAudioOutputDescriptor> outputDesc = nullptr;
+ sp<SwAudioOutputDescriptor> outputDesc;
if (!sourceDesc->isInternal()) {
// take care of dynamic routing for SwOutput selection,
audio_attributes_t attributes = sourceDesc->attributes();
@@ -4567,7 +4568,8 @@
ALOGE("%s output is duplicated", __func__);
return INVALID_OPERATION;
}
- sourceDesc->setSwOutput(outputDesc);
+ bool closeOutput = outputDesc->mDirectOpenCount != 0;
+ sourceDesc->setSwOutput(outputDesc, closeOutput);
} else {
// Same for "raw patches" aka created from createAudioPatch API
SortedVector<audio_io_handle_t> outputs =
@@ -4586,7 +4588,7 @@
__func__, sinkDevice->toString().c_str());
return INVALID_OPERATION;
}
- sourceDesc->setSwOutput(outputDesc);
+ sourceDesc->setSwOutput(outputDesc, /* closeOutput= */ false);
}
// create a software bridge in PatchPanel if:
// - source and sink devices are on different HW modules OR
@@ -4608,7 +4610,8 @@
audio_port_config srcMixPortConfig = {};
outputDesc->toAudioPortConfig(&srcMixPortConfig, nullptr);
// for volume control, we may need a valid stream
- srcMixPortConfig.ext.mix.usecase.stream = !sourceDesc->isInternal() ?
+ srcMixPortConfig.ext.mix.usecase.stream =
+ (!sourceDesc->isInternal() || isCallTxAudioSource(sourceDesc)) ?
mEngine->getStreamTypeForAttributes(sourceDesc->attributes()) :
AUDIO_STREAM_PATCH;
patchBuilder.addSource(srcMixPortConfig);
@@ -4715,17 +4718,29 @@
// releaseOutput has already called closeOutput in case of direct output
return NO_ERROR;
}
- if (!outputDesc->isActive() && !sourceDesc->useSwBridge()) {
- resetOutputDevice(outputDesc);
- } else {
- // Reuse patch handle if still valid / do not force rerouting if still routed
- patchHandle = outputDesc->getPatchHandle();
- setOutputDevices(outputDesc,
- getNewOutputDevices(outputDesc, true /*fromCache*/),
- patchHandle == AUDIO_PATCH_HANDLE_NONE, /*force*/
- 0,
- patchHandle == AUDIO_PATCH_HANDLE_NONE ? nullptr : &patchHandle);
- }
+ patchHandle = outputDesc->getPatchHandle();
+ // When a Sw bridge is released, the mixer used by this bridge will release its
+ // patch at AudioFlinger side. Hence, the mixer audio patch must be recreated
+ // Reuse patch handle to force audio flinger removing initial mixer patch removal
+ // updating hal patch handle (prevent leaks).
+ // While using a HwBridge, force reconsidering device only if not reusing an existing
+ // output and no more activity on output (will force to close).
+ bool force = sourceDesc->useSwBridge() ||
+ (sourceDesc->canCloseOutput() && !outputDesc->isActive());
+ // APM pattern is to have always outputs opened / patch realized for reachable devices.
+ // Update device may result to NONE (empty), coupled with force, it releases the patch.
+ // Reconsider device only for cases:
+ // 1 / Active Output
+ // 2 / Inactive Output previously hosting HwBridge
+ // 3 / Inactive Output previously hosting SwBridge that can be closed.
+ bool updateDevice = outputDesc->isActive() || !sourceDesc->useSwBridge() ||
+ sourceDesc->canCloseOutput();
+ setOutputDevices(outputDesc,
+ updateDevice ? getNewOutputDevices(outputDesc, true /*fromCache*/) :
+ outputDesc->devices(),
+ force,
+ 0,
+ patchHandle == AUDIO_PATCH_HANDLE_NONE ? nullptr : &patchHandle);
} else {
return BAD_VALUE;
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 87e6974..2c1db79 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -639,6 +639,10 @@
return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
}
+ bool isCallTxAudioSource(const sp<SourceClientDescriptor> &source) {
+ return mCallTxSourceClient != nullptr && source == mCallTxSourceClient;
+ }
+
void connectTelephonyRxAudioSource();
void disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc);
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index df49bba..49224c5 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -352,31 +352,20 @@
ALOGV("%s()", __func__);
Mutex::Autolock _l(mLock);
- // TODO b/182392553: refactor or remove
- AttributionSourceState adjAttributionSource = attributionSource;
- const uid_t callingUid = IPCThreadState::self()->getCallingUid();
- if (!isAudioServerOrMediaServerUid(callingUid) || attributionSource.uid == -1) {
- int32_t callingUidAidl = VALUE_OR_RETURN_BINDER_STATUS(
- legacy2aidl_uid_t_int32_t(callingUid));
- ALOGW_IF(attributionSource.uid != -1 && attributionSource.uid != callingUidAidl,
- "%s uid %d tried to pass itself off as %d", __func__,
- callingUidAidl, attributionSource.uid);
- adjAttributionSource.uid = callingUidAidl;
- }
if (!mPackageManager.allowPlaybackCapture(VALUE_OR_RETURN_BINDER_STATUS(
- aidl2legacy_int32_t_uid_t(adjAttributionSource.uid)))) {
+ aidl2legacy_int32_t_uid_t(attributionSource.uid)))) {
attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_NO_MEDIA_PROJECTION);
}
if (((attr.flags & (AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE)) != 0)
- && !bypassInterruptionPolicyAllowed(adjAttributionSource)) {
+ && !bypassInterruptionPolicyAllowed(attributionSource)) {
attr.flags = static_cast<audio_flags_mask_t>(
attr.flags & ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE));
}
if (attr.content_type == AUDIO_CONTENT_TYPE_ULTRASOUND) {
- if (!accessUltrasoundAllowed(adjAttributionSource)) {
+ if (!accessUltrasoundAllowed(attributionSource)) {
ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
- __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+ __func__, attributionSource.uid, attributionSource.pid);
return binderStatusFromStatusT(PERMISSION_DENIED);
}
}
@@ -386,7 +375,7 @@
bool isSpatialized = false;
status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
&stream,
- adjAttributionSource,
+ attributionSource,
&config,
&flags, &selectedDeviceId, &portId,
&secondaryOutputs,
@@ -401,20 +390,20 @@
break;
case AudioPolicyInterface::API_OUTPUT_TELEPHONY_TX:
if (((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)
- && !callAudioInterceptionAllowed(adjAttributionSource)) {
+ && !callAudioInterceptionAllowed(attributionSource)) {
ALOGE("%s() permission denied: call redirection not allowed for uid %d",
- __func__, adjAttributionSource.uid);
+ __func__, attributionSource.uid);
result = PERMISSION_DENIED;
- } else if (!modifyPhoneStateAllowed(adjAttributionSource)) {
+ } else if (!modifyPhoneStateAllowed(attributionSource)) {
ALOGE("%s() permission denied: modify phone state not allowed for uid %d",
- __func__, adjAttributionSource.uid);
+ __func__, attributionSource.uid);
result = PERMISSION_DENIED;
}
break;
case AudioPolicyInterface::API_OUT_MIX_PLAYBACK:
- if (!modifyAudioRoutingAllowed(adjAttributionSource)) {
+ if (!modifyAudioRoutingAllowed(attributionSource)) {
ALOGE("%s() permission denied: modify audio routing not allowed for uid %d",
- __func__, adjAttributionSource.uid);
+ __func__, attributionSource.uid);
result = PERMISSION_DENIED;
}
break;
@@ -427,7 +416,7 @@
if (result == NO_ERROR) {
sp<AudioPlaybackClient> client =
- new AudioPlaybackClient(attr, output, adjAttributionSource, session,
+ new AudioPlaybackClient(attr, output, attributionSource, session,
portId, selectedDeviceId, stream, isSpatialized);
mAudioPlaybackClients.add(portId, client);
@@ -613,33 +602,8 @@
return binderStatusFromStatusT(BAD_VALUE);
}
- // Make sure attribution source represents the current caller
- AttributionSourceState adjAttributionSource = attributionSource;
- // TODO b/182392553: refactor or remove
- bool updatePid = (attributionSource.pid == -1);
- const uid_t callingUid =IPCThreadState::self()->getCallingUid();
- const uid_t currentUid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(
- attributionSource.uid));
- if (!isAudioServerOrMediaServerUid(callingUid)) {
- ALOGW_IF(currentUid != (uid_t)-1 && currentUid != callingUid,
- "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid,
- currentUid);
- adjAttributionSource.uid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_uid_t_int32_t(
- callingUid));
- updatePid = true;
- }
-
- if (updatePid) {
- const int32_t callingPid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_pid_t_int32_t(
- IPCThreadState::self()->getCallingPid()));
- ALOGW_IF(attributionSource.pid != -1 && attributionSource.pid != callingPid,
- "%s uid %d pid %d tried to pass itself off as pid %d",
- __func__, adjAttributionSource.uid, callingPid, attributionSource.pid);
- adjAttributionSource.pid = callingPid;
- }
-
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr,
- adjAttributionSource)));
+ attributionSource)));
// check calling permissions.
// Capturing from the following sources does not require permission RECORD_AUDIO
@@ -650,17 +614,17 @@
// type is API_INPUT_MIX_EXT_POLICY_REROUTE and by AudioService if a media projection
// is used and input type is API_INPUT_MIX_PUBLIC_CAPTURE_PLAYBACK
// - ECHO_REFERENCE source is controlled by captureAudioOutputAllowed()
- if (!(recordingAllowed(adjAttributionSource, inputSource)
+ if (!(recordingAllowed(attributionSource, inputSource)
|| inputSource == AUDIO_SOURCE_FM_TUNER
|| inputSource == AUDIO_SOURCE_REMOTE_SUBMIX
|| inputSource == AUDIO_SOURCE_ECHO_REFERENCE)) {
ALOGE("%s permission denied: recording not allowed for %s",
- __func__, adjAttributionSource.toString().c_str());
+ __func__, attributionSource.toString().c_str());
return binderStatusFromStatusT(PERMISSION_DENIED);
}
- bool canCaptureOutput = captureAudioOutputAllowed(adjAttributionSource);
- bool canInterceptCallAudio = callAudioInterceptionAllowed(adjAttributionSource);
+ bool canCaptureOutput = captureAudioOutputAllowed(attributionSource);
+ bool canInterceptCallAudio = callAudioInterceptionAllowed(attributionSource);
bool isCallAudioSource = inputSource == AUDIO_SOURCE_VOICE_UPLINK
|| inputSource == AUDIO_SOURCE_VOICE_DOWNLINK
|| inputSource == AUDIO_SOURCE_VOICE_CALL;
@@ -674,11 +638,11 @@
}
if (inputSource == AUDIO_SOURCE_FM_TUNER
&& !canCaptureOutput
- && !captureTunerAudioInputAllowed(adjAttributionSource)) {
+ && !captureTunerAudioInputAllowed(attributionSource)) {
return binderStatusFromStatusT(PERMISSION_DENIED);
}
- bool canCaptureHotword = captureHotwordAllowed(adjAttributionSource);
+ bool canCaptureHotword = captureHotwordAllowed(attributionSource);
if ((inputSource == AUDIO_SOURCE_HOTWORD) && !canCaptureHotword) {
return binderStatusFromStatusT(PERMISSION_DENIED);
}
@@ -686,14 +650,14 @@
if (((flags & AUDIO_INPUT_FLAG_HW_HOTWORD) != 0)
&& !canCaptureHotword) {
ALOGE("%s: permission denied: hotword mode not allowed"
- " for uid %d pid %d", __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+ " for uid %d pid %d", __func__, attributionSource.uid, attributionSource.pid);
return binderStatusFromStatusT(PERMISSION_DENIED);
}
if (attr.source == AUDIO_SOURCE_ULTRASOUND) {
- if (!accessUltrasoundAllowed(adjAttributionSource)) {
+ if (!accessUltrasoundAllowed(attributionSource)) {
ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
- __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+ __func__, attributionSource.uid, attributionSource.pid);
return binderStatusFromStatusT(PERMISSION_DENIED);
}
}
@@ -708,7 +672,7 @@
AutoCallerClear acc;
// the audio_in_acoustics_t parameter is ignored by get_input()
status = mAudioPolicyManager->getInputForAttr(&attr, &input, riid, session,
- adjAttributionSource, &config,
+ attributionSource, &config,
flags, &selectedDeviceId,
&inputType, &portId);
@@ -737,7 +701,7 @@
}
break;
case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE:
- if (!(modifyAudioRoutingAllowed(adjAttributionSource)
+ if (!(modifyAudioRoutingAllowed(attributionSource)
|| ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0
&& canInterceptCallAudio))) {
ALOGE("%s permission denied for remote submix capture", __func__);
@@ -760,7 +724,7 @@
}
sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
- selectedDeviceId, adjAttributionSource,
+ selectedDeviceId, attributionSource,
canCaptureOutput, canCaptureHotword,
mOutputCommandThread);
mAudioRecordClients.add(portId, client);
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 9c989ce..be95d58 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -741,6 +741,17 @@
msg->post();
}
+void Spatializer::resetEngineHeadPose_l() {
+ ALOGV("%s mEngine %p", __func__, mEngine.get());
+ if (mEngine == nullptr) {
+ return;
+ }
+ const std::vector<float> headToStage(6, 0.0);
+ setEffectParameter_l(SPATIALIZER_PARAM_HEAD_TO_STAGE, headToStage);
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{SpatializerHeadTrackingMode::DISABLED});
+}
+
void Spatializer::onHeadToStagePoseMsg(const std::vector<float>& headToStage) {
ALOGV("%s", __func__);
sp<media::ISpatializerHeadTrackingCallback> callback;
@@ -792,8 +803,12 @@
}
mActualHeadTrackingMode = spatializerMode;
if (mEngine != nullptr) {
- setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
- std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+ if (spatializerMode == SpatializerHeadTrackingMode::DISABLED) {
+ resetEngineHeadPose_l();
+ } else {
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+ }
}
callback = mHeadTrackingCallback;
mLocalLog.log("%s: %s, spatializerMode %s", __func__, media::toString(mode).c_str(),
@@ -923,16 +938,25 @@
bool lowLatencySupported = mSupportedLatencyModes.empty()
|| (std::find(mSupportedLatencyModes.begin(), mSupportedLatencyModes.end(),
AUDIO_LATENCY_MODE_LOW) != mSupportedLatencyModes.end());
- if (mSupportsHeadTracking && mPoseController != nullptr) {
- if (lowLatencySupported && mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
- && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
- && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
- mPoseController->setHeadSensor(mHeadSensor);
- mPoseController->setScreenSensor(mScreenSensor);
- requestedLatencyMode = AUDIO_LATENCY_MODE_LOW;
+ if (mSupportsHeadTracking) {
+ if (mPoseController != nullptr) {
+ if (lowLatencySupported && mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
+ && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
+ && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
+ if (mEngine != nullptr) {
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
+ }
+ mPoseController->setHeadSensor(mHeadSensor);
+ mPoseController->setScreenSensor(mScreenSensor);
+ requestedLatencyMode = AUDIO_LATENCY_MODE_LOW;
+ } else {
+ mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+ mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+ resetEngineHeadPose_l();
+ }
} else {
- mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
- mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+ resetEngineHeadPose_l();
}
}
if (mOutput != AUDIO_IO_HANDLE_NONE) {
@@ -946,8 +970,6 @@
mEngine->setEnabled(true);
setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
std::vector<SpatializationLevel>{mLevel});
- setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
- std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
} else {
setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
std::vector<SpatializationLevel>{SpatializationLevel::NONE});
@@ -969,6 +991,7 @@
mPoseController->setDisplayOrientation(mDisplayOrientation);
} else if (!isControllerNeeded && mPoseController != nullptr) {
mPoseController.reset();
+ resetEngineHeadPose_l();
}
if (mPoseController != nullptr) {
mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 2687b5c..bcbd92b 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -350,6 +350,12 @@
*/
void checkEngineState_l() REQUIRES(mLock);
+ /**
+ * Reset head tracking mode and recenter pose in engine: Called when the head tracking
+ * is disabled.
+ */
+ void resetEngineHeadPose_l() REQUIRES(mLock);
+
/** Effect engine descriptor */
const effect_descriptor_t mEngineDescriptor;
/** Callback interface to parent audio policy service */
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index a78112d..1115920 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -203,6 +203,7 @@
status_t res;
std::vector<std::string> deviceIds;
+ std::unordered_map<std::string, std::set<std::string>> unavailPhysicalIds;
{
Mutex::Autolock l(mServiceLock);
@@ -233,7 +234,7 @@
ALOGE("Failed to enumerate flash units: %s (%d)", strerror(-res), res);
}
- deviceIds = mCameraProviderManager->getCameraDeviceIds();
+ deviceIds = mCameraProviderManager->getCameraDeviceIds(&unavailPhysicalIds);
}
@@ -242,6 +243,12 @@
if (getCameraState(id8) == nullptr) {
onDeviceStatusChanged(id8, CameraDeviceStatus::PRESENT);
}
+ if (unavailPhysicalIds.count(cameraId) > 0) {
+ for (const auto& physicalId : unavailPhysicalIds[cameraId]) {
+ String8 physicalId8 = String8(physicalId.c_str());
+ onDeviceStatusChanged(id8, physicalId8, CameraDeviceStatus::NOT_PRESENT);
+ }
+ }
}
// Derive primary rear/front cameras, and filter their charactierstics.
@@ -495,7 +502,7 @@
if (state == nullptr) {
ALOGE("%s: Physical camera id %s status change on a non-present ID %s",
- __FUNCTION__, id.string(), physicalId.string());
+ __FUNCTION__, physicalId.string(), id.string());
return;
}
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index a3d24ff..7d98a0b 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -41,8 +41,6 @@
namespace android {
-const static size_t kDisconnectTimeoutMs = 2500;
-
using namespace camera2;
// Interface used by CameraService
@@ -252,10 +250,16 @@
template <typename TClientBase>
binder::Status Camera2ClientBase<TClientBase>::disconnect() {
- if (mCameraServiceWatchdog != nullptr) {
+ if (mCameraServiceWatchdog != nullptr && mDevice != nullptr) {
+ // Timer for the disconnect call should be greater than getExpectedInFlightDuration
+ // since this duration is used to error handle methods in the disconnect sequence
+ // thus allowing existing error handling methods to execute first
+ uint64_t maxExpectedDuration =
+ ns2ms(mDevice->getExpectedInFlightDuration() + kBufferTimeDisconnectNs);
+
// Initialization from hal succeeded, time disconnect.
return mCameraServiceWatchdog->WATCH_CUSTOM_TIMER(disconnectImpl(),
- kDisconnectTimeoutMs / kCycleLengthMs, kCycleLengthMs);
+ maxExpectedDuration / kCycleLengthMs, kCycleLengthMs);
}
return disconnectImpl();
}
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 3af781b..e51d25d 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -132,6 +132,9 @@
protected:
+ // Used for watchdog timeout to monitor disconnect
+ static const nsecs_t kBufferTimeDisconnectNs = 3000000000; // 3 sec.
+
// The PID provided in the constructor call
pid_t mInitialClientPid;
bool mOverrideForPerfClass = false;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index cd23250..851a6d0 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -197,12 +197,17 @@
return std::make_pair(systemCameraCount, publicCameraCount);
}
-std::vector<std::string> CameraProviderManager::getCameraDeviceIds() const {
+std::vector<std::string> CameraProviderManager::getCameraDeviceIds(std::unordered_map<
+ std::string, std::set<std::string>>* unavailablePhysicalIds) const {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
std::vector<std::string> deviceIds;
for (auto& provider : mProviders) {
for (auto& id : provider->mUniqueCameraIds) {
deviceIds.push_back(id);
+ if (unavailablePhysicalIds != nullptr &&
+ provider->mUnavailablePhysicalCameras.count(id) > 0) {
+ (*unavailablePhysicalIds)[id] = provider->mUnavailablePhysicalCameras.at(id);
+ }
}
}
return deviceIds;
@@ -843,9 +848,6 @@
void CameraProviderManager::ProviderInfo::initializeProviderInfoCommon(
const std::vector<std::string> &devices) {
-
- sp<StatusListener> listener = mManager->getStatusListener();
-
for (auto& device : devices) {
std::string id;
status_t res = addDevice(device, CameraDeviceStatus::PRESENT, &id);
@@ -860,38 +862,22 @@
mProviderName.c_str(), mDevices.size());
// Process cached status callbacks
- std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus =
- std::make_unique<std::vector<CameraStatusInfoT>>();
{
std::lock_guard<std::mutex> lock(mInitLock);
for (auto& statusInfo : mCachedStatus) {
std::string id, physicalId;
- status_t res = OK;
if (statusInfo.isPhysicalCameraStatus) {
- res = physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
+ physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
statusInfo.cameraId, statusInfo.physicalCameraId, statusInfo.status);
} else {
- res = cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
- }
- if (res == OK) {
- cachedStatus->emplace_back(statusInfo.isPhysicalCameraStatus,
- id.c_str(), physicalId.c_str(), statusInfo.status);
+ cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
}
}
mCachedStatus.clear();
mInitialized = true;
}
-
- // The cached status change callbacks cannot be fired directly from this
- // function, due to same-thread deadlock trying to acquire mInterfaceMutex
- // twice.
- if (listener != nullptr) {
- mInitialStatusCallbackFuture = std::async(std::launch::async,
- &CameraProviderManager::ProviderInfo::notifyInitialStatusChange, this,
- listener, std::move(cachedStatus));
- }
}
CameraProviderManager::ProviderInfo::DeviceInfo* CameraProviderManager::findDeviceInfoLocked(
@@ -1961,6 +1947,7 @@
for (auto it = mDevices.begin(); it != mDevices.end(); it++) {
if ((*it)->mId == id) {
mUniqueCameraIds.erase(id);
+ mUnavailablePhysicalCameras.erase(id);
if ((*it)->isAPI1Compatible()) {
mUniqueAPI1CompatibleCameraIds.erase(std::remove(
mUniqueAPI1CompatibleCameraIds.begin(),
@@ -2228,6 +2215,15 @@
return BAD_VALUE;
}
+ if (mUnavailablePhysicalCameras.count(cameraId) == 0) {
+ mUnavailablePhysicalCameras.emplace(cameraId, std::set<std::string>{});
+ }
+ if (newStatus != CameraDeviceStatus::PRESENT) {
+ mUnavailablePhysicalCameras[cameraId].insert(physicalCameraDeviceName);
+ } else {
+ mUnavailablePhysicalCameras[cameraId].erase(physicalCameraDeviceName);
+ }
+
*id = cameraId;
*physicalId = physicalCameraDeviceName.c_str();
return OK;
@@ -2286,20 +2282,6 @@
}
}
-void CameraProviderManager::ProviderInfo::notifyInitialStatusChange(
- sp<StatusListener> listener,
- std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus) {
- for (auto& statusInfo : *cachedStatus) {
- if (statusInfo.isPhysicalCameraStatus) {
- listener->onDeviceStatusChanged(String8(statusInfo.cameraId.c_str()),
- String8(statusInfo.physicalCameraId.c_str()), statusInfo.status);
- } else {
- listener->onDeviceStatusChanged(
- String8(statusInfo.cameraId.c_str()), statusInfo.status);
- }
- }
-}
-
CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
const metadata_vendor_id_t tagId, const std::string &id,
uint16_t minorVersion,
@@ -2649,9 +2631,6 @@
}
CameraProviderManager::ProviderInfo::~ProviderInfo() {
- if (mInitialStatusCallbackFuture.valid()) {
- mInitialStatusCallbackFuture.wait();
- }
// Destruction of ProviderInfo is only supposed to happen when the respective
// CameraProvider interface dies, so do not unregister callbacks.
}
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index d049aff..86047e8 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -23,7 +23,6 @@
#include <set>
#include <string>
#include <mutex>
-#include <future>
#include <camera/camera2/ConcurrentCamera.h>
#include <camera/CameraParameters2.h>
@@ -220,7 +219,14 @@
*/
std::pair<int, int> getCameraCount() const;
- std::vector<std::string> getCameraDeviceIds() const;
+ /**
+ * Upon the function return, if unavailablePhysicalIds is not nullptr, it
+ * will contain all of the unavailable physical camera Ids represented in
+ * the form of:
+ * {[logicalCamera, {physicalCamera1, physicalCamera2, ...}], ...}.
+ */
+ std::vector<std::string> getCameraDeviceIds(std::unordered_map<
+ std::string, std::set<std::string>>* unavailablePhysicalIds = nullptr) const;
/**
* Retrieve the number of API1 compatible cameras; these are internal and
@@ -607,6 +613,7 @@
};
std::vector<std::unique_ptr<DeviceInfo>> mDevices;
std::unordered_set<std::string> mUniqueCameraIds;
+ std::unordered_map<std::string, std::set<std::string>> mUnavailablePhysicalCameras;
int mUniqueDeviceCount;
std::vector<std::string> mUniqueAPI1CompatibleCameraIds;
// The initial public camera IDs published by the camera provider.
@@ -715,8 +722,6 @@
std::vector<CameraStatusInfoT> mCachedStatus;
// End of scope for mInitLock
- std::future<void> mInitialStatusCallbackFuture;
-
std::unique_ptr<ProviderInfo::DeviceInfo>
virtual initializeDeviceInfo(
const std::string &name, const metadata_vendor_id_t tagId,
@@ -724,9 +729,6 @@
virtual status_t reCacheConcurrentStreamingCameraIdsLocked() = 0;
- void notifyInitialStatusChange(sp<StatusListener> listener,
- std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus);
-
std::vector<std::unordered_set<std::string>> mConcurrentCameraIdCombinations;
// Parse provider instance name for type and id
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 8eb7fd0..ca70b88 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -1742,7 +1742,7 @@
}
// Calculate expected duration for flush with additional buffer time in ms for watchdog
- uint64_t maxExpectedDuration = (getExpectedInFlightDuration() + kBaseGetBufferWait) / 1e6;
+ uint64_t maxExpectedDuration = ns2ms(getExpectedInFlightDuration() + kBaseGetBufferWait);
status_t res = mCameraServiceWatchdog->WATCH_CUSTOM_TIMER(mRequestThread->flush(),
maxExpectedDuration / kCycleLengthMs, kCycleLengthMs);
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
index b3cb178..83caa00 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
@@ -69,7 +69,9 @@
}
// Cache the frame to match readout time interval, for up to kMaxFrameWaitTime
- nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval;
+ // Because the code between here and queueBuffer() takes time to execute, make sure the
+ // presentationInterval is slightly shorter than readoutInterval.
+ nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval - kFrameAdjustThreshold;
nsecs_t frameWaitTime = std::min(kMaxFrameWaitTime, expectedQueueTime - currentTime);
if (frameWaitTime > 0 && mPendingBuffers.size() < 2) {
mBufferCond.waitRelative(mLock, frameWaitTime);
@@ -78,9 +80,9 @@
}
currentTime = systemTime();
}
- ALOGV("%s: readoutInterval %" PRId64 ", queueInterval %" PRId64 ", waited for %" PRId64
+ ALOGV("%s: readoutInterval %" PRId64 ", waited for %" PRId64
", timestamp %" PRId64, __FUNCTION__, readoutInterval,
- currentTime - mLastCameraPresentTime, frameWaitTime, buffer.timestamp);
+ mPendingBuffers.size() < 2 ? frameWaitTime : 0, buffer.timestamp);
mPendingBuffers.pop();
queueBufferToClientLocked(buffer, currentTime);
return true;
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
index cb9690c..f46de3d 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
@@ -86,6 +86,7 @@
static constexpr nsecs_t kWaitDuration = 5000000LL; // 50ms
static constexpr nsecs_t kFrameIntervalThreshold = 80000000LL; // 80ms
static constexpr nsecs_t kMaxFrameWaitTime = 10000000LL; // 10ms
+ static constexpr nsecs_t kFrameAdjustThreshold = 2000000LL; // 2ms
};
}; //namespace camera3
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index e9f6979..2f55def 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -102,23 +102,57 @@
sp<device::V3_2::ICameraDevice> mDeviceInterface;
hardware::hidl_vec<common::V1_0::VendorTagSection> mVendorTagSections;
+ // Whether to call a physical camera unavailable callback upon setCallback
+ bool mHasPhysicalCameraUnavailableCallback;
+ hardware::hidl_string mLogicalCameraId;
+ hardware::hidl_string mUnavailablePhysicalCameraId;
+
TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection) :
mDeviceNames(devices),
mDeviceInterface(new TestDeviceInterface(devices)),
- mVendorTagSections (vendorSection) {}
+ mVendorTagSections (vendorSection),
+ mHasPhysicalCameraUnavailableCallback(false) {}
TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection,
android::hardware::hidl_vec<uint8_t> chars) :
mDeviceNames(devices),
mDeviceInterface(new TestDeviceInterface(devices, chars)),
- mVendorTagSections (vendorSection) {}
+ mVendorTagSections (vendorSection),
+ mHasPhysicalCameraUnavailableCallback(false) {}
+
+ TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
+ const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection,
+ android::hardware::hidl_vec<uint8_t> chars,
+ const hardware::hidl_string& logicalCameraId,
+ const hardware::hidl_string& unavailablePhysicalCameraId) :
+ mDeviceNames(devices),
+ mDeviceInterface(new TestDeviceInterface(devices, chars)),
+ mVendorTagSections (vendorSection),
+ mHasPhysicalCameraUnavailableCallback(true),
+ mLogicalCameraId(logicalCameraId),
+ mUnavailablePhysicalCameraId(unavailablePhysicalCameraId) {}
virtual hardware::Return<Status> setCallback(
const sp<provider::V2_4::ICameraProviderCallback>& callbacks) override {
mCalledCounter[SET_CALLBACK]++;
mCallbacks = callbacks;
+ if (mHasPhysicalCameraUnavailableCallback) {
+ auto cast26 = provider::V2_6::ICameraProviderCallback::castFrom(callbacks);
+ if (!cast26.isOk()) {
+ ADD_FAILURE() << "Failed to cast ICameraProviderCallback to V2_6";
+ } else {
+ sp<provider::V2_6::ICameraProviderCallback> callback26 = cast26;
+ if (callback26 == nullptr) {
+ ADD_FAILURE() << "V2_6::ICameraProviderCallback is null after conversion";
+ } else {
+ callback26->physicalCameraDeviceStatusChange(mLogicalCameraId,
+ mUnavailablePhysicalCameraId,
+ android::hardware::camera::common::V1_0::CameraDeviceStatus::NOT_PRESENT);
+ }
+ }
+ }
return hardware::Return<Status>(Status::OK);
}
@@ -266,12 +300,16 @@
};
struct TestStatusListener : public CameraProviderManager::StatusListener {
+ int mPhysicalCameraStatusChangeCount = 0;
+
~TestStatusListener() {}
void onDeviceStatusChanged(const String8 &,
CameraDeviceStatus) override {}
void onDeviceStatusChanged(const String8 &, const String8 &,
- CameraDeviceStatus) override {}
+ CameraDeviceStatus) override {
+ mPhysicalCameraStatusChangeCount++;
+ }
void onTorchStatusChanged(const String8 &,
TorchModeStatus) override {}
void onTorchStatusChanged(const String8 &,
@@ -634,3 +672,46 @@
ASSERT_EQ(deviceCount, deviceNames.size()) <<
"Unexpected amount of camera devices";
}
+
+// Test that CameraProviderManager does not trigger
+// onDeviceStatusChanged(NOT_PRESENT) for physical camera before initialize()
+// returns.
+TEST(CameraProviderManagerTest, PhysicalCameraAvailabilityCallbackRaceTest) {
+ std::vector<hardware::hidl_string> deviceNames;
+ deviceNames.push_back("device@3.2/test/0");
+ hardware::hidl_vec<common::V1_0::VendorTagSection> vendorSection;
+
+ sp<CameraProviderManager> providerManager = new CameraProviderManager();
+ sp<TestStatusListener> statusListener = new TestStatusListener();
+ TestInteractionProxy serviceProxy;
+
+ android::hardware::hidl_vec<uint8_t> chars;
+ CameraMetadata meta;
+ int32_t charKeys[] = { ANDROID_REQUEST_AVAILABLE_CAPABILITIES };
+ meta.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, charKeys,
+ sizeof(charKeys) / sizeof(charKeys[0]));
+ uint8_t capabilities[] = { ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA };
+ meta.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, capabilities,
+ sizeof(capabilities)/sizeof(capabilities[0]));
+ uint8_t physicalCameraIds[] = { '2', '\0', '3', '\0' };
+ meta.update(ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS, physicalCameraIds,
+ sizeof(physicalCameraIds)/sizeof(physicalCameraIds[0]));
+ camera_metadata_t* metaBuffer = const_cast<camera_metadata_t*>(meta.getAndLock());
+ chars.setToExternal(reinterpret_cast<uint8_t*>(metaBuffer),
+ get_camera_metadata_size(metaBuffer));
+
+ sp<TestICameraProvider> provider = new TestICameraProvider(deviceNames,
+ vendorSection, chars, "device@3.2/test/0", "2");
+ serviceProxy.setProvider(provider);
+
+ status_t res = providerManager->initialize(statusListener, &serviceProxy);
+ ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
+
+ ASSERT_EQ(statusListener->mPhysicalCameraStatusChangeCount, 0)
+ << "Unexpected physical camera status change callback upon provider init.";
+
+ std::unordered_map<std::string, std::set<std::string>> unavailablePhysicalIds;
+ auto cameraIds = providerManager->getCameraDeviceIds(&unavailablePhysicalIds);
+ ASSERT_TRUE(unavailablePhysicalIds.count("0") > 0 && unavailablePhysicalIds["0"].count("2") > 0)
+ << "Unavailable physical camera Ids not set properly.";
+}