Merge "Update EffectUUID initialization" am: c7607bb50f am: 9e8d11d0a3
Original change: https://android-review.googlesource.com/c/platform/frameworks/av/+/2471061
Change-Id: I8cede1b49cf898a6c9b404ec024595af24624c8f
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 23d90cc..02047ae 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -696,7 +696,7 @@
CameraMetadata rawMetadata;
int targetSdkVersion = android_get_application_target_sdk_version();
binder::Status serviceRet = cs->getCameraCharacteristics(String16(cameraIdStr),
- targetSdkVersion, /*overrideToPortrait*/true, &rawMetadata);
+ targetSdkVersion, /*overrideToPortrait*/false, &rawMetadata);
if (!serviceRet.isOk()) {
switch(serviceRet.serviceSpecificErrorCode()) {
case hardware::ICameraService::ERROR_DISCONNECTED:
@@ -748,7 +748,7 @@
binder::Status serviceRet = cs->connectDevice(
callbacks, String16(cameraId), String16(""), {},
hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
- targetSdkVersion, /*overrideToPortrait*/true, /*out*/&deviceRemote);
+ targetSdkVersion, /*overrideToPortrait*/false, /*out*/&deviceRemote);
if (!serviceRet.isOk()) {
ALOGE("%s: connect camera device failed: %s", __FUNCTION__, serviceRet.toString8().string());
diff --git a/include/private/media/VideoFrame.h b/include/private/media/VideoFrame.h
index d4025e5..78ea2a1 100644
--- a/include/private/media/VideoFrame.h
+++ b/include/private/media/VideoFrame.h
@@ -42,9 +42,15 @@
mWidth(width), mHeight(height),
mDisplayWidth(displayWidth), mDisplayHeight(displayHeight),
mTileWidth(tileWidth), mTileHeight(tileHeight), mDurationUs(0),
- mRotationAngle(angle), mBytesPerPixel(bpp), mRowBytes(bpp * width),
- mSize(hasData ? (bpp * width * height) : 0),
- mIccSize(iccSize), mBitDepth(bitDepth) {
+ mRotationAngle(angle), mBytesPerPixel(bpp), mIccSize(iccSize),
+ mBitDepth(bitDepth) {
+ uint32_t multVal;
+ mRowBytes = __builtin_mul_overflow(bpp, width, &multVal) ? 0 : multVal;
+ mSize = __builtin_mul_overflow(multVal, height, &multVal) ? 0 : multVal;
+ if (hasData && (mRowBytes == 0 || mSize == 0)) {
+ ALOGE("Frame rowBytes/ size overflow %dx%d bpp %d", width, height, bpp);
+ android_errorWriteLog(0x534e4554, "233006499");
+ }
}
void init(const VideoFrame& copy, const void* iccData, size_t iccSize) {
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index a008dc2..6e183e9 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -877,6 +877,16 @@
if (msg->findInt32(KEY_PUSH_BLANK_BUFFERS_ON_STOP, &pushBlankBuffersOnStop)) {
config->mPushBlankBuffersOnStop = pushBlankBuffersOnStop == 1;
}
+ // secure compoment or protected content default with
+ // "push-blank-buffers-on-shutdown" flag
+ if (!config->mPushBlankBuffersOnStop) {
+ int32_t usageProtected;
+ if (comp->getName().find(".secure") != std::string::npos) {
+ config->mPushBlankBuffersOnStop = true;
+ } else if (msg->findInt32("protected", &usageProtected) && usageProtected) {
+ config->mPushBlankBuffersOnStop = true;
+ }
+ }
}
}
setSurface(surface);
@@ -1868,18 +1878,14 @@
}
state->set(STOPPING);
}
- {
- Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
- const std::unique_ptr<Config> &config = *configLocked;
- if (config->mPushBlankBuffersOnStop) {
- mChannel->pushBlankBufferToOutputSurface();
- }
- }
mChannel->reset();
- (new AMessage(kWhatStop, this))->post();
+ bool pushBlankBuffer = mConfig.lock().get()->mPushBlankBuffersOnStop;
+ sp<AMessage> stopMessage(new AMessage(kWhatStop, this));
+ stopMessage->setInt32("pushBlankBuffer", pushBlankBuffer);
+ stopMessage->post();
}
-void CCodec::stop() {
+void CCodec::stop(bool pushBlankBuffer) {
std::shared_ptr<Codec2Client::Component> comp;
{
Mutexed<State>::Locked state(mState);
@@ -1898,7 +1904,7 @@
comp = state->comp;
}
status_t err = comp->stop();
- mChannel->stopUseOutputSurface();
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
if (err != C2_OK) {
// TODO: convert err into status_t
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
@@ -1963,21 +1969,16 @@
config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
}
}
- {
- Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
- const std::unique_ptr<Config> &config = *configLocked;
- if (config->mPushBlankBuffersOnStop) {
- mChannel->pushBlankBufferToOutputSurface();
- }
- }
mChannel->reset();
+ bool pushBlankBuffer = mConfig.lock().get()->mPushBlankBuffersOnStop;
// thiz holds strong ref to this while the thread is running.
sp<CCodec> thiz(this);
- std::thread([thiz, sendCallback] { thiz->release(sendCallback); }).detach();
+ std::thread([thiz, sendCallback, pushBlankBuffer]
+ { thiz->release(sendCallback, pushBlankBuffer); }).detach();
}
-void CCodec::release(bool sendCallback) {
+void CCodec::release(bool sendCallback, bool pushBlankBuffer) {
std::shared_ptr<Codec2Client::Component> comp;
{
Mutexed<State>::Locked state(mState);
@@ -1992,7 +1993,7 @@
comp = state->comp;
}
comp->release();
- mChannel->stopUseOutputSurface();
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
{
Mutexed<State>::Locked state(mState);
@@ -2006,6 +2007,7 @@
}
status_t CCodec::setSurface(const sp<Surface> &surface) {
+ bool pushBlankBuffer = false;
{
Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
const std::unique_ptr<Config> &config = *configLocked;
@@ -2031,8 +2033,9 @@
return err;
}
}
+ pushBlankBuffer = config->mPushBlankBuffersOnStop;
}
- return mChannel->setSurface(surface);
+ return mChannel->setSurface(surface, pushBlankBuffer);
}
void CCodec::signalFlush() {
@@ -2331,7 +2334,11 @@
case kWhatStop: {
// C2Component::stop() should return within 500ms.
setDeadline(now, 1500ms, "stop");
- stop();
+ int32_t pushBlankBuffer;
+ if (!msg->findInt32("pushBlankBuffer", &pushBlankBuffer)) {
+ pushBlankBuffer = 0;
+ }
+ stop(static_cast<bool>(pushBlankBuffer));
break;
}
case kWhatFlush: {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index fc82426..36b2b3f 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -983,7 +983,6 @@
int64_t mediaTimeUs = 0;
(void)buffer->meta()->findInt64("timeUs", &mediaTimeUs);
- mCCodecCallback->onOutputFramesRendered(mediaTimeUs, timestampNs);
trackReleasedFrame(qbo, mediaTimeUs, timestampNs);
processRenderedFrames(qbo.frameTimestamps);
@@ -1008,8 +1007,8 @@
if (desiredRenderTimeNs < nowNs) {
desiredRenderTimeNs = nowNs;
}
- // We've just released a frame to the surface, so keep track of it and later check to see if it
- // is actually rendered.
+ // We've just queued a frame to the surface, so keep track of it and later check to see if it is
+ // actually rendered.
TrackedFrame frame;
frame.number = qbo.nextFrameNumber - 1;
frame.mediaTimeUs = mediaTimeUs;
@@ -1675,14 +1674,22 @@
mFirstValidFrameIndex = mFrameIndex.load(std::memory_order_relaxed);
}
-void CCodecBufferChannel::stopUseOutputSurface() {
- if (mOutputSurface.lock()->surface) {
+void CCodecBufferChannel::stopUseOutputSurface(bool pushBlankBuffer) {
+ sp<Surface> surface = mOutputSurface.lock()->surface;
+ if (surface) {
C2BlockPool::local_id_t outputPoolId;
{
Mutexed<BlockPools>::Locked pools(mBlockPools);
outputPoolId = pools->outputPoolId;
}
if (mComponent) mComponent->stopUsingOutputSurface(outputPoolId);
+
+ if (pushBlankBuffer) {
+ sp<ANativeWindow> anw = static_cast<ANativeWindow *>(surface.get());
+ if (anw) {
+ pushBlankBuffersToNativeWindow(anw.get());
+ }
+ }
}
}
@@ -2198,14 +2205,20 @@
}
}
-status_t CCodecBufferChannel::setSurface(const sp<Surface> &newSurface) {
+status_t CCodecBufferChannel::setSurface(const sp<Surface> &newSurface, bool pushBlankBuffer) {
static std::atomic_uint32_t surfaceGeneration{0};
uint32_t generation = (getpid() << 10) |
((surfaceGeneration.fetch_add(1, std::memory_order_relaxed) + 1)
& ((1 << 10) - 1));
sp<IGraphicBufferProducer> producer;
- int maxDequeueCount = mOutputSurface.lock()->maxDequeueBuffers;
+ int maxDequeueCount;
+ sp<Surface> oldSurface;
+ {
+ Mutexed<OutputSurface>::Locked outputSurface(mOutputSurface);
+ maxDequeueCount = outputSurface->maxDequeueBuffers;
+ oldSurface = outputSurface->surface;
+ }
if (newSurface) {
newSurface->setScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
newSurface->setDequeueTimeout(kDequeueTimeoutNs);
@@ -2240,8 +2253,18 @@
Mutexed<OutputSurface>::Locked output(mOutputSurface);
output->surface = newSurface;
output->generation = generation;
+ initializeFrameTrackingFor(static_cast<ANativeWindow *>(newSurface.get()));
}
- initializeFrameTrackingFor(static_cast<ANativeWindow *>(newSurface.get()));
+
+ if (oldSurface && pushBlankBuffer) {
+ // When ReleaseSurface was set from MediaCodec,
+ // pushing a blank buffer at the end might be necessary.
+ sp<ANativeWindow> anw = static_cast<ANativeWindow *>(oldSurface.get());
+ if (anw) {
+ pushBlankBuffersToNativeWindow(anw.get());
+ }
+ }
+
return OK;
}
@@ -2331,13 +2354,4 @@
}
}
-status_t CCodecBufferChannel::pushBlankBufferToOutputSurface() {
- Mutexed<OutputSurface>::Locked output(mOutputSurface);
- sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(output->surface.get());
- if (nativeWindow == nullptr) {
- return INVALID_OPERATION;
- }
- return pushBlankBuffersToNativeWindow(nativeWindow.get());
-}
-
} // namespace android
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 73299d7..e2e55b9 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -104,7 +104,7 @@
/**
* Set output graphic surface for rendering.
*/
- status_t setSurface(const sp<Surface> &surface);
+ status_t setSurface(const sp<Surface> &surface, bool pushBlankBuffer);
/**
* Set GraphicBufferSource object from which the component extracts input
@@ -153,8 +153,10 @@
/**
* Stop using buffers of the current output surface for other Codec
* instances to use the surface safely.
+ *
+ * \param pushBlankBuffer[in] push a blank buffer at the end if true
*/
- void stopUseOutputSurface();
+ void stopUseOutputSurface(bool pushBlankBuffer);
/**
* Stop queueing buffers to the component. This object should never queue
@@ -203,11 +205,6 @@
void setMetaMode(MetaMode mode);
- /**
- * Push a blank buffer to the configured native output surface.
- */
- status_t pushBlankBufferToOutputSurface();
-
private:
class QueueGuard;
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
index ec18128..13713bc 100644
--- a/media/codec2/sfplugin/include/media/stagefright/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -109,9 +109,9 @@
void allocate(const sp<MediaCodecInfo> &codecInfo);
void configure(const sp<AMessage> &msg);
void start();
- void stop();
+ void stop(bool pushBlankBuffer);
void flush();
- void release(bool sendCallback);
+ void release(bool sendCallback, bool pushBlankBuffer);
/**
* Creates an input surface for the current device configuration compatible with CCodec.
diff --git a/media/libaudioclient/aidl/android/media/ISpatializer.aidl b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
index a61ad58..250c450 100644
--- a/media/libaudioclient/aidl/android/media/ISpatializer.aidl
+++ b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
@@ -96,17 +96,33 @@
/**
* Sets the display orientation.
+ *
+ * This is the rotation of the displayed content relative to its natural orientation.
+ *
* Orientation is expressed in the angle of rotation from the physical "up" side of the screen
* to the logical "up" side of the content displayed the screen. Counterclockwise angles, as
* viewed while facing the screen are positive.
+ *
+ * Note: DisplayManager currently only returns this in increments of 90 degrees,
+ * so the values will be 0, PI/2, PI, 3PI/2.
*/
void setDisplayOrientation(float physicalToLogicalAngle);
/**
* Sets the hinge angle for foldable devices.
+ *
+ * Per the hinge angle sensor, this returns a value from 0 to 2PI.
+ * The value of 0 is considered closed, and PI is considered flat open.
*/
void setHingeAngle(float hingeAngle);
+ /**
+ * Sets whether a foldable is considered "folded" or not.
+ *
+ * The fold state may affect which physical screen is active for display.
+ */
+ void setFoldState(boolean folded);
+
/** Reports the list of supported spatialization modess (see SpatializationMode.aidl).
* The list should never be empty if an ISpatializer interface was successfully
* retrieved with IAudioPolicyService.getSpatializer().
diff --git a/media/libheadtracking/HeadTrackingProcessor.cpp b/media/libheadtracking/HeadTrackingProcessor.cpp
index 8502af0..54d08d2 100644
--- a/media/libheadtracking/HeadTrackingProcessor.cpp
+++ b/media/libheadtracking/HeadTrackingProcessor.cpp
@@ -98,7 +98,7 @@
mModeSelector.setScreenStable(mWorldToScreenTimestamp.value(), screenStable);
// Whenever the screen is unstable, recenter the head pose.
if (!screenStable) {
- recenter(true, false);
+ recenter(true, false, "calculate: screen movement");
}
mScreenHeadFusion.setWorldToScreenPose(mWorldToScreenTimestamp.value(),
worldToLogicalScreen);
@@ -110,7 +110,7 @@
// Auto-recenter.
bool headStable = mHeadStillnessDetector.calculate(timestamp);
if (headStable || !screenStable) {
- recenter(true, false);
+ recenter(true, false, "calculate: head movement");
worldToHead = mHeadPoseBias.getOutput();
}
@@ -140,16 +140,16 @@
HeadTrackingMode getActualMode() const override { return mModeSelector.getActualMode(); }
- void recenter(bool recenterHead, bool recenterScreen) override {
+ void recenter(bool recenterHead, bool recenterScreen, std::string source) override {
if (recenterHead) {
mHeadPoseBias.recenter();
mHeadStillnessDetector.reset();
- mLocalLog.log("recenter Head");
+ mLocalLog.log("recenter Head from %s", source.c_str());
}
if (recenterScreen) {
mScreenPoseBias.recenter();
mScreenStillnessDetector.reset();
- mLocalLog.log("recenter Screen");
+ mLocalLog.log("recenter Screen from %s", source.c_str());
}
// If a sensor being recentered is included in the current mode, apply rate limiting to
diff --git a/media/libheadtracking/include/media/HeadTrackingProcessor.h b/media/libheadtracking/include/media/HeadTrackingProcessor.h
index a3c1e97..d2b78f2 100644
--- a/media/libheadtracking/include/media/HeadTrackingProcessor.h
+++ b/media/libheadtracking/include/media/HeadTrackingProcessor.h
@@ -96,7 +96,8 @@
/**
* This causes the current poses for both the head and/or screen to be considered "center".
*/
- virtual void recenter(bool recenterHead = true, bool recenterScreen = true) = 0;
+ virtual void recenter(
+ bool recenterHead = true, bool recenterScreen = true, std::string source = "") = 0;
/**
* Set the predictor type.
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index f26311d..291b892 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -250,6 +250,11 @@
static_cast<Surface*>(nativeWindow)->getIGraphicBufferProducer()->allowAllocation(true);
+ // In nonblocking mode(timetout = 0), native_window_dequeue_buffer_and_wait()
+ // can fail with timeout. Changing to blocking mode will ensure that dequeue
+ // does not timeout.
+ static_cast<Surface*>(nativeWindow)->getIGraphicBufferProducer()->setDequeueTimeout(-1);
+
err = nativeWindow->query(nativeWindow,
NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs);
if (err != NO_ERROR) {
diff --git a/services/audiopolicy/engine/common/include/VolumeGroup.h b/services/audiopolicy/engine/common/include/VolumeGroup.h
index 5378f64..f40ab1c 100644
--- a/services/audiopolicy/engine/common/include/VolumeGroup.h
+++ b/services/audiopolicy/engine/common/include/VolumeGroup.h
@@ -39,7 +39,7 @@
VolumeCurves *getVolumeCurves() { return &mGroupVolumeCurves; }
void addSupportedAttributes(const audio_attributes_t &attr);
- AttributesVector getSupportedAttributes() const { return mGroupVolumeCurves.getAttributes(); }
+ AttributesVector getSupportedAttributes() const;
void addSupportedStream(audio_stream_type_t stream);
StreamTypeVector getStreamTypes() const { return mGroupVolumeCurves.getStreamTypes(); }
diff --git a/services/audiopolicy/engine/common/src/VolumeGroup.cpp b/services/audiopolicy/engine/common/src/VolumeGroup.cpp
index e189807..f5ffbba 100644
--- a/services/audiopolicy/engine/common/src/VolumeGroup.cpp
+++ b/services/audiopolicy/engine/common/src/VolumeGroup.cpp
@@ -37,6 +37,17 @@
{
}
+// Used for introspection, e.g. JAVA
+AttributesVector VolumeGroup::getSupportedAttributes() const
+{
+ AttributesVector supportedAttributes = {};
+ for (auto &aa : mGroupVolumeCurves.getAttributes()) {
+ aa.source = AUDIO_SOURCE_INVALID;
+ supportedAttributes.push_back(aa);
+ }
+ return supportedAttributes;
+}
+
void VolumeGroup::dump(String8 *dst, int spaces) const
{
dst->appendFormat("\n%*s-%s (id: %d)\n", spaces, "", mName.c_str(), mId);
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 45c5eac..649c63f 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -273,10 +273,15 @@
break;
case STRATEGY_PHONE: {
- devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
- if (!devices.isEmpty()) break;
+ // TODO(b/243670205): remove this logic that gives preference to last removable devices
+ // once a UX decision has been made
devices = availableOutputDevices.getFirstDevicesFromTypes(
- getLastRemovableMediaDevices(GROUP_NONE, {AUDIO_DEVICE_OUT_BLE_HEADSET}));
+ getLastRemovableMediaDevices(GROUP_NONE, {
+ // excluding HEARING_AID and BLE_HEADSET because Dialer uses
+ // setCommunicationDevice to select them explicitly
+ AUDIO_DEVICE_OUT_HEARING_AID,
+ AUDIO_DEVICE_OUT_BLE_HEADSET
+ }));
if (!devices.isEmpty()) break;
devices = availableOutputDevices.getFirstDevicesFromTypes({
AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_EARPIECE});
@@ -291,7 +296,9 @@
if ((strategy == STRATEGY_SONIFICATION) ||
(getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)) {
- devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
+ // favor dock over speaker when available
+ devices = availableOutputDevices.getFirstDevicesFromTypes({
+ AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_SPEAKER});
}
// if SCO headset is connected and we are told to use it, play ringtone over
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index c64497f..24c003e 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -2024,6 +2024,10 @@
outputDesc->stop();
return status;
}
+ if (client->hasPreferredDevice()) {
+ // playback activity with preferred device impacts routing occurred, inform upper layers
+ mpClientInterface->onRoutingUpdated();
+ }
if (delayMs != 0) {
usleep(delayMs * 1000);
}
@@ -2269,6 +2273,11 @@
}
sp<TrackClientDescriptor> client = outputDesc->getClient(portId);
+ if (client->hasPreferredDevice(true)) {
+ // playback activity with preferred device impacts routing occurred, inform upper layers
+ mpClientInterface->onRoutingUpdated();
+ }
+
ALOGV("stopOutput() output %d, stream %d, session %d",
outputDesc->mIoHandle, client->stream(), client->session());
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 09b6f3b..281785e 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -1816,14 +1816,12 @@
void AudioPolicyService::SensorPrivacyPolicy::registerSelf() {
SensorPrivacyManager spm;
mSensorPrivacyEnabled = spm.isSensorPrivacyEnabled();
- (void)spm.addToggleSensorPrivacyListener(this);
spm.addSensorPrivacyListener(this);
}
void AudioPolicyService::SensorPrivacyPolicy::unregisterSelf() {
SensorPrivacyManager spm;
spm.removeSensorPrivacyListener(this);
- spm.removeToggleSensorPrivacyListener(this);
}
bool AudioPolicyService::SensorPrivacyPolicy::isSensorPrivacyEnabled() {
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 9ee9037..5db82f7 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -20,6 +20,7 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
+#include <algorithm>
#include <inttypes.h>
#include <limits.h>
#include <stdint.h>
@@ -94,6 +95,16 @@
return record;
}
+template<typename T>
+static constexpr const T& safe_clamp(const T& value, const T& low, const T& high) {
+ if constexpr (std::is_floating_point_v<T>) {
+ return value != value /* constexpr isnan */
+ ? low : std::clamp(value, low, high);
+ } else /* constexpr */ {
+ return std::clamp(value, low, high);
+ }
+}
+
// ---------------------------------------------------------------------------
class Spatializer::EngineCallbackHandler : public AHandler {
@@ -638,28 +649,48 @@
Status Spatializer::setDisplayOrientation(float physicalToLogicalAngle) {
ALOGV("%s physicalToLogicalAngle %f", __func__, physicalToLogicalAngle);
- if (!mSupportsHeadTracking) {
- return binderStatusFromStatusT(INVALID_OPERATION);
- }
- std::lock_guard lock(mLock);
- mDisplayOrientation = physicalToLogicalAngle;
mLocalLog.log("%s with %f", __func__, physicalToLogicalAngle);
+ const float angle = safe_clamp(physicalToLogicalAngle, 0.f, (float)(2. * M_PI));
+ // It is possible due to numerical inaccuracies to exceed the boundaries of 0 to 2 * M_PI.
+ ALOGI_IF(angle != physicalToLogicalAngle,
+ "%s: clamping %f to %f", __func__, physicalToLogicalAngle, angle);
+ std::lock_guard lock(mLock);
+ mDisplayOrientation = angle;
if (mPoseController != nullptr) {
- mPoseController->setDisplayOrientation(mDisplayOrientation);
+ // This turns on the rate-limiter.
+ mPoseController->setDisplayOrientation(angle);
}
if (mEngine != nullptr) {
setEffectParameter_l(
- SPATIALIZER_PARAM_DISPLAY_ORIENTATION, std::vector<float>{physicalToLogicalAngle});
+ SPATIALIZER_PARAM_DISPLAY_ORIENTATION, std::vector<float>{angle});
}
return Status::ok();
}
Status Spatializer::setHingeAngle(float hingeAngle) {
- std::lock_guard lock(mLock);
ALOGV("%s hingeAngle %f", __func__, hingeAngle);
+ mLocalLog.log("%s with %f", __func__, hingeAngle);
+ const float angle = safe_clamp(hingeAngle, 0.f, (float)(2. * M_PI));
+ // It is possible due to numerical inaccuracies to exceed the boundaries of 0 to 2 * M_PI.
+ ALOGI_IF(angle != hingeAngle,
+ "%s: clamping %f to %f", __func__, hingeAngle, angle);
+ std::lock_guard lock(mLock);
+ mHingeAngle = angle;
if (mEngine != nullptr) {
- mLocalLog.log("%s with %f", __func__, hingeAngle);
- setEffectParameter_l(SPATIALIZER_PARAM_HINGE_ANGLE, std::vector<float>{hingeAngle});
+ setEffectParameter_l(SPATIALIZER_PARAM_HINGE_ANGLE, std::vector<float>{angle});
+ }
+ return Status::ok();
+}
+
+Status Spatializer::setFoldState(bool folded) {
+ ALOGV("%s foldState %d", __func__, (int)folded);
+ mLocalLog.log("%s with %d", __func__, (int)folded);
+ std::lock_guard lock(mLock);
+ mFoldedState = folded;
+ if (mEngine != nullptr) {
+ // we don't suppress multiple calls with the same folded state - that's
+ // done at the caller.
+ setEffectParameter_l(SPATIALIZER_PARAM_FOLD_STATE, std::vector<uint8_t>{mFoldedState});
}
return Status::ok();
}
@@ -862,6 +893,14 @@
checkSensorsState_l();
}
callback = mSpatializerCallback;
+
+ // Restore common effect state.
+ setEffectParameter_l(SPATIALIZER_PARAM_DISPLAY_ORIENTATION,
+ std::vector<float>{mDisplayOrientation});
+ setEffectParameter_l(SPATIALIZER_PARAM_FOLD_STATE,
+ std::vector<uint8_t>{mFoldedState});
+ setEffectParameter_l(SPATIALIZER_PARAM_HINGE_ANGLE,
+ std::vector<float>{mHingeAngle});
}
if (outputChanged && callback != nullptr) {
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 4d6c875..60030bd 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -119,6 +119,7 @@
binder::Status setScreenSensor(int sensorHandle) override;
binder::Status setDisplayOrientation(float physicalToLogicalAngle) override;
binder::Status setHingeAngle(float hingeAngle) override;
+ binder::Status setFoldState(bool folded) override;
binder::Status getSupportedModes(std::vector<media::SpatializationMode>* modes) override;
binder::Status registerHeadTrackingCallback(
const sp<media::ISpatializerHeadTrackingCallback>& callback) override;
@@ -376,8 +377,13 @@
int32_t mScreenSensor GUARDED_BY(mLock) = SpatializerPoseController::INVALID_SENSOR;
/** Last display orientation received */
- static constexpr float kDisplayOrientationInvalid = 1000;
- float mDisplayOrientation GUARDED_BY(mLock) = kDisplayOrientationInvalid;
+ float mDisplayOrientation GUARDED_BY(mLock) = 0.f; // aligned to natural up orientation.
+
+ /** Last folded state */
+ bool mFoldedState GUARDED_BY(mLock) = false; // foldable: true means folded.
+
+ /** Last hinge angle */
+ float mHingeAngle GUARDED_BY(mLock) = 0.f; // foldable: 0.f is closed, M_PI flat open.
std::vector<media::SpatializationLevel> mLevels;
std::vector<media::SpatializerHeadTrackingMode> mHeadTrackingModes;
diff --git a/services/audiopolicy/service/SpatializerPoseController.cpp b/services/audiopolicy/service/SpatializerPoseController.cpp
index 63f53b7..d325509 100644
--- a/services/audiopolicy/service/SpatializerPoseController.cpp
+++ b/services/audiopolicy/service/SpatializerPoseController.cpp
@@ -216,7 +216,7 @@
mHeadSensor = INVALID_SENSOR;
}
- mProcessor->recenter(true /* recenterHead */, false /* recenterScreen */);
+ mProcessor->recenter(true /* recenterHead */, false /* recenterScreen */, __func__);
}
void SpatializerPoseController::setScreenSensor(int32_t sensor) {
@@ -253,7 +253,7 @@
mScreenSensor = INVALID_SENSOR;
}
- mProcessor->recenter(false /* recenterHead */, true /* recenterScreen */);
+ mProcessor->recenter(false /* recenterHead */, true /* recenterScreen */, __func__);
}
void SpatializerPoseController::setDesiredMode(HeadTrackingMode mode) {
@@ -300,7 +300,7 @@
void SpatializerPoseController::recenter() {
std::lock_guard lock(mMutex);
- mProcessor->recenter();
+ mProcessor->recenter(true /* recenterHead */, true /* recenterScreen */, __func__);
}
void SpatializerPoseController::onPose(int64_t timestamp, int32_t sensor, const Pose3f& pose,
@@ -339,7 +339,7 @@
mProcessor->setWorldToHeadPose(timestamp, pose,
twist.value_or(Twist3f()) / kTicksPerSecond);
if (isNewReference) {
- mProcessor->recenter(true, false);
+ mProcessor->recenter(true, false, __func__);
}
}
if (sensor == mScreenSensor) {
@@ -353,7 +353,7 @@
mProcessor->setWorldToScreenPose(timestamp, pose);
if (isNewReference) {
- mProcessor->recenter(false, true);
+ mProcessor->recenter(false, true, __func__);
}
}
}
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 2a04658..a7c9bac 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -2043,6 +2043,7 @@
}
client->setImageDumpMask(mImageDumpMask);
+ client->setStreamUseCaseOverrides(mStreamUseCaseOverrides);
} // lock is destroyed, allow further connect calls
// Important: release the mutex here so the client can call back into the service from its
@@ -4419,6 +4420,13 @@
String8 activeClientString = mActiveClientManager.toString();
dprintf(fd, "Active Camera Clients:\n%s", activeClientString.string());
dprintf(fd, "Allowed user IDs: %s\n", toString(mAllowedUsers).string());
+ if (mStreamUseCaseOverrides.size() > 0) {
+ dprintf(fd, "Active stream use case overrides:");
+ for (int64_t useCaseOverride : mStreamUseCaseOverrides) {
+ dprintf(fd, " %" PRId64, useCaseOverride);
+ }
+ dprintf(fd, "\n");
+ }
dumpEventLog(fd);
@@ -4910,6 +4918,10 @@
return handleGetImageDumpMask(out);
} else if (args.size() >= 2 && args[0] == String16("set-camera-mute")) {
return handleSetCameraMute(args);
+ } else if (args.size() >= 2 && args[0] == String16("set-stream-use-case-override")) {
+ return handleSetStreamUseCaseOverrides(args);
+ } else if (args.size() >= 1 && args[0] == String16("clear-stream-use-case-override")) {
+ return handleClearStreamUseCaseOverrides();
} else if (args.size() >= 2 && args[0] == String16("watch")) {
return handleWatchCommand(args, in, out);
} else if (args.size() >= 2 && args[0] == String16("set-watchdog")) {
@@ -5082,6 +5094,43 @@
return OK;
}
+status_t CameraService::handleSetStreamUseCaseOverrides(const Vector<String16>& args) {
+ std::vector<int64_t> useCasesOverride;
+ for (size_t i = 1; i < args.size(); i++) {
+ int64_t useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+ String8 arg8 = String8(args[i]);
+ if (arg8 == "DEFAULT") {
+ useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+ } else if (arg8 == "PREVIEW") {
+ useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW;
+ } else if (arg8 == "STILL_CAPTURE") {
+ useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE;
+ } else if (arg8 == "VIDEO_RECORD") {
+ useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD;
+ } else if (arg8 == "PREVIEW_VIDEO_STILL") {
+ useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL;
+ } else if (arg8 == "VIDEO_CALL") {
+ useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL;
+ } else {
+ ALOGE("%s: Invalid stream use case %s", __FUNCTION__, String8(args[i]).c_str());
+ return BAD_VALUE;
+ }
+ useCasesOverride.push_back(useCase);
+ }
+
+ Mutex::Autolock lock(mServiceLock);
+ mStreamUseCaseOverrides = std::move(useCasesOverride);
+
+ return OK;
+}
+
+status_t CameraService::handleClearStreamUseCaseOverrides() {
+ Mutex::Autolock lock(mServiceLock);
+ mStreamUseCaseOverrides.clear();
+
+ return OK;
+}
+
status_t CameraService::handleWatchCommand(const Vector<String16>& args, int inFd, int outFd) {
if (args.size() >= 3 && args[1] == String16("start")) {
return startWatchingTags(args, outFd);
@@ -5436,6 +5485,15 @@
" Valid values 0=OFF, 1=ON for JPEG\n"
" get-image-dump-mask returns the current image-dump-mask value\n"
" set-camera-mute <0/1> enable or disable camera muting\n"
+ " set-stream-use-case-override <usecase1> <usecase2> ... override stream use cases\n"
+ " Use cases applied in descending resolutions. So usecase1 is assigned to the\n"
+ " largest resolution, usecase2 is assigned to the 2nd largest resolution, and so\n"
+ " on. In case the number of usecases is smaller than the number of streams, the\n"
+ " last use case is assigned to all the remaining streams. In case of multiple\n"
+ " streams with the same resolution, the tie-breaker is (JPEG, RAW, YUV, and PRIV)\n"
+ " Valid values are (case sensitive): DEFAULT, PREVIEW, STILL_CAPTURE, VIDEO_RECORD,\n"
+ " PREVIEW_VIDEO_STILL, VIDEO_CALL\n"
+ " clear-stream-use-case-override clear the stream use case override\n"
" watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
" help print this message\n");
}
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 840e9b6..588cfc0 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -348,6 +348,13 @@
// Set Camera service watchdog
virtual status_t setCameraServiceWatchdog(bool enabled) = 0;
+ // Set stream use case overrides
+ virtual void setStreamUseCaseOverrides(
+ const std::vector<int64_t>& useCaseOverrides) = 0;
+
+ // Clear stream use case overrides
+ virtual void clearStreamUseCaseOverrides() = 0;
+
// The injection camera session to replace the internal camera
// session.
virtual status_t injectCamera(const String8& injectedCamId,
@@ -502,6 +509,7 @@
virtual bool canCastToApiClient(apiLevel level) const;
void setImageDumpMask(int /*mask*/) { }
+ void setStreamUseCaseOverrides(const std::vector<int64_t>& /*usecaseOverrides*/) { }
protected:
// Initialized in constructor
@@ -1216,6 +1224,12 @@
// Set the camera mute state
status_t handleSetCameraMute(const Vector<String16>& args);
+ // Set the stream use case overrides
+ status_t handleSetStreamUseCaseOverrides(const Vector<String16>& args);
+
+ // Clear the stream use case overrides
+ status_t handleClearStreamUseCaseOverrides();
+
// Handle 'watch' command as passed through 'cmd'
status_t handleWatchCommand(const Vector<String16> &args, int inFd, int outFd);
@@ -1311,6 +1325,9 @@
// Camera Service watchdog flag
bool mCameraServiceWatchdogEnabled = true;
+ // Current stream use case overrides
+ std::vector<int64_t> mStreamUseCaseOverrides;
+
/**
* A listener class that implements the IBinder::DeathRecipient interface
* for use to call back the error state injected by the external camera, and
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 0887ced..430c82b 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -2347,6 +2347,15 @@
return mDevice->setCameraMute(enabled);
}
+void Camera2Client::setStreamUseCaseOverrides(
+ const std::vector<int64_t>& useCaseOverrides) {
+ mDevice->setStreamUseCaseOverrides(useCaseOverrides);
+}
+
+void Camera2Client::clearStreamUseCaseOverrides() {
+ mDevice->clearStreamUseCaseOverrides();
+}
+
status_t Camera2Client::waitUntilCurrentRequestIdLocked() {
int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
if (activeRequestId != 0) {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 9c540a4..8071bcb 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -92,6 +92,10 @@
virtual status_t setCameraServiceWatchdog(bool enabled);
+ virtual void setStreamUseCaseOverrides(
+ const std::vector<int64_t>& useCaseOverrides);
+ virtual void clearStreamUseCaseOverrides();
+
/**
* Interface used by CameraService
*/
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index bc76397..dd23c2e 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -1752,6 +1752,15 @@
return mDevice->setCameraMute(enabled);
}
+void CameraDeviceClient::setStreamUseCaseOverrides(
+ const std::vector<int64_t>& useCaseOverrides) {
+ mDevice->setStreamUseCaseOverrides(useCaseOverrides);
+}
+
+void CameraDeviceClient::clearStreamUseCaseOverrides() {
+ mDevice->clearStreamUseCaseOverrides();
+}
+
binder::Status CameraDeviceClient::switchToOffline(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
const std::vector<int>& offlineOutputIds,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 6bb64d6..c95bb4a 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -210,6 +210,9 @@
virtual status_t setCameraServiceWatchdog(bool enabled);
+ virtual void setStreamUseCaseOverrides(const std::vector<int64_t>& useCaseOverrides);
+ virtual void clearStreamUseCaseOverrides() override;
+
/**
* Device listener interface
*/
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 2cb3397..52d0020 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -90,6 +90,13 @@
return INVALID_OPERATION;
}
+void CameraOfflineSessionClient::setStreamUseCaseOverrides(
+ const std::vector<int64_t>& /*useCaseOverrides*/) {
+}
+
+void CameraOfflineSessionClient::clearStreamUseCaseOverrides() {
+}
+
status_t CameraOfflineSessionClient::dump(int fd, const Vector<String16>& args) {
return BasicClient::dump(fd, args);
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 8edb64a..23e1f3d 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -87,6 +87,11 @@
status_t setCameraServiceWatchdog(bool enabled) override;
+ void setStreamUseCaseOverrides(
+ const std::vector<int64_t>& useCaseOverrides) override;
+
+ void clearStreamUseCaseOverrides() override;
+
// permissions management
status_t startCameraOps() override;
status_t finishCameraOps() override;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index ad24392..bf6be64 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -343,6 +343,29 @@
}
template <typename TClientBase>
+void Camera2ClientBase<TClientBase>::notifyPhysicalCameraChange(const std::string &physicalId) {
+ // We're only interested in this notification if overrideToPortrait is turned on.
+ if (!TClientBase::mOverrideToPortrait) {
+ return;
+ }
+
+ String8 physicalId8(physicalId.c_str());
+ auto physicalCameraMetadata = mDevice->infoPhysical(physicalId8);
+ auto orientationEntry = physicalCameraMetadata.find(ANDROID_SENSOR_ORIENTATION);
+
+ if (orientationEntry.count == 1) {
+ int orientation = orientationEntry.data.i32[0];
+ int rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
+
+ if (orientation == 0 || orientation == 180) {
+ rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
+ }
+
+ static_cast<TClientBase *>(this)->setRotateAndCropOverride(rotateAndCropMode);
+ }
+}
+
+template <typename TClientBase>
status_t Camera2ClientBase<TClientBase>::notifyActive(float maxPreviewFps) {
if (!mDeviceActive) {
status_t res = TClientBase::startCameraStreamingOps();
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index d2dcdb1..705fe69 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -75,6 +75,7 @@
virtual void notifyError(int32_t errorCode,
const CaptureResultExtras& resultExtras);
+ virtual void notifyPhysicalCameraChange(const std::string &physicalId) override;
// Returns errors on app ops permission failures
virtual status_t notifyActive(float maxPreviewFps);
virtual void notifyIdle(int64_t /*requestCount*/, int64_t /*resultErrorCount*/,
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 69514f3..8f7b16d 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -464,6 +464,15 @@
void setImageDumpMask(int mask) { mImageDumpMask = mask; }
/**
+ * Set stream use case overrides
+ */
+ void setStreamUseCaseOverrides(const std::vector<int64_t>& useCaseOverrides) {
+ mStreamUseCaseOverrides = useCaseOverrides;
+ }
+
+ void clearStreamUseCaseOverrides() {}
+
+ /**
* The injection camera session to replace the internal camera
* session.
*/
@@ -477,6 +486,7 @@
protected:
bool mImageDumpMask = 0;
+ std::vector<int64_t> mStreamUseCaseOverrides;
};
}; // namespace android
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index f39b92a..63abcf0 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -40,6 +40,10 @@
// Required for API 1 and 2
virtual void notifyError(int32_t errorCode,
const CaptureResultExtras &resultExtras) = 0;
+
+ // Optional for API 1 and 2
+ virtual void notifyPhysicalCameraChange(const std::string &/*physicalId*/) {}
+
// May return an error since it checks appops
virtual status_t notifyActive(float maxPreviewFps) = 0;
virtual void notifyIdle(int64_t requestCount, int64_t resultError, bool deviceError,
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 3aab0b1..5ab7023 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -2033,7 +2033,7 @@
}
CameraMetadata info2;
res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2,
- /*overrideToPortrait*/true);
+ /*overrideToPortrait*/false);
if (res == INVALID_OPERATION) {
dprintf(fd, " API2 not directly supported\n");
} else if (res != OK) {
@@ -2384,8 +2384,8 @@
if (overrideToPortrait) {
const auto &lensFacingEntry = characteristics->find(ANDROID_LENS_FACING);
const auto &sensorOrientationEntry = characteristics->find(ANDROID_SENSOR_ORIENTATION);
+ uint8_t lensFacing = lensFacingEntry.data.u8[0];
if (lensFacingEntry.count > 0 && sensorOrientationEntry.count > 0) {
- uint8_t lensFacing = lensFacingEntry.data.u8[0];
int32_t sensorOrientation = sensorOrientationEntry.data.i32[0];
int32_t newSensorOrientation = sensorOrientation;
@@ -2406,6 +2406,8 @@
}
if (characteristics->exists(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS)) {
+ ALOGV("%s: Erasing ANDROID_INFO_DEVICE_STATE_ORIENTATIONS for lens facing %d",
+ __FUNCTION__, lensFacing);
characteristics->erase(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS);
}
}
@@ -2441,8 +2443,8 @@
for (size_t i = 0; i < streamConfigs.count; i += 4) {
if ((streamConfigs.data.i32[i] == HAL_PIXEL_FORMAT_BLOB) && (streamConfigs.data.i32[i+3] ==
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT)) {
- if (streamConfigs.data.i32[i+1] < thresholdW ||
- streamConfigs.data.i32[i+2] < thresholdH) {
+ if (streamConfigs.data.i32[i+1] * streamConfigs.data.i32[i+2] <
+ thresholdW * thresholdH) {
continue;
} else {
largeJpegCount ++;
@@ -2462,8 +2464,8 @@
mCameraCharacteristics.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
for (size_t i = 0; i < minDurations.count; i += 4) {
if (minDurations.data.i64[i] == HAL_PIXEL_FORMAT_BLOB) {
- if (minDurations.data.i64[i+1] < thresholdW ||
- minDurations.data.i64[i+2] < thresholdH) {
+ if ((int32_t)minDurations.data.i64[i+1] * (int32_t)minDurations.data.i64[i+2] <
+ thresholdW * thresholdH) {
continue;
} else {
largeJpegCount++;
@@ -2483,8 +2485,8 @@
mCameraCharacteristics.find(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
for (size_t i = 0; i < stallDurations.count; i += 4) {
if (stallDurations.data.i64[i] == HAL_PIXEL_FORMAT_BLOB) {
- if (stallDurations.data.i64[i+1] < thresholdW ||
- stallDurations.data.i64[i+2] < thresholdH) {
+ if ((int32_t)stallDurations.data.i64[i+1] * (int32_t)stallDurations.data.i64[i+2] <
+ thresholdW * thresholdH) {
continue;
} else {
largeJpegCount++;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 5e7fe7f..e631f8b 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -96,7 +96,8 @@
mLastTemplateId(-1),
mNeedFixupMonochromeTags(false),
mOverrideForPerfClass(overrideForPerfClass),
- mOverrideToPortrait(overrideToPortrait)
+ mOverrideToPortrait(overrideToPortrait),
+ mActivePhysicalId("")
{
ATRACE_CALL();
ALOGV("%s: Created device for camera %s", __FUNCTION__, mId.string());
@@ -2345,6 +2346,9 @@
tryRemoveFakeStreamLocked();
}
+ // Override stream use case based on "adb shell command"
+ overrideStreamUseCaseLocked();
+
// Start configuring the streams
ALOGV("%s: Camera %s: Starting stream configuration", __FUNCTION__, mId.string());
@@ -4124,6 +4128,19 @@
return OK;
}
+void Camera3Device::setStreamUseCaseOverrides(
+ const std::vector<int64_t>& useCaseOverrides) {
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+ mStreamUseCaseOverrides = useCaseOverrides;
+}
+
+void Camera3Device::clearStreamUseCaseOverrides() {
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+ mStreamUseCaseOverrides.clear();
+}
+
void Camera3Device::RequestThread::cleanUpFailedRequests(bool sendRequestError) {
if (mNextRequests.empty()) {
return;
@@ -5242,4 +5259,55 @@
return mInjectionMethods->stopInjection();
}
+void Camera3Device::overrideStreamUseCaseLocked() {
+ if (mStreamUseCaseOverrides.size() == 0) {
+ return;
+ }
+
+ // Start from an array of indexes in mStreamUseCaseOverrides, and sort them
+ // based first on size, and second on formats of [JPEG, RAW, YUV, PRIV].
+ std::vector<int> outputStreamsIndices(mOutputStreams.size());
+ for (size_t i = 0; i < outputStreamsIndices.size(); i++) {
+ outputStreamsIndices[i] = i;
+ }
+
+ std::sort(outputStreamsIndices.begin(), outputStreamsIndices.end(),
+ [&](int a, int b) -> bool {
+
+ auto formatScore = [](int format) {
+ switch (format) {
+ case HAL_PIXEL_FORMAT_BLOB:
+ return 4;
+ case HAL_PIXEL_FORMAT_RAW16:
+ case HAL_PIXEL_FORMAT_RAW10:
+ case HAL_PIXEL_FORMAT_RAW12:
+ return 3;
+ case HAL_PIXEL_FORMAT_YCBCR_420_888:
+ return 2;
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ return 1;
+ default:
+ return 0;
+ }
+ };
+
+ int sizeA = mOutputStreams[a]->getWidth() * mOutputStreams[a]->getHeight();
+ int sizeB = mOutputStreams[a]->getWidth() * mOutputStreams[a]->getHeight();
+ int formatAScore = formatScore(mOutputStreams[a]->getFormat());
+ int formatBScore = formatScore(mOutputStreams[b]->getFormat());
+ if (sizeA > sizeB ||
+ (sizeA == sizeB && formatAScore >= formatBScore)) {
+ return true;
+ } else {
+ return false;
+ }
+ });
+
+ size_t overlapSize = std::min(mStreamUseCaseOverrides.size(), mOutputStreams.size());
+ for (size_t i = 0; i < mOutputStreams.size(); i++) {
+ mOutputStreams[outputStreamsIndices[i]]->setStreamUseCase(
+ mStreamUseCaseOverrides[std::min(i, overlapSize-1)]);
+ }
+}
+
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 1a50c02..746205b 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -287,6 +287,13 @@
*/
status_t setCameraServiceWatchdog(bool enabled);
+ // Set stream use case overrides
+ void setStreamUseCaseOverrides(
+ const std::vector<int64_t>& useCaseOverrides);
+
+ // Clear stream use case overrides
+ void clearStreamUseCaseOverrides();
+
// Get the status trackeer for the camera device
wp<camera3::StatusTracker> getStatusTracker() { return mStatusTracker; }
@@ -1375,6 +1382,9 @@
// app compatibility reasons.
bool mOverrideToPortrait;
+ // Current active physical id of the logical multi-camera, if any
+ std::string mActivePhysicalId;
+
// The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
nsecs_t mMinExpectedDuration = 0;
// Whether the camera device runs at fixed frame rate based on AE_MODE and
@@ -1463,6 +1473,8 @@
sp<Camera3DeviceInjectionMethods> mInjectionMethods;
+ void overrideStreamUseCaseLocked();
+
}; // class Camera3Device
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
index a93d1da..1e9f478 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.h
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -101,6 +101,8 @@
virtual status_t setBatchSize(size_t batchSize) override;
virtual void onMinDurationChanged(nsecs_t /*duration*/, bool /*fixedFps*/) {}
+
+ virtual void setStreamUseCase(int64_t /*streamUseCase*/) {}
protected:
/**
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 396104c..ef12b64 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -1392,6 +1392,11 @@
mFixedFps = fixedFps;
}
+void Camera3OutputStream::setStreamUseCase(int64_t streamUseCase) {
+ Mutex::Autolock l(mLock);
+ camera_stream::use_case = streamUseCase;
+}
+
void Camera3OutputStream::returnPrefetchedBuffersLocked() {
std::vector<Surface::BatchBuffer> batchedBuffers;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index db988a0..a719d6b 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -253,6 +253,11 @@
virtual void onMinDurationChanged(nsecs_t duration, bool fixedFps) override;
/**
+ * Modify stream use case
+ */
+ virtual void setStreamUseCase(int64_t streamUseCase) override;
+
+ /**
* Apply ZSL related consumer usage quirk.
*/
static void applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index dbc6fe1..4baa7e8 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -117,6 +117,11 @@
* AE_TARGET_FPS_RANGE in the capture request.
*/
virtual void onMinDurationChanged(nsecs_t duration, bool fixedFps) = 0;
+
+ /**
+ * Modify the stream use case for this output.
+ */
+ virtual void setStreamUseCase(int64_t streamUseCase) = 0;
};
// Helper class to organize a synchronized mapping of stream IDs to stream instances
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 6569395..792756ab 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -521,27 +521,35 @@
if (result->partial_result != 0)
request.resultExtras.partialResultCount = result->partial_result;
- if ((result->result != nullptr) && !states.legacyClient && !states.overrideToPortrait) {
+ if (result->result != nullptr) {
camera_metadata_ro_entry entry;
auto ret = find_camera_metadata_ro_entry(result->result,
ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID, &entry);
if ((ret == OK) && (entry.count > 0)) {
std::string physicalId(reinterpret_cast<const char *>(entry.data.u8));
- auto deviceInfo = states.physicalDeviceInfoMap.find(physicalId);
- if (deviceInfo != states.physicalDeviceInfoMap.end()) {
- auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
- if (orientation.count > 0) {
- ret = CameraUtils::getRotationTransform(deviceInfo->second,
- OutputConfiguration::MIRROR_MODE_AUTO, &request.transform);
- if (ret != OK) {
- ALOGE("%s: Failed to calculate current stream transformation: %s (%d)",
- __FUNCTION__, strerror(-ret), ret);
+ if (!states.activePhysicalId.empty() && physicalId != states.activePhysicalId) {
+ states.listener->notifyPhysicalCameraChange(physicalId);
+ }
+ states.activePhysicalId = physicalId;
+
+ if (!states.legacyClient && !states.overrideToPortrait) {
+ auto deviceInfo = states.physicalDeviceInfoMap.find(physicalId);
+ if (deviceInfo != states.physicalDeviceInfoMap.end()) {
+ auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
+ if (orientation.count > 0) {
+ ret = CameraUtils::getRotationTransform(deviceInfo->second,
+ OutputConfiguration::MIRROR_MODE_AUTO, &request.transform);
+ if (ret != OK) {
+ ALOGE("%s: Failed to calculate current stream transformation: %s "
+ "(%d)", __FUNCTION__, strerror(-ret), ret);
+ }
+ } else {
+ ALOGE("%s: Physical device orientation absent!", __FUNCTION__);
}
} else {
- ALOGE("%s: Physical device orientation absent!", __FUNCTION__);
+ ALOGE("%s: Physical device not found in device info map found!",
+ __FUNCTION__);
}
- } else {
- ALOGE("%s: Physical device not found in device info map found!", __FUNCTION__);
}
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 019c8a8..d5328c5 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -108,6 +108,7 @@
nsecs_t& minFrameDuration;
bool& isFixedFps;
bool overrideToPortrait;
+ std::string &activePhysicalId;
};
void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result);
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 1e103f2..3fa7299 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -376,7 +376,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
*this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
- mOverrideToPortrait}, mResultMetadataQueue
+ mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
};
for (const auto& result : results) {
@@ -418,7 +418,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
*this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
- mOverrideToPortrait}, mResultMetadataQueue
+ mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index 816f96b..3c3db97 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -111,6 +111,7 @@
listener = mListener.promote();
}
+ std::string activePhysicalId(""); // Unused
AidlCaptureOutputStates states {
{mId,
mOfflineReqsLock, mLastCompletedRegularFrameNumber,
@@ -125,7 +126,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
*this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
- /*overrideToPortrait*/false}, mResultMetadataQueue
+ /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -157,6 +158,7 @@
listener = mListener.promote();
}
+ std::string activePhysicalId(""); // Unused
AidlCaptureOutputStates states {
{mId,
mOfflineReqsLock, mLastCompletedRegularFrameNumber,
@@ -171,7 +173,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
*this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
- /*overrideToPortrait*/false}, mResultMetadataQueue
+ /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 44c60cf..382b287 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -365,8 +365,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait},
- mResultMetadataQueue
+ *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+ mActivePhysicalId}, mResultMetadataQueue
};
//HidlCaptureOutputStates hidlStates {
@@ -428,8 +428,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait},
- mResultMetadataQueue
+ *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+ mActivePhysicalId}, mResultMetadataQueue
};
for (const auto& result : results) {
@@ -476,8 +476,8 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait},
- mResultMetadataQueue
+ *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+ mActivePhysicalId}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index 705408d..28b2b47 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -92,6 +92,7 @@
listener = mListener.promote();
}
+ std::string activePhysicalId("");
HidlCaptureOutputStates states {
{mId,
mOfflineReqsLock, mLastCompletedRegularFrameNumber,
@@ -106,7 +107,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
- /*overrideToPortrait*/false}, mResultMetadataQueue
+ /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -133,6 +134,7 @@
hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> noPhysMetadata;
+ std::string activePhysicalId("");
HidlCaptureOutputStates states {
{mId,
mOfflineReqsLock, mLastCompletedRegularFrameNumber,
@@ -147,7 +149,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
- /*overrideToPortrait*/false}, mResultMetadataQueue
+ /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -169,6 +171,7 @@
listener = mListener.promote();
}
+ std::string activePhysicalId("");
HidlCaptureOutputStates states {
{mId,
mOfflineReqsLock, mLastCompletedRegularFrameNumber,
@@ -183,7 +186,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
- /*overrideToPortrait*/false}, mResultMetadataQueue
+ /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);