Merge "camera3: Dump latest request sent with dumpsys" into klp-dev
diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp
index f203949..e9aa99d 100644
--- a/camera/tests/ProCameraTests.cpp
+++ b/camera/tests/ProCameraTests.cpp
@@ -271,7 +271,6 @@
CpuConsumer::LockedBuffer buf;
status_t ret;
- EXPECT_OK(ret);
if (OK == (ret = consumer->lockNextBuffer(&buf))) {
dout << "Frame received on streamId = " << streamId <<
@@ -482,7 +481,7 @@
* Creating a streaming request for these output streams from a template,
* and submit it
*/
- void createSubmitRequestForStreams(uint8_t* streamIds, size_t count, int requestCount=-1) {
+ void createSubmitRequestForStreams(int32_t* streamIds, size_t count, int requestCount=-1) {
ASSERT_NE((void*)NULL, streamIds);
ASSERT_LT(0u, count);
@@ -629,7 +628,7 @@
EXPECT_OK(mCamera->exclusiveTryLock());
- uint8_t streams[] = { depthStreamId };
+ int32_t streams[] = { depthStreamId };
ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(
streams,
/*count*/1));
@@ -706,7 +705,7 @@
// set the output streams to just this stream ID
// wow what a verbose API.
- uint8_t allStreams[] = { streamId, depthStreamId };
+ int32_t allStreams[] = { streamId, depthStreamId };
// IMPORTANT. bad things will happen if its not a uint8.
size_t streamCount = sizeof(allStreams) / sizeof(allStreams[0]);
camera_metadata_entry_t entry;
@@ -735,7 +734,7 @@
free_camera_metadata(request);
- for (int i = 0; i < streamCount; ++i) {
+ for (size_t i = 0; i < streamCount; ++i) {
EXPECT_OK(mCamera->deleteStream(allStreams[i]));
}
EXPECT_OK(mCamera->exclusiveUnlock());
@@ -777,7 +776,7 @@
// set the output streams to just this stream ID
- uint8_t allStreams[] = { streamId };
+ int32_t allStreams[] = { streamId };
camera_metadata_entry_t entry;
uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
int find = find_camera_metadata_entry(request, tag, &entry);
@@ -848,7 +847,7 @@
// set the output streams to just this stream ID
// wow what a verbose API.
- uint8_t allStreams[] = { streamId, depthStreamId };
+ int32_t allStreams[] = { streamId, depthStreamId };
size_t streamCount = 2;
camera_metadata_entry_t entry;
uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
@@ -923,7 +922,7 @@
// set the output streams to just this stream ID
- uint8_t allStreams[] = { streamId };
+ int32_t allStreams[] = { streamId };
size_t streamCount = 1;
camera_metadata_entry_t entry;
uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
@@ -974,7 +973,7 @@
EXPECT_OK(mCamera->exclusiveTryLock());
- uint8_t streams[] = { streamId };
+ int32_t streams[] = { streamId };
ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1));
// Consume a couple of results
@@ -1002,7 +1001,7 @@
EXPECT_OK(mCamera->exclusiveTryLock());
- uint8_t streams[] = { streamId };
+ int32_t streams[] = { streamId };
ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1,
/*requests*/TEST_CPU_FRAME_COUNT));
@@ -1049,7 +1048,7 @@
EXPECT_OK(mCamera->exclusiveTryLock());
- uint8_t streams[] = { streamId, depthStreamId };
+ int32_t streams[] = { streamId, depthStreamId };
ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/2,
/*requests*/REQUEST_COUNT));
@@ -1128,7 +1127,7 @@
EXPECT_OK(mCamera->exclusiveTryLock());
- uint8_t streams[] = { streamId };
+ int32_t streams[] = { streamId };
ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1,
/*requests*/NUM_REQUESTS));
@@ -1172,7 +1171,6 @@
}
const int NUM_REQUESTS = 20 * TEST_CPU_FRAME_COUNT;
- const int CONSECUTIVE_FAILS_ASSUME_TIME_OUT = 5;
int streamId = -1;
sp<CpuConsumer> consumer;
@@ -1183,7 +1181,7 @@
EXPECT_OK(mCamera->exclusiveTryLock());
- uint8_t streams[] = { streamId };
+ int32_t streams[] = { streamId };
ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1,
/*requests*/NUM_REQUESTS));
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index b8a6b37..626b5c2 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -98,6 +98,11 @@
size_t AudioFlinger::mTeeSinkTrackFrames = kTeeSinkTrackFramesDefault;
#endif
+//TODO: remove when effect offload is implemented
+// In order to avoid invalidating offloaded tracks each time a Visualizer is turned on and off
+// we define a minimum time during which a global effect is considered enabled.
+static const nsecs_t kMinGlobalEffectEnabletimeNs = seconds(7200);
+
// ----------------------------------------------------------------------------
static int load_audio_interface(const char *if_name, audio_hw_device_t **dev)
@@ -141,7 +146,8 @@
mMode(AUDIO_MODE_INVALID),
mBtNrecIsOff(false),
mIsLowRamDevice(true),
- mIsDeviceTypeKnown(false)
+ mIsDeviceTypeKnown(false),
+ mGlobalEffectEnableTime(0)
{
getpid_cached = getpid();
char value[PROPERTY_VALUE_MAX];
@@ -2314,6 +2320,38 @@
return NO_ERROR;
}
+bool AudioFlinger::isGlobalEffectEnabled_l()
+{
+ if (mGlobalEffectEnableTime != 0 &&
+ ((systemTime() - mGlobalEffectEnableTime) < kMinGlobalEffectEnabletimeNs)) {
+ return true;
+ }
+
+ for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+ sp<EffectChain> ec =
+ mPlaybackThreads.valueAt(i)->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
+ if (ec != 0 && ec->isEnabled()) {
+ return true;
+ }
+ }
+ return false;
+}
+
+void AudioFlinger::onGlobalEffectEnable()
+{
+ Mutex::Autolock _l(mLock);
+
+ mGlobalEffectEnableTime = systemTime();
+
+ for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+ sp<PlaybackThread> t = mPlaybackThreads.valueAt(i);
+ if (t->mType == ThreadBase::OFFLOAD) {
+ t->invalidateTracks(AUDIO_STREAM_MUSIC);
+ }
+ }
+
+}
+
struct Entry {
#define MAX_NAME 32 // %Y%m%d%H%M%S_%d.wav
char mName[MAX_NAME];
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 5df04f4..0992308 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -466,6 +466,10 @@
void removeClient_l(pid_t pid);
void removeNotificationClient(pid_t pid);
+ //TODO: remove when effect offload is implemented
+ bool isGlobalEffectEnabled_l();
+ void onGlobalEffectEnable();
+
class AudioHwDevice {
public:
enum Flags {
@@ -641,6 +645,8 @@
private:
bool mIsLowRamDevice;
bool mIsDeviceTypeKnown;
+ //TODO: remove when effect offload is implemented
+ nsecs_t mGlobalEffectEnableTime; // when a global effect was last enabled
};
#undef INCLUDING_FROM_AUDIOFLINGER_H
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index d5a21a7..86671a9 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -593,17 +593,6 @@
h->setEnabled(enabled);
}
}
-//EL_FIXME not sure why this is needed?
-// sp<ThreadBase> thread = mThread.promote();
-// if (thread == 0) {
-// return NO_ERROR;
-// }
-//
-// if ((thread->type() == ThreadBase::OFFLOAD) && (enabled)) {
-// PlaybackThread *p = (PlaybackThread *)thread.get();
-// ALOGV("setEnabled: Offload, invalidate tracks");
-// p->invalidateTracks(AUDIO_STREAM_MUSIC);
-// }
}
return NO_ERROR;
}
@@ -942,6 +931,17 @@
thread->checkSuspendOnEffectEnabled(mEffect, false, mEffect->sessionId());
}
mEnabled = false;
+ } else {
+ //TODO: remove when effect offload is implemented
+ if (thread != 0) {
+ if ((thread->type() == ThreadBase::OFFLOAD)) {
+ PlaybackThread *t = (PlaybackThread *)thread.get();
+ t->invalidateTracks(AUDIO_STREAM_MUSIC);
+ }
+ if (mEffect->sessionId() == AUDIO_SESSION_OUTPUT_MIX) {
+ thread->mAudioFlinger->onGlobalEffectEnable();
+ }
+ }
}
return status;
}
@@ -1728,4 +1728,16 @@
}
}
+bool AudioFlinger::EffectChain::isEnabled()
+{
+ Mutex::Autolock _l(mLock);
+ size_t size = mEffects.size();
+ for (size_t i = 0; i < size; i++) {
+ if (mEffects[i]->isEnabled()) {
+ return true;
+ }
+ }
+ return false;
+}
+
}; // namespace android
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 0b7fb83..bac50f2 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -303,6 +303,10 @@
void clearInputBuffer();
+ // At least one effect in the chain is enabled
+ bool isEnabled();
+
+
void dump(int fd, const Vector<String16>& args);
protected:
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 2042050..6002aa3 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -543,7 +543,17 @@
sp<ThreadBase> thread = mThread.promote();
if (thread != 0) {
- Mutex::Autolock _l(thread->mLock);
+ //TODO: remove when effect offload is implemented
+ if (isOffloaded()) {
+ Mutex::Autolock _laf(thread->mAudioFlinger->mLock);
+ Mutex::Autolock _lth(thread->mLock);
+ sp<EffectChain> ec = thread->getEffectChain_l(mSessionId);
+ if (thread->mAudioFlinger->isGlobalEffectEnabled_l() || (ec != 0 && ec->isEnabled())) {
+ invalidate();
+ return PERMISSION_DENIED;
+ }
+ }
+ Mutex::Autolock _lth(thread->mLock);
track_state state = mState;
// here the track could be either new, or restarted
// in both cases "unstop" the track
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 0a18501..bda2887 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -743,7 +743,7 @@
return res;
}
- Vector<uint8_t> outputStreams;
+ Vector<int32_t> outputStreams;
bool callbacksEnabled = (params.previewCallbackFlags &
CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ||
params.previewCallbackSurface;
@@ -999,7 +999,7 @@
return res;
}
- Vector<uint8_t> outputStreams;
+ Vector<int32_t> outputStreams;
outputStreams.push(getPreviewStreamId());
outputStreams.push(getRecordingStreamId());
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
index ad1590a..ca3198f 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
@@ -437,7 +437,8 @@
status_t res;
ATRACE_CALL();
SharedParameters::Lock l(client->getParameters());
- Vector<uint8_t> outputStreams;
+ Vector<int32_t> outputStreams;
+ uint8_t captureIntent = static_cast<uint8_t>(ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE);
/**
* Set up output streams in the request
@@ -456,6 +457,7 @@
if (l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
outputStreams.push(client->getRecordingStreamId());
+ captureIntent = static_cast<uint8_t>(ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT);
}
res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
@@ -465,6 +467,10 @@
&mCaptureId, 1);
}
if (res == OK) {
+ res = mCaptureRequest.update(ANDROID_CONTROL_CAPTURE_INTENT,
+ &captureIntent, 1);
+ }
+ if (res == OK) {
res = mCaptureRequest.sort();
}
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index dfe8580..6076dae 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -412,7 +412,7 @@
}
status_t StreamingProcessor::startStream(StreamType type,
- const Vector<uint8_t> &outputStreams) {
+ const Vector<int32_t> &outputStreams) {
ATRACE_CALL();
status_t res;
@@ -830,8 +830,8 @@
mRecordingHeapFree = mRecordingHeapCount;
}
-bool StreamingProcessor::isStreamActive(const Vector<uint8_t> &streams,
- uint8_t recordingStreamId) {
+bool StreamingProcessor::isStreamActive(const Vector<int32_t> &streams,
+ int32_t recordingStreamId) {
for (size_t i = 0; i < streams.size(); i++) {
if (streams[i] == recordingStreamId) {
return true;
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
index d879b83..833bb8f 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
@@ -64,7 +64,7 @@
RECORD
};
status_t startStream(StreamType type,
- const Vector<uint8_t> &outputStreams);
+ const Vector<int32_t> &outputStreams);
// Toggle between paused and unpaused. Stream must be started first.
status_t togglePauseStream(bool pause);
@@ -97,7 +97,7 @@
StreamType mActiveRequest;
bool mPaused;
- Vector<uint8_t> mActiveStreamIds;
+ Vector<int32_t> mActiveStreamIds;
// Preview-related members
int32_t mPreviewRequestId;
@@ -132,8 +132,8 @@
void releaseAllRecordingFramesLocked();
// Determine if the specified stream is currently in use
- static bool isStreamActive(const Vector<uint8_t> &streams,
- uint8_t recordingStreamId);
+ static bool isStreamActive(const Vector<int32_t> &streams,
+ int32_t recordingStreamId);
};
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 3b118f4..08ab357 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -300,12 +300,12 @@
uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
res = request.update(ANDROID_REQUEST_TYPE,
&requestType, 1);
- uint8_t inputStreams[1] =
- { static_cast<uint8_t>(mZslReprocessStreamId) };
+ int32_t inputStreams[1] =
+ { mZslReprocessStreamId };
if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS,
inputStreams, 1);
- uint8_t outputStreams[1] =
- { static_cast<uint8_t>(client->getCaptureStreamId()) };
+ int32_t outputStreams[1] =
+ { client->getCaptureStreamId() };
if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
outputStreams, 1);
res = request.update(ANDROID_REQUEST_ID,
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
index 7c4da50..3e05091 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
@@ -247,13 +247,13 @@
uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
res = request.update(ANDROID_REQUEST_TYPE,
&requestType, 1);
- uint8_t inputStreams[1] =
- { static_cast<uint8_t>(mZslStreamId) };
+ int32_t inputStreams[1] =
+ { mZslStreamId };
if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS,
inputStreams, 1);
// TODO: Shouldn't we also update the latest preview frame?
- uint8_t outputStreams[1] =
- { static_cast<uint8_t>(client->getCaptureStreamId()) };
+ int32_t outputStreams[1] =
+ { client->getCaptureStreamId() };
if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
outputStreams, 1);
res = request.update(ANDROID_REQUEST_ID,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index f147c06..055ea12 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -137,7 +137,7 @@
* Write in the output stream IDs which we calculate from
* the capture request's list of surface targets
*/
- Vector<uint8_t> outputStreamIds;
+ Vector<int32_t> outputStreamIds;
outputStreamIds.setCapacity(request->mSurfaceList.size());
for (size_t i = 0; i < request->mSurfaceList.size(); ++i) {
sp<Surface> surface = request->mSurfaceList[i];
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 2902340..b70a278 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -986,7 +986,7 @@
newRequest->mSettings.find(ANDROID_REQUEST_INPUT_STREAMS);
if (inputStreams.count > 0) {
if (mInputStream == NULL ||
- mInputStream->getId() != inputStreams.data.u8[0]) {
+ mInputStream->getId() != inputStreams.data.i32[0]) {
CLOGE("Request references unknown input stream %d",
inputStreams.data.u8[0]);
return NULL;
@@ -1015,7 +1015,7 @@
}
for (size_t i = 0; i < streams.count; i++) {
- int idx = mOutputStreams.indexOfKey(streams.data.u8[i]);
+ int idx = mOutputStreams.indexOfKey(streams.data.i32[i]);
if (idx == NAME_NOT_FOUND) {
CLOGE("Request references unknown stream %d",
streams.data.u8[i]);