Merge "Cleanup comments for the new control block implementation"
diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp
index 1040415..f6c9ca1 100644
--- a/camera/ProCamera.cpp
+++ b/camera/ProCamera.cpp
@@ -251,8 +251,7 @@
sp<CpuConsumer> cc = new CpuConsumer(bq, heapCount/*, synchronousMode*/);
cc->setName(String8("ProCamera::mCpuConsumer"));
- sp<Surface> stc = new Surface(
- cc->getProducerInterface());
+ sp<Surface> stc = new Surface(bq);
status_t s = createStream(width, height, format,
stc->getIGraphicBufferProducer(),
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 3e79ee0..28fc00f 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -44,8 +44,9 @@
// Command-line parameters.
static bool gVerbose = false; // chatty on stdout
static bool gRotate = false; // rotate 90 degrees
-static uint32_t gVideoWidth = 1280; // 720p
-static uint32_t gVideoHeight = 720;
+static bool gSizeSpecified = false; // was size explicitly requested?
+static uint32_t gVideoWidth = 0; // default width+height
+static uint32_t gVideoHeight = 0;
static uint32_t gBitRate = 4000000; // 4Mbps
// Set by signal handler to stop recording.
@@ -107,6 +108,14 @@
}
/*
+ * Returns "true" if the device is rotated 90 degrees.
+ */
+static bool isDeviceRotated(int orientation) {
+ return orientation != DISPLAY_ORIENTATION_0 &&
+ orientation != DISPLAY_ORIENTATION_180;
+}
+
+/*
* Configures and starts the MediaCodec encoder. Obtains an input surface
* from the codec.
*/
@@ -114,6 +123,11 @@
sp<IGraphicBufferProducer>* pBufferProducer) {
status_t err;
+ if (gVerbose) {
+ printf("Configuring recorder for %dx%d video at %.2fMbps\n",
+ gVideoWidth, gVideoHeight, gBitRate / 1000000.0);
+ }
+
sp<AMessage> format = new AMessage;
format->setInt32("width", gVideoWidth);
format->setInt32("height", gVideoHeight);
@@ -152,6 +166,7 @@
return err;
}
+ ALOGV("Codec prepared");
*pCodec = codec;
*pBufferProducer = bufferProducer;
return 0;
@@ -169,8 +184,7 @@
// Set the region of the layer stack we're interested in, which in our
// case is "all of it". If the app is rotated (so that the width of the
// app is based on the height of the display), reverse width/height.
- bool deviceRotated = mainDpyInfo.orientation != DISPLAY_ORIENTATION_0 &&
- mainDpyInfo.orientation != DISPLAY_ORIENTATION_180;
+ bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
uint32_t sourceWidth, sourceHeight;
if (!deviceRotated) {
sourceWidth = mainDpyInfo.w;
@@ -295,6 +309,12 @@
bufIndex, size, ptsUsec);
CHECK(trackIdx != -1);
+ // If the virtual display isn't providing us with timestamps,
+ // use the current time.
+ if (ptsUsec == 0) {
+ ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
+ }
+
// The MediaMuxer docs are unclear, but it appears that we
// need to pass either the full set of BufferInfo flags, or
// (flags & BUFFER_FLAG_SYNCFRAME).
@@ -370,11 +390,6 @@
static status_t recordScreen(const char* fileName) {
status_t err;
- if (gVerbose) {
- printf("Recording %dx%d video at %.2fMbps\n",
- gVideoWidth, gVideoHeight, gBitRate / 1000000.0);
- }
-
// Configure signal handler.
err = configureSignals();
if (err != NO_ERROR) return err;
@@ -399,11 +414,31 @@
mainDpyInfo.orientation);
}
+ bool rotated = isDeviceRotated(mainDpyInfo.orientation);
+ if (gVideoWidth == 0) {
+ gVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w;
+ }
+ if (gVideoHeight == 0) {
+ gVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h;
+ }
+
// Configure and start the encoder.
sp<MediaCodec> encoder;
sp<IGraphicBufferProducer> bufferProducer;
err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
- if (err != NO_ERROR) return err;
+ if (err != NO_ERROR && !gSizeSpecified) {
+ ALOGV("Retrying with 720p");
+ if (gVideoWidth != 1280 && gVideoHeight != 720) {
+ fprintf(stderr, "WARNING: failed at %dx%d, retrying at 720p\n",
+ gVideoWidth, gVideoHeight);
+ gVideoWidth = 1280;
+ gVideoHeight = 720;
+ err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
+ }
+ }
+ if (err != NO_ERROR) {
+ return err;
+ }
// Configure virtual display.
sp<IBinder> dpy;
@@ -478,6 +513,8 @@
fprintf(stderr,
"Usage: screenrecord [options] <filename>\n"
"\n"
+ "Records the device's display to a .mp4 file.\n"
+ "\n"
"Options:\n"
"--size WIDTHxHEIGHT\n"
" Set the video size, e.g. \"1280x720\". For best results, use\n"
@@ -485,8 +522,7 @@
"--bit-rate RATE\n"
" Set the video bit rate, in megabits per second. Default 4Mbps.\n"
"--rotate\n"
- " Rotate the output 90 degrees. Useful for filling the frame\n"
- " when in portrait mode.\n"
+ " Rotate the output 90 degrees.\n"
"--verbose\n"
" Display interesting information on stdout.\n"
"--help\n"
@@ -536,6 +572,7 @@
gVideoWidth, gVideoHeight);
return 2;
}
+ gSizeSpecified = true;
break;
case 'b':
gBitRate = atoi(optarg);
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 529b96c..797e0b6 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -939,7 +939,7 @@
sp<BufferQueue> bq = new BufferQueue();
sp<GLConsumer> texture = new GLConsumer(bq, 0 /* tex */);
- gSurface = new Surface(texture->getBufferQueue());
+ gSurface = new Surface(bq);
}
CHECK_EQ((status_t)OK,
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index 0a3c0e5..f457261 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -35,8 +35,6 @@
{
public:
- static const int DEFAULT_SAMPLE_RATE = 8000;
-
/* Events used by AudioRecord callback function (callback_t).
* Keep in sync with frameworks/base/media/java/android/media/AudioRecord.java NATIVE_EVENT_*.
*/
@@ -62,6 +60,7 @@
size_t frameCount; // number of sample frames corresponding to size;
// on input it is the number of frames available,
// on output is the number of frames actually drained
+ // (currently ignored, but will make the primary field in future)
size_t size; // input/output in bytes == frameCount * frameSize
// FIXME this is redundant with respect to frameCount,
@@ -144,6 +143,7 @@
* frames are ready in record track output buffer.
* sessionId: Not yet supported.
* transferType: How data is transferred from AudioRecord.
+ * flags: See comments on audio_input_flags_t in <system/audio.h>
* threadCanCallJava: Not present in parameter list, and so is fixed at false.
*/
@@ -156,7 +156,8 @@
void* user = NULL,
int notificationFrames = 0,
int sessionId = 0,
- transfer_type transferType = TRANSFER_DEFAULT);
+ transfer_type transferType = TRANSFER_DEFAULT,
+ audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE);
/* Terminates the AudioRecord and unregisters it from AudioFlinger.
* Also destroys all resources associated with the AudioRecord.
@@ -188,7 +189,8 @@
int notificationFrames = 0,
bool threadCanCallJava = false,
int sessionId = 0,
- transfer_type transferType = TRANSFER_DEFAULT);
+ transfer_type transferType = TRANSFER_DEFAULT,
+ audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE);
/* Result of constructing the AudioRecord. This must be checked
* before using any AudioRecord API (except for set()), because using
@@ -362,7 +364,12 @@
* Input parameter 'size' is in byte units.
* This is implemented on top of obtainBuffer/releaseBuffer. For best
* performance use callbacks. Returns actual number of bytes read >= 0,
- * or a negative status code.
+ * or one of the following negative status codes:
+ * INVALID_OPERATION AudioRecord is configured for streaming mode
+ * BAD_VALUE size is invalid
+ * WOULD_BLOCK when obtainBuffer() returns same, or
+ * AudioRecord was stopped during the read
+ * or any other error code returned by IAudioRecord::start() or restoreRecord_l().
*/
ssize_t read(void* buffer, size_t size);
@@ -419,6 +426,7 @@
status_t openRecord_l(uint32_t sampleRate,
audio_format_t format,
size_t frameCount,
+ audio_input_flags_t flags,
audio_io_handle_t input,
size_t epoch);
@@ -436,10 +444,13 @@
// for client callback handler
callback_t mCbf; // callback handler for events, or NULL
- void* mUserData; // for client callback handler
+ void* mUserData;
// for notification APIs
- uint32_t mNotificationFrames; // frames between each notification callback
+ uint32_t mNotificationFramesReq; // requested number of frames between each
+ // notification callback
+ uint32_t mNotificationFramesAct; // actual number of frames between each
+ // notification callback
bool mRefreshRemaining; // processAudioBuffer() should refresh next 2
// These are private to processAudioBuffer(), and are not protected by a lock
@@ -463,6 +474,7 @@
audio_source_t mInputSource;
uint32_t mLatency; // in ms
audio_channel_mask_t mChannelMask;
+ audio_input_flags_t mFlags;
int mSessionId;
transfer_type mTransfer;
@@ -475,11 +487,13 @@
int mPreviousPriority; // before start()
SchedPolicy mPreviousSchedulingGroup;
+ bool mAwaitBoost; // thread should wait for priority boost before running
// The proxy should only be referenced while a lock is held because the proxy isn't
// multi-thread safe.
// An exception is that a blocking ClientProxy::obtainBuffer() may be called without a lock,
// provided that the caller also holds an extra reference to the proxy and shared memory to keep
+ // them around in case they are replaced during the obtainBuffer().
sp<AudioRecordClientProxy> mProxy;
bool mInOverrun; // whether recorder is currently in overrun state
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index 5ba8461..ae92cdd 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -116,6 +116,7 @@
* Returned status (from utils/Errors.h) can be:
* - NO_ERROR: successful operation
* - NO_INIT: audio server or audio hardware not initialized
+ * - BAD_VALUE: unsupported configuration
*/
static status_t getMinFrameCount(size_t* frameCount,
@@ -170,9 +171,9 @@
*/
AudioTrack( audio_stream_type_t streamType,
- uint32_t sampleRate = 0,
- audio_format_t format = AUDIO_FORMAT_DEFAULT,
- audio_channel_mask_t channelMask = 0,
+ uint32_t sampleRate,
+ audio_format_t format,
+ audio_channel_mask_t,
int frameCount = 0,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
callback_t cbf = NULL,
@@ -194,10 +195,10 @@
*/
AudioTrack( audio_stream_type_t streamType,
- uint32_t sampleRate = 0,
- audio_format_t format = AUDIO_FORMAT_DEFAULT,
- audio_channel_mask_t channelMask = 0,
- const sp<IMemory>& sharedBuffer = 0,
+ uint32_t sampleRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ const sp<IMemory>& sharedBuffer,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
callback_t cbf = NULL,
void* user = NULL,
@@ -227,10 +228,10 @@
*
* threadCanCallJava: Whether callbacks are made from an attached thread and thus can call JNI.
*/
- status_t set(audio_stream_type_t streamType = AUDIO_STREAM_DEFAULT,
- uint32_t sampleRate = 0,
- audio_format_t format = AUDIO_FORMAT_DEFAULT,
- audio_channel_mask_t channelMask = 0,
+ status_t set(audio_stream_type_t streamType,
+ uint32_t sampleRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
int frameCount = 0,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
callback_t cbf = NULL,
@@ -670,8 +671,9 @@
STATE_STOPPING,
} mState;
+ // for client callback handler
callback_t mCbf; // callback handler for events, or NULL
- void* mUserData; // for client callback handler
+ void* mUserData;
// for notification APIs
uint32_t mNotificationFramesReq; // requested number of frames between each
diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h
index 82aae62..49f921b 100644
--- a/include/media/IAudioFlinger.h
+++ b/include/media/IAudioFlinger.h
@@ -79,7 +79,7 @@
audio_format_t format,
audio_channel_mask_t channelMask,
size_t frameCount,
- track_flags_t flags,
+ track_flags_t *flags,
pid_t tid, // -1 means unused, otherwise must be valid non-0
int *sessionId,
status_t *status) = 0;
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index 38f9d11..6d116f0 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -97,6 +97,10 @@
node_id node, OMX_U32 port_index,
const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) = 0;
+ virtual status_t updateGraphicBufferInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) = 0;
+
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index,
sp<IGraphicBufferProducer> *bufferProducer) = 0;
diff --git a/include/media/stagefright/MediaErrors.h b/include/media/stagefright/MediaErrors.h
index ee5e4e2..686f286 100644
--- a/include/media/stagefright/MediaErrors.h
+++ b/include/media/stagefright/MediaErrors.h
@@ -56,14 +56,11 @@
ERROR_DRM_TAMPER_DETECTED = DRM_ERROR_BASE - 7,
ERROR_DRM_NOT_PROVISIONED = DRM_ERROR_BASE - 8,
ERROR_DRM_DEVICE_REVOKED = DRM_ERROR_BASE - 9,
+ ERROR_DRM_RESOURCE_BUSY = DRM_ERROR_BASE - 10,
ERROR_DRM_VENDOR_MAX = DRM_ERROR_BASE - 500,
ERROR_DRM_VENDOR_MIN = DRM_ERROR_BASE - 999,
- // Deprecated
- ERROR_DRM_WV_VENDOR_MAX = ERROR_DRM_VENDOR_MAX,
- ERROR_DRM_WV_VENDOR_MIN = ERROR_DRM_VENDOR_MIN,
-
// Heartbeat Error Codes
HEARTBEAT_ERROR_BASE = -3000,
ERROR_HEARTBEAT_TERMINATE_REQUESTED = HEARTBEAT_ERROR_BASE,
diff --git a/include/media/stagefright/SurfaceMediaSource.h b/include/media/stagefright/SurfaceMediaSource.h
index 7d40379..db5f947 100644
--- a/include/media/stagefright/SurfaceMediaSource.h
+++ b/include/media/stagefright/SurfaceMediaSource.h
@@ -56,7 +56,7 @@
class SurfaceMediaSource : public MediaSource,
public MediaBufferObserver,
- protected BufferQueue::ConsumerListener {
+ protected ConsumerListener {
public:
enum { MIN_UNDEQUEUED_BUFFERS = 4};
diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.cpp b/libvideoeditor/lvpp/NativeWindowRenderer.cpp
index 84a8e15..8b362ef 100755
--- a/libvideoeditor/lvpp/NativeWindowRenderer.cpp
+++ b/libvideoeditor/lvpp/NativeWindowRenderer.cpp
@@ -570,7 +570,7 @@
, mTextureId(textureId) {
sp<BufferQueue> bq = new BufferQueue();
mST = new GLConsumer(bq, mTextureId);
- mSTC = new Surface(mST->getBufferQueue());
+ mSTC = new Surface(bq);
native_window_connect(mSTC.get(), NATIVE_WINDOW_API_MEDIA);
}
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index 0e7e17f..2718420 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -60,10 +60,9 @@
// We double the size of input buffer for ping pong use of record buffer.
size <<= 1;
- if (audio_is_linear_pcm(format)) {
- uint32_t channelCount = popcount(channelMask);
- size /= channelCount * audio_bytes_per_sample(format);
- }
+ // Assumes audio_is_linear_pcm(format)
+ uint32_t channelCount = popcount(channelMask);
+ size /= channelCount * audio_bytes_per_sample(format);
*frameCount = size;
return NO_ERROR;
@@ -87,7 +86,8 @@
void* user,
int notificationFrames,
int sessionId,
- transfer_type transferType)
+ transfer_type transferType,
+ audio_input_flags_t flags)
: mStatus(NO_INIT), mSessionId(0),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
mPreviousSchedulingGroup(SP_DEFAULT),
@@ -129,7 +129,8 @@
int notificationFrames,
bool threadCanCallJava,
int sessionId,
- transfer_type transferType)
+ transfer_type transferType,
+ audio_input_flags_t flags)
{
switch (transferType) {
case TRANSFER_DEFAULT:
@@ -176,7 +177,8 @@
}
if (sampleRate == 0) {
- sampleRate = DEFAULT_SAMPLE_RATE;
+ ALOGE("Invalid sample rate %u", sampleRate);
+ return BAD_VALUE;
}
mSampleRate = sampleRate;
@@ -205,11 +207,8 @@
uint32_t channelCount = popcount(channelMask);
mChannelCount = channelCount;
- if (audio_is_linear_pcm(format)) {
- mFrameSize = channelCount * audio_bytes_per_sample(format);
- } else {
- mFrameSize = sizeof(uint8_t);
- }
+ // Assumes audio_is_linear_pcm(format), else sizeof(uint8_t)
+ mFrameSize = channelCount * audio_bytes_per_sample(format);
if (sessionId == 0 ) {
mSessionId = AudioSystem::newAudioSessionId();
@@ -218,6 +217,8 @@
}
ALOGV("set(): mSessionId %d", mSessionId);
+ mFlags = flags;
+
audio_io_handle_t input = AudioSystem::getInput(inputSource,
sampleRate,
format,
@@ -249,7 +250,7 @@
}
// create the IAudioRecord
- status = openRecord_l(sampleRate, format, frameCount, input, 0 /*epoch*/);
+ status = openRecord_l(sampleRate, format, frameCount, mFlags, input, 0 /*epoch*/);
if (status != NO_ERROR) {
return status;
}
@@ -266,7 +267,8 @@
mActive = false;
mCbf = cbf;
- mNotificationFrames = notificationFrames;
+ mNotificationFramesReq = notificationFrames;
+ mNotificationFramesAct = 0;
mRefreshRemaining = true;
mUserData = user;
// TODO: add audio hardware input latency here
@@ -434,6 +436,7 @@
uint32_t sampleRate,
audio_format_t format,
size_t frameCount,
+ audio_input_flags_t flags,
audio_io_handle_t input,
size_t epoch)
{
@@ -444,15 +447,29 @@
return NO_INIT;
}
+ IAudioFlinger::track_flags_t trackFlags = IAudioFlinger::TRACK_DEFAULT;
pid_t tid = -1;
- // FIXME see similar logic at AudioTrack for tid
+
+ // Client can only express a preference for FAST. Server will perform additional tests.
+ // The only supported use case for FAST is callback transfer mode.
+ if (flags & AUDIO_INPUT_FLAG_FAST) {
+ if ((mTransfer != TRANSFER_CALLBACK) || (mAudioRecordThread == 0)) {
+ ALOGW("AUDIO_INPUT_FLAG_FAST denied by client");
+ // once denied, do not request again if IAudioRecord is re-created
+ flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_FAST);
+ mFlags = flags;
+ } else {
+ trackFlags |= IAudioFlinger::TRACK_FAST;
+ tid = mAudioRecordThread->getTid();
+ }
+ }
int originalSessionId = mSessionId;
sp<IAudioRecord> record = audioFlinger->openRecord(input,
sampleRate, format,
mChannelMask,
frameCount,
- IAudioFlinger::TRACK_DEFAULT,
+ &trackFlags,
tid,
&mSessionId,
&status);
@@ -476,6 +493,27 @@
mCblkMemory = iMem;
audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMem->pointer());
mCblk = cblk;
+ // FIXME missing fast track frameCount logic
+ mAwaitBoost = false;
+ mNotificationFramesAct = mNotificationFramesReq;
+ if (flags & AUDIO_INPUT_FLAG_FAST) {
+ if (trackFlags & IAudioFlinger::TRACK_FAST) {
+ ALOGV("AUDIO_INPUT_FLAG_FAST successful; frameCount %u", frameCount);
+ mAwaitBoost = true;
+ // double-buffering is not required for fast tracks, due to tighter scheduling
+ if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount) {
+ mNotificationFramesAct = frameCount;
+ }
+ } else {
+ ALOGV("AUDIO_INPUT_FLAG_FAST denied by server; frameCount %u", frameCount);
+ // once denied, do not request again if IAudioRecord is re-created
+ flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_FAST);
+ mFlags = flags;
+ if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) {
+ mNotificationFramesAct = frameCount/2;
+ }
+ }
+ }
// starting address of buffers in shared memory
void *buffers = (char*)cblk + sizeof(audio_track_cblk_t);
@@ -483,7 +521,7 @@
// update proxy
mProxy = new AudioRecordClientProxy(cblk, buffers, frameCount, mFrameSize);
mProxy->setEpoch(epoch);
- mProxy->setMinimum(mNotificationFrames);
+ mProxy->setMinimum(mNotificationFramesAct);
mDeathNotifier = new DeathNotifier(this);
mAudioRecord->asBinder()->linkToDeath(mDeathNotifier, this);
@@ -665,6 +703,26 @@
nsecs_t AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread)
{
mLock.lock();
+ if (mAwaitBoost) {
+ mAwaitBoost = false;
+ mLock.unlock();
+ static const int32_t kMaxTries = 5;
+ int32_t tryCounter = kMaxTries;
+ uint32_t pollUs = 10000;
+ do {
+ int policy = sched_getscheduler(0);
+ if (policy == SCHED_FIFO || policy == SCHED_RR) {
+ break;
+ }
+ usleep(pollUs);
+ pollUs <<= 1;
+ } while (tryCounter-- > 0);
+ if (tryCounter < 0) {
+ ALOGE("did not receive expected priority boost on time");
+ }
+ // Run again immediately
+ return 0;
+ }
// Can only reference mCblk while locked
int32_t flags = android_atomic_and(~CBLK_OVERRUN, &mCblk->mFlags);
@@ -710,7 +768,7 @@
}
// Cache other fields that will be needed soon
- size_t notificationFrames = mNotificationFrames;
+ size_t notificationFrames = mNotificationFramesAct;
if (mRefreshRemaining) {
mRefreshRemaining = false;
mRemainingFrames = notificationFrames;
@@ -886,7 +944,7 @@
// It will also delete the strong references on previous IAudioRecord and IMemory
size_t position = mProxy->getPosition();
mNewPosition = position + mUpdatePeriod;
- result = openRecord_l(mSampleRate, mFormat, mFrameCount, getInput_l(), position);
+ result = openRecord_l(mSampleRate, mFormat, mFrameCount, mFlags, getInput_l(), position);
if (result == NO_ERROR) {
if (mActive) {
// callback thread or sync event hasn't changed
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index c6e43e7..be818c6 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -144,7 +144,7 @@
audio_format_t format,
audio_channel_mask_t channelMask,
size_t frameCount,
- track_flags_t flags,
+ track_flags_t *flags,
pid_t tid,
int *sessionId,
status_t *status)
@@ -157,7 +157,8 @@
data.writeInt32(format);
data.writeInt32(channelMask);
data.writeInt32(frameCount);
- data.writeInt32(flags);
+ track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT;
+ data.writeInt32(lFlags);
data.writeInt32((int32_t) tid);
int lSessionId = 0;
if (sessionId != NULL) {
@@ -168,6 +169,10 @@
if (lStatus != NO_ERROR) {
ALOGE("openRecord error: %s", strerror(-lStatus));
} else {
+ lFlags = reply.readInt32();
+ if (flags != NULL) {
+ *flags = lFlags;
+ }
lSessionId = reply.readInt32();
if (sessionId != NULL) {
*sessionId = lSessionId;
@@ -761,7 +766,8 @@
int sessionId = data.readInt32();
status_t status;
sp<IAudioRecord> record = openRecord(input,
- sampleRate, format, channelMask, frameCount, flags, tid, &sessionId, &status);
+ sampleRate, format, channelMask, frameCount, &flags, tid, &sessionId, &status);
+ reply->writeInt32(flags);
reply->writeInt32(sessionId);
reply->writeInt32(status);
reply->writeStrongBinder(record->asBinder());
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index 5bbb2f0..ef99f4f 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -52,6 +52,7 @@
OBSERVER_ON_MSG,
GET_GRAPHIC_BUFFER_USAGE,
SET_INTERNAL_OPTION,
+ UPDATE_GRAPHIC_BUFFER_IN_META,
};
class BpOMX : public BpInterface<IOMX> {
@@ -283,6 +284,21 @@
return err;
}
+ virtual status_t updateGraphicBufferInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeIntPtr((intptr_t)node);
+ data.writeInt32(port_index);
+ data.write(*graphicBuffer);
+ data.writeIntPtr((intptr_t)buffer);
+ remote()->transact(UPDATE_GRAPHIC_BUFFER_IN_META, data, &reply);
+
+ status_t err = reply.readInt32();
+ return err;
+ }
+
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index,
sp<IGraphicBufferProducer> *bufferProducer) {
@@ -691,6 +707,23 @@
return NO_ERROR;
}
+ case UPDATE_GRAPHIC_BUFFER_IN_META:
+ {
+ CHECK_OMX_INTERFACE(IOMX, data, reply);
+
+ node_id node = (void*)data.readIntPtr();
+ OMX_U32 port_index = data.readInt32();
+ sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
+ data.read(*graphicBuffer);
+ buffer_id buffer = (void*)data.readIntPtr();
+
+ status_t err = updateGraphicBufferInMeta(
+ node, port_index, graphicBuffer, buffer);
+ reply->writeInt32(err);
+
+ return NO_ERROR;
+ }
+
case CREATE_INPUT_SURFACE:
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 963b04f..056cc0a 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -811,6 +811,13 @@
if (mPlayer == NULL) {
return NO_INIT;
}
+
+ if (next != NULL && !(next->mCurrentState &
+ (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_PLAYBACK_COMPLETE))) {
+ ALOGE("next player is not prepared");
+ return INVALID_OPERATION;
+ }
+
return mPlayer->setNextPlayer(next == NULL ? NULL : next->mPlayer);
}
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 00804c5..5aefa58 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -833,15 +833,20 @@
oldest->mGraphicBuffer = new GraphicBuffer(buf, false);
oldest->mStatus = BufferInfo::OWNED_BY_US;
- struct VideoDecoderOutputMetaData metaData;
- metaData.eType = kMetadataBufferTypeGrallocSource;
- metaData.pHandle = oldest->mGraphicBuffer->handle;
- memcpy(oldest->mData->base(), &metaData, sizeof(metaData));
+ mOMX->updateGraphicBufferInMeta(
+ mNode, kPortIndexOutput, oldest->mGraphicBuffer,
+ oldest->mBufferID);
- ALOGV("replaced oldest buffer #%u with age %u (%p stored in %p)",
+ VideoDecoderOutputMetaData *metaData =
+ reinterpret_cast<VideoDecoderOutputMetaData *>(
+ oldest->mData->base());
+ CHECK_EQ(metaData->eType, kMetadataBufferTypeGrallocSource);
+
+ ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)",
oldest - &mBuffers[kPortIndexOutput][0],
mDequeueCounter - oldest->mDequeuedAt,
- metaData.pHandle, oldest->mData->base());
+ metaData->pHandle,
+ oldest->mGraphicBuffer->handle, oldest->mData->base());
return oldest;
}
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index 810d88f..9820ef5 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -83,6 +83,10 @@
node_id node, OMX_U32 port_index,
const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
+ virtual status_t updateGraphicBufferInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer);
+
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index,
sp<IGraphicBufferProducer> *bufferProducer);
@@ -287,6 +291,13 @@
node, port_index, graphicBuffer, buffer);
}
+status_t MuxOMX::updateGraphicBufferInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) {
+ return getOMX(node)->updateGraphicBufferInMeta(
+ node, port_index, graphicBuffer, buffer);
+}
+
status_t MuxOMX::createInputSurface(
node_id node, OMX_U32 port_index,
sp<IGraphicBufferProducer> *bufferProducer) {
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
index befd4cc..6b934d4 100644
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -65,10 +65,8 @@
// reference once the ctor ends, as that would cause the refcount of 'this'
// dropping to 0 at the end of the ctor. Since all we need is a wp<...>
// that's what we create.
- wp<BufferQueue::ConsumerListener> listener;
- sp<BufferQueue::ConsumerListener> proxy;
- listener = static_cast<BufferQueue::ConsumerListener*>(this);
- proxy = new BufferQueue::ProxyConsumerListener(listener);
+ wp<ConsumerListener> listener = static_cast<ConsumerListener*>(this);
+ sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
status_t err = mBufferQueue->consumerConnect(proxy, false);
if (err != NO_ERROR) {
@@ -107,7 +105,7 @@
Mutex::Autolock lock(mMutex);
result.append(buffer);
- mBufferQueue->dump(result);
+ mBufferQueue->dump(result, "");
}
status_t SurfaceMediaSource::setFrameRate(int32_t fps)
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index 7fed7d4..7e53af3 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -79,6 +79,10 @@
node_id node, OMX_U32 port_index,
const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
+ virtual status_t updateGraphicBufferInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer);
+
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index,
sp<IGraphicBufferProducer> *bufferProducer);
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index f6ae376..ae498b4 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -66,6 +66,10 @@
OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
OMX::buffer_id *buffer);
+ status_t updateGraphicBufferInMeta(
+ OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
+ OMX::buffer_id buffer);
+
status_t createInputSurface(
OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer);
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index d6fd95b..325ffcf 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -69,11 +69,8 @@
// reference once the ctor ends, as that would cause the refcount of 'this'
// dropping to 0 at the end of the ctor. Since all we need is a wp<...>
// that's what we create.
- wp<BufferQueue::ConsumerListener> listener;
- listener = static_cast<BufferQueue::ConsumerListener*>(this);
-
- sp<BufferQueue::ConsumerListener> proxy;
- proxy = new BufferQueue::ProxyConsumerListener(listener);
+ wp<BufferQueue::ConsumerListener> listener = static_cast<BufferQueue::ConsumerListener*>(this);
+ sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
mInitCheck = mBufferQueue->consumerConnect(proxy, false);
if (mInitCheck != NO_ERROR) {
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 4b1dbe6..aaa9f89 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -345,6 +345,13 @@
port_index, graphicBuffer, buffer);
}
+status_t OMX::updateGraphicBufferInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) {
+ return findInstance(node)->updateGraphicBufferInMeta(
+ port_index, graphicBuffer, buffer);
+}
+
status_t OMX::createInputSurface(
node_id node, OMX_U32 port_index,
sp<IGraphicBufferProducer> *bufferProducer) {
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 525e18d..8d100f1 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -70,6 +70,10 @@
header->nFilledLen);
}
+ void setGraphicBuffer(const sp<GraphicBuffer> &graphicBuffer) {
+ mGraphicBuffer = graphicBuffer;
+ }
+
private:
sp<GraphicBuffer> mGraphicBuffer;
sp<IMemory> mMem;
@@ -566,6 +570,22 @@
return OK;
}
+status_t OMXNodeInstance::updateGraphicBufferInMeta(
+ OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
+ OMX::buffer_id buffer) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)(buffer);
+ VideoDecoderOutputMetaData *metadata =
+ (VideoDecoderOutputMetaData *)(header->pBuffer);
+ BufferMeta *bufferMeta = (BufferMeta *)(header->pAppPrivate);
+ bufferMeta->setGraphicBuffer(graphicBuffer);
+ metadata->eType = kMetadataBufferTypeGrallocSource;
+ metadata->pHandle = graphicBuffer->handle;
+
+ return OK;
+}
+
status_t OMXNodeInstance::createInputSurface(
OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer) {
Mutex::Autolock autolock(mLock);
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 00e8a57..3d65c44 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1212,7 +1212,7 @@
audio_format_t format,
audio_channel_mask_t channelMask,
size_t frameCount,
- IAudioFlinger::track_flags_t flags,
+ IAudioFlinger::track_flags_t *flags,
pid_t tid,
int *sessionId,
status_t *status)
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index d99b779..e5e4113 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -117,7 +117,7 @@
audio_format_t format,
audio_channel_mask_t channelMask,
size_t frameCount,
- IAudioFlinger::track_flags_t flags,
+ IAudioFlinger::track_flags_t *flags,
pid_t tid,
int *sessionId,
status_t *status);
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index f0c27c3..2c2931f 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -4433,7 +4433,7 @@
audio_channel_mask_t channelMask,
size_t frameCount,
int sessionId,
- IAudioFlinger::track_flags_t flags,
+ IAudioFlinger::track_flags_t *flags,
pid_t tid,
status_t *status)
{
@@ -4446,6 +4446,57 @@
goto Exit;
}
+ // client expresses a preference for FAST, but we get the final say
+ if (*flags & IAudioFlinger::TRACK_FAST) {
+ if (
+ // use case: callback handler and frame count is default or at least as large as HAL
+ (
+ (tid != -1) &&
+ ((frameCount == 0) ||
+ (frameCount >= (mFrameCount * kFastTrackMultiplier)))
+ ) &&
+ // FIXME when record supports non-PCM data, also check for audio_is_linear_pcm(format)
+ // mono or stereo
+ ( (channelMask == AUDIO_CHANNEL_OUT_MONO) ||
+ (channelMask == AUDIO_CHANNEL_OUT_STEREO) ) &&
+ // hardware sample rate
+ (sampleRate == mSampleRate) &&
+ // record thread has an associated fast recorder
+ hasFastRecorder()
+ // FIXME test that RecordThread for this fast track has a capable output HAL
+ // FIXME add a permission test also?
+ ) {
+ // if frameCount not specified, then it defaults to fast recorder (HAL) frame count
+ if (frameCount == 0) {
+ frameCount = mFrameCount * kFastTrackMultiplier;
+ }
+ ALOGV("AUDIO_INPUT_FLAG_FAST accepted: frameCount=%d mFrameCount=%d",
+ frameCount, mFrameCount);
+ } else {
+ ALOGV("AUDIO_INPUT_FLAG_FAST denied: frameCount=%d "
+ "mFrameCount=%d format=%d isLinear=%d channelMask=%#x sampleRate=%u mSampleRate=%u "
+ "hasFastRecorder=%d tid=%d",
+ frameCount, mFrameCount, format,
+ audio_is_linear_pcm(format),
+ channelMask, sampleRate, mSampleRate, hasFastRecorder(), tid);
+ *flags &= ~IAudioFlinger::TRACK_FAST;
+ // For compatibility with AudioRecord calculation, buffer depth is forced
+ // to be at least 2 x the record thread frame count and cover audio hardware latency.
+ // This is probably too conservative, but legacy application code may depend on it.
+ // If you change this calculation, also review the start threshold which is related.
+ uint32_t latencyMs = 50; // FIXME mInput->stream->get_latency(mInput->stream);
+ size_t mNormalFrameCount = 2048; // FIXME
+ uint32_t minBufCount = latencyMs / ((1000 * mNormalFrameCount) / mSampleRate);
+ if (minBufCount < 2) {
+ minBufCount = 2;
+ }
+ size_t minFrameCount = mNormalFrameCount * minBufCount;
+ if (frameCount < minFrameCount) {
+ frameCount = minFrameCount;
+ }
+ }
+ }
+
// FIXME use flags and tid similar to createTrack_l()
{ // scope for mLock
@@ -4465,6 +4516,13 @@
mAudioFlinger->btNrecIsOff();
setEffectSuspended_l(FX_IID_AEC, suspend, sessionId);
setEffectSuspended_l(FX_IID_NS, suspend, sessionId);
+
+ if ((*flags & IAudioFlinger::TRACK_FAST) && (tid != -1)) {
+ pid_t callingPid = IPCThreadState::self()->getCallingPid();
+ // we don't have CAP_SYS_NICE, nor do we want to have it as it's too powerful,
+ // so ask activity manager to do this on our behalf
+ sendPrioConfigEvent_l(callingPid, tid, kPriorityAudioApp);
+ }
}
lStatus = NO_ERROR;
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index aa04fd4..31d5323 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -837,7 +837,7 @@
audio_channel_mask_t channelMask,
size_t frameCount,
int sessionId,
- IAudioFlinger::track_flags_t flags,
+ IAudioFlinger::track_flags_t *flags,
pid_t tid,
status_t *status);
@@ -879,6 +879,7 @@
void handleSyncStartEvent(const sp<SyncEvent>& event);
virtual size_t frameCount() const { return mFrameCount; }
+ bool hasFastRecorder() const { return false; }
private:
void clearSyncStartEvent();
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index 12d0859..9d8c4a1 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -114,8 +114,7 @@
mCallbackConsumer = new CpuConsumer(bq, kCallbackHeapCount);
mCallbackConsumer->setFrameAvailableListener(this);
mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer"));
- mCallbackWindow = new Surface(
- mCallbackConsumer->getProducerInterface());
+ mCallbackWindow = new Surface(bq);
}
if (mCallbackStreamId != NO_STREAM) {
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index b920edf..77d5c8a 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -87,8 +87,7 @@
mCaptureConsumer = new CpuConsumer(bq, 1);
mCaptureConsumer->setFrameAvailableListener(this);
mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
- mCaptureWindow = new Surface(
- mCaptureConsumer->getProducerInterface());
+ mCaptureWindow = new Surface(bq);
// Create memory for API consumption
mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0,
"Camera2Client::CaptureHeap");
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 7e98016..dfe8580 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -325,8 +325,7 @@
mRecordingHeapCount + 1);
mRecordingConsumer->setFrameAvailableListener(this);
mRecordingConsumer->setName(String8("Camera2-RecordingConsumer"));
- mRecordingWindow = new Surface(
- mRecordingConsumer->getProducerInterface());
+ mRecordingWindow = new Surface(bq);
newConsumer = true;
// Allocate memory later, since we don't know buffer size until receipt
}
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 11a2cbb..3b118f4 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -134,8 +134,7 @@
kZslBufferDepth);
mZslConsumer->setFrameAvailableListener(this);
mZslConsumer->setName(String8("Camera2Client::ZslConsumer"));
- mZslWindow = new Surface(
- mZslConsumer->getProducerInterface());
+ mZslWindow = new Surface(bq);
}
if (mZslStreamId != NO_STREAM) {
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 74c4484..9432a59 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -77,6 +77,8 @@
virtual size_t getBufferCountLocked();
+ virtual status_t getEndpointUsage(uint32_t *usage) = 0;
+
status_t getBufferPreconditionCheckLocked() const;
status_t returnBufferPreconditionCheckLocked() const;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index e9a9c2b..c80f512 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -182,10 +182,6 @@
return OK;
}
-sp<IGraphicBufferProducer> Camera3InputStream::getProducerInterface() const {
- return mConsumer->getProducerInterface();
-}
-
void Camera3InputStream::dump(int fd, const Vector<String16> &args) const {
(void) args;
String8 lines;
@@ -234,6 +230,12 @@
return OK;
}
+status_t Camera3InputStream::getEndpointUsage(uint32_t *usage) {
+ // Per HAL3 spec, input streams have 0 for their initial usage field.
+ *usage = 0;
+ return OK;
+}
+
}; // namespace camera3
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index 8adda88..681d684 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -44,13 +44,6 @@
virtual void dump(int fd, const Vector<String16> &args) const;
- /**
- * Get the producer interface for this stream, to hand off to a producer.
- * The producer must be connected to the provided interface before
- * finishConfigure is called on this stream.
- */
- sp<IGraphicBufferProducer> getProducerInterface() const;
-
private:
typedef BufferItemConsumer::BufferItem BufferItem;
@@ -79,6 +72,8 @@
virtual status_t configureQueueLocked();
+ virtual status_t getEndpointUsage(uint32_t *usage);
+
}; // class Camera3InputStream
}; // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 0ec2b05..35cb5ba 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -364,6 +364,17 @@
return OK;
}
+status_t Camera3OutputStream::getEndpointUsage(uint32_t *usage) {
+
+ status_t res;
+ int32_t u = 0;
+ res = mConsumer->query(mConsumer.get(),
+ NATIVE_WINDOW_CONSUMER_USAGE_BITS, &u);
+ *usage = u;
+
+ return res;
+}
+
}; // namespace camera3
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 774fbdd..6cbb9f4 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -92,6 +92,9 @@
virtual status_t configureQueueLocked();
virtual status_t disconnectLocked();
+
+ virtual status_t getEndpointUsage(uint32_t *usage);
+
}; // class Camera3OutputStream
} // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index ab563df..a6872aa 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -77,7 +77,9 @@
}
camera3_stream* Camera3Stream::startConfiguration() {
+ ATRACE_CALL();
Mutex::Autolock l(mLock);
+ status_t res;
switch (mState) {
case STATE_ERROR:
@@ -107,8 +109,15 @@
return NULL;
}
- oldUsage = usage;
- oldMaxBuffers = max_buffers;
+ oldUsage = camera3_stream::usage;
+ oldMaxBuffers = camera3_stream::max_buffers;
+
+ res = getEndpointUsage(&(camera3_stream::usage));
+ if (res != OK) {
+ ALOGE("%s: Cannot query consumer endpoint usage!",
+ __FUNCTION__);
+ return NULL;
+ }
if (mState == STATE_CONSTRUCTED) {
mState = STATE_IN_CONFIG;
@@ -125,6 +134,7 @@
}
status_t Camera3Stream::finishConfiguration(camera3_device *hal3Device) {
+ ATRACE_CALL();
Mutex::Autolock l(mLock);
switch (mState) {
case STATE_ERROR:
@@ -147,8 +157,8 @@
// Check if the stream configuration is unchanged, and skip reallocation if
// so. As documented in hardware/camera3.h:configure_streams().
if (mState == STATE_IN_RECONFIG &&
- oldUsage == usage &&
- oldMaxBuffers == max_buffers) {
+ oldUsage == camera3_stream::usage &&
+ oldMaxBuffers == camera3_stream::max_buffers) {
mState = STATE_CONFIGURED;
return OK;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 69d81e4..b64fd86 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -263,6 +263,10 @@
// Get the total number of buffers in the queue
virtual size_t getBufferCountLocked() = 0;
+ // Get the usage flags for the other endpoint, or return
+ // INVALID_OPERATION if they cannot be obtained.
+ virtual status_t getEndpointUsage(uint32_t *usage) = 0;
+
private:
uint32_t oldUsage;
uint32_t oldMaxBuffers;
diff --git a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
index 8790c8c..04f5dc5 100644
--- a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
@@ -113,11 +113,11 @@
Camera3OutputStream(id, CAMERA3_STREAM_BIDIRECTIONAL,
width, height,
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
- mDepth(depth),
- mProducer(new RingBufferConsumer(GRALLOC_USAGE_HW_CAMERA_ZSL,
- depth)) {
+ mDepth(depth) {
- mConsumer = new Surface(mProducer->getProducerInterface());
+ sp<BufferQueue> bq = new BufferQueue();
+ mProducer = new RingBufferConsumer(bq, GRALLOC_USAGE_HW_CAMERA_ZSL, depth);
+ mConsumer = new Surface(bq);
}
Camera3ZslStream::~Camera3ZslStream() {
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
index 8141f4e..ebc7ea7 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -34,13 +34,14 @@
namespace android {
-RingBufferConsumer::RingBufferConsumer(uint32_t consumerUsage,
+RingBufferConsumer::RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer,
+ uint32_t consumerUsage,
int bufferCount) :
- ConsumerBase(new BufferQueue()),
+ ConsumerBase(consumer),
mBufferCount(bufferCount)
{
- mBufferQueue->setConsumerUsageBits(consumerUsage);
- mBufferQueue->setMaxAcquiredBufferCount(bufferCount);
+ mConsumer->setConsumerUsageBits(consumerUsage);
+ mConsumer->setMaxAcquiredBufferCount(bufferCount);
assert(bufferCount > 0);
}
@@ -51,7 +52,7 @@
void RingBufferConsumer::setName(const String8& name) {
Mutex::Autolock _l(mMutex);
mName = name;
- mBufferQueue->setConsumerName(name);
+ mConsumer->setConsumerName(name);
}
sp<PinnedBufferItem> RingBufferConsumer::pinSelectedBuffer(
@@ -342,17 +343,17 @@
status_t RingBufferConsumer::setDefaultBufferSize(uint32_t w, uint32_t h) {
Mutex::Autolock _l(mMutex);
- return mBufferQueue->setDefaultBufferSize(w, h);
+ return mConsumer->setDefaultBufferSize(w, h);
}
status_t RingBufferConsumer::setDefaultBufferFormat(uint32_t defaultFormat) {
Mutex::Autolock _l(mMutex);
- return mBufferQueue->setDefaultBufferFormat(defaultFormat);
+ return mConsumer->setDefaultBufferFormat(defaultFormat);
}
status_t RingBufferConsumer::setConsumerUsage(uint32_t usage) {
Mutex::Autolock _l(mMutex);
- return mBufferQueue->setConsumerUsageBits(usage);
+ return mConsumer->setConsumerUsageBits(usage);
}
} // namespace android
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
index 454fbae..b4ad824 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.h
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -63,7 +63,7 @@
// the consumer usage flags passed to the graphics allocator. The
// bufferCount parameter specifies how many buffers can be pinned for user
// access at the same time.
- RingBufferConsumer(uint32_t consumerUsage,
+ RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer, uint32_t consumerUsage,
int bufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS);
virtual ~RingBufferConsumer();
@@ -72,8 +72,6 @@
// log messages.
void setName(const String8& name);
- sp<IGraphicBufferProducer> getProducerInterface() const { return getBufferQueue(); }
-
// setDefaultBufferSize is used to set the size of buffers returned by
// requestBuffers when a with and height of zero is requested.
status_t setDefaultBufferSize(uint32_t w, uint32_t h);