Merge "Move libnbaio out of AudioFlinger" into jb-mr1-dev
diff --git a/include/media/IHDCP.h b/include/media/IHDCP.h
new file mode 100644
index 0000000..a0613c7
--- /dev/null
+++ b/include/media/IHDCP.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <binder/IInterface.h>
+#include <media/hardware/HDCPAPI.h>
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+struct IHDCPObserver : public IInterface {
+ DECLARE_META_INTERFACE(HDCPObserver);
+
+ virtual void notify(
+ int msg, int ext1, int ext2, const Parcel *obj) = 0;
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(IHDCPObserver);
+};
+
+struct IHDCP : public IInterface {
+ DECLARE_META_INTERFACE(HDCP);
+
+ // Called to specify the observer that receives asynchronous notifications
+ // from the HDCP implementation to signal completion/failure of asynchronous
+ // operations (such as initialization) or out of band events.
+ virtual status_t setObserver(const sp<IHDCPObserver> &observer) = 0;
+
+ // Request to setup an HDCP session with the specified host listening
+ // on the specified port.
+ virtual status_t initAsync(const char *host, unsigned port) = 0;
+
+ // Request to shutdown the active HDCP session.
+ virtual status_t shutdownAsync() = 0;
+
+ // Encrypt a data according to the HDCP spec. The data is to be
+ // encrypted in-place, only size bytes of data should be read/write,
+ // even if the size is not a multiple of 128 bit (16 bytes).
+ // This operation is to be synchronous, i.e. this call does not return
+ // until outData contains size bytes of encrypted data.
+ // streamCTR will be assigned by the caller (to 0 for the first PES stream,
+ // 1 for the second and so on)
+ // inputCTR will be maintained by the callee for each PES stream.
+ virtual status_t encrypt(
+ const void *inData, size_t size, uint32_t streamCTR,
+ uint64_t *outInputCTR, void *outData) = 0;
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(IHDCP);
+};
+
+struct BnHDCPObserver : public BnInterface<IHDCPObserver> {
+ virtual status_t onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply,
+ uint32_t flags = 0);
+};
+
+struct BnHDCP : public BnInterface<IHDCP> {
+ virtual status_t onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply,
+ uint32_t flags = 0);
+};
+
+} // namespace android
+
+
diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h
index 76c45a0..dbcdf92 100644
--- a/include/media/IMediaPlayerService.h
+++ b/include/media/IMediaPlayerService.h
@@ -50,6 +50,8 @@
virtual sp<IOMX> getOMX() = 0;
virtual sp<ICrypto> makeCrypto() = 0;
+ virtual status_t enableRemoteDisplay(bool enable) = 0;
+
// codecs and audio devices usage tracking for the battery app
enum BatteryDataBits {
// tracking audio codec
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index 2371619..500dde6 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -25,6 +25,8 @@
#include <media/stagefright/SkipCutBuffer.h>
#include <OMX_Audio.h>
+#define TRACK_BUFFER_TIMING 0
+
namespace android {
struct ABuffer;
@@ -127,6 +129,15 @@
sp<GraphicBuffer> mGraphicBuffer;
};
+#if TRACK_BUFFER_TIMING
+ struct BufferStats {
+ int64_t mEmptyBufferTimeUs;
+ int64_t mFillBufferDoneTimeUs;
+ };
+
+ KeyedVector<int64_t, BufferStats> mBufferStats;
+#endif
+
sp<AMessage> mNotify;
sp<UninitializedState> mUninitializedState;
diff --git a/include/media/stagefright/AudioSource.h b/include/media/stagefright/AudioSource.h
index f5466e8..de7edf3 100644
--- a/include/media/stagefright/AudioSource.h
+++ b/include/media/stagefright/AudioSource.h
@@ -31,11 +31,12 @@
class AudioRecord;
struct AudioSource : public MediaSource, public MediaBufferObserver {
- // Note that the "channels" parameter is _not_ the number of channels,
- // but a bitmask of audio_channels_t constants.
+ // Note that the "channels" parameter _is_ the number of channels,
+ // _not_ a bitmask of audio_channels_t constants.
AudioSource(
- audio_source_t inputSource, uint32_t sampleRate,
- uint32_t channels = AUDIO_CHANNEL_IN_MONO);
+ audio_source_t inputSource,
+ uint32_t sampleRate,
+ uint32_t channels = 1);
status_t initCheck() const;
@@ -49,9 +50,15 @@
virtual status_t read(
MediaBuffer **buffer, const ReadOptions *options = NULL);
- status_t dataCallbackTimestamp(const AudioRecord::Buffer& buffer, int64_t timeUs);
+ status_t dataCallback(const AudioRecord::Buffer& buffer);
virtual void signalBufferReturned(MediaBuffer *buffer);
+ // If useLooperTime == true, buffers will carry absolute timestamps
+ // as returned by ALooper::GetNowUs(), otherwise systemTime() is used
+ // and buffers contain timestamps relative to start time.
+ // The default is to _not_ use looper time.
+ void setUseLooperTime(bool useLooperTime);
+
protected:
virtual ~AudioSource();
@@ -87,6 +94,8 @@
List<MediaBuffer * > mBuffersReceived;
+ bool mUseLooperTime;
+
void trackMaxAmplitude(int16_t *data, int nSamples);
// This is used to raise the volume from mute to the
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index 32ee89e..d4be9fa 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -1011,6 +1011,8 @@
LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "Effect_setConfig")
if(LvmStatus != LVM_SUCCESS) return -EINVAL;
+ ActiveParams.SampleRate = SampleRate;
+
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "Effect_setConfig")
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index c8e1dc7..bcce063 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -18,6 +18,7 @@
IAudioTrack.cpp \
IAudioRecord.cpp \
ICrypto.cpp \
+ IHDCP.cpp \
AudioRecord.cpp \
AudioSystem.cpp \
mediaplayer.cpp \
diff --git a/media/libmedia/IHDCP.cpp b/media/libmedia/IHDCP.cpp
new file mode 100644
index 0000000..493f5a4
--- /dev/null
+++ b/media/libmedia/IHDCP.cpp
@@ -0,0 +1,206 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IHDCP"
+#include <utils/Log.h>
+
+#include <binder/Parcel.h>
+#include <media/IHDCP.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+enum {
+ OBSERVER_NOTIFY = IBinder::FIRST_CALL_TRANSACTION,
+ HDCP_SET_OBSERVER,
+ HDCP_INIT_ASYNC,
+ HDCP_SHUTDOWN_ASYNC,
+ HDCP_ENCRYPT,
+};
+
+struct BpHDCPObserver : public BpInterface<IHDCPObserver> {
+ BpHDCPObserver(const sp<IBinder> &impl)
+ : BpInterface<IHDCPObserver>(impl) {
+ }
+
+ virtual void notify(
+ int msg, int ext1, int ext2, const Parcel *obj) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IHDCPObserver::getInterfaceDescriptor());
+ data.writeInt32(msg);
+ data.writeInt32(ext1);
+ data.writeInt32(ext2);
+ if (obj && obj->dataSize() > 0) {
+ data.appendFrom(const_cast<Parcel *>(obj), 0, obj->dataSize());
+ }
+ remote()->transact(OBSERVER_NOTIFY, data, &reply, IBinder::FLAG_ONEWAY);
+ }
+};
+
+IMPLEMENT_META_INTERFACE(HDCPObserver, "android.hardware.IHDCPObserver");
+
+struct BpHDCP : public BpInterface<IHDCP> {
+ BpHDCP(const sp<IBinder> &impl)
+ : BpInterface<IHDCP>(impl) {
+ }
+
+ virtual status_t setObserver(const sp<IHDCPObserver> &observer) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
+ data.writeStrongBinder(observer->asBinder());
+ remote()->transact(HDCP_SET_OBSERVER, data, &reply);
+ return reply.readInt32();
+ }
+
+ virtual status_t initAsync(const char *host, unsigned port) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
+ data.writeCString(host);
+ data.writeInt32(port);
+ remote()->transact(HDCP_INIT_ASYNC, data, &reply);
+ return reply.readInt32();
+ }
+
+ virtual status_t shutdownAsync() {
+ Parcel data, reply;
+ data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
+ remote()->transact(HDCP_SHUTDOWN_ASYNC, data, &reply);
+ return reply.readInt32();
+ }
+
+ virtual status_t encrypt(
+ const void *inData, size_t size, uint32_t streamCTR,
+ uint64_t *outInputCTR, void *outData) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
+ data.writeInt32(size);
+ data.write(inData, size);
+ data.writeInt32(streamCTR);
+ remote()->transact(HDCP_ENCRYPT, data, &reply);
+
+ status_t err = reply.readInt32();
+
+ if (err != OK) {
+ *outInputCTR = 0;
+
+ return err;
+ }
+
+ *outInputCTR = reply.readInt64();
+ reply.read(outData, size);
+
+ return err;
+ }
+};
+
+IMPLEMENT_META_INTERFACE(HDCP, "android.hardware.IHDCP");
+
+status_t BnHDCPObserver::onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
+ switch (code) {
+ case OBSERVER_NOTIFY:
+ {
+ CHECK_INTERFACE(IHDCPObserver, data, reply);
+
+ int msg = data.readInt32();
+ int ext1 = data.readInt32();
+ int ext2 = data.readInt32();
+
+ Parcel obj;
+ if (data.dataAvail() > 0) {
+ obj.appendFrom(
+ const_cast<Parcel *>(&data),
+ data.dataPosition(),
+ data.dataAvail());
+ }
+
+ notify(msg, ext1, ext2, &obj);
+
+ return OK;
+ }
+
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+status_t BnHDCP::onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
+ switch (code) {
+ case HDCP_SET_OBSERVER:
+ {
+ CHECK_INTERFACE(IHDCP, data, reply);
+
+ sp<IHDCPObserver> observer =
+ interface_cast<IHDCPObserver>(data.readStrongBinder());
+
+ reply->writeInt32(setObserver(observer));
+ return OK;
+ }
+
+ case HDCP_INIT_ASYNC:
+ {
+ CHECK_INTERFACE(IHDCP, data, reply);
+
+ const char *host = data.readCString();
+ unsigned port = data.readInt32();
+
+ reply->writeInt32(initAsync(host, port));
+ return OK;
+ }
+
+ case HDCP_SHUTDOWN_ASYNC:
+ {
+ CHECK_INTERFACE(IHDCP, data, reply);
+
+ reply->writeInt32(shutdownAsync());
+ return OK;
+ }
+
+ case HDCP_ENCRYPT:
+ {
+ size_t size = data.readInt32();
+
+ void *inData = malloc(2 * size);
+ void *outData = (uint8_t *)inData + size;
+
+ data.read(inData, size);
+
+ uint32_t streamCTR = data.readInt32();
+ uint64_t inputCTR;
+ status_t err = encrypt(inData, size, streamCTR, &inputCTR, outData);
+
+ reply->writeInt32(err);
+
+ if (err == OK) {
+ reply->writeInt64(inputCTR);
+ reply->write(outData, size);
+ }
+
+ free(inData);
+ inData = outData = NULL;
+
+ return OK;
+ }
+
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+} // namespace android
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index 9120617..41969b1 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -38,6 +38,7 @@
CREATE_METADATA_RETRIEVER,
GET_OMX,
MAKE_CRYPTO,
+ ENABLE_REMOTE_DISPLAY,
ADD_BATTERY_DATA,
PULL_BATTERY_DATA
};
@@ -120,6 +121,14 @@
return interface_cast<ICrypto>(reply.readStrongBinder());
}
+ virtual status_t enableRemoteDisplay(bool enable) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
+ data.writeInt32(enable);
+ remote()->transact(ENABLE_REMOTE_DISPLAY, data, &reply);
+ return reply.readInt32();
+ }
+
virtual void addBatteryData(uint32_t params) {
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
@@ -206,6 +215,12 @@
reply->writeStrongBinder(crypto->asBinder());
return NO_ERROR;
} break;
+ case ENABLE_REMOTE_DISPLAY: {
+ CHECK_INTERFACE(IMediaPlayerService, data, reply);
+ bool enable = data.readInt32();
+ reply->writeInt32(enableRemoteDisplay(enable));
+ return NO_ERROR;
+ } break;
case ADD_BATTERY_DATA: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
uint32_t params = data.readInt32();
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index 1373d3c..c7227b0 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -9,45 +9,47 @@
LOCAL_SRC_FILES:= \
ActivityManager.cpp \
Crypto.cpp \
- MediaRecorderClient.cpp \
MediaPlayerFactory.cpp \
MediaPlayerService.cpp \
+ MediaRecorderClient.cpp \
MetadataRetrieverClient.cpp \
- TestPlayerStub.cpp \
- MidiMetadataRetriever.cpp \
MidiFile.cpp \
+ MidiMetadataRetriever.cpp \
+ RemoteDisplay.cpp \
StagefrightPlayer.cpp \
- StagefrightRecorder.cpp
+ StagefrightRecorder.cpp \
+ TestPlayerStub.cpp \
-LOCAL_SHARED_LIBRARIES := \
- libcutils \
- libutils \
- libbinder \
- libvorbisidec \
- libsonivox \
- libmedia \
- libmedia_native \
- libcamera_client \
- libstagefright \
- libstagefright_omx \
- libstagefright_foundation \
- libgui \
- libdl
+LOCAL_SHARED_LIBRARIES := \
+ libbinder \
+ libcamera_client \
+ libcutils \
+ libdl \
+ libgui \
+ libmedia \
+ libmedia_native \
+ libsonivox \
+ libstagefright \
+ libstagefright_foundation \
+ libstagefright_omx \
+ libstagefright_wfd \
+ libutils \
+ libvorbisidec \
-LOCAL_STATIC_LIBRARIES := \
- libstagefright_nuplayer \
- libstagefright_rtsp \
+LOCAL_STATIC_LIBRARIES := \
+ libstagefright_nuplayer \
+ libstagefright_rtsp \
-LOCAL_C_INCLUDES := \
- $(call include-path-for, graphics corecg) \
- $(TOP)/frameworks/av/media/libstagefright/include \
- $(TOP)/frameworks/av/media/libstagefright/rtsp \
- $(TOP)/frameworks/native/include/media/openmax \
- $(TOP)/external/tremolo/Tremolo \
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, graphics corecg) \
+ $(TOP)/frameworks/av/media/libstagefright/include \
+ $(TOP)/frameworks/av/media/libstagefright/rtsp \
+ $(TOP)/frameworks/av/media/libstagefright/wifi-display \
+ $(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/external/tremolo/Tremolo \
LOCAL_MODULE:= libmediaplayerservice
include $(BUILD_SHARED_LIBRARY)
include $(call all-makefiles-under,$(LOCAL_PATH))
-
diff --git a/media/libmediaplayerservice/HDCP.cpp b/media/libmediaplayerservice/HDCP.cpp
new file mode 100644
index 0000000..6f8a465
--- /dev/null
+++ b/media/libmediaplayerservice/HDCP.cpp
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "HDCP"
+#include <utils/Log.h>
+
+#include "HDCP.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <dlfcn.h>
+
+namespace android {
+
+HDCP::HDCP()
+ : mLibHandle(NULL),
+ mHDCPModule(NULL) {
+ mLibHandle = dlopen("libstagefright_hdcp.so", RTLD_NOW);
+
+ if (mLibHandle == NULL) {
+ ALOGE("Unable to locate libstagefright_hdcp.so");
+ return;
+ }
+
+ typedef HDCPModule *(*CreateHDCPModuleFunc)();
+ CreateHDCPModuleFunc createHDCPModule =
+ (CreateHDCPModuleFunc)dlsym(mLibHandle, "createHDCPModule");
+
+ if (createHDCPModule == NULL) {
+ ALOGE("Unable to find symbol 'createHDCPModule'.");
+ } else if ((mHDCPModule = createHDCPModule()) == NULL) {
+ ALOGE("createHDCPModule failed.");
+ }
+}
+
+HDCP::~HDCP() {
+ if (mHDCPModule != NULL) {
+ delete mHDCPModule;
+ mHDCPModule = NULL;
+ }
+
+ if (mLibHandle != NULL) {
+ dlclose(mLibHandle);
+ mLibHandle = NULL;
+ }
+}
+
+status_t HDCP::setObserver(const sp<IHDCPObserver> &observer) {
+ if (mHDCPModule == NULL) {
+ return NO_INIT;
+ }
+
+ mObserver = observer;
+
+ return OK;
+}
+
+status_t HDCP::initAsync(const char *host, unsigned port) {
+ if (mHDCPModule == NULL) {
+ return NO_INIT;
+ }
+
+ return mHDCPModule->initAsync(host, port);
+}
+
+status_t HDCP::shutdownAsync() {
+ if (mHDCPModule == NULL) {
+ return NO_INIT;
+ }
+
+ return mHDCPModule->shutdownAsync();
+}
+
+status_t HDCP::encrypt(
+ const void *inData, size_t size, uint32_t streamCTR,
+ uint64_t *outInputCTR, void *outData) {
+ if (mHDCPModule == NULL) {
+ *outInputCTR = 0;
+
+ return NO_INIT;
+ }
+
+ return mHDCPModule->encrypt(inData, size, streamCTR, outInputCTR, outData);
+}
+
+} // namespace android
+
diff --git a/media/libmediaplayerservice/HDCP.h b/media/libmediaplayerservice/HDCP.h
new file mode 100644
index 0000000..2e27689
--- /dev/null
+++ b/media/libmediaplayerservice/HDCP.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HDCP_H_
+
+#define HDCP_H_
+
+#include <media/IHDCP.h>
+
+namespace android {
+
+struct HDCP : public BnHDCP {
+ HDCP();
+ virtual ~HDCP();
+
+ virtual status_t setObserver(const sp<IHDCPObserver> &observer);
+ virtual status_t initAsync(const char *host, unsigned port);
+ virtual status_t shutdownAsync();
+
+ virtual status_t encrypt(
+ const void *inData, size_t size, uint32_t streamCTR,
+ uint64_t *outInputCTR, void *outData);
+
+private:
+ void *mLibHandle;
+ HDCPModule *mHDCPModule;
+ sp<IHDCPObserver> mObserver;
+
+ DISALLOW_EVIL_CONSTRUCTORS(HDCP);
+};
+
+} // namespace android
+
+#endif // HDCP_H_
+
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 6346363..5fe446f 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -70,6 +70,7 @@
#include <OMX.h>
#include "Crypto.h"
+#include "RemoteDisplay.h"
namespace {
using android::media::Metadata;
@@ -278,6 +279,28 @@
return new Crypto;
}
+status_t MediaPlayerService::enableRemoteDisplay(bool enable) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (enable && mRemoteDisplay == NULL) {
+ mRemoteDisplay = new RemoteDisplay;
+
+ status_t err = mRemoteDisplay->start();
+
+ if (err != OK) {
+ mRemoteDisplay.clear();
+ return err;
+ }
+
+ return OK;
+ } else if (!enable && mRemoteDisplay != NULL) {
+ mRemoteDisplay->stop();
+ mRemoteDisplay.clear();
+ }
+
+ return OK;
+}
+
status_t MediaPlayerService::AudioCache::dump(int fd, const Vector<String16>& args) const
{
const size_t SIZE = 256;
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 6ede9a4..8fbc5d5 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -42,6 +42,7 @@
class IMediaMetadataRetriever;
class IOMX;
class MediaRecorderClient;
+struct RemoteDisplay;
#define CALLBACK_ANTAGONIZER 0
#if CALLBACK_ANTAGONIZER
@@ -247,6 +248,7 @@
virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat);
virtual sp<IOMX> getOMX();
virtual sp<ICrypto> makeCrypto();
+ virtual status_t enableRemoteDisplay(bool enable);
virtual status_t dump(int fd, const Vector<String16>& args);
@@ -423,6 +425,7 @@
int32_t mNextConnId;
sp<IOMX> mOMX;
sp<ICrypto> mCrypto;
+ sp<RemoteDisplay> mRemoteDisplay;
};
// ----------------------------------------------------------------------------
diff --git a/media/libmediaplayerservice/RemoteDisplay.cpp b/media/libmediaplayerservice/RemoteDisplay.cpp
new file mode 100644
index 0000000..855824a
--- /dev/null
+++ b/media/libmediaplayerservice/RemoteDisplay.cpp
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "RemoteDisplay.h"
+
+#include "ANetworkSession.h"
+#include "source/WifiDisplaySource.h"
+
+namespace android {
+
+RemoteDisplay::RemoteDisplay()
+ : mInitCheck(NO_INIT),
+ mLooper(new ALooper),
+ mNetSession(new ANetworkSession),
+ mSource(new WifiDisplaySource(mNetSession)) {
+ mLooper->registerHandler(mSource);
+}
+
+RemoteDisplay::~RemoteDisplay() {
+}
+
+status_t RemoteDisplay::start() {
+ mNetSession->start();
+ mLooper->start();
+
+ // XXX replace with 8554 for bcom dongle (it doesn't respect the
+ // default port or the one advertised in the wfd IE).
+ mSource->start(WifiDisplaySource::kWifiDisplayDefaultPort);
+
+ return OK;
+}
+
+status_t RemoteDisplay::stop() {
+ mSource->stop();
+
+ mLooper->stop();
+ mNetSession->stop();
+
+ return OK;
+}
+
+} // namespace android
+
diff --git a/media/libmediaplayerservice/RemoteDisplay.h b/media/libmediaplayerservice/RemoteDisplay.h
new file mode 100644
index 0000000..6b37afb
--- /dev/null
+++ b/media/libmediaplayerservice/RemoteDisplay.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef REMOTE_DISPLAY_H_
+
+#define REMOTE_DISPLAY_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct ALooper;
+struct ANetworkSession;
+struct WifiDisplaySource;
+
+struct RemoteDisplay : public RefBase {
+ RemoteDisplay();
+
+ status_t start();
+ status_t stop();
+
+protected:
+ virtual ~RemoteDisplay();
+
+private:
+ status_t mInitCheck;
+
+ sp<ALooper> mNetLooper;
+ sp<ALooper> mLooper;
+ sp<ANetworkSession> mNetSession;
+ sp<WifiDisplaySource> mSource;
+
+ DISALLOW_EVIL_CONSTRUCTORS(RemoteDisplay);
+};
+
+} // namespace android
+
+#endif // REMOTE_DISPLAY_H_
+
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index c37d2ca..3dd5d60 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -861,6 +861,20 @@
return INVALID_OPERATION;
}
+ int32_t storeMeta;
+ if (encoder
+ && msg->findInt32("store-metadata-in-buffers", &storeMeta)
+ && storeMeta != 0) {
+ err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE);
+
+ if (err != OK) {
+ ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",
+ mComponentName.c_str(), err);
+
+ return err;
+ }
+ }
+
if (!strncasecmp(mime, "video/", 6)) {
if (encoder) {
err = setupVideoEncoder(mime, msg);
@@ -2424,6 +2438,21 @@
CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);
info->mStatus = BufferInfo::OWNED_BY_US;
+ const sp<AMessage> &bufferMeta = info->mData->meta();
+ void *mediaBuffer;
+ if (bufferMeta->findPointer("mediaBuffer", &mediaBuffer)
+ && mediaBuffer != NULL) {
+ // We're in "store-metadata-in-buffers" mode, the underlying
+ // OMX component had access to data that's implicitly refcounted
+ // by this "mediaBuffer" object. Now that the OMX component has
+ // told us that it's done with the input buffer, we can decrement
+ // the mediaBuffer's reference count.
+ ((MediaBuffer *)mediaBuffer)->release();
+ mediaBuffer = NULL;
+
+ bufferMeta->setPointer("mediaBuffer", NULL);
+ }
+
PortMode mode = getPortMode(kPortIndexInput);
switch (mode) {
@@ -2531,10 +2560,10 @@
}
if (buffer != info->mData) {
- if (0 && !(flags & OMX_BUFFERFLAG_CODECCONFIG)) {
- ALOGV("[%s] Needs to copy input data.",
- mCodec->mComponentName.c_str());
- }
+ ALOGV("[%s] Needs to copy input data for buffer %p. (%p != %p)",
+ mCodec->mComponentName.c_str(),
+ bufferID,
+ buffer.get(), info->mData.get());
CHECK_LE(buffer->size(), info->mData->capacity());
memcpy(info->mData->data(), buffer->data(), buffer->size());
@@ -2547,10 +2576,22 @@
ALOGV("[%s] calling emptyBuffer %p w/ EOS",
mCodec->mComponentName.c_str(), bufferID);
} else {
+#if TRACK_BUFFER_TIMING
+ ALOGI("[%s] calling emptyBuffer %p w/ time %lld us",
+ mCodec->mComponentName.c_str(), bufferID, timeUs);
+#else
ALOGV("[%s] calling emptyBuffer %p w/ time %lld us",
mCodec->mComponentName.c_str(), bufferID, timeUs);
+#endif
}
+#if TRACK_BUFFER_TIMING
+ ACodec::BufferStats stats;
+ stats.mEmptyBufferTimeUs = ALooper::GetNowUs();
+ stats.mFillBufferDoneTimeUs = -1ll;
+ mCodec->mBufferStats.add(timeUs, stats);
+#endif
+
CHECK_EQ(mCodec->mOMX->emptyBuffer(
mCodec->mNode,
bufferID,
@@ -2647,6 +2688,22 @@
mCodec->mComponentName.c_str(), bufferID, timeUs, flags);
ssize_t index;
+
+#if TRACK_BUFFER_TIMING
+ index = mCodec->mBufferStats.indexOfKey(timeUs);
+ if (index >= 0) {
+ ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index);
+ stats->mFillBufferDoneTimeUs = ALooper::GetNowUs();
+
+ ALOGI("frame PTS %lld: %lld",
+ timeUs,
+ stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs);
+
+ mCodec->mBufferStats.removeItemsAt(index);
+ stats = NULL;
+ }
+#endif
+
BufferInfo *info =
mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
@@ -2891,7 +2948,7 @@
AString mime;
AString componentName;
- uint32_t quirks;
+ uint32_t quirks = 0;
if (msg->findString("componentName", &componentName)) {
ssize_t index = matchingCodecs.add();
OMXCodec::CodecNameAndQuirks *entry = &matchingCodecs.editItemAt(index);
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index ed142a4..3248dbc 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -24,6 +24,7 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
#include <cutils/properties.h>
#include <stdlib.h>
@@ -33,7 +34,7 @@
AudioSource *source = (AudioSource *) user;
switch (event) {
case AudioRecord::EVENT_MORE_DATA: {
- source->dataCallbackTimestamp(*((AudioRecord::Buffer *) info), systemTime() / 1000);
+ source->dataCallback(*((AudioRecord::Buffer *) info));
break;
}
case AudioRecord::EVENT_OVERRUN: {
@@ -53,7 +54,8 @@
mSampleRate(sampleRate),
mPrevSampleTimeUs(0),
mNumFramesReceived(0),
- mNumClientOwnedBuffers(0) {
+ mNumClientOwnedBuffers(0),
+ mUseLooperTime(false) {
ALOGV("sampleRate: %d, channelCount: %d", sampleRate, channelCount);
CHECK(channelCount == 1 || channelCount == 2);
@@ -100,6 +102,12 @@
return mInitCheck;
}
+void AudioSource::setUseLooperTime(bool useLooperTime) {
+ CHECK(!mStarted);
+
+ mUseLooperTime = useLooperTime;
+}
+
status_t AudioSource::start(MetaData *params) {
Mutex::Autolock autoLock(mLock);
if (mStarted) {
@@ -271,8 +279,10 @@
return;
}
-status_t AudioSource::dataCallbackTimestamp(
- const AudioRecord::Buffer& audioBuffer, int64_t timeUs) {
+status_t AudioSource::dataCallback(const AudioRecord::Buffer& audioBuffer) {
+ int64_t timeUs =
+ mUseLooperTime ? ALooper::GetNowUs() : (systemTime() / 1000ll);
+
ALOGV("dataCallbackTimestamp: %lld us", timeUs);
Mutex::Autolock autoLock(mLock);
if (!mStarted) {
@@ -290,12 +300,15 @@
if (mNumFramesReceived == 0 && mPrevSampleTimeUs == 0) {
mInitialReadTimeUs = timeUs;
// Initial delay
- if (mStartTimeUs > 0) {
+ if (mUseLooperTime) {
+ mStartTimeUs = timeUs;
+ } else if (mStartTimeUs > 0) {
mStartTimeUs = timeUs - mStartTimeUs;
} else {
// Assume latency is constant.
mStartTimeUs += mRecord->latency() * 1000;
}
+
mPrevSampleTimeUs = mStartTimeUs;
}
diff --git a/media/libstagefright/wifi-display/ANetworkSession.h b/media/libstagefright/wifi-display/ANetworkSession.h
index 0402317..d4cd14f 100644
--- a/media/libstagefright/wifi-display/ANetworkSession.h
+++ b/media/libstagefright/wifi-display/ANetworkSession.h
@@ -27,6 +27,8 @@
struct AMessage;
+// Helper class to manage a number of live sockets (datagram and stream-based)
+// on a single thread. Clients are notified about activity through AMessages.
struct ANetworkSession : public RefBase {
ANetworkSession();
diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk
index 114ff62..b035a51 100644
--- a/media/libstagefright/wifi-display/Android.mk
+++ b/media/libstagefright/wifi-display/Android.mk
@@ -3,9 +3,42 @@
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
- udptest.cpp \
- ANetworkSession.cpp \
- ParsedMessage.cpp \
+ ANetworkSession.cpp \
+ ParsedMessage.cpp \
+ source/Converter.cpp \
+ source/PlaybackSession.cpp \
+ source/RepeaterSource.cpp \
+ source/Serializer.cpp \
+ source/TSPacketizer.cpp \
+ source/WifiDisplaySource.cpp \
+
+LOCAL_C_INCLUDES:= \
+ $(TOP)/frameworks/av/media/libstagefright \
+ $(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/frameworks/av/media/libstagefright/mpeg2ts \
+
+LOCAL_SHARED_LIBRARIES:= \
+ libbinder \
+ libcutils \
+ libgui \
+ libmedia \
+ libstagefright \
+ libstagefright_foundation \
+ libui \
+ libutils \
+
+LOCAL_MODULE:= libstagefright_wfd
+
+LOCAL_MODULE_TAGS:= optional
+
+include $(BUILD_SHARED_LIBRARY)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ wfd.cpp \
LOCAL_SHARED_LIBRARIES:= \
libbinder \
@@ -13,6 +46,29 @@
libmedia \
libstagefright \
libstagefright_foundation \
+ libstagefright_wfd \
+ libutils \
+
+LOCAL_MODULE:= wfd
+
+LOCAL_MODULE_TAGS := debug
+
+include $(BUILD_EXECUTABLE)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ udptest.cpp \
+
+LOCAL_SHARED_LIBRARIES:= \
+ libbinder \
+ libgui \
+ libmedia \
+ libstagefright \
+ libstagefright_foundation \
+ libstagefright_wfd \
libutils \
LOCAL_MODULE:= udptest
diff --git a/media/libstagefright/wifi-display/ParsedMessage.h b/media/libstagefright/wifi-display/ParsedMessage.h
index 00f578f..e9a1859 100644
--- a/media/libstagefright/wifi-display/ParsedMessage.h
+++ b/media/libstagefright/wifi-display/ParsedMessage.h
@@ -21,6 +21,8 @@
namespace android {
+// Encapsulates an "HTTP/RTSP style" response, i.e. a status line,
+// key/value pairs making up the headers and an optional body/content.
struct ParsedMessage : public RefBase {
static sp<ParsedMessage> Parse(
const char *data, size_t size, bool noMoreData, size_t *length);
diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp
new file mode 100644
index 0000000..ee05e45
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/Converter.cpp
@@ -0,0 +1,281 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Converter"
+#include <utils/Log.h>
+
+#include "Converter.h"
+
+#include <gui/SurfaceTextureClient.h>
+#include <media/ICrypto.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+
+namespace android {
+
+Converter::Converter(
+ const sp<AMessage> ¬ify,
+ const sp<ALooper> &codecLooper,
+ const sp<AMessage> &format)
+ : mInitCheck(NO_INIT),
+ mNotify(notify),
+ mCodecLooper(codecLooper),
+ mInputFormat(format),
+ mDoMoreWorkPending(false) {
+ mInitCheck = initEncoder();
+}
+
+Converter::~Converter() {
+ if (mEncoder != NULL) {
+ mEncoder->release();
+ mEncoder.clear();
+ }
+}
+
+status_t Converter::initCheck() const {
+ return mInitCheck;
+}
+
+sp<AMessage> Converter::getOutputFormat() const {
+ return mOutputFormat;
+}
+
+status_t Converter::initEncoder() {
+ AString inputMIME;
+ CHECK(mInputFormat->findString("mime", &inputMIME));
+
+ AString outputMIME;
+ bool isAudio = false;
+ if (!strcasecmp(inputMIME.c_str(), MEDIA_MIMETYPE_AUDIO_RAW)) {
+ outputMIME = MEDIA_MIMETYPE_AUDIO_AAC;
+ isAudio = true;
+ } else if (!strcasecmp(inputMIME.c_str(), MEDIA_MIMETYPE_VIDEO_RAW)) {
+ outputMIME = MEDIA_MIMETYPE_VIDEO_AVC;
+ } else {
+ TRESPASS();
+ }
+
+ mEncoder = MediaCodec::CreateByType(
+ mCodecLooper, outputMIME.c_str(), true /* encoder */);
+
+ if (mEncoder == NULL) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ mOutputFormat = mInputFormat->dup();
+ mOutputFormat->setString("mime", outputMIME.c_str());
+
+ if (isAudio) {
+ mOutputFormat->setInt32("bitrate", 64000); // 64 kBit/sec
+ } else {
+ mOutputFormat->setInt32("bitrate", 5000000); // 5Mbit/sec
+ mOutputFormat->setInt32("frame-rate", 60);
+ mOutputFormat->setInt32("i-frame-interval", 3); // Iframes every 3 secs
+ }
+
+ ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
+
+ status_t err = mEncoder->configure(
+ mOutputFormat,
+ NULL /* nativeWindow */,
+ NULL /* crypto */,
+ MediaCodec::CONFIGURE_FLAG_ENCODE);
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = mEncoder->start();
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = mEncoder->getInputBuffers(&mEncoderInputBuffers);
+
+ if (err != OK) {
+ return err;
+ }
+
+ return mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
+}
+
+void Converter::feedAccessUnit(const sp<ABuffer> &accessUnit) {
+ sp<AMessage> msg = new AMessage(kWhatFeedAccessUnit, id());
+ msg->setBuffer("accessUnit", accessUnit);
+ msg->post();
+}
+
+void Converter::signalEOS() {
+ (new AMessage(kWhatInputEOS, id()))->post();
+}
+
+void Converter::notifyError(status_t err) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatError);
+ notify->setInt32("err", err);
+ notify->post();
+}
+
+void Converter::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatFeedAccessUnit:
+ {
+ sp<ABuffer> accessUnit;
+ CHECK(msg->findBuffer("accessUnit", &accessUnit));
+
+ mInputBufferQueue.push_back(accessUnit);
+
+ feedEncoderInputBuffers();
+
+ scheduleDoMoreWork();
+ break;
+ }
+
+ case kWhatInputEOS:
+ {
+ mInputBufferQueue.push_back(NULL);
+
+ feedEncoderInputBuffers();
+
+ scheduleDoMoreWork();
+ break;
+ }
+
+ case kWhatDoMoreWork:
+ {
+ mDoMoreWorkPending = false;
+ status_t err = doMoreWork();
+
+ if (err != OK) {
+ notifyError(err);
+ } else {
+ scheduleDoMoreWork();
+ }
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+void Converter::scheduleDoMoreWork() {
+ if (mDoMoreWorkPending) {
+ return;
+ }
+
+ mDoMoreWorkPending = true;
+ (new AMessage(kWhatDoMoreWork, id()))->post(1000ll);
+}
+
+status_t Converter::feedEncoderInputBuffers() {
+ while (!mInputBufferQueue.empty()
+ && !mAvailEncoderInputIndices.empty()) {
+ sp<ABuffer> buffer = *mInputBufferQueue.begin();
+ mInputBufferQueue.erase(mInputBufferQueue.begin());
+
+ size_t bufferIndex = *mAvailEncoderInputIndices.begin();
+ mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
+
+ int64_t timeUs = 0ll;
+ uint32_t flags = 0;
+
+ if (buffer != NULL) {
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+ memcpy(mEncoderInputBuffers.itemAt(bufferIndex)->data(),
+ buffer->data(),
+ buffer->size());
+
+ void *mediaBuffer;
+ if (buffer->meta()->findPointer("mediaBuffer", &mediaBuffer)
+ && mediaBuffer != NULL) {
+ mEncoderInputBuffers.itemAt(bufferIndex)->meta()
+ ->setPointer("mediaBuffer", mediaBuffer);
+
+ buffer->meta()->setPointer("mediaBuffer", NULL);
+ }
+ } else {
+ flags = MediaCodec::BUFFER_FLAG_EOS;
+ }
+
+ status_t err = mEncoder->queueInputBuffer(
+ bufferIndex, 0, (buffer == NULL) ? 0 : buffer->size(),
+ timeUs, flags);
+
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ return OK;
+}
+
+status_t Converter::doMoreWork() {
+ size_t bufferIndex;
+ status_t err = mEncoder->dequeueInputBuffer(&bufferIndex);
+
+ if (err == OK) {
+ mAvailEncoderInputIndices.push_back(bufferIndex);
+ feedEncoderInputBuffers();
+ }
+
+ size_t offset;
+ size_t size;
+ int64_t timeUs;
+ uint32_t flags;
+ err = mEncoder->dequeueOutputBuffer(
+ &bufferIndex, &offset, &size, &timeUs, &flags);
+
+ if (err == OK) {
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatEOS);
+ notify->post();
+ } else {
+ sp<ABuffer> buffer = new ABuffer(size);
+ buffer->meta()->setInt64("timeUs", timeUs);
+
+ memcpy(buffer->data(),
+ mEncoderOutputBuffers.itemAt(bufferIndex)->base() + offset,
+ size);
+
+ if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
+ mOutputFormat->setBuffer("csd-0", buffer);
+ } else {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatAccessUnit);
+ notify->setBuffer("accessUnit", buffer);
+ notify->post();
+ }
+ }
+
+ err = mEncoder->releaseOutputBuffer(bufferIndex);
+ } else if (err == -EAGAIN) {
+ err = OK;
+ }
+
+ return err;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h
new file mode 100644
index 0000000..6700a32
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/Converter.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CONVERTER_H_
+
+#define CONVERTER_H_
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct MediaCodec;
+
+// Utility class that receives media access units and converts them into
+// media access unit of a different format.
+// Right now this'll convert raw video into H.264 and raw audio into AAC.
+struct Converter : public AHandler {
+ Converter(
+ const sp<AMessage> ¬ify,
+ const sp<ALooper> &codecLooper,
+ const sp<AMessage> &format);
+
+ status_t initCheck() const;
+
+ sp<AMessage> getOutputFormat() const;
+
+ void feedAccessUnit(const sp<ABuffer> &accessUnit);
+ void signalEOS();
+
+ enum {
+ kWhatAccessUnit,
+ kWhatEOS,
+ kWhatError,
+ };
+
+protected:
+ virtual ~Converter();
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+ enum {
+ kWhatFeedAccessUnit,
+ kWhatInputEOS,
+ kWhatDoMoreWork
+ };
+
+ status_t mInitCheck;
+ sp<AMessage> mNotify;
+ sp<ALooper> mCodecLooper;
+ sp<AMessage> mInputFormat;
+ sp<AMessage> mOutputFormat;
+
+ sp<MediaCodec> mEncoder;
+
+ Vector<sp<ABuffer> > mEncoderInputBuffers;
+ Vector<sp<ABuffer> > mEncoderOutputBuffers;
+
+ List<size_t> mAvailEncoderInputIndices;
+
+ List<sp<ABuffer> > mInputBufferQueue;
+
+ bool mDoMoreWorkPending;
+
+ status_t initEncoder();
+
+ status_t feedEncoderInputBuffers();
+
+ void scheduleDoMoreWork();
+ status_t doMoreWork();
+
+ void notifyError(status_t err);
+
+ DISALLOW_EVIL_CONSTRUCTORS(Converter);
+};
+
+} // namespace android
+
+#endif // CONVERTER_H_
+
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
new file mode 100644
index 0000000..c99a11e
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
@@ -0,0 +1,1000 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "PlaybackSession"
+#include <utils/Log.h>
+
+#include "PlaybackSession.h"
+
+#include "Converter.h"
+#include "RepeaterSource.h"
+#include "Serializer.h"
+#include "TSPacketizer.h"
+
+#include <binder/IServiceManager.h>
+#include <gui/ISurfaceComposer.h>
+#include <gui/SurfaceComposerClient.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/AudioSource.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MPEG2TSWriter.h>
+#include <media/stagefright/SurfaceMediaSource.h>
+#include <media/stagefright/Utils.h>
+#include <ui/DisplayInfo.h>
+
+#include <OMX_IVCommon.h>
+
+#define FAKE_VIDEO 0
+
+namespace android {
+
+static size_t kMaxRTPPacketSize = 1500;
+static size_t kMaxNumTSPacketsPerRTPPacket = (kMaxRTPPacketSize - 12) / 188;
+
+struct WifiDisplaySource::PlaybackSession::Track : public RefBase {
+ Track(const sp<Converter> &converter);
+ Track(const sp<AMessage> &format);
+
+ sp<AMessage> getFormat();
+
+ const sp<Converter> &converter() const;
+ ssize_t packetizerTrackIndex() const;
+
+ void setPacketizerTrackIndex(size_t index);
+
+protected:
+ virtual ~Track();
+
+private:
+ sp<Converter> mConverter;
+ sp<AMessage> mFormat;
+ ssize_t mPacketizerTrackIndex;
+
+ DISALLOW_EVIL_CONSTRUCTORS(Track);
+};
+
+WifiDisplaySource::PlaybackSession::Track::Track(const sp<Converter> &converter)
+ : mConverter(converter),
+ mPacketizerTrackIndex(-1) {
+}
+
+WifiDisplaySource::PlaybackSession::Track::Track(const sp<AMessage> &format)
+ : mFormat(format),
+ mPacketizerTrackIndex(-1) {
+}
+
+WifiDisplaySource::PlaybackSession::Track::~Track() {
+}
+
+sp<AMessage> WifiDisplaySource::PlaybackSession::Track::getFormat() {
+ if (mFormat != NULL) {
+ return mFormat;
+ }
+
+ return mConverter->getOutputFormat();
+}
+
+const sp<Converter> &WifiDisplaySource::PlaybackSession::Track::converter() const {
+ return mConverter;
+}
+
+ssize_t WifiDisplaySource::PlaybackSession::Track::packetizerTrackIndex() const {
+ return mPacketizerTrackIndex;
+}
+
+void WifiDisplaySource::PlaybackSession::Track::setPacketizerTrackIndex(size_t index) {
+ CHECK_LT(mPacketizerTrackIndex, 0);
+ mPacketizerTrackIndex = index;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+WifiDisplaySource::PlaybackSession::PlaybackSession(
+ const sp<ANetworkSession> &netSession,
+ const sp<AMessage> ¬ify)
+ : mNetSession(netSession),
+ mNotify(notify),
+ mLastLifesignUs(),
+ mTSQueue(new ABuffer(12 + kMaxNumTSPacketsPerRTPPacket * 188)),
+ mPrevTimeUs(-1ll),
+ mUseInterleavedTCP(false),
+ mRTPChannel(0),
+ mRTCPChannel(0),
+ mRTPPort(0),
+ mRTPSessionID(0),
+ mRTCPSessionID(0),
+ mRTPSeqNo(0),
+ mLastNTPTime(0),
+ mLastRTPTime(0),
+ mNumRTPSent(0),
+ mNumRTPOctetsSent(0),
+ mNumSRsSent(0),
+ mSendSRPending(false),
+ mFirstPacketTimeUs(-1ll),
+ mHistoryLength(0)
+#if LOG_TRANSPORT_STREAM
+ ,mLogFile(NULL)
+#endif
+{
+ mTSQueue->setRange(0, 12);
+
+#if LOG_TRANSPORT_STREAM
+ mLogFile = fopen("/system/etc/log.ts", "wb");
+#endif
+}
+
+status_t WifiDisplaySource::PlaybackSession::init(
+ const char *clientIP, int32_t clientRtp, int32_t clientRtcp,
+ bool useInterleavedTCP) {
+ status_t err = setupPacketizer();
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (useInterleavedTCP) {
+ mUseInterleavedTCP = true;
+ mRTPChannel = clientRtp;
+ mRTCPChannel = clientRtcp;
+ mRTPPort = 0;
+ mRTPSessionID = 0;
+ mRTCPSessionID = 0;
+
+ updateLiveness();
+ return OK;
+ }
+
+ mUseInterleavedTCP = false;
+ mRTPChannel = 0;
+ mRTCPChannel = 0;
+
+ int serverRtp;
+
+ sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id());
+ sp<AMessage> rtcpNotify = new AMessage(kWhatRTCPNotify, id());
+ for (serverRtp = 15550;; serverRtp += 2) {
+ int32_t rtpSession;
+ err = mNetSession->createUDPSession(
+ serverRtp, clientIP, clientRtp,
+ rtpNotify, &rtpSession);
+
+ if (err != OK) {
+ ALOGI("failed to create RTP socket on port %d", serverRtp);
+ continue;
+ }
+
+ if (clientRtcp < 0) {
+ // No RTCP.
+
+ mRTPPort = serverRtp;
+ mRTPSessionID = rtpSession;
+ mRTCPSessionID = 0;
+
+ ALOGI("rtpSessionId = %d", rtpSession);
+ break;
+ }
+
+ int32_t rtcpSession;
+ err = mNetSession->createUDPSession(
+ serverRtp + 1, clientIP, clientRtcp,
+ rtcpNotify, &rtcpSession);
+
+ if (err == OK) {
+ mRTPPort = serverRtp;
+ mRTPSessionID = rtpSession;
+ mRTCPSessionID = rtcpSession;
+
+ ALOGI("rtpSessionID = %d, rtcpSessionID = %d", rtpSession, rtcpSession);
+ break;
+ }
+
+ ALOGI("failed to create RTCP socket on port %d", serverRtp + 1);
+ mNetSession->destroySession(rtpSession);
+ }
+
+ if (mRTPPort == 0) {
+ return UNKNOWN_ERROR;
+ }
+
+ updateLiveness();
+
+ return OK;
+}
+
+WifiDisplaySource::PlaybackSession::~PlaybackSession() {
+#if LOG_TRANSPORT_STREAM
+ if (mLogFile != NULL) {
+ fclose(mLogFile);
+ mLogFile = NULL;
+ }
+#endif
+
+ mTracks.clear();
+
+ if (mCodecLooper != NULL) {
+ mCodecLooper->stop();
+ mCodecLooper.clear();
+ }
+
+ mPacketizer.clear();
+
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("SurfaceFlinger"));
+ sp<ISurfaceComposer> service = interface_cast<ISurfaceComposer>(binder);
+ CHECK(service != NULL);
+
+ if (mSerializer != NULL) {
+ mSerializer->stop();
+
+ looper()->unregisterHandler(mSerializer->id());
+ mSerializer.clear();
+ }
+
+ if (mSerializerLooper != NULL) {
+ mSerializerLooper->stop();
+ mSerializerLooper.clear();
+ }
+
+ service->connectDisplay(NULL);
+
+ if (mRTCPSessionID != 0) {
+ mNetSession->destroySession(mRTCPSessionID);
+ }
+
+ if (mRTPSessionID != 0) {
+ mNetSession->destroySession(mRTPSessionID);
+ }
+}
+
+int32_t WifiDisplaySource::PlaybackSession::getRTPPort() const {
+ return mRTPPort;
+}
+
+int64_t WifiDisplaySource::PlaybackSession::getLastLifesignUs() const {
+ return mLastLifesignUs;
+}
+
+void WifiDisplaySource::PlaybackSession::updateLiveness() {
+ mLastLifesignUs = ALooper::GetNowUs();
+}
+
+status_t WifiDisplaySource::PlaybackSession::play() {
+ updateLiveness();
+
+ if (mRTCPSessionID != 0) {
+ scheduleSendSR();
+ }
+
+ return mSerializer->start();
+}
+
+status_t WifiDisplaySource::PlaybackSession::pause() {
+ updateLiveness();
+
+ return OK;
+}
+
+void WifiDisplaySource::PlaybackSession::onMessageReceived(
+ const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatRTPNotify:
+ case kWhatRTCPNotify:
+ {
+ int32_t reason;
+ CHECK(msg->findInt32("reason", &reason));
+
+ switch (reason) {
+ case ANetworkSession::kWhatError:
+ {
+ int32_t sessionID;
+ CHECK(msg->findInt32("sessionID", &sessionID));
+
+ int32_t err;
+ CHECK(msg->findInt32("err", &err));
+
+ int32_t errorOccuredDuringSend;
+ CHECK(msg->findInt32("send", &errorOccuredDuringSend));
+
+ AString detail;
+ CHECK(msg->findString("detail", &detail));
+
+ if (msg->what() == kWhatRTPNotify
+ && !errorOccuredDuringSend) {
+ // This is ok, we don't expect to receive anything on
+ // the RTP socket.
+ break;
+ }
+
+ ALOGE("An error occurred during %s in session %d "
+ "(%d, '%s' (%s)).",
+ errorOccuredDuringSend ? "send" : "receive",
+ sessionID,
+ err,
+ detail.c_str(),
+ strerror(-err));
+
+ mNetSession->destroySession(sessionID);
+
+ if (sessionID == mRTPSessionID) {
+ mRTPSessionID = 0;
+ } else if (sessionID == mRTCPSessionID) {
+ mRTCPSessionID = 0;
+ }
+
+ // Inform WifiDisplaySource of our premature death (wish).
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatSessionDead);
+ notify->post();
+ break;
+ }
+
+ case ANetworkSession::kWhatDatagram:
+ {
+ int32_t sessionID;
+ CHECK(msg->findInt32("sessionID", &sessionID));
+
+ sp<ABuffer> data;
+ CHECK(msg->findBuffer("data", &data));
+
+ status_t err;
+ if (msg->what() == kWhatRTCPNotify) {
+ err = parseRTCP(data);
+ }
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+ break;
+ }
+
+ case kWhatSendSR:
+ {
+ mSendSRPending = false;
+
+ if (mRTCPSessionID == 0) {
+ break;
+ }
+
+ onSendSR();
+
+ scheduleSendSR();
+ break;
+ }
+
+ case kWhatSerializerNotify:
+ {
+ int32_t what;
+ CHECK(msg->findInt32("what", &what));
+
+ if (what == Serializer::kWhatEOS) {
+ ALOGI("input eos");
+
+ for (size_t i = 0; i < mTracks.size(); ++i) {
+#if FAKE_VIDEO
+ sp<AMessage> msg = new AMessage(kWhatConverterNotify, id());
+ msg->setInt32("what", Converter::kWhatEOS);
+ msg->setSize("trackIndex", i);
+ msg->post();
+#else
+ mTracks.valueAt(i)->converter()->signalEOS();
+#endif
+ }
+ } else {
+ CHECK_EQ(what, Serializer::kWhatAccessUnit);
+
+ size_t trackIndex;
+ CHECK(msg->findSize("trackIndex", &trackIndex));
+
+ sp<ABuffer> accessUnit;
+ CHECK(msg->findBuffer("accessUnit", &accessUnit));
+
+#if FAKE_VIDEO
+ int64_t timeUs;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+ void *mbuf;
+ CHECK(accessUnit->meta()->findPointer("mediaBuffer", &mbuf));
+
+ ((MediaBuffer *)mbuf)->release();
+ mbuf = NULL;
+
+ sp<AMessage> msg = new AMessage(kWhatConverterNotify, id());
+ msg->setInt32("what", Converter::kWhatAccessUnit);
+ msg->setSize("trackIndex", trackIndex);
+ msg->setBuffer("accessUnit", accessUnit);
+ msg->post();
+#else
+ mTracks.valueFor(trackIndex)->converter()
+ ->feedAccessUnit(accessUnit);
+#endif
+ }
+ break;
+ }
+
+ case kWhatConverterNotify:
+ {
+ int32_t what;
+ CHECK(msg->findInt32("what", &what));
+
+ size_t trackIndex;
+ CHECK(msg->findSize("trackIndex", &trackIndex));
+
+ if (what == Converter::kWhatAccessUnit) {
+ const sp<Track> &track = mTracks.valueFor(trackIndex);
+
+ uint32_t flags = 0;
+
+ ssize_t packetizerTrackIndex = track->packetizerTrackIndex();
+ if (packetizerTrackIndex < 0) {
+ flags = TSPacketizer::EMIT_PAT_AND_PMT;
+
+ packetizerTrackIndex =
+ mPacketizer->addTrack(track->getFormat());
+
+ if (packetizerTrackIndex >= 0) {
+ track->setPacketizerTrackIndex(packetizerTrackIndex);
+ }
+ }
+
+ if (packetizerTrackIndex >= 0) {
+ sp<ABuffer> accessUnit;
+ CHECK(msg->findBuffer("accessUnit", &accessUnit));
+
+ int64_t timeUs;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+ if (mPrevTimeUs < 0ll || mPrevTimeUs + 100000ll >= timeUs) {
+ flags |= TSPacketizer::EMIT_PCR;
+ mPrevTimeUs = timeUs;
+ }
+
+ sp<ABuffer> packets;
+ mPacketizer->packetize(
+ packetizerTrackIndex, accessUnit, &packets, flags);
+
+ for (size_t offset = 0;
+ offset < packets->size(); offset += 188) {
+ bool lastTSPacket = (offset + 188 >= packets->size());
+
+ appendTSData(
+ packets->data() + offset,
+ 188,
+ true /* timeDiscontinuity */,
+ lastTSPacket /* flush */);
+ }
+
+#if LOG_TRANSPORT_STREAM
+ if (mLogFile != NULL) {
+ fwrite(packets->data(), 1, packets->size(), mLogFile);
+ }
+#endif
+ }
+ } else if (what == Converter::kWhatEOS) {
+ CHECK_EQ(what, Converter::kWhatEOS);
+
+ ALOGI("output EOS on track %d", trackIndex);
+
+ ssize_t index = mTracks.indexOfKey(trackIndex);
+ CHECK_GE(index, 0);
+
+#if !FAKE_VIDEO
+ const sp<Converter> &converter =
+ mTracks.valueAt(index)->converter();
+ looper()->unregisterHandler(converter->id());
+#endif
+
+ mTracks.removeItemsAt(index);
+
+ if (mTracks.isEmpty()) {
+ ALOGI("Reached EOS");
+ }
+ } else {
+ CHECK_EQ(what, Converter::kWhatError);
+
+ status_t err;
+ CHECK(msg->findInt32("err", &err));
+
+ ALOGE("converter signaled error %d", err);
+ }
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+status_t WifiDisplaySource::PlaybackSession::setupPacketizer() {
+ sp<AMessage> msg = new AMessage(kWhatSerializerNotify, id());
+
+ mSerializerLooper = new ALooper;
+ mSerializerLooper->start();
+
+ mSerializer = new Serializer(
+#if FAKE_VIDEO
+ true /* throttled */
+#else
+ false /* throttled */
+#endif
+ , msg);
+ mSerializerLooper->registerHandler(mSerializer);
+
+ mPacketizer = new TSPacketizer;
+
+#if FAKE_VIDEO
+ DataSource::RegisterDefaultSniffers();
+
+ sp<DataSource> dataSource =
+ DataSource::CreateFromURI(
+ "/system/etc/inception_1500.mp4");
+
+ CHECK(dataSource != NULL);
+
+ sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
+ CHECK(extractor != NULL);
+
+ bool haveAudio = false;
+ bool haveVideo = false;
+ for (size_t i = 0; i < extractor->countTracks(); ++i) {
+ sp<MetaData> meta = extractor->getTrackMetaData(i);
+
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+ bool useTrack = false;
+ if (!strncasecmp(mime, "audio/", 6) && !haveAudio) {
+ useTrack = true;
+ haveAudio = true;
+ } else if (!strncasecmp(mime, "video/", 6) && !haveVideo) {
+ useTrack = true;
+ haveVideo = true;
+ }
+
+ if (!useTrack) {
+ continue;
+ }
+
+ sp<MediaSource> source = extractor->getTrack(i);
+
+ ssize_t index = mSerializer->addSource(source);
+ CHECK_GE(index, 0);
+
+ sp<AMessage> format;
+ status_t err = convertMetaDataToMessage(source->getFormat(), &format);
+ CHECK_EQ(err, (status_t)OK);
+
+ mTracks.add(index, new Track(format));
+ }
+ CHECK(haveAudio || haveVideo);
+#else
+ mCodecLooper = new ALooper;
+ mCodecLooper->start();
+
+ DisplayInfo info;
+ SurfaceComposerClient::getDisplayInfo(0, &info);
+
+ // sp<SurfaceMediaSource> source = new SurfaceMediaSource(info.w, info.h);
+ sp<SurfaceMediaSource> source = new SurfaceMediaSource(720, 1280);
+
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("SurfaceFlinger"));
+ sp<ISurfaceComposer> service = interface_cast<ISurfaceComposer>(binder);
+ CHECK(service != NULL);
+
+ service->connectDisplay(source->getBufferQueue());
+
+#if 0
+ {
+ ALOGI("reading buffer");
+
+ CHECK_EQ((status_t)OK, source->start());
+ MediaBuffer *mbuf;
+ CHECK_EQ((status_t)OK, source->read(&mbuf));
+ mbuf->release();
+ mbuf = NULL;
+
+ ALOGI("got buffer");
+ }
+#endif
+
+#if 0
+ ssize_t index = mSerializer->addSource(source);
+#else
+ ssize_t index = mSerializer->addSource(
+ new RepeaterSource(source, 55.0 /* rateHz */));
+#endif
+
+ CHECK_GE(index, 0);
+
+ sp<AMessage> format;
+ status_t err = convertMetaDataToMessage(source->getFormat(), &format);
+ CHECK_EQ(err, (status_t)OK);
+
+ format->setInt32("store-metadata-in-buffers", true);
+
+ format->setInt32(
+ "color-format", OMX_COLOR_FormatAndroidOpaque);
+
+ sp<AMessage> notify = new AMessage(kWhatConverterNotify, id());
+ notify->setSize("trackIndex", index);
+
+ sp<Converter> converter =
+ new Converter(notify, mCodecLooper, format);
+
+ looper()->registerHandler(converter);
+
+ mTracks.add(index, new Track(converter));
+#endif
+
+#if 0
+ sp<AudioSource> audioSource = new AudioSource(
+ AUDIO_SOURCE_MIC,
+ 48000 /* sampleRate */,
+ 2 /* channelCount */); // XXX AUDIO_CHANNEL_IN_STEREO?
+
+ CHECK_EQ((status_t)OK, audioSource->initCheck());
+
+ audioSource->setUseLooperTime(true);
+
+ index = mSerializer->addSource(audioSource);
+ CHECK_GE(index, 0);
+
+ sp<AMessage> audioFormat;
+ err = convertMetaDataToMessage(audioSource->getFormat(), &audioFormat);
+ CHECK_EQ(err, (status_t)OK);
+
+ sp<AMessage> audioNotify = new AMessage(kWhatConverterNotify, id());
+ audioNotify->setSize("trackIndex", index);
+
+ converter = new Converter(audioNotify, mCodecLooper, audioFormat);
+ looper()->registerHandler(converter);
+
+ mTracks.add(index, new Track(converter));
+#endif
+
+ return OK;
+}
+
+void WifiDisplaySource::PlaybackSession::scheduleSendSR() {
+ if (mSendSRPending) {
+ return;
+ }
+
+ mSendSRPending = true;
+ (new AMessage(kWhatSendSR, id()))->post(kSendSRIntervalUs);
+}
+
+void WifiDisplaySource::PlaybackSession::addSR(const sp<ABuffer> &buffer) {
+ uint8_t *data = buffer->data() + buffer->size();
+
+ // TODO: Use macros/utility functions to clean up all the bitshifts below.
+
+ data[0] = 0x80 | 0;
+ data[1] = 200; // SR
+ data[2] = 0;
+ data[3] = 6;
+ data[4] = kSourceID >> 24;
+ data[5] = (kSourceID >> 16) & 0xff;
+ data[6] = (kSourceID >> 8) & 0xff;
+ data[7] = kSourceID & 0xff;
+
+ data[8] = mLastNTPTime >> (64 - 8);
+ data[9] = (mLastNTPTime >> (64 - 16)) & 0xff;
+ data[10] = (mLastNTPTime >> (64 - 24)) & 0xff;
+ data[11] = (mLastNTPTime >> 32) & 0xff;
+ data[12] = (mLastNTPTime >> 24) & 0xff;
+ data[13] = (mLastNTPTime >> 16) & 0xff;
+ data[14] = (mLastNTPTime >> 8) & 0xff;
+ data[15] = mLastNTPTime & 0xff;
+
+ data[16] = (mLastRTPTime >> 24) & 0xff;
+ data[17] = (mLastRTPTime >> 16) & 0xff;
+ data[18] = (mLastRTPTime >> 8) & 0xff;
+ data[19] = mLastRTPTime & 0xff;
+
+ data[20] = mNumRTPSent >> 24;
+ data[21] = (mNumRTPSent >> 16) & 0xff;
+ data[22] = (mNumRTPSent >> 8) & 0xff;
+ data[23] = mNumRTPSent & 0xff;
+
+ data[24] = mNumRTPOctetsSent >> 24;
+ data[25] = (mNumRTPOctetsSent >> 16) & 0xff;
+ data[26] = (mNumRTPOctetsSent >> 8) & 0xff;
+ data[27] = mNumRTPOctetsSent & 0xff;
+
+ buffer->setRange(buffer->offset(), buffer->size() + 28);
+}
+
+void WifiDisplaySource::PlaybackSession::addSDES(const sp<ABuffer> &buffer) {
+ uint8_t *data = buffer->data() + buffer->size();
+ data[0] = 0x80 | 1;
+ data[1] = 202; // SDES
+ data[4] = kSourceID >> 24;
+ data[5] = (kSourceID >> 16) & 0xff;
+ data[6] = (kSourceID >> 8) & 0xff;
+ data[7] = kSourceID & 0xff;
+
+ size_t offset = 8;
+
+ data[offset++] = 1; // CNAME
+
+ static const char *kCNAME = "someone@somewhere";
+ data[offset++] = strlen(kCNAME);
+
+ memcpy(&data[offset], kCNAME, strlen(kCNAME));
+ offset += strlen(kCNAME);
+
+ data[offset++] = 7; // NOTE
+
+ static const char *kNOTE = "Hell's frozen over.";
+ data[offset++] = strlen(kNOTE);
+
+ memcpy(&data[offset], kNOTE, strlen(kNOTE));
+ offset += strlen(kNOTE);
+
+ data[offset++] = 0;
+
+ if ((offset % 4) > 0) {
+ size_t count = 4 - (offset % 4);
+ switch (count) {
+ case 3:
+ data[offset++] = 0;
+ case 2:
+ data[offset++] = 0;
+ case 1:
+ data[offset++] = 0;
+ }
+ }
+
+ size_t numWords = (offset / 4) - 1;
+ data[2] = numWords >> 8;
+ data[3] = numWords & 0xff;
+
+ buffer->setRange(buffer->offset(), buffer->size() + offset);
+}
+
+// static
+uint64_t WifiDisplaySource::PlaybackSession::GetNowNTP() {
+ uint64_t nowUs = ALooper::GetNowUs();
+
+ nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll;
+
+ uint64_t hi = nowUs / 1000000ll;
+ uint64_t lo = ((1ll << 32) * (nowUs % 1000000ll)) / 1000000ll;
+
+ return (hi << 32) | lo;
+}
+
+void WifiDisplaySource::PlaybackSession::onSendSR() {
+ sp<ABuffer> buffer = new ABuffer(1500);
+ buffer->setRange(0, 0);
+
+ addSR(buffer);
+ addSDES(buffer);
+
+ if (mUseInterleavedTCP) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatBinaryData);
+ notify->setInt32("channel", mRTCPChannel);
+ notify->setBuffer("data", buffer);
+ notify->post();
+ } else {
+ mNetSession->sendRequest(
+ mRTCPSessionID, buffer->data(), buffer->size());
+ }
+
+ ++mNumSRsSent;
+}
+
+ssize_t WifiDisplaySource::PlaybackSession::appendTSData(
+ const void *data, size_t size, bool timeDiscontinuity, bool flush) {
+ CHECK_EQ(size, 188);
+
+ CHECK_LE(mTSQueue->size() + size, mTSQueue->capacity());
+
+ memcpy(mTSQueue->data() + mTSQueue->size(), data, size);
+ mTSQueue->setRange(0, mTSQueue->size() + size);
+
+ if (flush || mTSQueue->size() == mTSQueue->capacity()) {
+ // flush
+
+ int64_t nowUs = ALooper::GetNowUs();
+ if (mFirstPacketTimeUs < 0ll) {
+ mFirstPacketTimeUs = nowUs;
+ }
+
+ // 90kHz time scale
+ uint32_t rtpTime = ((nowUs - mFirstPacketTimeUs) * 9ll) / 100ll;
+
+ uint8_t *rtp = mTSQueue->data();
+ rtp[0] = 0x80;
+ rtp[1] = 33 | (timeDiscontinuity ? (1 << 7) : 0); // M-bit
+ rtp[2] = (mRTPSeqNo >> 8) & 0xff;
+ rtp[3] = mRTPSeqNo & 0xff;
+ rtp[4] = rtpTime >> 24;
+ rtp[5] = (rtpTime >> 16) & 0xff;
+ rtp[6] = (rtpTime >> 8) & 0xff;
+ rtp[7] = rtpTime & 0xff;
+ rtp[8] = kSourceID >> 24;
+ rtp[9] = (kSourceID >> 16) & 0xff;
+ rtp[10] = (kSourceID >> 8) & 0xff;
+ rtp[11] = kSourceID & 0xff;
+
+ ++mRTPSeqNo;
+ ++mNumRTPSent;
+ mNumRTPOctetsSent += mTSQueue->size() - 12;
+
+ mLastRTPTime = rtpTime;
+ mLastNTPTime = GetNowNTP();
+
+ if (mUseInterleavedTCP) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatBinaryData);
+
+ sp<ABuffer> data = new ABuffer(mTSQueue->size());
+ memcpy(data->data(), rtp, mTSQueue->size());
+
+ notify->setInt32("channel", mRTPChannel);
+ notify->setBuffer("data", data);
+ notify->post();
+ } else {
+ mNetSession->sendRequest(
+ mRTPSessionID, rtp, mTSQueue->size());
+ }
+
+ mTSQueue->setInt32Data(mRTPSeqNo - 1);
+ mHistory.push_back(mTSQueue);
+ ++mHistoryLength;
+
+ if (mHistoryLength > kMaxHistoryLength) {
+ mTSQueue = *mHistory.begin();
+ mHistory.erase(mHistory.begin());
+
+ --mHistoryLength;
+ } else {
+ mTSQueue = new ABuffer(12 + kMaxNumTSPacketsPerRTPPacket * 188);
+ }
+
+ mTSQueue->setRange(0, 12);
+ }
+
+ return size;
+}
+
+status_t WifiDisplaySource::PlaybackSession::parseRTCP(
+ const sp<ABuffer> &buffer) {
+ const uint8_t *data = buffer->data();
+ size_t size = buffer->size();
+
+ while (size > 0) {
+ if (size < 8) {
+ // Too short to be a valid RTCP header
+ return ERROR_MALFORMED;
+ }
+
+ if ((data[0] >> 6) != 2) {
+ // Unsupported version.
+ return ERROR_UNSUPPORTED;
+ }
+
+ if (data[0] & 0x20) {
+ // Padding present.
+
+ size_t paddingLength = data[size - 1];
+
+ if (paddingLength + 12 > size) {
+ // If we removed this much padding we'd end up with something
+ // that's too short to be a valid RTP header.
+ return ERROR_MALFORMED;
+ }
+
+ size -= paddingLength;
+ }
+
+ size_t headerLength = 4 * (data[2] << 8 | data[3]) + 4;
+
+ if (size < headerLength) {
+ // Only received a partial packet?
+ return ERROR_MALFORMED;
+ }
+
+ switch (data[1]) {
+ case 200:
+ case 201: // RR
+ case 202: // SDES
+ case 203:
+ case 204: // APP
+ break;
+
+ case 205: // TSFB (transport layer specific feedback)
+ parseTSFB(data, headerLength);
+ break;
+
+ case 206: // PSFB (payload specific feedback)
+ hexdump(data, headerLength);
+ break;
+
+ default:
+ {
+ ALOGW("Unknown RTCP packet type %u of size %d",
+ (unsigned)data[1], headerLength);
+ break;
+ }
+ }
+
+ data += headerLength;
+ size -= headerLength;
+ }
+
+ return OK;
+}
+
+status_t WifiDisplaySource::PlaybackSession::parseTSFB(
+ const uint8_t *data, size_t size) {
+ if ((data[0] & 0x1f) != 1) {
+ return ERROR_UNSUPPORTED; // We only support NACK for now.
+ }
+
+ uint32_t srcId = U32_AT(&data[8]);
+ if (srcId != kSourceID) {
+ return ERROR_MALFORMED;
+ }
+
+ for (size_t i = 12; i < size; i += 4) {
+ uint16_t seqNo = U16_AT(&data[i]);
+ uint16_t blp = U16_AT(&data[i + 2]);
+
+ List<sp<ABuffer> >::iterator it = mHistory.begin();
+ bool found = false;
+ while (it != mHistory.end()) {
+ const sp<ABuffer> &buffer = *it;
+
+ uint16_t bufferSeqNo = buffer->int32Data() & 0xffff;
+
+ if (bufferSeqNo == seqNo) {
+ mNetSession->sendRequest(
+ mRTPSessionID, buffer->data(), buffer->size());
+
+ found = true;
+ break;
+ }
+
+ ++it;
+ }
+
+ if (found) {
+ ALOGI("retransmitting seqNo %d", seqNo);
+ } else {
+ ALOGI("seqNo %d no longer available", seqNo);
+ }
+ }
+
+ return OK;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h
new file mode 100644
index 0000000..a6c9f27
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.h
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef PLAYBACK_SESSION_H_
+
+#define PLAYBACK_SESSION_H_
+
+#include "WifiDisplaySource.h"
+
+namespace android {
+
+struct ABuffer;
+struct Serializer;
+struct TSPacketizer;
+
+#define LOG_TRANSPORT_STREAM 0
+
+// Encapsulates the state of an RTP/RTCP session in the context of wifi
+// display.
+struct WifiDisplaySource::PlaybackSession : public AHandler {
+ PlaybackSession(
+ const sp<ANetworkSession> &netSession, const sp<AMessage> ¬ify);
+
+ status_t init(
+ const char *clientIP, int32_t clientRtp, int32_t clientRtcp,
+ bool useInterleavedTCP);
+
+ int32_t getRTPPort() const;
+
+ int64_t getLastLifesignUs() const;
+ void updateLiveness();
+
+ status_t play();
+ status_t pause();
+
+ enum {
+ kWhatSessionDead,
+ kWhatBinaryData,
+ };
+
+protected:
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+ virtual ~PlaybackSession();
+
+private:
+ struct Track;
+
+ enum {
+ kWhatSendSR,
+ kWhatRTPNotify,
+ kWhatRTCPNotify,
+ kWhatSerializerNotify,
+ kWhatConverterNotify,
+ kWhatUpdateSurface,
+ };
+
+ static const int64_t kSendSRIntervalUs = 10000000ll;
+ static const uint32_t kSourceID = 0xdeadbeef;
+ static const size_t kMaxHistoryLength = 128;
+
+ sp<ANetworkSession> mNetSession;
+ sp<AMessage> mNotify;
+
+ int64_t mLastLifesignUs;
+
+ sp<ALooper> mSerializerLooper;
+ sp<Serializer> mSerializer;
+ sp<TSPacketizer> mPacketizer;
+ sp<ALooper> mCodecLooper;
+
+ KeyedVector<size_t, sp<Track> > mTracks;
+
+ sp<ABuffer> mTSQueue;
+ int64_t mPrevTimeUs;
+
+ bool mUseInterleavedTCP;
+
+ // in TCP mode
+ int32_t mRTPChannel;
+ int32_t mRTCPChannel;
+
+ // in UDP mode
+ int32_t mRTPPort;
+ int32_t mRTPSessionID;
+ int32_t mRTCPSessionID;
+
+
+ uint32_t mRTPSeqNo;
+
+ uint64_t mLastNTPTime;
+ uint32_t mLastRTPTime;
+ uint32_t mNumRTPSent;
+ uint32_t mNumRTPOctetsSent;
+ uint32_t mNumSRsSent;
+
+ bool mSendSRPending;
+
+ int64_t mFirstPacketTimeUs;
+
+ List<sp<ABuffer> > mHistory;
+ size_t mHistoryLength;
+
+#if LOG_TRANSPORT_STREAM
+ FILE *mLogFile;
+#endif
+
+ void onSendSR();
+ void addSR(const sp<ABuffer> &buffer);
+ void addSDES(const sp<ABuffer> &buffer);
+ static uint64_t GetNowNTP();
+
+ status_t setupPacketizer();
+
+ ssize_t appendTSData(
+ const void *data, size_t size, bool timeDiscontinuity, bool flush);
+
+ void scheduleSendSR();
+
+ status_t parseRTCP(const sp<ABuffer> &buffer);
+ status_t parseTSFB(const uint8_t *data, size_t size);
+
+ DISALLOW_EVIL_CONSTRUCTORS(PlaybackSession);
+};
+
+} // namespace android
+
+#endif // PLAYBACK_SESSION_H_
+
diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.cpp b/media/libstagefright/wifi-display/source/RepeaterSource.cpp
new file mode 100644
index 0000000..8af4fdf
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/RepeaterSource.cpp
@@ -0,0 +1,140 @@
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RepeaterSource"
+#include <utils/Log.h>
+
+#include "RepeaterSource.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+RepeaterSource::RepeaterSource(const sp<MediaSource> &source, double rateHz)
+ : mSource(source),
+ mRateHz(rateHz),
+ mBuffer(NULL),
+ mResult(OK),
+ mStartTimeUs(-1ll),
+ mFrameCount(0) {
+}
+
+RepeaterSource::~RepeaterSource() {
+ stop();
+}
+
+status_t RepeaterSource::start(MetaData *params) {
+ status_t err = mSource->start(params);
+
+ if (err != OK) {
+ return err;
+ }
+
+ mBuffer = NULL;
+ mResult = OK;
+ mStartTimeUs = -1ll;
+ mFrameCount = 0;
+
+ mLooper = new ALooper;
+ mLooper->start();
+
+ mReflector = new AHandlerReflector<RepeaterSource>(this);
+ mLooper->registerHandler(mReflector);
+
+ postRead();
+
+ return OK;
+}
+
+status_t RepeaterSource::stop() {
+ if (mLooper != NULL) {
+ mLooper->stop();
+ mLooper.clear();
+
+ mReflector.clear();
+ }
+
+ return mSource->stop();
+}
+
+sp<MetaData> RepeaterSource::getFormat() {
+ return mSource->getFormat();
+}
+
+status_t RepeaterSource::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ int64_t seekTimeUs;
+ ReadOptions::SeekMode seekMode;
+ CHECK(options == NULL || !options->getSeekTo(&seekTimeUs, &seekMode));
+
+ int64_t bufferTimeUs = -1ll;
+
+ if (mStartTimeUs < 0ll) {
+ Mutex::Autolock autoLock(mLock);
+ while (mBuffer == NULL && mResult == OK) {
+ mCondition.wait(mLock);
+ }
+
+ mStartTimeUs = ALooper::GetNowUs();
+ bufferTimeUs = mStartTimeUs;
+ } else {
+ bufferTimeUs = mStartTimeUs + (mFrameCount * 1000000ll) / mRateHz;
+
+ int64_t nowUs = ALooper::GetNowUs();
+ int64_t delayUs = bufferTimeUs - nowUs;
+
+ if (delayUs > 0ll) {
+ usleep(delayUs);
+ }
+ }
+
+ Mutex::Autolock autoLock(mLock);
+ if (mResult != OK) {
+ CHECK(mBuffer == NULL);
+ return mResult;
+ }
+
+ mBuffer->add_ref();
+ *buffer = mBuffer;
+ (*buffer)->meta_data()->setInt64(kKeyTime, bufferTimeUs);
+
+ ++mFrameCount;
+
+ return OK;
+}
+
+void RepeaterSource::postRead() {
+ (new AMessage(kWhatRead, mReflector->id()))->post();
+}
+
+void RepeaterSource::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatRead:
+ {
+ MediaBuffer *buffer;
+ status_t err = mSource->read(&buffer);
+
+ Mutex::Autolock autoLock(mLock);
+ if (mBuffer != NULL) {
+ mBuffer->release();
+ mBuffer = NULL;
+ }
+ mBuffer = buffer;
+ mResult = err;
+
+ mCondition.broadcast();
+
+ if (err == OK) {
+ postRead();
+ }
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.h b/media/libstagefright/wifi-display/source/RepeaterSource.h
new file mode 100644
index 0000000..31eb5cd
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/RepeaterSource.h
@@ -0,0 +1,55 @@
+#ifndef REPEATER_SOURCE_H_
+
+#define REPEATER_SOURCE_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AHandlerReflector.h>
+#include <media/stagefright/MediaSource.h>
+
+namespace android {
+
+// This MediaSource delivers frames at a constant rate by repeating buffers
+// if necessary.
+struct RepeaterSource : public MediaSource {
+ RepeaterSource(const sp<MediaSource> &source, double rateHz);
+
+ virtual status_t start(MetaData *params);
+ virtual status_t stop();
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options);
+
+ void onMessageReceived(const sp<AMessage> &msg);
+
+protected:
+ virtual ~RepeaterSource();
+
+private:
+ enum {
+ kWhatRead,
+ };
+
+ Mutex mLock;
+ Condition mCondition;
+
+ sp<MediaSource> mSource;
+ double mRateHz;
+
+ sp<ALooper> mLooper;
+ sp<AHandlerReflector<RepeaterSource> > mReflector;
+
+ MediaBuffer *mBuffer;
+ status_t mResult;
+
+ int64_t mStartTimeUs;
+ int32_t mFrameCount;
+
+ void postRead();
+
+ DISALLOW_EVIL_CONSTRUCTORS(RepeaterSource);
+};
+
+} // namespace android
+
+#endif // REPEATER_SOURCE_H_
diff --git a/media/libstagefright/wifi-display/source/Serializer.cpp b/media/libstagefright/wifi-display/source/Serializer.cpp
new file mode 100644
index 0000000..bd53fc8
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/Serializer.cpp
@@ -0,0 +1,366 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Serializer"
+#include <utils/Log.h>
+
+#include "Serializer.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+struct Serializer::Track : public RefBase {
+ Track(const sp<MediaSource> &source);
+
+ status_t start();
+ status_t stop();
+
+ void readBufferIfNecessary();
+
+ bool reachedEOS() const;
+ int64_t bufferTimeUs() const;
+
+ sp<ABuffer> drainBuffer();
+
+protected:
+ virtual ~Track();
+
+private:
+ sp<MediaSource> mSource;
+
+ bool mStarted;
+ status_t mFinalResult;
+ MediaBuffer *mBuffer;
+ int64_t mBufferTimeUs;
+
+ DISALLOW_EVIL_CONSTRUCTORS(Track);
+};
+
+Serializer::Track::Track(const sp<MediaSource> &source)
+ : mSource(source),
+ mStarted(false),
+ mFinalResult(OK),
+ mBuffer(NULL),
+ mBufferTimeUs(-1ll) {
+}
+
+Serializer::Track::~Track() {
+ stop();
+}
+
+status_t Serializer::Track::start() {
+ if (mStarted) {
+ return OK;
+ }
+
+ status_t err = mSource->start();
+
+ if (err == OK) {
+ mStarted = true;
+ }
+
+ return err;
+}
+
+status_t Serializer::Track::stop() {
+ if (!mStarted) {
+ return OK;
+ }
+
+ if (mBuffer != NULL) {
+ mBuffer->release();
+ mBuffer = NULL;
+
+ mBufferTimeUs = -1ll;
+ }
+
+ status_t err = mSource->stop();
+
+ mStarted = false;
+
+ return err;
+}
+
+void Serializer::Track::readBufferIfNecessary() {
+ if (mBuffer != NULL) {
+ return;
+ }
+
+ mFinalResult = mSource->read(&mBuffer);
+
+ if (mFinalResult != OK) {
+ ALOGI("read failed w/ err %d", mFinalResult);
+ return;
+ }
+
+ CHECK(mBuffer->meta_data()->findInt64(kKeyTime, &mBufferTimeUs));
+}
+
+bool Serializer::Track::reachedEOS() const {
+ return mFinalResult != OK;
+}
+
+int64_t Serializer::Track::bufferTimeUs() const {
+ return mBufferTimeUs;
+}
+
+sp<ABuffer> Serializer::Track::drainBuffer() {
+ sp<ABuffer> accessUnit = new ABuffer(mBuffer->range_length());
+
+ memcpy(accessUnit->data(),
+ (const uint8_t *)mBuffer->data() + mBuffer->range_offset(),
+ mBuffer->range_length());
+
+ accessUnit->meta()->setInt64("timeUs", mBufferTimeUs);
+ accessUnit->meta()->setPointer("mediaBuffer", mBuffer);
+
+ mBuffer = NULL;
+ mBufferTimeUs = -1ll;
+
+ return accessUnit;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+Serializer::Serializer(bool throttle, const sp<AMessage> ¬ify)
+ : mThrottle(throttle),
+ mNotify(notify),
+ mPollGeneration(0),
+ mStartTimeUs(-1ll) {
+}
+
+Serializer::~Serializer() {
+}
+
+status_t Serializer::postSynchronouslyAndReturnError(
+ const sp<AMessage> &msg) {
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!response->findInt32("err", &err)) {
+ err = OK;
+ }
+
+ return err;
+}
+
+ssize_t Serializer::addSource(const sp<MediaSource> &source) {
+ sp<AMessage> msg = new AMessage(kWhatAddSource, id());
+ msg->setPointer("source", source.get());
+
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!response->findInt32("err", &err)) {
+ size_t index;
+ CHECK(response->findSize("index", &index));
+
+ return index;
+ }
+
+ return err;
+}
+
+status_t Serializer::start() {
+ return postSynchronouslyAndReturnError(new AMessage(kWhatStart, id()));
+}
+
+status_t Serializer::stop() {
+ return postSynchronouslyAndReturnError(new AMessage(kWhatStop, id()));
+}
+
+void Serializer::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatAddSource:
+ {
+ ssize_t index = onAddSource(msg);
+
+ sp<AMessage> response = new AMessage;
+
+ if (index < 0) {
+ response->setInt32("err", index);
+ } else {
+ response->setSize("index", index);
+ }
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatStart:
+ case kWhatStop:
+ {
+ status_t err = (msg->what() == kWhatStart) ? onStart() : onStop();
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatPoll:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+
+ if (generation != mPollGeneration) {
+ break;
+ }
+
+ int64_t delayUs = onPoll();
+ if (delayUs >= 0ll) {
+ schedulePoll(delayUs);
+ }
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+ssize_t Serializer::onAddSource(const sp<AMessage> &msg) {
+ void *obj;
+ CHECK(msg->findPointer("source", &obj));
+
+ sp<MediaSource> source = static_cast<MediaSource *>(obj);
+
+ sp<Track> track = new Track(source);
+ return mTracks.add(track);
+}
+
+status_t Serializer::onStart() {
+ status_t err = OK;
+ for (size_t i = 0; i < mTracks.size(); ++i) {
+ err = mTracks.itemAt(i)->start();
+
+ if (err != OK) {
+ break;
+ }
+ }
+
+ if (err == OK) {
+#if 0
+ schedulePoll();
+#else
+ // XXX the dongle doesn't appear to have setup the RTP connection
+ // fully at the time PLAY is called. We have to delay sending data
+ // for a little bit.
+ schedulePoll(500000ll);
+#endif
+ }
+
+ return err;
+}
+
+status_t Serializer::onStop() {
+ for (size_t i = 0; i < mTracks.size(); ++i) {
+ mTracks.itemAt(i)->stop();
+ }
+
+ cancelPoll();
+
+ return OK;
+}
+
+int64_t Serializer::onPoll() {
+ int64_t minTimeUs = -1ll;
+ ssize_t minTrackIndex = -1;
+
+ for (size_t i = 0; i < mTracks.size(); ++i) {
+ const sp<Track> &track = mTracks.itemAt(i);
+
+ track->readBufferIfNecessary();
+
+ if (!track->reachedEOS()) {
+ int64_t timeUs = track->bufferTimeUs();
+
+ if (minTrackIndex < 0 || timeUs < minTimeUs) {
+ minTimeUs = timeUs;
+ minTrackIndex = i;
+ }
+ }
+ }
+
+ if (minTrackIndex < 0) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatEOS);
+ notify->post();
+
+ return -1ll;
+ }
+
+ if (mThrottle) {
+ int64_t nowUs = ALooper::GetNowUs();
+
+ if (mStartTimeUs < 0ll) {
+ mStartTimeUs = nowUs;
+ }
+
+ int64_t lateByUs = nowUs - (minTimeUs + mStartTimeUs);
+
+ if (lateByUs < 0ll) {
+ // Too early
+ return -lateByUs;
+ }
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+
+ notify->setInt32("what", kWhatAccessUnit);
+ notify->setSize("trackIndex", minTrackIndex);
+
+ notify->setBuffer(
+ "accessUnit", mTracks.itemAt(minTrackIndex)->drainBuffer());
+
+ notify->post();
+
+ return 0ll;
+}
+
+void Serializer::schedulePoll(int64_t delayUs) {
+ sp<AMessage> msg = new AMessage(kWhatPoll, id());
+ msg->setInt32("generation", mPollGeneration);
+ msg->post(delayUs);
+}
+
+void Serializer::cancelPoll() {
+ ++mPollGeneration;
+}
+
+} // namespace android
diff --git a/media/libstagefright/wifi-display/source/Serializer.h b/media/libstagefright/wifi-display/source/Serializer.h
new file mode 100644
index 0000000..07950fa
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/Serializer.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SERIALIZER_H_
+
+#define SERIALIZER_H_
+
+#include <media/stagefright/foundation/AHandler.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+struct AMessage;
+struct MediaSource;
+
+// After adding a number of MediaSource objects and starting the Serializer,
+// it'll emit their access units in order of increasing timestamps.
+struct Serializer : public AHandler {
+ enum {
+ kWhatEOS,
+ kWhatAccessUnit
+ };
+
+ // In throttled operation, data is emitted at a pace corresponding
+ // to the incoming media timestamps.
+ Serializer(bool throttle, const sp<AMessage> ¬ify);
+
+ ssize_t addSource(const sp<MediaSource> &source);
+
+ status_t start();
+ status_t stop();
+
+protected:
+ virtual void onMessageReceived(const sp<AMessage> &what);
+ virtual ~Serializer();
+
+private:
+ enum {
+ kWhatAddSource,
+ kWhatStart,
+ kWhatStop,
+ kWhatPoll
+ };
+
+ struct Track;
+
+ bool mThrottle;
+ sp<AMessage> mNotify;
+ Vector<sp<Track> > mTracks;
+
+ int32_t mPollGeneration;
+
+ int64_t mStartTimeUs;
+
+ status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
+
+ ssize_t onAddSource(const sp<AMessage> &msg);
+ status_t onStart();
+ status_t onStop();
+ int64_t onPoll();
+
+ void schedulePoll(int64_t delayUs = 0ll);
+ void cancelPoll();
+
+ DISALLOW_EVIL_CONSTRUCTORS(Serializer);
+};
+
+} // namespace android
+
+#endif // SERIALIZER_H_
+
diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.cpp b/media/libstagefright/wifi-display/source/TSPacketizer.cpp
new file mode 100644
index 0000000..b9a3e9b
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/TSPacketizer.cpp
@@ -0,0 +1,694 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TSPacketizer"
+#include <utils/Log.h>
+
+#include "TSPacketizer.h"
+#include "include/avc_utils.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include <arpa/inet.h>
+
+namespace android {
+
+struct TSPacketizer::Track : public RefBase {
+ Track(const sp<AMessage> &format,
+ unsigned PID, unsigned streamType, unsigned streamID);
+
+ unsigned PID() const;
+ unsigned streamType() const;
+ unsigned streamID() const;
+
+ // Returns the previous value.
+ unsigned incrementContinuityCounter();
+
+ bool isAudio() const;
+ bool isVideo() const;
+
+ bool isH264() const;
+ bool lacksADTSHeader() const;
+
+ sp<ABuffer> prependCSD(const sp<ABuffer> &accessUnit) const;
+ sp<ABuffer> prependADTSHeader(const sp<ABuffer> &accessUnit) const;
+
+protected:
+ virtual ~Track();
+
+private:
+ sp<AMessage> mFormat;
+
+ unsigned mPID;
+ unsigned mStreamType;
+ unsigned mStreamID;
+ unsigned mContinuityCounter;
+
+ AString mMIME;
+ Vector<sp<ABuffer> > mCSD;
+
+ DISALLOW_EVIL_CONSTRUCTORS(Track);
+};
+
+TSPacketizer::Track::Track(
+ const sp<AMessage> &format,
+ unsigned PID, unsigned streamType, unsigned streamID)
+ : mFormat(format),
+ mPID(PID),
+ mStreamType(streamType),
+ mStreamID(streamID),
+ mContinuityCounter(0) {
+ CHECK(format->findString("mime", &mMIME));
+
+ if (!strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)
+ || !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
+ for (size_t i = 0;; ++i) {
+ sp<ABuffer> csd;
+ if (!format->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) {
+ break;
+ }
+
+ mCSD.push(csd);
+ }
+ }
+}
+
+TSPacketizer::Track::~Track() {
+}
+
+unsigned TSPacketizer::Track::PID() const {
+ return mPID;
+}
+
+unsigned TSPacketizer::Track::streamType() const {
+ return mStreamType;
+}
+
+unsigned TSPacketizer::Track::streamID() const {
+ return mStreamID;
+}
+
+unsigned TSPacketizer::Track::incrementContinuityCounter() {
+ unsigned prevCounter = mContinuityCounter;
+
+ if (++mContinuityCounter == 16) {
+ mContinuityCounter = 0;
+ }
+
+ return prevCounter;
+}
+
+bool TSPacketizer::Track::isAudio() const {
+ return !strncasecmp("audio/", mMIME.c_str(), 6);
+}
+
+bool TSPacketizer::Track::isVideo() const {
+ return !strncasecmp("video/", mMIME.c_str(), 6);
+}
+
+bool TSPacketizer::Track::isH264() const {
+ return !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_VIDEO_AVC);
+}
+
+bool TSPacketizer::Track::lacksADTSHeader() const {
+ if (strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
+ return false;
+ }
+
+ int32_t isADTS;
+ if (mFormat->findInt32("is-adts", &isADTS) && isADTS != 0) {
+ return false;
+ }
+
+ return true;
+}
+
+sp<ABuffer> TSPacketizer::Track::prependCSD(
+ const sp<ABuffer> &accessUnit) const {
+ size_t size = 0;
+ for (size_t i = 0; i < mCSD.size(); ++i) {
+ size += mCSD.itemAt(i)->size();
+ }
+
+ sp<ABuffer> dup = new ABuffer(accessUnit->size() + size);
+ size_t offset = 0;
+ for (size_t i = 0; i < mCSD.size(); ++i) {
+ const sp<ABuffer> &csd = mCSD.itemAt(i);
+
+ memcpy(dup->data() + offset, csd->data(), csd->size());
+ offset += csd->size();
+ }
+
+ memcpy(dup->data() + offset, accessUnit->data(), accessUnit->size());
+
+ return dup;
+}
+
+sp<ABuffer> TSPacketizer::Track::prependADTSHeader(
+ const sp<ABuffer> &accessUnit) const {
+ CHECK_EQ(mCSD.size(), 1u);
+
+ const uint8_t *codec_specific_data = mCSD.itemAt(0)->data();
+
+ const uint32_t aac_frame_length = accessUnit->size() + 7;
+
+ sp<ABuffer> dup = new ABuffer(aac_frame_length);
+
+ unsigned profile = (codec_specific_data[0] >> 3) - 1;
+
+ unsigned sampling_freq_index =
+ ((codec_specific_data[0] & 7) << 1)
+ | (codec_specific_data[1] >> 7);
+
+ unsigned channel_configuration =
+ (codec_specific_data[1] >> 3) & 0x0f;
+
+ uint8_t *ptr = dup->data();
+
+ *ptr++ = 0xff;
+ *ptr++ = 0xf1; // b11110001, ID=0, layer=0, protection_absent=1
+
+ *ptr++ =
+ profile << 6
+ | sampling_freq_index << 2
+ | ((channel_configuration >> 2) & 1); // private_bit=0
+
+ // original_copy=0, home=0, copyright_id_bit=0, copyright_id_start=0
+ *ptr++ =
+ (channel_configuration & 3) << 6
+ | aac_frame_length >> 11;
+ *ptr++ = (aac_frame_length >> 3) & 0xff;
+ *ptr++ = (aac_frame_length & 7) << 5;
+
+ // adts_buffer_fullness=0, number_of_raw_data_blocks_in_frame=0
+ *ptr++ = 0;
+
+ memcpy(ptr, accessUnit->data(), accessUnit->size());
+
+ return dup;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+TSPacketizer::TSPacketizer()
+ : mPATContinuityCounter(0),
+ mPMTContinuityCounter(0) {
+ initCrcTable();
+}
+
+TSPacketizer::~TSPacketizer() {
+}
+
+ssize_t TSPacketizer::addTrack(const sp<AMessage> &format) {
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+
+ unsigned PIDStart;
+ bool isVideo = !strncasecmp("video/", mime.c_str(), 6);
+ bool isAudio = !strncasecmp("audio/", mime.c_str(), 6);
+
+ if (isVideo) {
+ PIDStart = 0x1011;
+ } else if (isAudio) {
+ PIDStart = 0x1100;
+ } else {
+ return ERROR_UNSUPPORTED;
+ }
+
+ unsigned streamType;
+ unsigned streamIDStart;
+ unsigned streamIDStop;
+
+ if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
+ streamType = 0x1b;
+ streamIDStart = 0xe0;
+ streamIDStop = 0xef;
+ } else if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
+ streamType = 0x0f;
+ streamIDStart = 0xc0;
+ streamIDStop = 0xdf;
+ } else {
+ return ERROR_UNSUPPORTED;
+ }
+
+ size_t numTracksOfThisType = 0;
+ unsigned PID = PIDStart;
+
+ for (size_t i = 0; i < mTracks.size(); ++i) {
+ const sp<Track> &track = mTracks.itemAt(i);
+
+ if (track->streamType() == streamType) {
+ ++numTracksOfThisType;
+ }
+
+ if ((isAudio && track->isAudio()) || (isVideo && track->isVideo())) {
+ ++PID;
+ }
+ }
+
+ unsigned streamID = streamIDStart + numTracksOfThisType;
+ if (streamID > streamIDStop) {
+ return -ERANGE;
+ }
+
+ sp<Track> track = new Track(format, PID, streamType, streamID);
+ return mTracks.add(track);
+}
+
+status_t TSPacketizer::packetize(
+ size_t trackIndex,
+ const sp<ABuffer> &_accessUnit,
+ sp<ABuffer> *packets,
+ uint32_t flags) {
+ sp<ABuffer> accessUnit = _accessUnit;
+
+ packets->clear();
+
+ if (trackIndex >= mTracks.size()) {
+ return -ERANGE;
+ }
+
+ int64_t timeUs;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+ const sp<Track> &track = mTracks.itemAt(trackIndex);
+
+ if (track->isH264()) {
+ if (IsIDR(accessUnit)) {
+ // prepend codec specific data, i.e. SPS and PPS.
+ accessUnit = track->prependCSD(accessUnit);
+ }
+ } else if (track->lacksADTSHeader()) {
+ accessUnit = track->prependADTSHeader(accessUnit);
+ }
+
+ // 0x47
+ // transport_error_indicator = b0
+ // payload_unit_start_indicator = b1
+ // transport_priority = b0
+ // PID
+ // transport_scrambling_control = b00
+ // adaptation_field_control = b??
+ // continuity_counter = b????
+ // -- payload follows
+ // packet_startcode_prefix = 0x000001
+ // stream_id
+ // PES_packet_length = 0x????
+ // reserved = b10
+ // PES_scrambling_control = b00
+ // PES_priority = b0
+ // data_alignment_indicator = b1
+ // copyright = b0
+ // original_or_copy = b0
+ // PTS_DTS_flags = b10 (PTS only)
+ // ESCR_flag = b0
+ // ES_rate_flag = b0
+ // DSM_trick_mode_flag = b0
+ // additional_copy_info_flag = b0
+ // PES_CRC_flag = b0
+ // PES_extension_flag = b0
+ // PES_header_data_length = 0x05
+ // reserved = b0010 (PTS)
+ // PTS[32..30] = b???
+ // reserved = b1
+ // PTS[29..15] = b??? ???? ???? ???? (15 bits)
+ // reserved = b1
+ // PTS[14..0] = b??? ???? ???? ???? (15 bits)
+ // reserved = b1
+ // the first fragment of "buffer" follows
+
+ size_t numTSPackets;
+ if (accessUnit->size() <= 170) {
+ numTSPackets = 1;
+ } else {
+ numTSPackets = 1 + ((accessUnit->size() - 170) + 183) / 184;
+ }
+
+ if (flags & EMIT_PAT_AND_PMT) {
+ numTSPackets += 2;
+ }
+
+ if (flags & EMIT_PCR) {
+ ++numTSPackets;
+ }
+
+ sp<ABuffer> buffer = new ABuffer(numTSPackets * 188);
+ uint8_t *packetDataStart = buffer->data();
+
+ if (flags & EMIT_PAT_AND_PMT) {
+ // Program Association Table (PAT):
+ // 0x47
+ // transport_error_indicator = b0
+ // payload_unit_start_indicator = b1
+ // transport_priority = b0
+ // PID = b0000000000000 (13 bits)
+ // transport_scrambling_control = b00
+ // adaptation_field_control = b01 (no adaptation field, payload only)
+ // continuity_counter = b????
+ // skip = 0x00
+ // --- payload follows
+ // table_id = 0x00
+ // section_syntax_indicator = b1
+ // must_be_zero = b0
+ // reserved = b11
+ // section_length = 0x00d
+ // transport_stream_id = 0x0000
+ // reserved = b11
+ // version_number = b00001
+ // current_next_indicator = b1
+ // section_number = 0x00
+ // last_section_number = 0x00
+ // one program follows:
+ // program_number = 0x0001
+ // reserved = b111
+ // program_map_PID = kPID_PMT (13 bits!)
+ // CRC = 0x????????
+
+ if (++mPATContinuityCounter == 16) {
+ mPATContinuityCounter = 0;
+ }
+
+ uint8_t *ptr = packetDataStart;
+ *ptr++ = 0x47;
+ *ptr++ = 0x40;
+ *ptr++ = 0x00;
+ *ptr++ = 0x10 | mPATContinuityCounter;
+ *ptr++ = 0x00;
+
+ const uint8_t *crcDataStart = ptr;
+ *ptr++ = 0x00;
+ *ptr++ = 0xb0;
+ *ptr++ = 0x0d;
+ *ptr++ = 0x00;
+ *ptr++ = 0x00;
+ *ptr++ = 0xc3;
+ *ptr++ = 0x00;
+ *ptr++ = 0x00;
+ *ptr++ = 0x00;
+ *ptr++ = 0x01;
+ *ptr++ = 0xe0 | (kPID_PMT >> 8);
+ *ptr++ = kPID_PMT & 0xff;
+
+ CHECK_EQ(ptr - crcDataStart, 12);
+ uint32_t crc = htonl(crc32(crcDataStart, ptr - crcDataStart));
+ memcpy(ptr, &crc, 4);
+ ptr += 4;
+
+ size_t sizeLeft = packetDataStart + 188 - ptr;
+ memset(ptr, 0xff, sizeLeft);
+
+ packetDataStart += 188;
+
+ // Program Map (PMT):
+ // 0x47
+ // transport_error_indicator = b0
+ // payload_unit_start_indicator = b1
+ // transport_priority = b0
+ // PID = kPID_PMT (13 bits)
+ // transport_scrambling_control = b00
+ // adaptation_field_control = b01 (no adaptation field, payload only)
+ // continuity_counter = b????
+ // skip = 0x00
+ // -- payload follows
+ // table_id = 0x02
+ // section_syntax_indicator = b1
+ // must_be_zero = b0
+ // reserved = b11
+ // section_length = 0x???
+ // program_number = 0x0001
+ // reserved = b11
+ // version_number = b00001
+ // current_next_indicator = b1
+ // section_number = 0x00
+ // last_section_number = 0x00
+ // reserved = b111
+ // PCR_PID = kPCR_PID (13 bits)
+ // reserved = b1111
+ // program_info_length = 0x000
+ // one or more elementary stream descriptions follow:
+ // stream_type = 0x??
+ // reserved = b111
+ // elementary_PID = b? ???? ???? ???? (13 bits)
+ // reserved = b1111
+ // ES_info_length = 0x000
+ // CRC = 0x????????
+
+ if (++mPMTContinuityCounter == 16) {
+ mPMTContinuityCounter = 0;
+ }
+
+ size_t section_length = 5 * mTracks.size() + 4 + 9;
+
+ ptr = packetDataStart;
+ *ptr++ = 0x47;
+ *ptr++ = 0x40 | (kPID_PMT >> 8);
+ *ptr++ = kPID_PMT & 0xff;
+ *ptr++ = 0x10 | mPMTContinuityCounter;
+ *ptr++ = 0x00;
+
+ crcDataStart = ptr;
+ *ptr++ = 0x02;
+ *ptr++ = 0xb0 | (section_length >> 8);
+ *ptr++ = section_length & 0xff;
+ *ptr++ = 0x00;
+ *ptr++ = 0x01;
+ *ptr++ = 0xc3;
+ *ptr++ = 0x00;
+ *ptr++ = 0x00;
+ *ptr++ = 0xe0 | (kPID_PCR >> 8);
+ *ptr++ = kPID_PCR & 0xff;
+ *ptr++ = 0xf0;
+ *ptr++ = 0x00;
+
+ for (size_t i = 0; i < mTracks.size(); ++i) {
+ const sp<Track> &track = mTracks.itemAt(i);
+
+ *ptr++ = track->streamType();
+ *ptr++ = 0xe0 | (track->PID() >> 8);
+ *ptr++ = track->PID() & 0xff;
+ *ptr++ = 0xf0;
+ *ptr++ = 0x00;
+ }
+
+ CHECK_EQ(ptr - crcDataStart, 12 + mTracks.size() * 5);
+ crc = htonl(crc32(crcDataStart, ptr - crcDataStart));
+ memcpy(ptr, &crc, 4);
+ ptr += 4;
+
+ sizeLeft = packetDataStart + 188 - ptr;
+ memset(ptr, 0xff, sizeLeft);
+
+ packetDataStart += 188;
+ }
+
+ if (flags & EMIT_PCR) {
+ // PCR stream
+ // 0x47
+ // transport_error_indicator = b0
+ // payload_unit_start_indicator = b1
+ // transport_priority = b0
+ // PID = kPCR_PID (13 bits)
+ // transport_scrambling_control = b00
+ // adaptation_field_control = b10 (adaptation field only, no payload)
+ // continuity_counter = b0000 (does not increment)
+ // adaptation_field_length = 183
+ // discontinuity_indicator = b0
+ // random_access_indicator = b0
+ // elementary_stream_priority_indicator = b0
+ // PCR_flag = b1
+ // OPCR_flag = b0
+ // splicing_point_flag = b0
+ // transport_private_data_flag = b0
+ // adaptation_field_extension_flag = b0
+ // program_clock_reference_base = b?????????????????????????????????
+ // reserved = b111111
+ // program_clock_reference_extension = b?????????
+
+#if 0
+ int64_t nowUs = ALooper::GetNowUs();
+#else
+ int64_t nowUs = timeUs;
+#endif
+
+ uint64_t PCR = nowUs * 27; // PCR based on a 27MHz clock
+ uint64_t PCR_base = PCR / 300;
+ uint32_t PCR_ext = PCR % 300;
+
+ uint8_t *ptr = packetDataStart;
+ *ptr++ = 0x47;
+ *ptr++ = 0x40 | (kPID_PCR >> 8);
+ *ptr++ = kPID_PCR & 0xff;
+ *ptr++ = 0x20;
+ *ptr++ = 0xb7; // adaptation_field_length
+ *ptr++ = 0x10;
+ *ptr++ = (PCR_base >> 25) & 0xff;
+ *ptr++ = (PCR_base >> 17) & 0xff;
+ *ptr++ = (PCR_base >> 9) & 0xff;
+ *ptr++ = ((PCR_base & 1) << 7) | 0x7e | ((PCR_ext >> 8) & 1);
+ *ptr++ = (PCR_ext & 0xff);
+
+ size_t sizeLeft = packetDataStart + 188 - ptr;
+ memset(ptr, 0xff, sizeLeft);
+
+ packetDataStart += 188;
+ }
+
+ uint32_t PTS = (timeUs * 9ll) / 100ll;
+
+ size_t PES_packet_length = accessUnit->size() + 8;
+ bool padding = (accessUnit->size() < (188 - 18));
+
+ if (PES_packet_length >= 65536) {
+ // This really should only happen for video.
+ CHECK(track->isVideo());
+
+ // It's valid to set this to 0 for video according to the specs.
+ PES_packet_length = 0;
+ }
+
+ uint8_t *ptr = packetDataStart;
+ *ptr++ = 0x47;
+ *ptr++ = 0x40 | (track->PID() >> 8);
+ *ptr++ = track->PID() & 0xff;
+ *ptr++ = (padding ? 0x30 : 0x10) | track->incrementContinuityCounter();
+
+ if (padding) {
+ size_t paddingSize = 188 - 18 - accessUnit->size();
+ *ptr++ = paddingSize - 1;
+ if (paddingSize >= 2) {
+ *ptr++ = 0x00;
+ memset(ptr, 0xff, paddingSize - 2);
+ ptr += paddingSize - 2;
+ }
+ }
+
+ *ptr++ = 0x00;
+ *ptr++ = 0x00;
+ *ptr++ = 0x01;
+ *ptr++ = track->streamID();
+ *ptr++ = PES_packet_length >> 8;
+ *ptr++ = PES_packet_length & 0xff;
+ *ptr++ = 0x84;
+ *ptr++ = 0x80;
+ *ptr++ = 0x05;
+ *ptr++ = 0x20 | (((PTS >> 30) & 7) << 1) | 1;
+ *ptr++ = (PTS >> 22) & 0xff;
+ *ptr++ = (((PTS >> 15) & 0x7f) << 1) | 1;
+ *ptr++ = (PTS >> 7) & 0xff;
+ *ptr++ = ((PTS & 0x7f) << 1) | 1;
+
+ // 18 bytes of TS/PES header leave 188 - 18 = 170 bytes for the payload
+
+ size_t sizeLeft = packetDataStart + 188 - ptr;
+ size_t copy = accessUnit->size();
+ if (copy > sizeLeft) {
+ copy = sizeLeft;
+ }
+
+ memcpy(ptr, accessUnit->data(), copy);
+ ptr += copy;
+ CHECK_EQ(sizeLeft, copy);
+ memset(ptr, 0xff, sizeLeft - copy);
+
+ packetDataStart += 188;
+
+ size_t offset = copy;
+ while (offset < accessUnit->size()) {
+ bool padding = (accessUnit->size() - offset) < (188 - 4);
+
+ // for subsequent fragments of "buffer":
+ // 0x47
+ // transport_error_indicator = b0
+ // payload_unit_start_indicator = b0
+ // transport_priority = b0
+ // PID = b0 0001 1110 ???? (13 bits) [0x1e0 + 1 + sourceIndex]
+ // transport_scrambling_control = b00
+ // adaptation_field_control = b??
+ // continuity_counter = b????
+ // the fragment of "buffer" follows.
+
+ uint8_t *ptr = packetDataStart;
+ *ptr++ = 0x47;
+ *ptr++ = 0x00 | (track->PID() >> 8);
+ *ptr++ = track->PID() & 0xff;
+
+ *ptr++ = (padding ? 0x30 : 0x10) | track->incrementContinuityCounter();
+
+ if (padding) {
+ size_t paddingSize = 188 - 4 - (accessUnit->size() - offset);
+ *ptr++ = paddingSize - 1;
+ if (paddingSize >= 2) {
+ *ptr++ = 0x00;
+ memset(ptr, 0xff, paddingSize - 2);
+ ptr += paddingSize - 2;
+ }
+ }
+
+ // 4 bytes of TS header leave 188 - 4 = 184 bytes for the payload
+
+ size_t sizeLeft = packetDataStart + 188 - ptr;
+ size_t copy = accessUnit->size() - offset;
+ if (copy > sizeLeft) {
+ copy = sizeLeft;
+ }
+
+ memcpy(ptr, accessUnit->data() + offset, copy);
+ ptr += copy;
+ CHECK_EQ(sizeLeft, copy);
+ memset(ptr, 0xff, sizeLeft - copy);
+
+ offset += copy;
+ packetDataStart += 188;
+ }
+
+ CHECK(packetDataStart == buffer->data() + buffer->capacity());
+
+ *packets = buffer;
+
+ return OK;
+}
+
+void TSPacketizer::initCrcTable() {
+ uint32_t poly = 0x04C11DB7;
+
+ for (int i = 0; i < 256; i++) {
+ uint32_t crc = i << 24;
+ for (int j = 0; j < 8; j++) {
+ crc = (crc << 1) ^ ((crc & 0x80000000) ? (poly) : 0);
+ }
+ mCrcTable[i] = crc;
+ }
+}
+
+uint32_t TSPacketizer::crc32(const uint8_t *start, size_t size) const {
+ uint32_t crc = 0xFFFFFFFF;
+ const uint8_t *p;
+
+ for (p = start; p < start + size; ++p) {
+ crc = (crc << 8) ^ mCrcTable[((crc >> 24) ^ *p) & 0xFF];
+ }
+
+ return crc;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.h b/media/libstagefright/wifi-display/source/TSPacketizer.h
new file mode 100644
index 0000000..9dbeb27
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/TSPacketizer.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef TS_PACKETIZER_H_
+
+#define TS_PACKETIZER_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMessage;
+
+// Forms the packets of a transport stream given access units.
+// Emits metadata tables (PAT and PMT) and timestamp stream (PCR) based
+// on flags.
+struct TSPacketizer : public RefBase {
+ TSPacketizer();
+
+ // Returns trackIndex or error.
+ ssize_t addTrack(const sp<AMessage> &format);
+
+ enum {
+ EMIT_PAT_AND_PMT = 1,
+ EMIT_PCR = 2,
+ };
+ status_t packetize(
+ size_t trackIndex, const sp<ABuffer> &accessUnit,
+ sp<ABuffer> *packets,
+ uint32_t flags);
+
+protected:
+ virtual ~TSPacketizer();
+
+private:
+ enum {
+ kPID_PMT = 0x100,
+ kPID_PCR = 0x1000,
+ };
+
+ struct Track;
+
+ Vector<sp<Track> > mTracks;
+
+ unsigned mPATContinuityCounter;
+ unsigned mPMTContinuityCounter;
+
+ uint32_t mCrcTable[256];
+
+ void initCrcTable();
+ uint32_t crc32(const uint8_t *start, size_t size) const;
+
+ DISALLOW_EVIL_CONSTRUCTORS(TSPacketizer);
+};
+
+} // namespace android
+
+#endif // TS_PACKETIZER_H_
+
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
new file mode 100644
index 0000000..3f75bc3
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
@@ -0,0 +1,944 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "WifiDisplaySource"
+#include <utils/Log.h>
+
+#include "WifiDisplaySource.h"
+#include "PlaybackSession.h"
+#include "ParsedMessage.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+namespace android {
+
+WifiDisplaySource::WifiDisplaySource(const sp<ANetworkSession> &netSession)
+ : mNetSession(netSession),
+ mSessionID(0),
+ mReaperPending(false),
+ mNextCSeq(1) {
+}
+
+WifiDisplaySource::~WifiDisplaySource() {
+}
+
+status_t WifiDisplaySource::start(int32_t port) {
+ sp<AMessage> msg = new AMessage(kWhatStart, id());
+ msg->setInt32("port", port);
+
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!response->findInt32("err", &err)) {
+ err = OK;
+ }
+
+ return err;
+}
+
+status_t WifiDisplaySource::stop() {
+ sp<AMessage> msg = new AMessage(kWhatStop, id());
+
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!response->findInt32("err", &err)) {
+ err = OK;
+ }
+
+ return err;
+}
+
+void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatStart:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ int32_t port;
+ CHECK(msg->findInt32("port", &port));
+
+ sp<AMessage> notify = new AMessage(kWhatRTSPNotify, id());
+
+ status_t err = mNetSession->createRTSPServer(
+ port, notify, &mSessionID);
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatRTSPNotify:
+ {
+ int32_t reason;
+ CHECK(msg->findInt32("reason", &reason));
+
+ switch (reason) {
+ case ANetworkSession::kWhatError:
+ {
+ int32_t sessionID;
+ CHECK(msg->findInt32("sessionID", &sessionID));
+
+ int32_t err;
+ CHECK(msg->findInt32("err", &err));
+
+ AString detail;
+ CHECK(msg->findString("detail", &detail));
+
+ ALOGE("An error occurred in session %d (%d, '%s/%s').",
+ sessionID,
+ err,
+ detail.c_str(),
+ strerror(-err));
+
+ mNetSession->destroySession(sessionID);
+
+ mClientIPs.removeItem(sessionID);
+ break;
+ }
+
+ case ANetworkSession::kWhatClientConnected:
+ {
+ int32_t sessionID;
+ CHECK(msg->findInt32("sessionID", &sessionID));
+
+ ClientInfo info;
+ CHECK(msg->findString("client-ip", &info.mRemoteIP));
+ CHECK(msg->findString("server-ip", &info.mLocalIP));
+ CHECK(msg->findInt32("server-port", &info.mLocalPort));
+
+ ALOGI("We now have a client (%d) connected.", sessionID);
+
+ mClientIPs.add(sessionID, info);
+
+ status_t err = sendM1(sessionID);
+ CHECK_EQ(err, (status_t)OK);
+ break;
+ }
+
+ case ANetworkSession::kWhatData:
+ {
+ onReceiveClientData(msg);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+ break;
+ }
+
+ case kWhatStop:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ for (size_t i = mPlaybackSessions.size(); i-- > 0;) {
+ const sp<PlaybackSession> &playbackSession =
+ mPlaybackSessions.valueAt(i);
+
+ looper()->unregisterHandler(playbackSession->id());
+ mPlaybackSessions.removeItemsAt(i);
+ }
+
+ status_t err = OK;
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatReapDeadClients:
+ {
+ mReaperPending = false;
+
+ for (size_t i = mPlaybackSessions.size(); i-- > 0;) {
+ const sp<PlaybackSession> &playbackSession =
+ mPlaybackSessions.valueAt(i);
+
+ if (playbackSession->getLastLifesignUs()
+ + kPlaybackSessionTimeoutUs < ALooper::GetNowUs()) {
+ ALOGI("playback session %d timed out, reaping.",
+ mPlaybackSessions.keyAt(i));
+
+ looper()->unregisterHandler(playbackSession->id());
+ mPlaybackSessions.removeItemsAt(i);
+ }
+ }
+
+ if (!mPlaybackSessions.isEmpty()) {
+ scheduleReaper();
+ }
+ break;
+ }
+
+ case kWhatPlaybackSessionNotify:
+ {
+ int32_t playbackSessionID;
+ CHECK(msg->findInt32("playbackSessionID", &playbackSessionID));
+
+ int32_t what;
+ CHECK(msg->findInt32("what", &what));
+
+ ssize_t index = mPlaybackSessions.indexOfKey(playbackSessionID);
+ if (index >= 0) {
+ const sp<PlaybackSession> &playbackSession =
+ mPlaybackSessions.valueAt(index);
+
+ if (what == PlaybackSession::kWhatSessionDead) {
+ ALOGI("playback sessions %d wants to quit.",
+ playbackSessionID);
+
+ looper()->unregisterHandler(playbackSession->id());
+ mPlaybackSessions.removeItemsAt(index);
+ } else {
+ CHECK_EQ(what, PlaybackSession::kWhatBinaryData);
+
+ int32_t channel;
+ CHECK(msg->findInt32("channel", &channel));
+
+ sp<ABuffer> data;
+ CHECK(msg->findBuffer("data", &data));
+
+ CHECK_LE(channel, 0xffu);
+ CHECK_LE(data->size(), 0xffffu);
+
+ int32_t sessionID;
+ CHECK(msg->findInt32("sessionID", &sessionID));
+
+ char header[4];
+ header[0] = '$';
+ header[1] = channel;
+ header[2] = data->size() >> 8;
+ header[3] = data->size() & 0xff;
+
+ mNetSession->sendRequest(
+ sessionID, header, sizeof(header));
+
+ mNetSession->sendRequest(
+ sessionID, data->data(), data->size());
+ }
+ }
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+void WifiDisplaySource::registerResponseHandler(
+ int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func) {
+ ResponseID id;
+ id.mSessionID = sessionID;
+ id.mCSeq = cseq;
+ mResponseHandlers.add(id, func);
+}
+
+status_t WifiDisplaySource::sendM1(int32_t sessionID) {
+ AString request = "OPTIONS * RTSP/1.0\r\n";
+ AppendCommonResponse(&request, mNextCSeq);
+
+ request.append(
+ "Require: org.wfa.wfd1.0\r\n"
+ "\r\n");
+
+ status_t err =
+ mNetSession->sendRequest(sessionID, request.c_str(), request.size());
+
+ if (err != OK) {
+ return err;
+ }
+
+ registerResponseHandler(
+ sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM1Response);
+
+ ++mNextCSeq;
+
+ return OK;
+}
+
+status_t WifiDisplaySource::sendM3(int32_t sessionID) {
+ AString body =
+ "wfd_video_formats\r\n"
+ "wfd_audio_codecs\r\n"
+ "wfd_client_rtp_ports\r\n";
+
+ AString request = "GET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
+ AppendCommonResponse(&request, mNextCSeq);
+
+ request.append("Content-Type: text/parameters\r\n");
+ request.append(StringPrintf("Content-Length: %d\r\n", body.size()));
+ request.append("\r\n");
+ request.append(body);
+
+ status_t err =
+ mNetSession->sendRequest(sessionID, request.c_str(), request.size());
+
+ if (err != OK) {
+ return err;
+ }
+
+ registerResponseHandler(
+ sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM3Response);
+
+ ++mNextCSeq;
+
+ return OK;
+}
+
+status_t WifiDisplaySource::sendM4(int32_t sessionID) {
+ // wfd_video_formats:
+ // 1 byte "native"
+ // 1 byte "preferred-display-mode-supported" 0 or 1
+ // one or more avc codec structures
+ // 1 byte profile
+ // 1 byte level
+ // 4 byte CEA mask
+ // 4 byte VESA mask
+ // 4 byte HH mask
+ // 1 byte latency
+ // 2 byte min-slice-slice
+ // 2 byte slice-enc-params
+ // 1 byte framerate-control-support
+ // max-hres (none or 2 byte)
+ // max-vres (none or 2 byte)
+
+ const ClientInfo &info = mClientIPs.valueFor(sessionID);
+
+ AString body = StringPrintf(
+ "wfd_video_formats: "
+ "30 00 02 02 00000040 00000000 00000000 00 0000 0000 00 none none\r\n"
+ "wfd_audio_codecs: AAC 00000001 00\r\n" // 2 ch AAC 48kHz
+ "wfd_presentation_URL: rtsp://%s:%d/wfd1.0/streamid=0 none\r\n"
+ "wfd_client_rtp_ports: RTP/AVP/UDP;unicast 19000 0 mode=play\r\n",
+ info.mLocalIP.c_str(), info.mLocalPort);
+
+ AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
+ AppendCommonResponse(&request, mNextCSeq);
+
+ request.append("Content-Type: text/parameters\r\n");
+ request.append(StringPrintf("Content-Length: %d\r\n", body.size()));
+ request.append("\r\n");
+ request.append(body);
+
+ status_t err =
+ mNetSession->sendRequest(sessionID, request.c_str(), request.size());
+
+ if (err != OK) {
+ return err;
+ }
+
+ registerResponseHandler(
+ sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM4Response);
+
+ ++mNextCSeq;
+
+ return OK;
+}
+
+status_t WifiDisplaySource::sendM5(int32_t sessionID) {
+ AString body = "wfd_trigger_method: SETUP\r\n";
+
+ AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
+ AppendCommonResponse(&request, mNextCSeq);
+
+ request.append("Content-Type: text/parameters\r\n");
+ request.append(StringPrintf("Content-Length: %d\r\n", body.size()));
+ request.append("\r\n");
+ request.append(body);
+
+ status_t err =
+ mNetSession->sendRequest(sessionID, request.c_str(), request.size());
+
+ if (err != OK) {
+ return err;
+ }
+
+ registerResponseHandler(
+ sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM5Response);
+
+ ++mNextCSeq;
+
+ return OK;
+}
+
+status_t WifiDisplaySource::onReceiveM1Response(
+ int32_t sessionID, const sp<ParsedMessage> &msg) {
+ int32_t statusCode;
+ if (!msg->getStatusCode(&statusCode)) {
+ return ERROR_MALFORMED;
+ }
+
+ if (statusCode != 200) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ return OK;
+}
+
+status_t WifiDisplaySource::onReceiveM3Response(
+ int32_t sessionID, const sp<ParsedMessage> &msg) {
+ int32_t statusCode;
+ if (!msg->getStatusCode(&statusCode)) {
+ return ERROR_MALFORMED;
+ }
+
+ if (statusCode != 200) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ return sendM4(sessionID);
+}
+
+status_t WifiDisplaySource::onReceiveM4Response(
+ int32_t sessionID, const sp<ParsedMessage> &msg) {
+ int32_t statusCode;
+ if (!msg->getStatusCode(&statusCode)) {
+ return ERROR_MALFORMED;
+ }
+
+ if (statusCode != 200) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ return sendM5(sessionID);
+}
+
+status_t WifiDisplaySource::onReceiveM5Response(
+ int32_t sessionID, const sp<ParsedMessage> &msg) {
+ int32_t statusCode;
+ if (!msg->getStatusCode(&statusCode)) {
+ return ERROR_MALFORMED;
+ }
+
+ if (statusCode != 200) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ return OK;
+}
+
+void WifiDisplaySource::scheduleReaper() {
+ if (mReaperPending) {
+ return;
+ }
+
+ mReaperPending = true;
+ (new AMessage(kWhatReapDeadClients, id()))->post(kReaperIntervalUs);
+}
+
+void WifiDisplaySource::onReceiveClientData(const sp<AMessage> &msg) {
+ int32_t sessionID;
+ CHECK(msg->findInt32("sessionID", &sessionID));
+
+ sp<RefBase> obj;
+ CHECK(msg->findObject("data", &obj));
+
+ sp<ParsedMessage> data =
+ static_cast<ParsedMessage *>(obj.get());
+
+ ALOGV("session %d received '%s'",
+ sessionID, data->debugString().c_str());
+
+ AString method;
+ AString uri;
+ data->getRequestField(0, &method);
+
+ int32_t cseq;
+ if (!data->findInt32("cseq", &cseq)) {
+ sendErrorResponse(sessionID, "400 Bad Request", -1 /* cseq */);
+ return;
+ }
+
+ if (method.startsWith("RTSP/")) {
+ // This is a response.
+
+ ResponseID id;
+ id.mSessionID = sessionID;
+ id.mCSeq = cseq;
+
+ ssize_t index = mResponseHandlers.indexOfKey(id);
+
+ if (index < 0) {
+ ALOGW("Received unsolicited server response, cseq %d", cseq);
+ return;
+ }
+
+ HandleRTSPResponseFunc func = mResponseHandlers.valueAt(index);
+ mResponseHandlers.removeItemsAt(index);
+
+ status_t err = (this->*func)(sessionID, data);
+
+ if (err != OK) {
+ ALOGW("Response handler for session %d, cseq %d returned "
+ "err %d (%s)",
+ sessionID, cseq, err, strerror(-err));
+ }
+ } else {
+ AString version;
+ data->getRequestField(2, &version);
+ if (!(version == AString("RTSP/1.0"))) {
+ sendErrorResponse(sessionID, "505 RTSP Version not supported", cseq);
+ return;
+ }
+
+ if (method == "DESCRIBE") {
+ onDescribeRequest(sessionID, cseq, data);
+ } else if (method == "OPTIONS") {
+ onOptionsRequest(sessionID, cseq, data);
+ } else if (method == "SETUP") {
+ onSetupRequest(sessionID, cseq, data);
+ } else if (method == "PLAY") {
+ onPlayRequest(sessionID, cseq, data);
+ } else if (method == "PAUSE") {
+ onPauseRequest(sessionID, cseq, data);
+ } else if (method == "TEARDOWN") {
+ onTeardownRequest(sessionID, cseq, data);
+ } else if (method == "GET_PARAMETER") {
+ onGetParameterRequest(sessionID, cseq, data);
+ } else if (method == "SET_PARAMETER") {
+ onSetParameterRequest(sessionID, cseq, data);
+ } else {
+ sendErrorResponse(sessionID, "405 Method Not Allowed", cseq);
+ }
+ }
+}
+
+void WifiDisplaySource::onDescribeRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data) {
+ int64_t nowUs = ALooper::GetNowUs();
+
+ AString sdp;
+ sdp.append("v=0\r\n");
+
+ sdp.append(StringPrintf(
+ "o=- %lld %lld IN IP4 0.0.0.0\r\n", nowUs, nowUs));
+
+ sdp.append(
+ "o=- 0 0 IN IP4 127.0.0.0\r\n"
+ "s=Sample\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "b=AS:502\r\n"
+ "t=0 0\r\n"
+ "a=control:*\r\n"
+ "a=range:npt=now-\r\n"
+ "m=video 0 RTP/AVP 33\r\n"
+ "a=rtpmap:33 MP2T/90000\r\n"
+ "a=control:\r\n");
+
+ AString response = "RTSP/1.0 200 OK\r\n";
+ AppendCommonResponse(&response, cseq);
+
+ response.append("Content-Type: application/sdp\r\n");
+
+ // response.append("Content-Base: rtsp://0.0.0.0:7236\r\n");
+ response.append(StringPrintf("Content-Length: %d\r\n", sdp.size()));
+ response.append("\r\n");
+ response.append(sdp);
+
+ status_t err = mNetSession->sendRequest(sessionID, response.c_str());
+ CHECK_EQ(err, (status_t)OK);
+}
+
+void WifiDisplaySource::onOptionsRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data) {
+ int32_t playbackSessionID;
+ sp<PlaybackSession> playbackSession =
+ findPlaybackSession(data, &playbackSessionID);
+
+ if (playbackSession != NULL) {
+ playbackSession->updateLiveness();
+ }
+
+ AString response = "RTSP/1.0 200 OK\r\n";
+ AppendCommonResponse(&response, cseq);
+
+ response.append(
+ "Public: org.wfa.wfd1.0, DESCRIBE, SETUP, TEARDOWN, PLAY, PAUSE, "
+ "GET_PARAMETER, SET_PARAMETER\r\n");
+
+ response.append("\r\n");
+
+ status_t err = mNetSession->sendRequest(sessionID, response.c_str());
+ CHECK_EQ(err, (status_t)OK);
+
+ err = sendM3(sessionID);
+ CHECK_EQ(err, (status_t)OK);
+}
+
+void WifiDisplaySource::onSetupRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data) {
+ AString transport;
+ if (!data->findString("transport", &transport)) {
+ sendErrorResponse(sessionID, "400 Bad Request", cseq);
+ return;
+ }
+
+ bool useInterleavedTCP = false;
+
+ int clientRtp, clientRtcp;
+ if (transport.startsWith("RTP/AVP/TCP;")) {
+ AString interleaved;
+ if (!ParsedMessage::GetAttribute(
+ transport.c_str(), "interleaved", &interleaved)
+ || sscanf(interleaved.c_str(), "%d-%d",
+ &clientRtp, &clientRtcp) != 2) {
+ sendErrorResponse(sessionID, "400 Bad Request", cseq);
+ return;
+ }
+
+ useInterleavedTCP = true;
+ } else if (transport.startsWith("RTP/AVP;unicast;")
+ || transport.startsWith("RTP/AVP/UDP;unicast;")) {
+ bool badRequest = false;
+
+ AString clientPort;
+ if (!ParsedMessage::GetAttribute(
+ transport.c_str(), "client_port", &clientPort)) {
+ badRequest = true;
+ } else if (sscanf(clientPort.c_str(), "%d-%d",
+ &clientRtp, &clientRtcp) == 2) {
+ } else if (sscanf(clientPort.c_str(), "%d", &clientRtp) == 1) {
+ // No RTCP.
+ clientRtcp = -1;
+ } else {
+ badRequest = true;
+ }
+
+ if (badRequest) {
+ sendErrorResponse(sessionID, "400 Bad Request", cseq);
+ return;
+ }
+#if 1
+ // The LG dongle doesn't specify client_port=xxx apparently.
+ } else if (transport == "RTP/AVP/UDP;unicast") {
+ clientRtp = 19000;
+ clientRtcp = clientRtp + 1;
+#endif
+ } else {
+ sendErrorResponse(sessionID, "461 Unsupported Transport", cseq);
+ return;
+ }
+
+ int32_t playbackSessionID = makeUniquePlaybackSessionID();
+
+ sp<AMessage> notify = new AMessage(kWhatPlaybackSessionNotify, id());
+ notify->setInt32("playbackSessionID", playbackSessionID);
+ notify->setInt32("sessionID", sessionID);
+
+ sp<PlaybackSession> playbackSession =
+ new PlaybackSession(mNetSession, notify);
+
+ looper()->registerHandler(playbackSession);
+
+ AString uri;
+ data->getRequestField(1, &uri);
+
+ if (strncasecmp("rtsp://", uri.c_str(), 7)) {
+ sendErrorResponse(sessionID, "400 Bad Request", cseq);
+ return;
+ }
+
+ if (!(uri.startsWith("rtsp://") && uri.endsWith("/wfd1.0/streamid=0"))) {
+ sendErrorResponse(sessionID, "404 Not found", cseq);
+ return;
+ }
+
+ const ClientInfo &info = mClientIPs.valueFor(sessionID);
+
+ status_t err = playbackSession->init(
+ info.mRemoteIP.c_str(),
+ clientRtp,
+ clientRtcp,
+ useInterleavedTCP);
+
+ if (err != OK) {
+ looper()->unregisterHandler(playbackSession->id());
+ playbackSession.clear();
+ }
+
+ switch (err) {
+ case OK:
+ break;
+ case -ENOENT:
+ sendErrorResponse(sessionID, "404 Not Found", cseq);
+ return;
+ default:
+ sendErrorResponse(sessionID, "403 Forbidden", cseq);
+ return;
+ }
+
+ mPlaybackSessions.add(playbackSessionID, playbackSession);
+
+ AString response = "RTSP/1.0 200 OK\r\n";
+ AppendCommonResponse(&response, cseq, playbackSessionID);
+
+ if (useInterleavedTCP) {
+ response.append(
+ StringPrintf(
+ "Transport: RTP/AVP/TCP;interleaved=%d-%d;",
+ clientRtp, clientRtcp));
+ } else {
+ int32_t serverRtp = playbackSession->getRTPPort();
+
+ if (clientRtcp >= 0) {
+ response.append(
+ StringPrintf(
+ "Transport: RTP/AVP;unicast;client_port=%d-%d;"
+ "server_port=%d-%d\r\n",
+ clientRtp, clientRtcp, serverRtp, serverRtp + 1));
+ } else {
+ response.append(
+ StringPrintf(
+ "Transport: RTP/AVP;unicast;client_port=%d;"
+ "server_port=%d\r\n",
+ clientRtp, serverRtp));
+ }
+ }
+
+ response.append("\r\n");
+
+ err = mNetSession->sendRequest(sessionID, response.c_str());
+ CHECK_EQ(err, (status_t)OK);
+
+#if 0
+ // XXX the dongle does not currently send keep-alives.
+ scheduleReaper();
+#endif
+}
+
+void WifiDisplaySource::onPlayRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data) {
+ int32_t playbackSessionID;
+ sp<PlaybackSession> playbackSession =
+ findPlaybackSession(data, &playbackSessionID);
+
+ if (playbackSession == NULL) {
+ sendErrorResponse(sessionID, "454 Session Not Found", cseq);
+ return;
+ }
+
+ status_t err = playbackSession->play();
+ CHECK_EQ(err, (status_t)OK);
+
+ AString response = "RTSP/1.0 200 OK\r\n";
+ AppendCommonResponse(&response, cseq, playbackSessionID);
+ response.append("Range: npt=now-\r\n");
+ response.append("\r\n");
+
+ err = mNetSession->sendRequest(sessionID, response.c_str());
+ CHECK_EQ(err, (status_t)OK);
+}
+
+void WifiDisplaySource::onPauseRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data) {
+ int32_t playbackSessionID;
+ sp<PlaybackSession> playbackSession =
+ findPlaybackSession(data, &playbackSessionID);
+
+ if (playbackSession == NULL) {
+ sendErrorResponse(sessionID, "454 Session Not Found", cseq);
+ return;
+ }
+
+ status_t err = playbackSession->pause();
+ CHECK_EQ(err, (status_t)OK);
+
+ AString response = "RTSP/1.0 200 OK\r\n";
+ AppendCommonResponse(&response, cseq, playbackSessionID);
+ response.append("\r\n");
+
+ err = mNetSession->sendRequest(sessionID, response.c_str());
+ CHECK_EQ(err, (status_t)OK);
+}
+
+void WifiDisplaySource::onTeardownRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data) {
+ int32_t playbackSessionID;
+ sp<PlaybackSession> playbackSession =
+ findPlaybackSession(data, &playbackSessionID);
+
+ if (playbackSession == NULL) {
+ sendErrorResponse(sessionID, "454 Session Not Found", cseq);
+ return;
+ }
+
+ looper()->unregisterHandler(playbackSession->id());
+ mPlaybackSessions.removeItem(playbackSessionID);
+
+ AString response = "RTSP/1.0 200 OK\r\n";
+ AppendCommonResponse(&response, cseq, playbackSessionID);
+ response.append("\r\n");
+
+ status_t err = mNetSession->sendRequest(sessionID, response.c_str());
+ CHECK_EQ(err, (status_t)OK);
+}
+
+void WifiDisplaySource::onGetParameterRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data) {
+ int32_t playbackSessionID;
+ sp<PlaybackSession> playbackSession =
+ findPlaybackSession(data, &playbackSessionID);
+
+ if (playbackSession == NULL) {
+ sendErrorResponse(sessionID, "454 Session Not Found", cseq);
+ return;
+ }
+
+ playbackSession->updateLiveness();
+
+ AString response = "RTSP/1.0 200 OK\r\n";
+ AppendCommonResponse(&response, cseq, playbackSessionID);
+ response.append("\r\n");
+
+ status_t err = mNetSession->sendRequest(sessionID, response.c_str());
+ CHECK_EQ(err, (status_t)OK);
+}
+
+void WifiDisplaySource::onSetParameterRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data) {
+ int32_t playbackSessionID;
+#if 0
+ // XXX the dongle does not include a "Session:" header in this request.
+ sp<PlaybackSession> playbackSession =
+ findPlaybackSession(data, &playbackSessionID);
+
+ if (playbackSession == NULL) {
+ sendErrorResponse(sessionID, "454 Session Not Found", cseq);
+ return;
+ }
+#else
+ CHECK_EQ(mPlaybackSessions.size(), 1u);
+ playbackSessionID = mPlaybackSessions.keyAt(0);
+ sp<PlaybackSession> playbackSession = mPlaybackSessions.valueAt(0);
+#endif
+
+ playbackSession->updateLiveness();
+
+ AString response = "RTSP/1.0 200 OK\r\n";
+ AppendCommonResponse(&response, cseq, playbackSessionID);
+ response.append("\r\n");
+
+ status_t err = mNetSession->sendRequest(sessionID, response.c_str());
+ CHECK_EQ(err, (status_t)OK);
+}
+
+// static
+void WifiDisplaySource::AppendCommonResponse(
+ AString *response, int32_t cseq, int32_t playbackSessionID) {
+ time_t now = time(NULL);
+ struct tm *now2 = gmtime(&now);
+ char buf[128];
+ strftime(buf, sizeof(buf), "%a, %d %b %Y %H:%M:%S %z", now2);
+
+ response->append("Date: ");
+ response->append(buf);
+ response->append("\r\n");
+
+ response->append("Server: Mine/1.0\r\n");
+
+ if (cseq >= 0) {
+ response->append(StringPrintf("CSeq: %d\r\n", cseq));
+ }
+
+ if (playbackSessionID >= 0ll) {
+ response->append(
+ StringPrintf(
+ "Session: %d;timeout=%lld\r\n",
+ playbackSessionID, kPlaybackSessionTimeoutSecs));
+ }
+}
+
+void WifiDisplaySource::sendErrorResponse(
+ int32_t sessionID,
+ const char *errorDetail,
+ int32_t cseq) {
+ AString response;
+ response.append("RTSP/1.0 ");
+ response.append(errorDetail);
+ response.append("\r\n");
+
+ AppendCommonResponse(&response, cseq);
+
+ response.append("\r\n");
+
+ status_t err = mNetSession->sendRequest(sessionID, response.c_str());
+ CHECK_EQ(err, (status_t)OK);
+}
+
+int32_t WifiDisplaySource::makeUniquePlaybackSessionID() const {
+ for (;;) {
+ int32_t playbackSessionID = rand();
+
+ for (size_t i = 0; i < mPlaybackSessions.size(); ++i) {
+ if (mPlaybackSessions.keyAt(i) == playbackSessionID) {
+ continue;
+ }
+ }
+
+ return playbackSessionID;
+ }
+}
+
+sp<WifiDisplaySource::PlaybackSession> WifiDisplaySource::findPlaybackSession(
+ const sp<ParsedMessage> &data, int32_t *playbackSessionID) const {
+ if (!data->findInt32("session", playbackSessionID)) {
+ *playbackSessionID = 0;
+ return NULL;
+ }
+
+ ssize_t index = mPlaybackSessions.indexOfKey(*playbackSessionID);
+ if (index < 0) {
+ return NULL;
+ }
+
+ return mPlaybackSessions.valueAt(index);
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
new file mode 100644
index 0000000..95c3560
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
@@ -0,0 +1,175 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WIFI_DISPLAY_SOURCE_H_
+
+#define WIFI_DISPLAY_SOURCE_H_
+
+#include "ANetworkSession.h"
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ParsedMessage;
+
+// Represents the RTSP server acting as a wifi display source.
+// Manages incoming connections, sets up Playback sessions as necessary.
+struct WifiDisplaySource : public AHandler {
+ static const unsigned kWifiDisplayDefaultPort = 7236;
+
+ WifiDisplaySource(const sp<ANetworkSession> &netSession);
+
+ status_t start(int32_t port);
+ status_t stop();
+
+protected:
+ virtual ~WifiDisplaySource();
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+ struct PlaybackSession;
+
+ enum {
+ kWhatStart,
+ kWhatRTSPNotify,
+ kWhatStop,
+ kWhatReapDeadClients,
+ kWhatPlaybackSessionNotify,
+ };
+
+ struct ResponseID {
+ int32_t mSessionID;
+ int32_t mCSeq;
+
+ bool operator<(const ResponseID &other) const {
+ return mSessionID < other.mSessionID
+ || (mSessionID == other.mSessionID
+ && mCSeq < other.mCSeq);
+ }
+ };
+
+ typedef status_t (WifiDisplaySource::*HandleRTSPResponseFunc)(
+ int32_t sessionID, const sp<ParsedMessage> &msg);
+
+ static const int64_t kReaperIntervalUs = 1000000ll;
+
+ static const int64_t kPlaybackSessionTimeoutSecs = 30;
+
+ static const int64_t kPlaybackSessionTimeoutUs =
+ kPlaybackSessionTimeoutSecs * 1000000ll;
+
+ sp<ANetworkSession> mNetSession;
+ int32_t mSessionID;
+
+ struct ClientInfo {
+ AString mRemoteIP;
+ AString mLocalIP;
+ int32_t mLocalPort;
+ };
+ KeyedVector<int32_t, ClientInfo> mClientIPs;
+
+ bool mReaperPending;
+
+ int32_t mNextCSeq;
+
+ KeyedVector<ResponseID, HandleRTSPResponseFunc> mResponseHandlers;
+
+ KeyedVector<int32_t, sp<PlaybackSession> > mPlaybackSessions;
+
+ status_t sendM1(int32_t sessionID);
+ status_t sendM3(int32_t sessionID);
+ status_t sendM4(int32_t sessionID);
+ status_t sendM5(int32_t sessionID);
+
+ status_t onReceiveM1Response(
+ int32_t sessionID, const sp<ParsedMessage> &msg);
+
+ status_t onReceiveM3Response(
+ int32_t sessionID, const sp<ParsedMessage> &msg);
+
+ status_t onReceiveM4Response(
+ int32_t sessionID, const sp<ParsedMessage> &msg);
+
+ status_t onReceiveM5Response(
+ int32_t sessionID, const sp<ParsedMessage> &msg);
+
+ void registerResponseHandler(
+ int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func);
+
+ void onReceiveClientData(const sp<AMessage> &msg);
+
+ void onDescribeRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data);
+
+ void onOptionsRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data);
+
+ void onSetupRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data);
+
+ void onPlayRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data);
+
+ void onPauseRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data);
+
+ void onTeardownRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data);
+
+ void onGetParameterRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data);
+
+ void onSetParameterRequest(
+ int32_t sessionID,
+ int32_t cseq,
+ const sp<ParsedMessage> &data);
+
+ void sendErrorResponse(
+ int32_t sessionID,
+ const char *errorDetail,
+ int32_t cseq);
+
+ static void AppendCommonResponse(
+ AString *response, int32_t cseq, int32_t playbackSessionID = -1ll);
+
+ void scheduleReaper();
+
+ int32_t makeUniquePlaybackSessionID() const;
+
+ sp<PlaybackSession> findPlaybackSession(
+ const sp<ParsedMessage> &data, int32_t *playbackSessionID) const;
+
+ DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySource);
+};
+
+} // namespace android
+
+#endif // WIFI_DISPLAY_SOURCE_H_
diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp
new file mode 100644
index 0000000..32cdf3f
--- /dev/null
+++ b/media/libstagefright/wifi-display/wfd.cpp
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "wfd"
+#include <utils/Log.h>
+
+#define SUPPORT_SINK 0
+
+#if SUPPORT_SINK
+#include "sink/WifiDisplaySink.h"
+#endif
+
+#include <binder/ProcessState.h>
+#include <binder/IServiceManager.h>
+#include <media/IMediaPlayerService.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+static void enableDisableRemoteDisplay(bool enable) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("media.player"));
+
+ sp<IMediaPlayerService> service =
+ interface_cast<IMediaPlayerService>(binder);
+
+ CHECK(service.get() != NULL);
+
+ service->enableRemoteDisplay(enable);
+}
+
+} // namespace android
+
+static void usage(const char *me) {
+ fprintf(stderr,
+ "usage:\n"
+#if SUPPORT_SINK
+ " %s -c host[:port]\tconnect to wifi source\n"
+ " -u uri \tconnect to an rtsp uri\n"
+#endif
+ " -e \tenable remote display\n"
+ " -d \tdisable remote display\n",
+ me);
+}
+
+int main(int argc, char **argv) {
+ using namespace android;
+
+ ProcessState::self()->startThreadPool();
+
+ DataSource::RegisterDefaultSniffers();
+
+ AString connectToHost;
+ int32_t connectToPort = -1;
+ AString uri;
+
+ int res;
+ while ((res = getopt(argc, argv, "hc:l:u:ed")) >= 0) {
+ switch (res) {
+#if SUPPORT_SINK
+ case 'c':
+ {
+ const char *colonPos = strrchr(optarg, ':');
+
+ if (colonPos == NULL) {
+ connectToHost = optarg;
+ connectToPort = WifiDisplaySource::kWifiDisplayDefaultPort;
+ } else {
+ connectToHost.setTo(optarg, colonPos - optarg);
+
+ char *end;
+ connectToPort = strtol(colonPos + 1, &end, 10);
+
+ if (*end != '\0' || end == colonPos + 1
+ || connectToPort < 1 || connectToPort > 65535) {
+ fprintf(stderr, "Illegal port specified.\n");
+ exit(1);
+ }
+ }
+ break;
+ }
+
+ case 'u':
+ {
+ uri = optarg;
+ break;
+ }
+#endif
+
+ case 'e':
+ case 'd':
+ {
+ enableDisableRemoteDisplay(res == 'e');
+ exit(0);
+ break;
+ }
+
+ case '?':
+ case 'h':
+ default:
+ usage(argv[0]);
+ exit(1);
+ }
+ }
+
+#if SUPPORT_SINK
+ if (connectToPort < 0 && uri.empty()) {
+ fprintf(stderr,
+ "You need to select either source host or uri.\n");
+
+ exit(1);
+ }
+
+ if (connectToPort >= 0 && !uri.empty()) {
+ fprintf(stderr,
+ "You need to either connect to a wfd host or an rtsp url, "
+ "not both.\n");
+ exit(1);
+ }
+
+ sp<ANetworkSession> session = new ANetworkSession;
+ session->start();
+
+ sp<ALooper> looper = new ALooper;
+
+ sp<WifiDisplaySink> sink = new WifiDisplaySink(session);
+ looper->registerHandler(sink);
+
+ if (connectToPort >= 0) {
+ sink->start(connectToHost.c_str(), connectToPort);
+ } else {
+ sink->start(uri.c_str());
+ }
+
+ looper->start(true /* runOnCallingThread */);
+#endif
+
+ return 0;
+}
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 391c319..1370c62 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -12,7 +12,10 @@
Camera2Client.cpp \
Camera2Device.cpp \
camera2/CameraMetadata.cpp \
- camera2/Parameters.cpp
+ camera2/Parameters.cpp \
+ camera2/FrameProcessor.cpp \
+ camera2/CaptureProcessor.cpp \
+ camera2/CallbackProcessor.cpp
LOCAL_SHARED_LIBRARIES:= \
libui \
diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp
index 9b6401b..acd290d 100644
--- a/services/camera/libcameraservice/Camera2Client.cpp
+++ b/services/camera/libcameraservice/Camera2Client.cpp
@@ -52,11 +52,9 @@
int clientPid):
Client(cameraService, cameraClient,
cameraId, cameraFacing, clientPid),
+ mSharedCameraClient(cameraClient),
mParameters(cameraId, cameraFacing),
mPreviewStreamId(NO_STREAM),
- mCallbackStreamId(NO_STREAM),
- mCallbackHeapId(0),
- mCaptureStreamId(NO_STREAM),
mRecordingStreamId(NO_STREAM),
mRecordingHeapCount(kDefaultRecordingHeapCount)
{
@@ -84,11 +82,6 @@
ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
status_t res;
- mFrameProcessor = new FrameProcessor(this);
- String8 frameThreadName = String8::format("Camera2Client[%d]::FrameProcessor",
- mCameraId);
- mFrameProcessor->run(frameThreadName.string());
-
res = mDevice->initialize(module);
if (res != OK) {
ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
@@ -107,6 +100,21 @@
return NO_INIT;
}
+ mFrameProcessor = new FrameProcessor(this);
+ String8 frameThreadName = String8::format("Camera2Client[%d]::FrameProcessor",
+ mCameraId);
+ mFrameProcessor->run(frameThreadName.string());
+
+ mCaptureProcessor = new CaptureProcessor(this);
+ String8 captureThreadName =
+ String8::format("Camera2Client[%d]::CaptureProcessor", mCameraId);
+ mCaptureProcessor->run(captureThreadName.string());
+
+ mCallbackProcessor = new CallbackProcessor(this);
+ String8 callbackThreadName =
+ String8::format("Camera2Client[%d]::CallbackProcessor", mCameraId);
+ mCallbackProcessor->run(callbackThreadName.string());
+
if (gLogLevel >= 1) {
ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
mCameraId);
@@ -292,7 +300,8 @@
result.append(" Current streams:\n");
result.appendFormat(" Preview stream ID: %d\n", mPreviewStreamId);
- result.appendFormat(" Capture stream ID: %d\n", mCaptureStreamId);
+ result.appendFormat(" Capture stream ID: %d\n",
+ mCaptureProcessor->getStreamId());
result.appendFormat(" Recording stream ID: %d\n", mRecordingStreamId);
result.append(" Current requests:\n");
@@ -357,20 +366,14 @@
mPreviewStreamId = NO_STREAM;
}
- if (mCaptureStreamId != NO_STREAM) {
- mDevice->deleteStream(mCaptureStreamId);
- mCaptureStreamId = NO_STREAM;
- }
+ mCaptureProcessor->deleteStream();
if (mRecordingStreamId != NO_STREAM) {
mDevice->deleteStream(mRecordingStreamId);
mRecordingStreamId = NO_STREAM;
}
- if (mCallbackStreamId != NO_STREAM) {
- mDevice->deleteStream(mCallbackStreamId);
- mCallbackStreamId = NO_STREAM;
- }
+ mCallbackProcessor->deleteStream();
mDevice.clear();
SharedParameters::Lock l(mParameters);
@@ -393,8 +396,8 @@
mClientPid = getCallingPid();
- Mutex::Autolock iccl(mICameraClientLock);
mCameraClient = client;
+ mSharedCameraClient = client;
SharedParameters::Lock l(mParameters);
l.mParameters.state = Parameters::STOPPED;
@@ -433,10 +436,9 @@
// TODO: Check for uninterruptable conditions
if (mClientPid == getCallingPid()) {
- Mutex::Autolock iccl(mICameraClientLock);
-
mClientPid = 0;
mCameraClient.clear();
+ mSharedCameraClient.clear();
return OK;
}
@@ -614,7 +616,7 @@
bool callbacksEnabled = params.previewCallbackFlags &
CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK;
if (callbacksEnabled) {
- res = updateCallbackStream(params);
+ res = mCallbackProcessor->updateStream(params);
if (res != OK) {
ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
@@ -633,7 +635,7 @@
if (callbacksEnabled) {
uint8_t outputStreams[2] =
- { mPreviewStreamId, mCallbackStreamId };
+ { mPreviewStreamId, mCallbackProcessor->getStreamId() };
res = mPreviewRequest.update(
ANDROID_REQUEST_OUTPUT_STREAMS,
outputStreams, 2);
@@ -797,7 +799,7 @@
bool callbacksEnabled = params.previewCallbackFlags &
CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK;
if (callbacksEnabled) {
- res = updateCallbackStream(params);
+ res = mCallbackProcessor->updateStream(params);
if (res != OK) {
ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
@@ -816,7 +818,8 @@
if (callbacksEnabled) {
uint8_t outputStreams[3] =
- { mPreviewStreamId, mRecordingStreamId, mCallbackStreamId };
+ { mPreviewStreamId, mRecordingStreamId,
+ mCallbackProcessor->getStreamId() };
res = mRecordingRequest.update(
ANDROID_REQUEST_OUTPUT_STREAMS,
outputStreams, 3);
@@ -1029,7 +1032,7 @@
ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId);
- res = updateCaptureStream(l.mParameters);
+ res = mCaptureProcessor->updateStream(l.mParameters);
if (res != OK) {
ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
@@ -1049,32 +1052,47 @@
CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK;
bool recordingEnabled = (l.mParameters.state == Parameters::RECORD);
+ int captureStreamId = mCaptureProcessor->getStreamId();
+
int streamSwitch = (callbacksEnabled ? 0x2 : 0x0) +
(recordingEnabled ? 0x1 : 0x0);
switch ( streamSwitch ) {
case 0: { // No recording, callbacks
- uint8_t streamIds[2] = { mPreviewStreamId, mCaptureStreamId };
+ uint8_t streamIds[2] = {
+ mPreviewStreamId,
+ captureStreamId
+ };
res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
streamIds, 2);
break;
}
case 1: { // Recording
- uint8_t streamIds[3] = { mPreviewStreamId, mRecordingStreamId,
- mCaptureStreamId };
+ uint8_t streamIds[3] = {
+ mPreviewStreamId,
+ mRecordingStreamId,
+ captureStreamId
+ };
res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
streamIds, 3);
break;
}
case 2: { // Callbacks
- uint8_t streamIds[3] = { mPreviewStreamId, mCallbackStreamId,
- mCaptureStreamId };
+ uint8_t streamIds[3] = {
+ mPreviewStreamId,
+ mCallbackProcessor->getStreamId(),
+ captureStreamId
+ };
res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
streamIds, 3);
break;
}
case 3: { // Both
- uint8_t streamIds[4] = { mPreviewStreamId, mCallbackStreamId,
- mRecordingStreamId, mCaptureStreamId };
+ uint8_t streamIds[4] = {
+ mPreviewStreamId,
+ mCallbackProcessor->getStreamId(),
+ mRecordingStreamId,
+ captureStreamId
+ };
res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
streamIds, 4);
break;
@@ -1457,22 +1475,25 @@
case Parameters::FOCUS_MODE_FIXED:
default:
if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) {
- ALOGE("%s: Unexpected AF state change %d (ID %d) in focus mode %d",
- __FUNCTION__, newState, triggerId, l.mParameters.focusMode);
+ ALOGE("%s: Unexpected AF state change %d "
+ "(ID %d) in focus mode %d",
+ __FUNCTION__, newState, triggerId,
+ l.mParameters.focusMode);
}
}
}
if (sendMovingMessage) {
- Mutex::Autolock iccl(mICameraClientLock);
- if (mCameraClient != 0) {
- mCameraClient->notifyCallback(CAMERA_MSG_FOCUS_MOVE,
+ SharedCameraClient::Lock l(mSharedCameraClient);
+ if (l.mCameraClient != 0) {
+ l.mCameraClient->notifyCallback(CAMERA_MSG_FOCUS_MOVE,
afInMotion ? 1 : 0, 0);
}
}
if (sendCompletedMessage) {
- Mutex::Autolock iccl(mICameraClientLock);
- if (mCameraClient != 0) {
- mCameraClient->notifyCallback(CAMERA_MSG_FOCUS, success ? 1 : 0, 0);
+ SharedCameraClient::Lock l(mSharedCameraClient);
+ if (l.mCameraClient != 0) {
+ l.mCameraClient->notifyCallback(CAMERA_MSG_FOCUS,
+ success ? 1 : 0, 0);
}
}
}
@@ -1487,377 +1508,42 @@
__FUNCTION__, newState, triggerId);
}
-Camera2Client::FrameProcessor::FrameProcessor(wp<Camera2Client> client):
- Thread(false), mClient(client) {
+int Camera2Client::getCameraId() {
+ return mCameraId;
}
-Camera2Client::FrameProcessor::~FrameProcessor() {
- ALOGV("%s: Exit", __FUNCTION__);
+const sp<Camera2Device>& Camera2Client::getCameraDevice() {
+ return mDevice;
}
-void Camera2Client::FrameProcessor::dump(int fd, const Vector<String16>& args) {
- String8 result(" Latest received frame:\n");
- write(fd, result.string(), result.size());
- mLastFrame.dump(fd, 2, 6);
+camera2::SharedParameters& Camera2Client::getParameters() {
+ return mParameters;
}
-bool Camera2Client::FrameProcessor::threadLoop() {
- status_t res;
-
- sp<Camera2Device> device;
- {
- sp<Camera2Client> client = mClient.promote();
- if (client == 0) return false;
- device = client->mDevice;
- }
-
- res = device->waitForNextFrame(kWaitDuration);
- if (res == OK) {
- sp<Camera2Client> client = mClient.promote();
- if (client == 0) return false;
- processNewFrames(client);
- } else if (res != TIMED_OUT) {
- ALOGE("Camera2Client::FrameProcessor: Error waiting for new "
- "frames: %s (%d)", strerror(-res), res);
- }
-
- return true;
+Camera2Client::SharedCameraClient::Lock::Lock(SharedCameraClient &client):
+ mCameraClient(client.mCameraClient),
+ mSharedClient(client) {
+ mSharedClient.mCameraClientLock.lock();
}
-void Camera2Client::FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
- status_t res;
- CameraMetadata frame;
- while ( (res = client->mDevice->getNextFrame(&frame)) == OK) {
- camera_metadata_entry_t entry;
- entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: Error reading frame number: %s (%d)",
- __FUNCTION__, client->mCameraId, strerror(-res), res);
- break;
- }
-
- res = processFaceDetect(frame, client);
- if (res != OK) break;
-
- mLastFrame.acquire(frame);
- }
- if (res != NOT_ENOUGH_DATA) {
- ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
- __FUNCTION__, client->mCameraId, strerror(-res), res);
- return;
- }
-
- return;
+Camera2Client::SharedCameraClient::Lock::~Lock() {
+ mSharedClient.mCameraClientLock.unlock();
}
-status_t Camera2Client::FrameProcessor::processFaceDetect(
- const CameraMetadata &frame, sp<Camera2Client> &client) {
- status_t res;
- camera_metadata_ro_entry_t entry;
- bool enableFaceDetect;
- int maxFaces;
- {
- SharedParameters::Lock l(client->mParameters);
- enableFaceDetect = l.mParameters.enableFaceDetect;
- }
- entry = frame.find(ANDROID_STATS_FACE_DETECT_MODE);
-
- // TODO: This should be an error once implementations are compliant
- if (entry.count == 0) {
- return OK;
- }
-
- uint8_t faceDetectMode = entry.data.u8[0];
-
- camera_frame_metadata metadata;
- Vector<camera_face_t> faces;
- metadata.number_of_faces = 0;
-
- if (enableFaceDetect && faceDetectMode != ANDROID_STATS_FACE_DETECTION_OFF) {
- SharedParameters::Lock l(client->mParameters);
- entry = frame.find(ANDROID_STATS_FACE_RECTANGLES);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: Unable to read face rectangles",
- __FUNCTION__, client->mCameraId);
- return res;
- }
- metadata.number_of_faces = entry.count / 4;
- if (metadata.number_of_faces >
- l.mParameters.fastInfo.maxFaces) {
- ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
- __FUNCTION__, client->mCameraId,
- metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
- return res;
- }
- const int32_t *faceRects = entry.data.i32;
-
- entry = frame.find(ANDROID_STATS_FACE_SCORES);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: Unable to read face scores",
- __FUNCTION__, client->mCameraId);
- return res;
- }
- const uint8_t *faceScores = entry.data.u8;
-
- const int32_t *faceLandmarks = NULL;
- const int32_t *faceIds = NULL;
-
- if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
- entry = frame.find(ANDROID_STATS_FACE_LANDMARKS);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: Unable to read face landmarks",
- __FUNCTION__, client->mCameraId);
- return res;
- }
- faceLandmarks = entry.data.i32;
-
- entry = frame.find(ANDROID_STATS_FACE_IDS);
-
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: Unable to read face IDs",
- __FUNCTION__, client->mCameraId);
- return res;
- }
- faceIds = entry.data.i32;
- }
-
- faces.setCapacity(metadata.number_of_faces);
-
- for (int i = 0; i < metadata.number_of_faces; i++) {
- camera_face_t face;
-
- face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]);
- face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]);
- face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]);
- face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]);
-
- face.score = faceScores[i];
- if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
- face.id = faceIds[i];
- face.left_eye[0] =
- l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]);
- face.left_eye[1] =
- l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]);
- face.right_eye[0] =
- l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]);
- face.right_eye[1] =
- l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]);
- face.mouth[0] =
- l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]);
- face.mouth[1] =
- l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]);
- } else {
- face.id = 0;
- face.left_eye[0] = face.left_eye[1] = -2000;
- face.right_eye[0] = face.right_eye[1] = -2000;
- face.mouth[0] = face.mouth[1] = -2000;
- }
- faces.push_back(face);
- }
-
- metadata.faces = faces.editArray();
- }
-
- if (metadata.number_of_faces != 0) {
- Mutex::Autolock iccl(client->mICameraClientLock);
- if (client->mCameraClient != NULL) {
- client->mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
- NULL, &metadata);
- }
- }
- return OK;
+Camera2Client::SharedCameraClient::SharedCameraClient(const sp<ICameraClient>&client):
+ mCameraClient(client) {
}
-void Camera2Client::onCallbackAvailable() {
- ATRACE_CALL();
- status_t res;
- ALOGV("%s: Camera %d: Preview callback available", __FUNCTION__, mCameraId);
-
- int callbackHeapId;
- sp<Camera2Heap> callbackHeap;
- size_t heapIdx;
-
- CpuConsumer::LockedBuffer imgBuffer;
- ALOGV("%s: Getting buffer", __FUNCTION__);
- res = mCallbackConsumer->lockNextBuffer(&imgBuffer);
- if (res != OK) {
- ALOGE("%s: Camera %d: Error receiving next callback buffer: "
- "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
- return;
- }
-
- {
- SharedParameters::Lock l(mParameters);
-
- if ( l.mParameters.state != Parameters::PREVIEW
- && l.mParameters.state != Parameters::RECORD
- && l.mParameters.state != Parameters::VIDEO_SNAPSHOT) {
- ALOGV("%s: Camera %d: No longer streaming",
- __FUNCTION__, mCameraId);
- mCallbackConsumer->unlockBuffer(imgBuffer);
- return;
- }
-
- if (! (l.mParameters.previewCallbackFlags &
- CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ) {
- ALOGV("%s: No longer enabled, dropping", __FUNCTION__);
- mCallbackConsumer->unlockBuffer(imgBuffer);
- return;
- }
- if ((l.mParameters.previewCallbackFlags &
- CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) &&
- !l.mParameters.previewCallbackOneShot) {
- ALOGV("%s: One shot mode, already sent, dropping", __FUNCTION__);
- mCallbackConsumer->unlockBuffer(imgBuffer);
- return;
- }
-
- if (imgBuffer.format != l.mParameters.previewFormat) {
- ALOGE("%s: Camera %d: Unexpected format for callback: "
- "%x, expected %x", __FUNCTION__, mCameraId,
- imgBuffer.format, l.mParameters.previewFormat);
- mCallbackConsumer->unlockBuffer(imgBuffer);
- return;
- }
-
- size_t bufferSize = calculateBufferSize(imgBuffer.width, imgBuffer.height,
- imgBuffer.format, imgBuffer.stride);
- size_t currentBufferSize = (mCallbackHeap == 0) ?
- 0 : (mCallbackHeap->mHeap->getSize() / kCallbackHeapCount);
- if (bufferSize != currentBufferSize) {
- mCallbackHeap.clear();
- mCallbackHeap = new Camera2Heap(bufferSize, kCallbackHeapCount,
- "Camera2Client::CallbackHeap");
- if (mCallbackHeap->mHeap->getSize() == 0) {
- ALOGE("%s: Camera %d: Unable to allocate memory for callbacks",
- __FUNCTION__, mCameraId);
- mCallbackConsumer->unlockBuffer(imgBuffer);
- return;
- }
-
- mCallbackHeapHead = 0;
- mCallbackHeapFree = kCallbackHeapCount;
- mCallbackHeapId++;
- }
-
- if (mCallbackHeapFree == 0) {
- ALOGE("%s: Camera %d: No free callback buffers, dropping frame",
- __FUNCTION__, mCameraId);
- mCallbackConsumer->unlockBuffer(imgBuffer);
- return;
- }
- heapIdx = mCallbackHeapHead;
- callbackHeap = mCallbackHeap;
- callbackHeapId = mCallbackHeapId;
-
- mCallbackHeapHead = (mCallbackHeapHead + 1) & kCallbackHeapCount;
- mCallbackHeapFree--;
-
- // TODO: Get rid of this memcpy by passing the gralloc queue all the way
- // to app
-
- ssize_t offset;
- size_t size;
- sp<IMemoryHeap> heap =
- mCallbackHeap->mBuffers[heapIdx]->getMemory(&offset,
- &size);
- uint8_t *data = (uint8_t*)heap->getBase() + offset;
- memcpy(data, imgBuffer.data, bufferSize);
-
- ALOGV("%s: Freeing buffer", __FUNCTION__);
- mCallbackConsumer->unlockBuffer(imgBuffer);
-
- // In one-shot mode, stop sending callbacks after the first one
- if (l.mParameters.previewCallbackFlags &
- CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) {
- ALOGV("%s: clearing oneshot", __FUNCTION__);
- l.mParameters.previewCallbackOneShot = false;
- }
- }
-
- // Call outside parameter lock to allow re-entrancy from notification
- {
- Mutex::Autolock iccl(mICameraClientLock);
- if (mCameraClient != 0) {
- ALOGV("%s: Camera %d: Invoking client data callback",
- __FUNCTION__, mCameraId);
- mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_FRAME,
- callbackHeap->mBuffers[heapIdx], NULL);
- }
- }
-
- SharedParameters::Lock l(mParameters);
- // Only increment free if we're still using the same heap
- if (mCallbackHeapId == callbackHeapId) {
- mCallbackHeapFree++;
- }
-
- ALOGV("%s: exit", __FUNCTION__);
+Camera2Client::SharedCameraClient& Camera2Client::SharedCameraClient::operator=(
+ const sp<ICameraClient>&client) {
+ Mutex::Autolock l(mCameraClientLock);
+ mCameraClient = client;
+ return *this;
}
-void Camera2Client::onCaptureAvailable() {
- ATRACE_CALL();
- status_t res;
- sp<Camera2Heap> captureHeap;
- ALOGV("%s: Camera %d: Still capture available", __FUNCTION__, mCameraId);
-
- {
- SharedParameters::Lock l(mParameters);
- CpuConsumer::LockedBuffer imgBuffer;
-
- res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
- if (res != OK) {
- ALOGE("%s: Camera %d: Error receiving still image buffer: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
- return;
- }
-
- // TODO: Signal errors here upstream
- if (l.mParameters.state != Parameters::STILL_CAPTURE &&
- l.mParameters.state != Parameters::VIDEO_SNAPSHOT) {
- ALOGE("%s: Camera %d: Still image produced unexpectedly!",
- __FUNCTION__, mCameraId);
- mCaptureConsumer->unlockBuffer(imgBuffer);
- return;
- }
-
- if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
- ALOGE("%s: Camera %d: Unexpected format for still image: "
- "%x, expected %x", __FUNCTION__, mCameraId,
- imgBuffer.format,
- HAL_PIXEL_FORMAT_BLOB);
- mCaptureConsumer->unlockBuffer(imgBuffer);
- return;
- }
-
- // TODO: Optimize this to avoid memcopy
- void* captureMemory = mCaptureHeap->mHeap->getBase();
- size_t size = mCaptureHeap->mHeap->getSize();
- memcpy(captureMemory, imgBuffer.data, size);
-
- mCaptureConsumer->unlockBuffer(imgBuffer);
-
- switch (l.mParameters.state) {
- case Parameters::STILL_CAPTURE:
- l.mParameters.state = Parameters::STOPPED;
- break;
- case Parameters::VIDEO_SNAPSHOT:
- l.mParameters.state = Parameters::RECORD;
- break;
- default:
- ALOGE("%s: Camera %d: Unexpected state %d", __FUNCTION__,
- mCameraId, l.mParameters.state);
- break;
- }
-
- captureHeap = mCaptureHeap;
- }
- // Call outside parameter locks to allow re-entrancy from notification
- Mutex::Autolock iccl(mICameraClientLock);
- if (mCameraClient != 0) {
- mCameraClient->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE,
- captureHeap->mBuffers[0], NULL);
- }
+void Camera2Client::SharedCameraClient::clear() {
+ Mutex::Autolock l(mCameraClientLock);
+ mCameraClient.clear();
}
void Camera2Client::onRecordingFrameAvailable() {
@@ -1951,9 +1637,9 @@
}
// Call outside locked parameters to allow re-entrancy from notification
- Mutex::Autolock iccl(mICameraClientLock);
- if (mCameraClient != 0) {
- mCameraClient->dataCallbackTimestamp(timestamp,
+ SharedCameraClient::Lock l(mSharedCameraClient);
+ if (l.mCameraClient != 0) {
+ l.mCameraClient->dataCallbackTimestamp(timestamp,
CAMERA_MSG_VIDEO_FRAME,
recordingHeap->mBuffers[heapIdx]);
}
@@ -2086,132 +1772,6 @@
return OK;
}
-status_t Camera2Client::updateCallbackStream(const Parameters ¶ms) {
- status_t res;
-
- if (mCallbackConsumer == 0) {
- // Create CPU buffer queue endpoint
- mCallbackConsumer = new CpuConsumer(kCallbackHeapCount);
- mCallbackWaiter = new CallbackWaiter(this);
- mCallbackConsumer->setFrameAvailableListener(mCallbackWaiter);
- mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer"));
- mCallbackWindow = new SurfaceTextureClient(
- mCallbackConsumer->getProducerInterface());
- }
-
- if (mCallbackStreamId != NO_STREAM) {
- // Check if stream parameters have to change
- uint32_t currentWidth, currentHeight, currentFormat;
- res = mDevice->getStreamInfo(mCallbackStreamId,
- ¤tWidth, ¤tHeight, ¤tFormat);
- if (res != OK) {
- ALOGE("%s: Camera %d: Error querying callback output stream info: "
- "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
- return res;
- }
- if (currentWidth != (uint32_t)params.previewWidth ||
- currentHeight != (uint32_t)params.previewHeight ||
- currentFormat != (uint32_t)params.previewFormat) {
- // Since size should only change while preview is not running,
- // assuming that all existing use of old callback stream is
- // completed.
- res = mDevice->deleteStream(mCallbackStreamId);
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to delete old output stream "
- "for callbacks: %s (%d)", __FUNCTION__, mCameraId,
- strerror(-res), res);
- return res;
- }
- mCallbackStreamId = NO_STREAM;
- }
- }
-
- if (mCallbackStreamId == NO_STREAM) {
- ALOGV("Creating callback stream: %d %d format 0x%x",
- params.previewWidth, params.previewHeight,
- params.previewFormat);
- res = mDevice->createStream(mCallbackWindow,
- params.previewWidth, params.previewHeight,
- params.previewFormat, 0, &mCallbackStreamId);
- if (res != OK) {
- ALOGE("%s: Camera %d: Can't create output stream for callbacks: "
- "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
- return res;
- }
- }
-
- return OK;
-}
-
-
-status_t Camera2Client::updateCaptureStream(const Parameters ¶ms) {
- ATRACE_CALL();
- status_t res;
- // Find out buffer size for JPEG
- camera_metadata_ro_entry_t maxJpegSize =
- mParameters.staticInfo(ANDROID_JPEG_MAX_SIZE);
- if (maxJpegSize.count == 0) {
- ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!",
- __FUNCTION__, mCameraId);
- return INVALID_OPERATION;
- }
-
- if (mCaptureConsumer == 0) {
- // Create CPU buffer queue endpoint
- mCaptureConsumer = new CpuConsumer(1);
- mCaptureConsumer->setFrameAvailableListener(new CaptureWaiter(this));
- mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
- mCaptureWindow = new SurfaceTextureClient(
- mCaptureConsumer->getProducerInterface());
- // Create memory for API consumption
- mCaptureHeap = new Camera2Heap(maxJpegSize.data.i32[0], 1,
- "Camera2Client::CaptureHeap");
- if (mCaptureHeap->mHeap->getSize() == 0) {
- ALOGE("%s: Camera %d: Unable to allocate memory for capture",
- __FUNCTION__, mCameraId);
- return NO_MEMORY;
- }
- }
-
- if (mCaptureStreamId != NO_STREAM) {
- // Check if stream parameters have to change
- uint32_t currentWidth, currentHeight;
- res = mDevice->getStreamInfo(mCaptureStreamId,
- ¤tWidth, ¤tHeight, 0);
- if (res != OK) {
- ALOGE("%s: Camera %d: Error querying capture output stream info: "
- "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
- return res;
- }
- if (currentWidth != (uint32_t)params.pictureWidth ||
- currentHeight != (uint32_t)params.pictureHeight) {
- res = mDevice->deleteStream(mCaptureStreamId);
- if (res != OK) {
- ALOGE("%s: Camera %d: Unable to delete old output stream "
- "for capture: %s (%d)", __FUNCTION__, mCameraId,
- strerror(-res), res);
- return res;
- }
- mCaptureStreamId = NO_STREAM;
- }
- }
-
- if (mCaptureStreamId == NO_STREAM) {
- // Create stream for HAL production
- res = mDevice->createStream(mCaptureWindow,
- params.pictureWidth, params.pictureHeight,
- HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0],
- &mCaptureStreamId);
- if (res != OK) {
- ALOGE("%s: Camera %d: Can't create output stream for capture: "
- "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
- return res;
- }
-
- }
- return OK;
-}
-
status_t Camera2Client::updateCaptureRequest(const Parameters ¶ms) {
ATRACE_CALL();
status_t res;
diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h
index 4023397..b2fd636 100644
--- a/services/camera/libcameraservice/Camera2Client.h
+++ b/services/camera/libcameraservice/Camera2Client.h
@@ -20,6 +20,9 @@
#include "Camera2Device.h"
#include "CameraService.h"
#include "camera2/Parameters.h"
+#include "camera2/FrameProcessor.h"
+#include "camera2/CaptureProcessor.h"
+#include "camera2/CallbackProcessor.h"
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <gui/CpuConsumer.h>
@@ -36,7 +39,9 @@
public Camera2Device::NotificationListener
{
public:
- // ICamera interface (see ICamera for details)
+ /**
+ * ICamera interface (see ICamera for details)
+ */
virtual void disconnect();
virtual status_t connect(const sp<ICameraClient>& client);
@@ -61,7 +66,9 @@
virtual String8 getParameters() const;
virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
- // Interface used by CameraService
+ /**
+ * Interface used by CameraService
+ */
Camera2Client(const sp<CameraService>& cameraService,
const sp<ICameraClient>& cameraClient,
@@ -74,7 +81,9 @@
virtual status_t dump(int fd, const Vector<String16>& args);
- // Interface used by CameraDevice
+ /**
+ * Interface used by Camera2Device
+ */
virtual void notifyError(int errorCode, int arg1, int arg2);
virtual void notifyShutter(int frameNumber, nsecs_t timestamp);
@@ -82,6 +91,38 @@
virtual void notifyAutoExposure(uint8_t newState, int triggerId);
virtual void notifyAutoWhitebalance(uint8_t newState, int triggerId);
+ /**
+ * Interface used by independent components of Camera2Client.
+ */
+
+ int getCameraId();
+ const sp<Camera2Device>& getCameraDevice();
+ camera2::SharedParameters& getParameters();
+
+ // Simple class to ensure that access to ICameraClient is serialized by
+ // requiring mCameraClientLock to be locked before access to mCameraClient
+ // is possible.
+ class SharedCameraClient {
+ public:
+ class Lock {
+ public:
+ Lock(SharedCameraClient &client);
+ ~Lock();
+ sp<ICameraClient> &mCameraClient;
+ private:
+ SharedCameraClient &mSharedClient;
+ };
+ SharedCameraClient(const sp<ICameraClient>& client);
+ SharedCameraClient& operator=(const sp<ICameraClient>& client);
+ void clear();
+ private:
+ sp<ICameraClient> mCameraClient;
+ mutable Mutex mCameraClientLock;
+ } mSharedCameraClient;
+
+ static size_t calculateBufferSize(int width, int height,
+ int format, int stride);
+
private:
/** ICamera interface-related private members */
@@ -91,11 +132,6 @@
// they're called
mutable Mutex mICameraLock;
- // Mutex that must be locked by methods accessing the base Client's
- // mCameraClient ICameraClient interface member, for sending notifications
- // up to the camera user
- mutable Mutex mICameraClientLock;
-
typedef camera2::Parameters Parameters;
typedef camera2::CameraMetadata CameraMetadata;
@@ -123,37 +159,13 @@
/** Camera device-related private members */
- class Camera2Heap;
-
void setPreviewCallbackFlagL(Parameters ¶ms, int flag);
status_t updateRequests(const Parameters ¶ms);
// Used with stream IDs
static const int NO_STREAM = -1;
- /* Output frame metadata processing thread. This thread waits for new
- * frames from the device, and analyzes them as necessary.
- */
- class FrameProcessor: public Thread {
- public:
- FrameProcessor(wp<Camera2Client> client);
- ~FrameProcessor();
-
- void dump(int fd, const Vector<String16>& args);
- private:
- static const nsecs_t kWaitDuration = 10000000; // 10 ms
- wp<Camera2Client> mClient;
-
- virtual bool threadLoop();
-
- void processNewFrames(sp<Camera2Client> &client);
- status_t processFaceDetect(const CameraMetadata &frame,
- sp<Camera2Client> &client);
-
- CameraMetadata mLastFrame;
- };
-
- sp<FrameProcessor> mFrameProcessor;
+ sp<camera2::FrameProcessor> mFrameProcessor;
/* Preview related members */
@@ -167,50 +179,13 @@
/** Preview callback related members */
- int mCallbackStreamId;
- static const size_t kCallbackHeapCount = 6;
- sp<CpuConsumer> mCallbackConsumer;
- sp<ANativeWindow> mCallbackWindow;
- // Simple listener that forwards frame available notifications from
- // a CPU consumer to the callback notification
- class CallbackWaiter: public CpuConsumer::FrameAvailableListener {
- public:
- CallbackWaiter(Camera2Client *parent) : mParent(parent) {}
- void onFrameAvailable() { mParent->onCallbackAvailable(); }
- private:
- Camera2Client *mParent;
- };
- sp<CallbackWaiter> mCallbackWaiter;
- sp<Camera2Heap> mCallbackHeap;
- int mCallbackHeapId;
- size_t mCallbackHeapHead, mCallbackHeapFree;
- // Handle callback image buffers
- void onCallbackAvailable();
-
- status_t updateCallbackStream(const Parameters ¶ms);
+ sp<camera2::CallbackProcessor> mCallbackProcessor;
/* Still image capture related members */
- int mCaptureStreamId;
- sp<CpuConsumer> mCaptureConsumer;
- sp<ANativeWindow> mCaptureWindow;
- // Simple listener that forwards frame available notifications from
- // a CPU consumer to the capture notification
- class CaptureWaiter: public CpuConsumer::FrameAvailableListener {
- public:
- CaptureWaiter(Camera2Client *parent) : mParent(parent) {}
- void onFrameAvailable() { mParent->onCaptureAvailable(); }
- private:
- Camera2Client *mParent;
- };
- sp<CaptureWaiter> mCaptureWaiter;
+ sp<camera2::CaptureProcessor> mCaptureProcessor;
CameraMetadata mCaptureRequest;
- sp<Camera2Heap> mCaptureHeap;
- // Handle captured image buffers
- void onCaptureAvailable();
-
status_t updateCaptureRequest(const Parameters ¶ms);
- status_t updateCaptureStream(const Parameters ¶ms);
/* Recording related members */
@@ -229,7 +204,7 @@
};
sp<RecordingWaiter> mRecordingWaiter;
CameraMetadata mRecordingRequest;
- sp<Camera2Heap> mRecordingHeap;
+ sp<camera2::Camera2Heap> mRecordingHeap;
static const size_t kDefaultRecordingHeapCount = 8;
size_t mRecordingHeapCount;
@@ -254,32 +229,6 @@
// Verify that caller is the owner of the camera
status_t checkPid(const char *checkLocation) const;
- // Utility class for managing a set of IMemory blocks
- class Camera2Heap : public RefBase {
- public:
- Camera2Heap(size_t buf_size, uint_t num_buffers = 1,
- const char *name = NULL) :
- mBufSize(buf_size),
- mNumBufs(num_buffers) {
- mHeap = new MemoryHeapBase(buf_size * num_buffers, 0, name);
- mBuffers = new sp<MemoryBase>[mNumBufs];
- for (uint_t i = 0; i < mNumBufs; i++)
- mBuffers[i] = new MemoryBase(mHeap,
- i * mBufSize,
- mBufSize);
- }
-
- virtual ~Camera2Heap()
- {
- delete [] mBuffers;
- }
-
- size_t mBufSize;
- uint_t mNumBufs;
- sp<MemoryHeapBase> mHeap;
- sp<MemoryBase> *mBuffers;
- };
-
// Update parameters all requests use, based on mParameters
status_t updateRequestCommon(CameraMetadata *request, const Parameters ¶ms) const;
@@ -291,9 +240,6 @@
int arrayXToNormalized(int width) const;
int arrayYToNormalized(int height) const;
-
- static size_t calculateBufferSize(int width, int height,
- int format, int stride);
};
}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
new file mode 100644
index 0000000..854b890
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
@@ -0,0 +1,293 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera2Client::CallbackProcessor"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
+#include "CallbackProcessor.h"
+#include <gui/SurfaceTextureClient.h>
+#include "../Camera2Device.h"
+#include "../Camera2Client.h"
+
+
+namespace android {
+namespace camera2 {
+
+CallbackProcessor::CallbackProcessor(wp<Camera2Client> client):
+ Thread(false),
+ mClient(client),
+ mCallbackAvailable(false),
+ mCallbackStreamId(NO_STREAM) {
+}
+
+CallbackProcessor::~CallbackProcessor() {
+ ALOGV("%s: Exit", __FUNCTION__);
+}
+
+void CallbackProcessor::onFrameAvailable() {
+ Mutex::Autolock l(mInputMutex);
+ if (!mCallbackAvailable) {
+ mCallbackAvailable = true;
+ mCallbackAvailableSignal.signal();
+ }
+}
+
+status_t CallbackProcessor::updateStream(const Parameters ¶ms) {
+ ATRACE_CALL();
+ status_t res;
+
+ Mutex::Autolock l(mInputMutex);
+
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return OK;
+ sp<Camera2Device> device = client->getCameraDevice();
+
+ if (mCallbackConsumer == 0) {
+ // Create CPU buffer queue endpoint
+ mCallbackConsumer = new CpuConsumer(kCallbackHeapCount);
+ mCallbackConsumer->setFrameAvailableListener(this);
+ mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer"));
+ mCallbackWindow = new SurfaceTextureClient(
+ mCallbackConsumer->getProducerInterface());
+ }
+
+ if (mCallbackStreamId != NO_STREAM) {
+ // Check if stream parameters have to change
+ uint32_t currentWidth, currentHeight, currentFormat;
+ res = device->getStreamInfo(mCallbackStreamId,
+ ¤tWidth, ¤tHeight, ¤tFormat);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Error querying callback output stream info: "
+ "%s (%d)", __FUNCTION__, client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+ if (currentWidth != (uint32_t)params.previewWidth ||
+ currentHeight != (uint32_t)params.previewHeight ||
+ currentFormat != (uint32_t)params.previewFormat) {
+ // Since size should only change while preview is not running,
+ // assuming that all existing use of old callback stream is
+ // completed.
+ res = device->deleteStream(mCallbackStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to delete old output stream "
+ "for callbacks: %s (%d)", __FUNCTION__, client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+ mCallbackStreamId = NO_STREAM;
+ }
+ }
+
+ if (mCallbackStreamId == NO_STREAM) {
+ ALOGV("Creating callback stream: %d %d format 0x%x",
+ params.previewWidth, params.previewHeight,
+ params.previewFormat);
+ res = device->createStream(mCallbackWindow,
+ params.previewWidth, params.previewHeight,
+ params.previewFormat, 0, &mCallbackStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Can't create output stream for callbacks: "
+ "%s (%d)", __FUNCTION__, client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ return OK;
+}
+
+status_t CallbackProcessor::deleteStream() {
+ ATRACE_CALL();
+ status_t res;
+
+ Mutex::Autolock l(mInputMutex);
+
+ if (mCallbackStreamId != NO_STREAM) {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return OK;
+ sp<Camera2Device> device = client->getCameraDevice();
+
+ device->deleteStream(mCallbackStreamId);
+ mCallbackStreamId = NO_STREAM;
+ }
+ return OK;
+}
+
+int CallbackProcessor::getStreamId() const {
+ Mutex::Autolock l(mInputMutex);
+ return mCallbackStreamId;
+}
+
+void CallbackProcessor::dump(int fd, const Vector<String16>& args) {
+}
+
+bool CallbackProcessor::threadLoop() {
+ status_t res;
+
+ {
+ Mutex::Autolock l(mInputMutex);
+ while (!mCallbackAvailable) {
+ res = mCallbackAvailableSignal.waitRelative(mInputMutex,
+ kWaitDuration);
+ if (res == TIMED_OUT) return true;
+ }
+ mCallbackAvailable = false;
+ }
+
+ do {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return false;
+ res = processNewCallback(client);
+ } while (res == OK);
+
+ return true;
+}
+
+status_t CallbackProcessor::processNewCallback(sp<Camera2Client> &client) {
+ ATRACE_CALL();
+ status_t res;
+
+ int callbackHeapId;
+ sp<Camera2Heap> callbackHeap;
+ size_t heapIdx;
+
+ CpuConsumer::LockedBuffer imgBuffer;
+ ALOGV("%s: Getting buffer", __FUNCTION__);
+ res = mCallbackConsumer->lockNextBuffer(&imgBuffer);
+ if (res != OK) {
+ if (res != BAD_VALUE) {
+ ALOGE("%s: Camera %d: Error receiving next callback buffer: "
+ "%s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ }
+ return res;
+ }
+ ALOGV("%s: Camera %d: Preview callback available", __FUNCTION__,
+ client->getCameraId());
+
+ {
+ SharedParameters::Lock l(client->getParameters());
+
+ if ( l.mParameters.state != Parameters::PREVIEW
+ && l.mParameters.state != Parameters::RECORD
+ && l.mParameters.state != Parameters::VIDEO_SNAPSHOT) {
+ ALOGV("%s: Camera %d: No longer streaming",
+ __FUNCTION__, client->getCameraId());
+ mCallbackConsumer->unlockBuffer(imgBuffer);
+ return OK;
+ }
+
+ if (! (l.mParameters.previewCallbackFlags &
+ CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ) {
+ ALOGV("%s: No longer enabled, dropping", __FUNCTION__);
+ mCallbackConsumer->unlockBuffer(imgBuffer);
+ return OK;
+ }
+ if ((l.mParameters.previewCallbackFlags &
+ CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) &&
+ !l.mParameters.previewCallbackOneShot) {
+ ALOGV("%s: One shot mode, already sent, dropping", __FUNCTION__);
+ mCallbackConsumer->unlockBuffer(imgBuffer);
+ return OK;
+ }
+
+ if (imgBuffer.format != l.mParameters.previewFormat) {
+ ALOGE("%s: Camera %d: Unexpected format for callback: "
+ "%x, expected %x", __FUNCTION__, client->getCameraId(),
+ imgBuffer.format, l.mParameters.previewFormat);
+ mCallbackConsumer->unlockBuffer(imgBuffer);
+ return INVALID_OPERATION;
+ }
+
+ // In one-shot mode, stop sending callbacks after the first one
+ if (l.mParameters.previewCallbackFlags &
+ CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) {
+ ALOGV("%s: clearing oneshot", __FUNCTION__);
+ l.mParameters.previewCallbackOneShot = false;
+ }
+ }
+
+ size_t bufferSize = Camera2Client::calculateBufferSize(
+ imgBuffer.width, imgBuffer.height,
+ imgBuffer.format, imgBuffer.stride);
+ size_t currentBufferSize = (mCallbackHeap == 0) ?
+ 0 : (mCallbackHeap->mHeap->getSize() / kCallbackHeapCount);
+ if (bufferSize != currentBufferSize) {
+ mCallbackHeap.clear();
+ mCallbackHeap = new Camera2Heap(bufferSize, kCallbackHeapCount,
+ "Camera2Client::CallbackHeap");
+ if (mCallbackHeap->mHeap->getSize() == 0) {
+ ALOGE("%s: Camera %d: Unable to allocate memory for callbacks",
+ __FUNCTION__, client->getCameraId());
+ mCallbackConsumer->unlockBuffer(imgBuffer);
+ return INVALID_OPERATION;
+ }
+
+ mCallbackHeapHead = 0;
+ mCallbackHeapFree = kCallbackHeapCount;
+ }
+
+ if (mCallbackHeapFree == 0) {
+ ALOGE("%s: Camera %d: No free callback buffers, dropping frame",
+ __FUNCTION__, client->getCameraId());
+ mCallbackConsumer->unlockBuffer(imgBuffer);
+ return OK;
+ }
+
+ heapIdx = mCallbackHeapHead;
+
+ mCallbackHeapHead = (mCallbackHeapHead + 1) & kCallbackHeapCount;
+ mCallbackHeapFree--;
+
+ // TODO: Get rid of this memcpy by passing the gralloc queue all the way
+ // to app
+
+ ssize_t offset;
+ size_t size;
+ sp<IMemoryHeap> heap =
+ mCallbackHeap->mBuffers[heapIdx]->getMemory(&offset,
+ &size);
+ uint8_t *data = (uint8_t*)heap->getBase() + offset;
+ memcpy(data, imgBuffer.data, bufferSize);
+
+ ALOGV("%s: Freeing buffer", __FUNCTION__);
+ mCallbackConsumer->unlockBuffer(imgBuffer);
+
+ // Call outside parameter lock to allow re-entrancy from notification
+ {
+ Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient);
+ if (l.mCameraClient != 0) {
+ ALOGV("%s: Camera %d: Invoking client data callback",
+ __FUNCTION__, client->getCameraId());
+ l.mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_FRAME,
+ mCallbackHeap->mBuffers[heapIdx], NULL);
+ }
+ }
+
+ // Only increment free if we're still using the same heap
+ mCallbackHeapFree++;
+
+ ALOGV("%s: exit", __FUNCTION__);
+
+ return OK;
+}
+
+}; // namespace camera2
+}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.h b/services/camera/libcameraservice/camera2/CallbackProcessor.h
new file mode 100644
index 0000000..36c51a3
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/CallbackProcessor.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CALLBACKPROCESSOR_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_CALLBACKPROCESSOR_H
+
+#include <utils/Thread.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+#include <gui/CpuConsumer.h>
+#include "Parameters.h"
+#include "CameraMetadata.h"
+#include "Camera2Heap.h"
+
+namespace android {
+
+class Camera2Client;
+
+namespace camera2 {
+
+/***
+ * Still image capture output image processing
+ */
+class CallbackProcessor:
+ public Thread, public CpuConsumer::FrameAvailableListener {
+ public:
+ CallbackProcessor(wp<Camera2Client> client);
+ ~CallbackProcessor();
+
+ void onFrameAvailable();
+
+ status_t updateStream(const Parameters ¶ms);
+ status_t deleteStream();
+ int getStreamId() const;
+
+ void dump(int fd, const Vector<String16>& args);
+ private:
+ static const nsecs_t kWaitDuration = 10000000; // 10 ms
+ wp<Camera2Client> mClient;
+
+ mutable Mutex mInputMutex;
+ bool mCallbackAvailable;
+ Condition mCallbackAvailableSignal;
+
+ enum {
+ NO_STREAM = -1
+ };
+
+ int mCallbackStreamId;
+ static const size_t kCallbackHeapCount = 6;
+ sp<CpuConsumer> mCallbackConsumer;
+ sp<ANativeWindow> mCallbackWindow;
+ sp<Camera2Heap> mCallbackHeap;
+ int mCallbackHeapId;
+ size_t mCallbackHeapHead, mCallbackHeapFree;
+
+ virtual bool threadLoop();
+
+ status_t processNewCallback(sp<Camera2Client> &client);
+
+};
+
+
+}; //namespace camera2
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera2/Camera2Heap.h b/services/camera/libcameraservice/camera2/Camera2Heap.h
new file mode 100644
index 0000000..9c72d76
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/Camera2Heap.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROiD_SERVERS_CAMERA_CAMERA2HEAP_H
+#define ANDROiD_SERVERS_CAMERA_CAMERA2HEAP_H
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+
+namespace android {
+namespace camera2 {
+
+// Utility class for managing a set of IMemory blocks
+class Camera2Heap : public RefBase {
+ public:
+ Camera2Heap(size_t buf_size, uint_t num_buffers = 1,
+ const char *name = NULL) :
+ mBufSize(buf_size),
+ mNumBufs(num_buffers) {
+ mHeap = new MemoryHeapBase(buf_size * num_buffers, 0, name);
+ mBuffers = new sp<MemoryBase>[mNumBufs];
+ for (uint_t i = 0; i < mNumBufs; i++)
+ mBuffers[i] = new MemoryBase(mHeap,
+ i * mBufSize,
+ mBufSize);
+ }
+
+ virtual ~Camera2Heap()
+ {
+ delete [] mBuffers;
+ }
+
+ size_t mBufSize;
+ uint_t mNumBufs;
+ sp<MemoryHeapBase> mHeap;
+ sp<MemoryBase> *mBuffers;
+};
+
+}; // namespace camera2
+}; // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera2/CaptureProcessor.cpp b/services/camera/libcameraservice/camera2/CaptureProcessor.cpp
new file mode 100644
index 0000000..b17f9d2
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/CaptureProcessor.cpp
@@ -0,0 +1,248 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera2Client::CaptureProcessor"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
+#include "CaptureProcessor.h"
+#include <gui/SurfaceTextureClient.h>
+#include "../Camera2Device.h"
+#include "../Camera2Client.h"
+
+
+namespace android {
+namespace camera2 {
+
+CaptureProcessor::CaptureProcessor(wp<Camera2Client> client):
+ Thread(false),
+ mClient(client),
+ mCaptureAvailable(false),
+ mCaptureStreamId(NO_STREAM) {
+}
+
+CaptureProcessor::~CaptureProcessor() {
+ ALOGV("%s: Exit", __FUNCTION__);
+}
+
+void CaptureProcessor::onFrameAvailable() {
+ Mutex::Autolock l(mInputMutex);
+ if (!mCaptureAvailable) {
+ mCaptureAvailable = true;
+ mCaptureAvailableSignal.signal();
+ }
+}
+
+status_t CaptureProcessor::updateStream(const Parameters ¶ms) {
+ ATRACE_CALL();
+ ALOGV("%s", __FUNCTION__);
+ status_t res;
+
+ Mutex::Autolock l(mInputMutex);
+
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return OK;
+ sp<Camera2Device> device = client->getCameraDevice();
+
+ // Find out buffer size for JPEG
+ camera_metadata_ro_entry_t maxJpegSize =
+ params.staticInfo(ANDROID_JPEG_MAX_SIZE);
+ if (maxJpegSize.count == 0) {
+ ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!",
+ __FUNCTION__, client->getCameraId());
+ return INVALID_OPERATION;
+ }
+
+ if (mCaptureConsumer == 0) {
+ // Create CPU buffer queue endpoint
+ mCaptureConsumer = new CpuConsumer(1);
+ mCaptureConsumer->setFrameAvailableListener(this);
+ mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
+ mCaptureWindow = new SurfaceTextureClient(
+ mCaptureConsumer->getProducerInterface());
+ // Create memory for API consumption
+ mCaptureHeap = new Camera2Heap(maxJpegSize.data.i32[0], 1,
+ "Camera2Client::CaptureHeap");
+ if (mCaptureHeap->mHeap->getSize() == 0) {
+ ALOGE("%s: Camera %d: Unable to allocate memory for capture",
+ __FUNCTION__, client->getCameraId());
+ return NO_MEMORY;
+ }
+ }
+
+ if (mCaptureStreamId != NO_STREAM) {
+ // Check if stream parameters have to change
+ uint32_t currentWidth, currentHeight;
+ res = device->getStreamInfo(mCaptureStreamId,
+ ¤tWidth, ¤tHeight, 0);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Error querying capture output stream info: "
+ "%s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ return res;
+ }
+ if (currentWidth != (uint32_t)params.pictureWidth ||
+ currentHeight != (uint32_t)params.pictureHeight) {
+ res = device->deleteStream(mCaptureStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to delete old output stream "
+ "for capture: %s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ return res;
+ }
+ mCaptureStreamId = NO_STREAM;
+ }
+ }
+
+ if (mCaptureStreamId == NO_STREAM) {
+ // Create stream for HAL production
+ res = device->createStream(mCaptureWindow,
+ params.pictureWidth, params.pictureHeight,
+ HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0],
+ &mCaptureStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Can't create output stream for capture: "
+ "%s (%d)", __FUNCTION__, client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+
+ }
+ return OK;
+}
+
+status_t CaptureProcessor::deleteStream() {
+ ATRACE_CALL();
+ status_t res;
+
+ Mutex::Autolock l(mInputMutex);
+
+ if (mCaptureStreamId != NO_STREAM) {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return OK;
+ sp<Camera2Device> device = client->getCameraDevice();
+
+ device->deleteStream(mCaptureStreamId);
+ mCaptureStreamId = NO_STREAM;
+ }
+ return OK;
+}
+
+int CaptureProcessor::getStreamId() const {
+ Mutex::Autolock l(mInputMutex);
+ return mCaptureStreamId;
+}
+
+void CaptureProcessor::dump(int fd, const Vector<String16>& args) {
+}
+
+bool CaptureProcessor::threadLoop() {
+ status_t res;
+
+ {
+ Mutex::Autolock l(mInputMutex);
+ while (!mCaptureAvailable) {
+ res = mCaptureAvailableSignal.waitRelative(mInputMutex,
+ kWaitDuration);
+ if (res == TIMED_OUT) return true;
+ }
+ mCaptureAvailable = false;
+ }
+
+ do {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return false;
+ res = processNewCapture(client);
+ } while (res == OK);
+
+ return true;
+}
+
+status_t CaptureProcessor::processNewCapture(sp<Camera2Client> &client) {
+ ATRACE_CALL();
+ status_t res;
+ sp<Camera2Heap> captureHeap;
+
+ CpuConsumer::LockedBuffer imgBuffer;
+
+ res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
+ if (res != OK) {
+ if (res != BAD_VALUE) {
+ ALOGE("%s: Camera %d: Error receiving still image buffer: "
+ "%s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ }
+ return res;
+ }
+
+ ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
+ client->getCameraId());
+
+ // TODO: Signal errors here upstream
+ {
+ SharedParameters::Lock l(client->getParameters());
+
+ switch (l.mParameters.state) {
+ case Parameters::STILL_CAPTURE:
+ l.mParameters.state = Parameters::STOPPED;
+ break;
+ case Parameters::VIDEO_SNAPSHOT:
+ l.mParameters.state = Parameters::RECORD;
+ break;
+ default:
+ ALOGE("%s: Camera %d: Still image produced unexpectedly "
+ "in state %s!",
+ __FUNCTION__, client->getCameraId(),
+ Parameters::getStateName(l.mParameters.state));
+ mCaptureConsumer->unlockBuffer(imgBuffer);
+ return BAD_VALUE;
+ }
+ }
+
+ if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
+ ALOGE("%s: Camera %d: Unexpected format for still image: "
+ "%x, expected %x", __FUNCTION__, client->getCameraId(),
+ imgBuffer.format,
+ HAL_PIXEL_FORMAT_BLOB);
+ mCaptureConsumer->unlockBuffer(imgBuffer);
+ return OK;
+ }
+
+ // TODO: Optimize this to avoid memcopy
+ void* captureMemory = mCaptureHeap->mHeap->getBase();
+ size_t size = mCaptureHeap->mHeap->getSize();
+ memcpy(captureMemory, imgBuffer.data, size);
+
+ mCaptureConsumer->unlockBuffer(imgBuffer);
+
+ captureHeap = mCaptureHeap;
+
+ Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient);
+ ALOGV("%s: Sending still image to client", __FUNCTION__);
+ if (l.mCameraClient != 0) {
+ l.mCameraClient->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE,
+ captureHeap->mBuffers[0], NULL);
+ } else {
+ ALOGV("%s: No client!", __FUNCTION__);
+ }
+ return OK;
+}
+
+}; // namespace camera2
+}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/CaptureProcessor.h b/services/camera/libcameraservice/camera2/CaptureProcessor.h
new file mode 100644
index 0000000..8e35739
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/CaptureProcessor.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CAPTUREPROCESSOR_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_CAPTUREPROCESSOR_H
+
+#include <utils/Thread.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+#include <gui/CpuConsumer.h>
+#include "Parameters.h"
+#include "CameraMetadata.h"
+#include "Camera2Heap.h"
+
+namespace android {
+
+class Camera2Client;
+
+namespace camera2 {
+
+/***
+ * Still image capture output image processing
+ */
+class CaptureProcessor:
+ public Thread, public CpuConsumer::FrameAvailableListener {
+ public:
+ CaptureProcessor(wp<Camera2Client> client);
+ ~CaptureProcessor();
+
+ void onFrameAvailable();
+
+ status_t updateStream(const Parameters ¶ms);
+ status_t deleteStream();
+ int getStreamId() const;
+
+ void dump(int fd, const Vector<String16>& args);
+ private:
+ static const nsecs_t kWaitDuration = 10000000; // 10 ms
+ wp<Camera2Client> mClient;
+
+ mutable Mutex mInputMutex;
+ bool mCaptureAvailable;
+ Condition mCaptureAvailableSignal;
+
+ enum {
+ NO_STREAM = -1
+ };
+
+ int mCaptureStreamId;
+ sp<CpuConsumer> mCaptureConsumer;
+ sp<ANativeWindow> mCaptureWindow;
+ sp<Camera2Heap> mCaptureHeap;
+
+ virtual bool threadLoop();
+
+ status_t processNewCapture(sp<Camera2Client> &client);
+
+};
+
+
+}; //namespace camera2
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp
new file mode 100644
index 0000000..5059754
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/FrameProcessor.cpp
@@ -0,0 +1,214 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera2Client::FrameProcessor"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
+#include "FrameProcessor.h"
+#include "../Camera2Device.h"
+#include "../Camera2Client.h"
+
+namespace android {
+namespace camera2 {
+
+FrameProcessor::FrameProcessor(wp<Camera2Client> client):
+ Thread(false), mClient(client) {
+}
+
+FrameProcessor::~FrameProcessor() {
+ ALOGV("%s: Exit", __FUNCTION__);
+}
+
+void FrameProcessor::dump(int fd, const Vector<String16>& args) {
+ String8 result(" Latest received frame:\n");
+ write(fd, result.string(), result.size());
+ mLastFrame.dump(fd, 2, 6);
+}
+
+bool FrameProcessor::threadLoop() {
+ status_t res;
+
+ sp<Camera2Device> device;
+ {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return false;
+ device = client->getCameraDevice();
+ }
+
+ res = device->waitForNextFrame(kWaitDuration);
+ if (res == OK) {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return false;
+ processNewFrames(client);
+ } else if (res != TIMED_OUT) {
+ ALOGE("Camera2Client::FrameProcessor: Error waiting for new "
+ "frames: %s (%d)", strerror(-res), res);
+ }
+
+ return true;
+}
+
+void FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
+ status_t res;
+ CameraMetadata frame;
+ while ( (res = client->getCameraDevice()->getNextFrame(&frame)) == OK) {
+ camera_metadata_entry_t entry;
+ entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: Error reading frame number: %s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ break;
+ }
+
+ res = processFaceDetect(frame, client);
+ if (res != OK) break;
+
+ mLastFrame.acquire(frame);
+ }
+ if (res != NOT_ENOUGH_DATA) {
+ ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return;
+ }
+
+ return;
+}
+
+status_t FrameProcessor::processFaceDetect(
+ const CameraMetadata &frame, sp<Camera2Client> &client) {
+ status_t res;
+ camera_metadata_ro_entry_t entry;
+ bool enableFaceDetect;
+ int maxFaces;
+ {
+ SharedParameters::Lock l(client->getParameters());
+ enableFaceDetect = l.mParameters.enableFaceDetect;
+ }
+ entry = frame.find(ANDROID_STATS_FACE_DETECT_MODE);
+
+ // TODO: This should be an error once implementations are compliant
+ if (entry.count == 0) {
+ return OK;
+ }
+
+ uint8_t faceDetectMode = entry.data.u8[0];
+
+ camera_frame_metadata metadata;
+ Vector<camera_face_t> faces;
+ metadata.number_of_faces = 0;
+
+ if (enableFaceDetect && faceDetectMode != ANDROID_STATS_FACE_DETECTION_OFF) {
+ SharedParameters::Lock l(client->getParameters());
+ entry = frame.find(ANDROID_STATS_FACE_RECTANGLES);
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: Unable to read face rectangles",
+ __FUNCTION__, client->getCameraId());
+ return res;
+ }
+ metadata.number_of_faces = entry.count / 4;
+ if (metadata.number_of_faces >
+ l.mParameters.fastInfo.maxFaces) {
+ ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
+ __FUNCTION__, client->getCameraId(),
+ metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
+ return res;
+ }
+ const int32_t *faceRects = entry.data.i32;
+
+ entry = frame.find(ANDROID_STATS_FACE_SCORES);
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: Unable to read face scores",
+ __FUNCTION__, client->getCameraId());
+ return res;
+ }
+ const uint8_t *faceScores = entry.data.u8;
+
+ const int32_t *faceLandmarks = NULL;
+ const int32_t *faceIds = NULL;
+
+ if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
+ entry = frame.find(ANDROID_STATS_FACE_LANDMARKS);
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: Unable to read face landmarks",
+ __FUNCTION__, client->getCameraId());
+ return res;
+ }
+ faceLandmarks = entry.data.i32;
+
+ entry = frame.find(ANDROID_STATS_FACE_IDS);
+
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: Unable to read face IDs",
+ __FUNCTION__, client->getCameraId());
+ return res;
+ }
+ faceIds = entry.data.i32;
+ }
+
+ faces.setCapacity(metadata.number_of_faces);
+
+ for (int i = 0; i < metadata.number_of_faces; i++) {
+ camera_face_t face;
+
+ face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]);
+ face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]);
+ face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]);
+ face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]);
+
+ face.score = faceScores[i];
+ if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
+ face.id = faceIds[i];
+ face.left_eye[0] =
+ l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]);
+ face.left_eye[1] =
+ l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]);
+ face.right_eye[0] =
+ l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]);
+ face.right_eye[1] =
+ l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]);
+ face.mouth[0] =
+ l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]);
+ face.mouth[1] =
+ l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]);
+ } else {
+ face.id = 0;
+ face.left_eye[0] = face.left_eye[1] = -2000;
+ face.right_eye[0] = face.right_eye[1] = -2000;
+ face.mouth[0] = face.mouth[1] = -2000;
+ }
+ faces.push_back(face);
+ }
+
+ metadata.faces = faces.editArray();
+ }
+
+ if (metadata.number_of_faces != 0) {
+ Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient);
+ if (l.mCameraClient != NULL) {
+ l.mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
+ NULL, &metadata);
+ }
+ }
+ return OK;
+}
+
+
+}; // namespace camera2
+}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.h b/services/camera/libcameraservice/camera2/FrameProcessor.h
new file mode 100644
index 0000000..2cdf7f0
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/FrameProcessor.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_FRAMEPROCESSOR_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_FRAMEPROCESSOR_H
+
+#include <utils/Thread.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+#include "CameraMetadata.h"
+
+namespace android {
+
+class Camera2Client;
+
+namespace camera2 {
+
+/* Output frame metadata processing thread. This thread waits for new
+ * frames from the device, and analyzes them as necessary.
+ */
+class FrameProcessor: public Thread {
+ public:
+ FrameProcessor(wp<Camera2Client> client);
+ ~FrameProcessor();
+
+ void dump(int fd, const Vector<String16>& args);
+ private:
+ static const nsecs_t kWaitDuration = 10000000; // 10 ms
+ wp<Camera2Client> mClient;
+
+ virtual bool threadLoop();
+
+ void processNewFrames(sp<Camera2Client> &client);
+ status_t processFaceDetect(const CameraMetadata &frame,
+ sp<Camera2Client> &client);
+
+ CameraMetadata mLastFrame;
+};
+
+
+}; //namespace camera2
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp
index 91c5141..2f7d023 100644
--- a/services/camera/libcameraservice/camera2/Parameters.cpp
+++ b/services/camera/libcameraservice/camera2/Parameters.cpp
@@ -208,7 +208,7 @@
CameraParameters::PIXEL_FORMAT_JPEG);
camera_metadata_ro_entry_t availableJpegThumbnailSizes =
- staticInfo(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 2);
+ staticInfo(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 4);
if (!availableJpegThumbnailSizes.count) return NO_INIT;
// TODO: Pick default thumbnail size sensibly