Merge "Fix retries when server doesn't support range requests." into jb-mr1-dev
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index bfe20cc..723a6e5 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -34,6 +34,7 @@
#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/NuMediaExtractor.h>
+#include <gui/ISurfaceComposer.h>
#include <gui/SurfaceComposerClient.h>
#include <ui/DisplayInfo.h>
@@ -379,8 +380,10 @@
composerClient = new SurfaceComposerClient;
CHECK_EQ(composerClient->initCheck(), (status_t)OK);
+ sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
+ ISurfaceComposer::eDisplayIdMain));
DisplayInfo info;
- SurfaceComposerClient::getDisplayInfo(0, &info);
+ SurfaceComposerClient::getDisplayInfo(display, &info);
ssize_t displayWidth = info.w;
ssize_t displayHeight = info.h;
@@ -388,7 +391,6 @@
control = composerClient->createSurface(
String8("A Surface"),
- 0,
displayWidth,
displayHeight,
PIXEL_FORMAT_RGB_565,
diff --git a/cmds/stagefright/sf2.cpp b/cmds/stagefright/sf2.cpp
index 3bbfbdc..c817443 100644
--- a/cmds/stagefright/sf2.cpp
+++ b/cmds/stagefright/sf2.cpp
@@ -612,7 +612,6 @@
control = composerClient->createSurface(
String8("A Surface"),
- 0,
1280,
800,
PIXEL_FORMAT_RGB_565,
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 0362f39..b92a8a0 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -922,7 +922,6 @@
control = composerClient->createSurface(
String8("A Surface"),
- 0,
1280,
800,
PIXEL_FORMAT_RGB_565,
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index ac88d1f..7329dcc 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -33,6 +33,7 @@
#include <binder/IServiceManager.h>
#include <media/IMediaPlayerService.h>
+#include <gui/ISurfaceComposer.h>
#include <gui/SurfaceComposerClient.h>
#include <fcntl.h>
@@ -306,8 +307,10 @@
sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
CHECK_EQ(composerClient->initCheck(), (status_t)OK);
+ sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
+ ISurfaceComposer::eDisplayIdMain));
DisplayInfo info;
- SurfaceComposerClient::getDisplayInfo(0, &info);
+ SurfaceComposerClient::getDisplayInfo(display, &info);
ssize_t displayWidth = info.w;
ssize_t displayHeight = info.h;
@@ -316,7 +319,6 @@
sp<SurfaceControl> control =
composerClient->createSurface(
String8("A Surface"),
- 0,
displayWidth,
displayHeight,
PIXEL_FORMAT_RGB_565,
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index bb9e595..e6739ae 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -354,6 +354,8 @@
const void *data, size_t size,
unsigned *profile, unsigned *level);
+ status_t stopOmxComponent_l();
+
OMXCodec(const OMXCodec &);
OMXCodec &operator=(const OMXCodec &);
};
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 58e4723..6346363 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1042,9 +1042,14 @@
void* cookie, int msg, int ext1, int ext2, const Parcel *obj)
{
Client* client = static_cast<Client*>(cookie);
+ if (client == NULL) {
+ return;
+ }
+ sp<IMediaPlayerClient> c;
{
Mutex::Autolock l(client->mLock);
+ c = client->mClient;
if (msg == MEDIA_PLAYBACK_COMPLETE && client->mNextClient != NULL) {
if (client->mAudioOutput != NULL)
client->mAudioOutput->switchToNextOutput();
@@ -1065,8 +1070,11 @@
// also access mMetadataUpdated and clears it.
client->addNewMetadataUpdate(metadata_type);
}
- ALOGV("[%d] notify (%p, %d, %d, %d)", client->mConnId, cookie, msg, ext1, ext2);
- client->mClient->notify(msg, ext1, ext2, obj);
+
+ if (c != NULL) {
+ ALOGV("[%d] notify (%p, %d, %d, %d)", client->mConnId, cookie, msg, ext1, ext2);
+ c->notify(msg, ext1, ext2, obj);
+ }
}
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 5615d0f..d0e306c 100755
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -3621,7 +3621,11 @@
}
params->setInt32(kKeyNumBuffers, mPortBuffers[kPortIndexInput].size());
- return mSource->start(params.get());
+ err = mSource->start(params.get());
+ if (err != OK) {
+ stopOmxComponent_l();
+ }
+ return err;
}
// Decoder case
@@ -3633,8 +3637,16 @@
status_t OMXCodec::stop() {
CODEC_LOGV("stop mState=%d", mState);
-
Mutex::Autolock autoLock(mLock);
+ status_t err = stopOmxComponent_l();
+ mSource->stop();
+
+ CODEC_LOGV("stopped in state %d", mState);
+ return err;
+}
+
+status_t OMXCodec::stopOmxComponent_l() {
+ CODEC_LOGV("stopOmxComponent_l mState=%d", mState);
while (isIntermediateState(mState)) {
mAsyncCompletion.wait(mLock);
@@ -3732,10 +3744,6 @@
mLeftOverBuffer = NULL;
}
- mSource->stop();
-
- CODEC_LOGV("stopped in state %d", mState);
-
return OK;
}
diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
index cc2aca7..a61d6a2 100644
--- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp
+++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
@@ -90,7 +90,7 @@
ASSERT_EQ(NO_ERROR, mComposerClient->initCheck());
mSurfaceControl = mComposerClient->createSurface(
- String8("Test Surface"), 0,
+ String8("Test Surface"),
getSurfaceWidth(), getSurfaceHeight(),
PIXEL_FORMAT_RGB_888, 0);
diff --git a/media/libstagefright/wifi-display/ANetworkSession.cpp b/media/libstagefright/wifi-display/ANetworkSession.cpp
new file mode 100644
index 0000000..ee0600c
--- /dev/null
+++ b/media/libstagefright/wifi-display/ANetworkSession.cpp
@@ -0,0 +1,994 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NetworkSession"
+#include <utils/Log.h>
+
+#include "ANetworkSession.h"
+#include "ParsedMessage.h"
+
+#include <arpa/inet.h>
+#include <fcntl.h>
+#include <net/if.h>
+#include <netdb.h>
+#include <netinet/in.h>
+#include <sys/socket.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+static const size_t kMaxUDPSize = 1500;
+
+struct ANetworkSession::NetworkThread : public Thread {
+ NetworkThread(ANetworkSession *session);
+
+protected:
+ virtual ~NetworkThread();
+
+private:
+ ANetworkSession *mSession;
+
+ virtual bool threadLoop();
+
+ DISALLOW_EVIL_CONSTRUCTORS(NetworkThread);
+};
+
+struct ANetworkSession::Session : public RefBase {
+ enum State {
+ CONNECTING,
+ CONNECTED,
+ LISTENING,
+ DATAGRAM,
+ };
+
+ Session(int32_t sessionID,
+ State state,
+ int s,
+ const sp<AMessage> ¬ify);
+
+ int32_t sessionID() const;
+ int socket() const;
+ sp<AMessage> getNotificationMessage() const;
+
+ bool isListening() const;
+
+ bool wantsToRead();
+ bool wantsToWrite();
+
+ status_t readMore();
+ status_t writeMore();
+
+ status_t sendRequest(const void *data, ssize_t size);
+
+protected:
+ virtual ~Session();
+
+private:
+ int32_t mSessionID;
+ State mState;
+ int mSocket;
+ sp<AMessage> mNotify;
+ bool mSawReceiveFailure, mSawSendFailure;
+
+ AString mOutBuffer;
+ List<size_t> mOutBufferSizes;
+
+ AString mInBuffer;
+
+ void notifyError(bool send, status_t err, const char *detail);
+ void notify(NotificationReason reason);
+
+ DISALLOW_EVIL_CONSTRUCTORS(Session);
+};
+////////////////////////////////////////////////////////////////////////////////
+
+ANetworkSession::NetworkThread::NetworkThread(ANetworkSession *session)
+ : mSession(session) {
+}
+
+ANetworkSession::NetworkThread::~NetworkThread() {
+}
+
+bool ANetworkSession::NetworkThread::threadLoop() {
+ mSession->threadLoop();
+
+ return true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ANetworkSession::Session::Session(
+ int32_t sessionID,
+ State state,
+ int s,
+ const sp<AMessage> ¬ify)
+ : mSessionID(sessionID),
+ mState(state),
+ mSocket(s),
+ mNotify(notify),
+ mSawReceiveFailure(false),
+ mSawSendFailure(false) {
+ if (mState == CONNECTED) {
+ struct sockaddr_in localAddr;
+ socklen_t localAddrLen = sizeof(localAddr);
+
+ int res = getsockname(
+ mSocket, (struct sockaddr *)&localAddr, &localAddrLen);
+ CHECK_GE(res, 0);
+
+ struct sockaddr_in remoteAddr;
+ socklen_t remoteAddrLen = sizeof(remoteAddr);
+
+ res = getpeername(
+ mSocket, (struct sockaddr *)&remoteAddr, &remoteAddrLen);
+ CHECK_GE(res, 0);
+
+ in_addr_t addr = ntohl(localAddr.sin_addr.s_addr);
+ AString localAddrString = StringPrintf(
+ "%d.%d.%d.%d",
+ (addr >> 24),
+ (addr >> 16) & 0xff,
+ (addr >> 8) & 0xff,
+ addr & 0xff);
+
+ addr = ntohl(remoteAddr.sin_addr.s_addr);
+ AString remoteAddrString = StringPrintf(
+ "%d.%d.%d.%d",
+ (addr >> 24),
+ (addr >> 16) & 0xff,
+ (addr >> 8) & 0xff,
+ addr & 0xff);
+
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("sessionID", mSessionID);
+ msg->setInt32("reason", kWhatClientConnected);
+ msg->setString("server-ip", localAddrString.c_str());
+ msg->setInt32("server-port", ntohs(localAddr.sin_port));
+ msg->setString("client-ip", remoteAddrString.c_str());
+ msg->setInt32("client-port", ntohs(remoteAddr.sin_port));
+ msg->post();
+ }
+}
+
+ANetworkSession::Session::~Session() {
+ ALOGI("Session %d gone", mSessionID);
+
+ close(mSocket);
+ mSocket = -1;
+}
+
+int32_t ANetworkSession::Session::sessionID() const {
+ return mSessionID;
+}
+
+int ANetworkSession::Session::socket() const {
+ return mSocket;
+}
+
+sp<AMessage> ANetworkSession::Session::getNotificationMessage() const {
+ return mNotify;
+}
+
+bool ANetworkSession::Session::isListening() const {
+ return mState == LISTENING;
+}
+
+bool ANetworkSession::Session::wantsToRead() {
+ return !mSawReceiveFailure && mState != CONNECTING;
+}
+
+bool ANetworkSession::Session::wantsToWrite() {
+ return !mSawSendFailure
+ && (mState == CONNECTING
+ || ((mState == CONNECTED || mState == DATAGRAM)
+ && !mOutBuffer.empty()));
+}
+
+status_t ANetworkSession::Session::readMore() {
+ if (mState == DATAGRAM) {
+ status_t err;
+ do {
+ sp<ABuffer> buf = new ABuffer(kMaxUDPSize);
+
+ struct sockaddr_in remoteAddr;
+ socklen_t remoteAddrLen = sizeof(remoteAddr);
+
+ ssize_t n;
+ do {
+ n = recvfrom(
+ mSocket, buf->data(), buf->capacity(), 0,
+ (struct sockaddr *)&remoteAddr, &remoteAddrLen);
+ } while (n < 0 && errno == EINTR);
+
+ err = OK;
+ if (n < 0) {
+ err = -errno;
+ } else if (n == 0) {
+ err = -ECONNRESET;
+ } else {
+ buf->setRange(0, n);
+
+ int64_t nowUs = ALooper::GetNowUs();
+ buf->meta()->setInt64("arrivalTimeUs", nowUs);
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("sessionID", mSessionID);
+ notify->setInt32("reason", kWhatDatagram);
+
+ uint32_t ip = ntohl(remoteAddr.sin_addr.s_addr);
+ notify->setString(
+ "fromAddr",
+ StringPrintf(
+ "%u.%u.%u.%u",
+ ip >> 24,
+ (ip >> 16) & 0xff,
+ (ip >> 8) & 0xff,
+ ip & 0xff).c_str());
+
+ notify->setInt32("fromPort", ntohs(remoteAddr.sin_port));
+
+ notify->setBuffer("data", buf);
+ notify->post();
+ }
+ } while (err == OK);
+
+ if (err == -EAGAIN) {
+ err = OK;
+ }
+
+ if (err != OK) {
+ notifyError(false /* send */, err, "Recvfrom failed.");
+ mSawReceiveFailure = true;
+ }
+
+ return err;
+ }
+
+ char tmp[512];
+ ssize_t n;
+ do {
+ n = recv(mSocket, tmp, sizeof(tmp), 0);
+ } while (n < 0 && errno == EINTR);
+
+ status_t err = OK;
+
+ if (n > 0) {
+ mInBuffer.append(tmp, n);
+
+#if 0
+ ALOGI("in:");
+ hexdump(tmp, n);
+#endif
+ } else if (n < 0) {
+ err = -errno;
+ } else {
+ err = -ECONNRESET;
+ }
+
+ for (;;) {
+ size_t length;
+
+ if (mInBuffer.size() > 0 && mInBuffer.c_str()[0] == '$') {
+ if (mInBuffer.size() < 4) {
+ break;
+ }
+
+ length = U16_AT((const uint8_t *)mInBuffer.c_str() + 2);
+
+ if (mInBuffer.size() < 4 + length) {
+ break;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("sessionID", mSessionID);
+ notify->setInt32("reason", kWhatBinaryData);
+ notify->setInt32("channel", mInBuffer.c_str()[1]);
+
+ sp<ABuffer> data = new ABuffer(length);
+ memcpy(data->data(), mInBuffer.c_str() + 4, length);
+
+ int64_t nowUs = ALooper::GetNowUs();
+ data->meta()->setInt64("arrivalTimeUs", nowUs);
+
+ notify->setBuffer("data", data);
+ notify->post();
+
+ mInBuffer.erase(0, 4 + length);
+ continue;
+ }
+
+ sp<ParsedMessage> msg =
+ ParsedMessage::Parse(
+ mInBuffer.c_str(), mInBuffer.size(), err != OK, &length);
+
+ if (msg == NULL) {
+ break;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("sessionID", mSessionID);
+ notify->setInt32("reason", kWhatData);
+ notify->setObject("data", msg);
+ notify->post();
+
+#if 1
+ // XXX The dongle sends the wrong content length header on a
+ // SET_PARAMETER request that signals a "wfd_idr_request".
+ // (17 instead of 19).
+ const char *content = msg->getContent();
+ if (content && !memcmp(content, "wfd_idr_request\r\n", 17)) {
+ length += 2;
+ }
+#endif
+
+ mInBuffer.erase(0, length);
+
+ if (err != OK) {
+ break;
+ }
+ }
+
+ if (err != OK) {
+ notifyError(false /* send */, err, "Recv failed.");
+ mSawReceiveFailure = true;
+ }
+
+ return err;
+}
+
+status_t ANetworkSession::Session::writeMore() {
+ if (mState == DATAGRAM) {
+ CHECK(!mOutBufferSizes.empty());
+
+ status_t err;
+ do {
+ size_t size = *mOutBufferSizes.begin();
+
+ CHECK_GE(mOutBuffer.size(), size);
+
+ int n;
+ do {
+ n = send(mSocket, mOutBuffer.c_str(), size, 0);
+ } while (n < 0 && errno == EINTR);
+
+ err = OK;
+
+ if (n > 0) {
+ mOutBufferSizes.erase(mOutBufferSizes.begin());
+ mOutBuffer.erase(0, n);
+ } else if (n < 0) {
+ err = -errno;
+ } else if (n == 0) {
+ err = -ECONNRESET;
+ }
+ } while (err == OK && !mOutBufferSizes.empty());
+
+ if (err == -EAGAIN) {
+ err = OK;
+ }
+
+ if (err != OK) {
+ notifyError(true /* send */, err, "Send datagram failed.");
+ mSawSendFailure = true;
+ }
+
+ return err;
+ }
+
+ if (mState == CONNECTING) {
+ int err;
+ socklen_t optionLen = sizeof(err);
+ CHECK_EQ(getsockopt(mSocket, SOL_SOCKET, SO_ERROR, &err, &optionLen), 0);
+ CHECK_EQ(optionLen, (socklen_t)sizeof(err));
+
+ if (err != 0) {
+ notifyError(kWhatError, -err, "Connection failed");
+ mSawSendFailure = true;
+
+ return UNKNOWN_ERROR;
+ }
+
+ mState = CONNECTED;
+ notify(kWhatConnected);
+
+ return OK;
+ }
+
+ CHECK_EQ(mState, CONNECTED);
+ CHECK(!mOutBuffer.empty());
+
+ ssize_t n;
+ do {
+ n = send(mSocket, mOutBuffer.c_str(), mOutBuffer.size(), 0);
+ } while (n < 0 && errno == EINTR);
+
+ status_t err = OK;
+
+ if (n > 0) {
+ ALOGI("out:");
+ hexdump(mOutBuffer.c_str(), n);
+
+ mOutBuffer.erase(0, n);
+ } else if (n < 0) {
+ err = -errno;
+ } else if (n == 0) {
+ err = -ECONNRESET;
+ }
+
+ if (err != OK) {
+ notifyError(true /* send */, err, "Send failed.");
+ mSawSendFailure = true;
+ }
+
+ return err;
+}
+
+status_t ANetworkSession::Session::sendRequest(const void *data, ssize_t size) {
+ CHECK(mState == CONNECTED || mState == DATAGRAM);
+
+ mOutBuffer.append(
+ (const char *)data,
+ (size >= 0) ? size : strlen((const char *)data));
+
+ if (mState == DATAGRAM) {
+ CHECK_GE(size, 0);
+ mOutBufferSizes.push_back(size);
+ }
+
+ return OK;
+}
+
+void ANetworkSession::Session::notifyError(
+ bool send, status_t err, const char *detail) {
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("sessionID", mSessionID);
+ msg->setInt32("reason", kWhatError);
+ msg->setInt32("send", send);
+ msg->setInt32("err", err);
+ msg->setString("detail", detail);
+ msg->post();
+}
+
+void ANetworkSession::Session::notify(NotificationReason reason) {
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("sessionID", mSessionID);
+ msg->setInt32("reason", reason);
+ msg->post();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ANetworkSession::ANetworkSession()
+ : mNextSessionID(1) {
+ mPipeFd[0] = mPipeFd[1] = -1;
+}
+
+ANetworkSession::~ANetworkSession() {
+ stop();
+}
+
+status_t ANetworkSession::start() {
+ if (mThread != NULL) {
+ return INVALID_OPERATION;
+ }
+
+ int res = pipe(mPipeFd);
+ if (res != 0) {
+ mPipeFd[0] = mPipeFd[1] = -1;
+ return -errno;
+ }
+
+ mThread = new NetworkThread(this);
+
+ status_t err = mThread->run("ANetworkSession", ANDROID_PRIORITY_AUDIO);
+
+ if (err != OK) {
+ mThread.clear();
+
+ close(mPipeFd[0]);
+ close(mPipeFd[1]);
+ mPipeFd[0] = mPipeFd[1] = -1;
+
+ return err;
+ }
+
+ return OK;
+}
+
+status_t ANetworkSession::stop() {
+ if (mThread == NULL) {
+ return INVALID_OPERATION;
+ }
+
+ mThread->requestExit();
+ interrupt();
+ mThread->requestExitAndWait();
+
+ mThread.clear();
+
+ close(mPipeFd[0]);
+ close(mPipeFd[1]);
+ mPipeFd[0] = mPipeFd[1] = -1;
+
+ return OK;
+}
+
+status_t ANetworkSession::createRTSPClient(
+ const char *host, unsigned port, const sp<AMessage> ¬ify,
+ int32_t *sessionID) {
+ return createClientOrServer(
+ kModeCreateRTSPClient,
+ 0 /* port */,
+ host,
+ port,
+ notify,
+ sessionID);
+}
+
+status_t ANetworkSession::createRTSPServer(
+ unsigned port, const sp<AMessage> ¬ify, int32_t *sessionID) {
+ return createClientOrServer(
+ kModeCreateRTSPServer,
+ port,
+ NULL /* remoteHost */,
+ 0 /* remotePort */,
+ notify,
+ sessionID);
+}
+
+status_t ANetworkSession::createUDPSession(
+ unsigned localPort, const sp<AMessage> ¬ify, int32_t *sessionID) {
+ return createUDPSession(localPort, NULL, 0, notify, sessionID);
+}
+
+status_t ANetworkSession::createUDPSession(
+ unsigned localPort,
+ const char *remoteHost,
+ unsigned remotePort,
+ const sp<AMessage> ¬ify,
+ int32_t *sessionID) {
+ return createClientOrServer(
+ kModeCreateUDPSession,
+ localPort,
+ remoteHost,
+ remotePort,
+ notify,
+ sessionID);
+}
+
+status_t ANetworkSession::destroySession(int32_t sessionID) {
+ Mutex::Autolock autoLock(mLock);
+
+ ssize_t index = mSessions.indexOfKey(sessionID);
+
+ if (index < 0) {
+ return -ENOENT;
+ }
+
+ mSessions.removeItemsAt(index);
+
+ interrupt();
+
+ return OK;
+}
+
+// static
+status_t ANetworkSession::MakeSocketNonBlocking(int s) {
+ int flags = fcntl(s, F_GETFL, 0);
+ if (flags < 0) {
+ flags = 0;
+ }
+
+ int res = fcntl(s, F_SETFL, flags | O_NONBLOCK);
+ if (res < 0) {
+ return -errno;
+ }
+
+ return OK;
+}
+
+status_t ANetworkSession::createClientOrServer(
+ Mode mode,
+ unsigned port,
+ const char *remoteHost,
+ unsigned remotePort,
+ const sp<AMessage> ¬ify,
+ int32_t *sessionID) {
+ Mutex::Autolock autoLock(mLock);
+
+ *sessionID = 0;
+ status_t err = OK;
+ int s, res;
+ sp<Session> session;
+
+ s = socket(
+ AF_INET,
+ (mode == kModeCreateUDPSession) ? SOCK_DGRAM : SOCK_STREAM,
+ 0);
+
+ if (s < 0) {
+ err = -errno;
+ goto bail;
+ }
+
+ if (mode == kModeCreateRTSPServer) {
+ const int yes = 1;
+ res = setsockopt(s, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(yes));
+
+ if (res < 0) {
+ err = -errno;
+ goto bail2;
+ }
+ }
+
+ if (mode == kModeCreateUDPSession) {
+ int size = 256 * 1024;
+
+ res = setsockopt(s, SOL_SOCKET, SO_RCVBUF, &size, sizeof(size));
+
+ if (res < 0) {
+ err = -errno;
+ goto bail2;
+ }
+
+ res = setsockopt(s, SOL_SOCKET, SO_SNDBUF, &size, sizeof(size));
+
+ if (res < 0) {
+ err = -errno;
+ goto bail2;
+ }
+ }
+
+ err = MakeSocketNonBlocking(s);
+
+ if (err != OK) {
+ goto bail2;
+ }
+
+ struct sockaddr_in addr;
+ memset(addr.sin_zero, 0, sizeof(addr.sin_zero));
+ addr.sin_family = AF_INET;
+
+ if (mode == kModeCreateRTSPClient) {
+ struct hostent *ent= gethostbyname(remoteHost);
+ if (ent == NULL) {
+ err = -h_errno;
+ goto bail2;
+ }
+
+ addr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr;
+ addr.sin_port = htons(remotePort);
+ } else {
+ addr.sin_addr.s_addr = INADDR_ANY;
+ addr.sin_port = htons(port);
+ }
+
+ if (mode == kModeCreateRTSPClient) {
+ res = connect(s, (const struct sockaddr *)&addr, sizeof(addr));
+
+ CHECK_LT(res, 0);
+ if (errno == EINPROGRESS) {
+ res = 0;
+ }
+ } else {
+ res = bind(s, (const struct sockaddr *)&addr, sizeof(addr));
+
+ if (res == 0) {
+ if (mode == kModeCreateRTSPServer) {
+ res = listen(s, 4);
+ } else {
+ CHECK_EQ(mode, kModeCreateUDPSession);
+
+ if (remoteHost != NULL) {
+ struct sockaddr_in remoteAddr;
+ memset(remoteAddr.sin_zero, 0, sizeof(remoteAddr.sin_zero));
+ remoteAddr.sin_family = AF_INET;
+ remoteAddr.sin_port = htons(remotePort);
+
+ struct hostent *ent= gethostbyname(remoteHost);
+ if (ent == NULL) {
+ err = -h_errno;
+ goto bail2;
+ }
+
+ remoteAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr;
+
+ res = connect(
+ s,
+ (const struct sockaddr *)&remoteAddr,
+ sizeof(remoteAddr));
+ }
+ }
+ }
+ }
+
+ if (res < 0) {
+ err = -errno;
+ goto bail2;
+ }
+
+ Session::State state;
+ switch (mode) {
+ case kModeCreateRTSPClient:
+ state = Session::CONNECTING;
+ break;
+
+ case kModeCreateRTSPServer:
+ state = Session::LISTENING;
+ break;
+
+ default:
+ CHECK_EQ(mode, kModeCreateUDPSession);
+ state = Session::DATAGRAM;
+ break;
+ }
+
+ session = new Session(
+ mNextSessionID++,
+ state,
+ s,
+ notify);
+
+ mSessions.add(session->sessionID(), session);
+
+ interrupt();
+
+ *sessionID = session->sessionID();
+
+ goto bail;
+
+bail2:
+ close(s);
+ s = -1;
+
+bail:
+ return err;
+}
+
+status_t ANetworkSession::connectUDPSession(
+ int32_t sessionID, const char *remoteHost, unsigned remotePort) {
+ Mutex::Autolock autoLock(mLock);
+
+ ssize_t index = mSessions.indexOfKey(sessionID);
+
+ if (index < 0) {
+ return -ENOENT;
+ }
+
+ const sp<Session> session = mSessions.valueAt(index);
+ int s = session->socket();
+
+ struct sockaddr_in remoteAddr;
+ memset(remoteAddr.sin_zero, 0, sizeof(remoteAddr.sin_zero));
+ remoteAddr.sin_family = AF_INET;
+ remoteAddr.sin_port = htons(remotePort);
+
+ status_t err = OK;
+ struct hostent *ent= gethostbyname(remoteHost);
+ if (ent == NULL) {
+ err = -h_errno;
+ } else {
+ remoteAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr;
+
+ int res = connect(
+ s,
+ (const struct sockaddr *)&remoteAddr,
+ sizeof(remoteAddr));
+
+ if (res < 0) {
+ err = -errno;
+ }
+ }
+
+ return err;
+}
+
+status_t ANetworkSession::sendRequest(
+ int32_t sessionID, const void *data, ssize_t size) {
+ Mutex::Autolock autoLock(mLock);
+
+ ssize_t index = mSessions.indexOfKey(sessionID);
+
+ if (index < 0) {
+ return -ENOENT;
+ }
+
+ const sp<Session> session = mSessions.valueAt(index);
+
+ status_t err = session->sendRequest(data, size);
+
+ interrupt();
+
+ return err;
+}
+
+void ANetworkSession::interrupt() {
+ static const char dummy = 0;
+
+ ssize_t n;
+ do {
+ n = write(mPipeFd[1], &dummy, 1);
+ } while (n < 0 && errno == EINTR);
+
+ if (n < 0) {
+ ALOGW("Error writing to pipe (%s)", strerror(errno));
+ }
+}
+
+void ANetworkSession::threadLoop() {
+ fd_set rs, ws;
+ FD_ZERO(&rs);
+ FD_ZERO(&ws);
+
+ FD_SET(mPipeFd[0], &rs);
+ int maxFd = mPipeFd[0];
+
+ {
+ Mutex::Autolock autoLock(mLock);
+
+ for (size_t i = 0; i < mSessions.size(); ++i) {
+ const sp<Session> &session = mSessions.valueAt(i);
+
+ int s = session->socket();
+
+ if (s < 0) {
+ continue;
+ }
+
+ if (session->wantsToRead()) {
+ FD_SET(s, &rs);
+ if (s > maxFd) {
+ maxFd = s;
+ }
+ }
+
+ if (session->wantsToWrite()) {
+ FD_SET(s, &ws);
+ if (s > maxFd) {
+ maxFd = s;
+ }
+ }
+ }
+ }
+
+ int res = select(maxFd + 1, &rs, &ws, NULL, NULL /* tv */);
+
+ if (res == 0) {
+ return;
+ }
+
+ if (res < 0) {
+ if (errno == EINTR) {
+ return;
+ }
+
+ ALOGE("select failed w/ error %d (%s)", errno, strerror(errno));
+ return;
+ }
+
+ if (FD_ISSET(mPipeFd[0], &rs)) {
+ char c;
+ ssize_t n;
+ do {
+ n = read(mPipeFd[0], &c, 1);
+ } while (n < 0 && errno == EINTR);
+
+ if (n < 0) {
+ ALOGW("Error reading from pipe (%s)", strerror(errno));
+ }
+
+ --res;
+ }
+
+ {
+ Mutex::Autolock autoLock(mLock);
+
+ List<sp<Session> > sessionsToAdd;
+
+ for (size_t i = mSessions.size(); res > 0 && i-- > 0;) {
+ const sp<Session> &session = mSessions.valueAt(i);
+
+ int s = session->socket();
+
+ if (s < 0) {
+ continue;
+ }
+
+ if (FD_ISSET(s, &rs) || FD_ISSET(s, &ws)) {
+ --res;
+ }
+
+ if (FD_ISSET(s, &rs)) {
+ if (session->isListening()) {
+ struct sockaddr_in remoteAddr;
+ socklen_t remoteAddrLen = sizeof(remoteAddr);
+
+ int clientSocket = accept(
+ s, (struct sockaddr *)&remoteAddr, &remoteAddrLen);
+
+ if (clientSocket >= 0) {
+ status_t err = MakeSocketNonBlocking(clientSocket);
+
+ if (err != OK) {
+ ALOGE("Unable to make client socket non blocking, "
+ "failed w/ error %d (%s)",
+ err, strerror(-err));
+
+ close(clientSocket);
+ clientSocket = -1;
+ } else {
+ in_addr_t addr = ntohl(remoteAddr.sin_addr.s_addr);
+
+ ALOGI("incoming connection from %d.%d.%d.%d:%d "
+ "(socket %d)",
+ (addr >> 24),
+ (addr >> 16) & 0xff,
+ (addr >> 8) & 0xff,
+ addr & 0xff,
+ ntohs(remoteAddr.sin_port),
+ clientSocket);
+
+ sp<Session> clientSession =
+ // using socket sd as sessionID
+ new Session(
+ mNextSessionID++,
+ Session::CONNECTED,
+ clientSocket,
+ session->getNotificationMessage());
+
+ sessionsToAdd.push_back(clientSession);
+ }
+ } else {
+ ALOGE("accept returned error %d (%s)",
+ errno, strerror(errno));
+ }
+ } else {
+ status_t err = session->readMore();
+ if (err != OK) {
+ ALOGI("readMore on socket %d failed w/ error %d (%s)",
+ s, err, strerror(-err));
+ }
+ }
+ }
+
+ if (FD_ISSET(s, &ws)) {
+ status_t err = session->writeMore();
+ if (err != OK) {
+ ALOGI("writeMore on socket %d failed w/ error %d (%s)",
+ s, err, strerror(-err));
+ }
+ }
+ }
+
+ while (!sessionsToAdd.empty()) {
+ sp<Session> session = *sessionsToAdd.begin();
+ sessionsToAdd.erase(sessionsToAdd.begin());
+
+ mSessions.add(session->sessionID(), session);
+
+ ALOGI("added clientSession %d", session->sessionID());
+ }
+ }
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/wifi-display/ANetworkSession.h b/media/libstagefright/wifi-display/ANetworkSession.h
new file mode 100644
index 0000000..0402317
--- /dev/null
+++ b/media/libstagefright/wifi-display/ANetworkSession.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_NETWORK_SESSION_H_
+
+#define A_NETWORK_SESSION_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/KeyedVector.h>
+#include <utils/RefBase.h>
+#include <utils/Thread.h>
+
+namespace android {
+
+struct AMessage;
+
+struct ANetworkSession : public RefBase {
+ ANetworkSession();
+
+ status_t start();
+ status_t stop();
+
+ status_t createRTSPClient(
+ const char *host, unsigned port, const sp<AMessage> ¬ify,
+ int32_t *sessionID);
+
+ status_t createRTSPServer(
+ unsigned port, const sp<AMessage> ¬ify, int32_t *sessionID);
+
+ status_t createUDPSession(
+ unsigned localPort, const sp<AMessage> ¬ify, int32_t *sessionID);
+
+ status_t createUDPSession(
+ unsigned localPort,
+ const char *remoteHost,
+ unsigned remotePort,
+ const sp<AMessage> ¬ify,
+ int32_t *sessionID);
+
+ status_t connectUDPSession(
+ int32_t sessionID, const char *remoteHost, unsigned remotePort);
+
+ status_t destroySession(int32_t sessionID);
+
+ status_t sendRequest(
+ int32_t sessionID, const void *data, ssize_t size = -1);
+
+ enum NotificationReason {
+ kWhatError,
+ kWhatConnected,
+ kWhatClientConnected,
+ kWhatData,
+ kWhatDatagram,
+ kWhatBinaryData,
+ };
+
+protected:
+ virtual ~ANetworkSession();
+
+private:
+ struct NetworkThread;
+ struct Session;
+
+ Mutex mLock;
+ sp<Thread> mThread;
+
+ int32_t mNextSessionID;
+
+ int mPipeFd[2];
+
+ KeyedVector<int32_t, sp<Session> > mSessions;
+
+ enum Mode {
+ kModeCreateUDPSession,
+ kModeCreateRTSPServer,
+ kModeCreateRTSPClient,
+ };
+ status_t createClientOrServer(
+ Mode mode,
+ unsigned port,
+ const char *remoteHost,
+ unsigned remotePort,
+ const sp<AMessage> ¬ify,
+ int32_t *sessionID);
+
+ void threadLoop();
+ void interrupt();
+
+ static status_t MakeSocketNonBlocking(int s);
+
+ DISALLOW_EVIL_CONSTRUCTORS(ANetworkSession);
+};
+
+} // namespace android
+
+#endif // A_NETWORK_SESSION_H_
diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk
new file mode 100644
index 0000000..114ff62
--- /dev/null
+++ b/media/libstagefright/wifi-display/Android.mk
@@ -0,0 +1,22 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ udptest.cpp \
+ ANetworkSession.cpp \
+ ParsedMessage.cpp \
+
+LOCAL_SHARED_LIBRARIES:= \
+ libbinder \
+ libgui \
+ libmedia \
+ libstagefright \
+ libstagefright_foundation \
+ libutils \
+
+LOCAL_MODULE:= udptest
+
+LOCAL_MODULE_TAGS := debug
+
+include $(BUILD_EXECUTABLE)
diff --git a/media/libstagefright/wifi-display/ParsedMessage.cpp b/media/libstagefright/wifi-display/ParsedMessage.cpp
new file mode 100644
index 0000000..c0e60c3
--- /dev/null
+++ b/media/libstagefright/wifi-display/ParsedMessage.cpp
@@ -0,0 +1,284 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ParsedMessage.h"
+
+#include <ctype.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+// static
+sp<ParsedMessage> ParsedMessage::Parse(
+ const char *data, size_t size, bool noMoreData, size_t *length) {
+ sp<ParsedMessage> msg = new ParsedMessage;
+ ssize_t res = msg->parse(data, size, noMoreData);
+
+ if (res < 0) {
+ *length = 0;
+ return NULL;
+ }
+
+ *length = res;
+ return msg;
+}
+
+ParsedMessage::ParsedMessage() {
+}
+
+ParsedMessage::~ParsedMessage() {
+}
+
+bool ParsedMessage::findString(const char *name, AString *value) const {
+ AString key = name;
+ key.tolower();
+
+ ssize_t index = mDict.indexOfKey(key);
+
+ if (index < 0) {
+ value->clear();
+
+ return false;
+ }
+
+ *value = mDict.valueAt(index);
+ return true;
+}
+
+bool ParsedMessage::findInt32(const char *name, int32_t *value) const {
+ AString stringValue;
+
+ if (!findString(name, &stringValue)) {
+ return false;
+ }
+
+ char *end;
+ *value = strtol(stringValue.c_str(), &end, 10);
+
+ if (end == stringValue.c_str() || *end != '\0') {
+ *value = 0;
+ return false;
+ }
+
+ return true;
+}
+
+const char *ParsedMessage::getContent() const {
+ return mContent.c_str();
+}
+
+ssize_t ParsedMessage::parse(const char *data, size_t size, bool noMoreData) {
+ if (size == 0) {
+ return -1;
+ }
+
+ ssize_t lastDictIndex = -1;
+
+ size_t offset = 0;
+ while (offset < size) {
+ size_t lineEndOffset = offset;
+ while (lineEndOffset + 1 < size
+ && (data[lineEndOffset] != '\r'
+ || data[lineEndOffset + 1] != '\n')) {
+ ++lineEndOffset;
+ }
+
+ if (lineEndOffset + 1 >= size) {
+ return -1;
+ }
+
+ AString line(&data[offset], lineEndOffset - offset);
+
+ if (offset == 0) {
+ // Special handling for the request/status line.
+
+ mDict.add(AString("_"), line);
+ offset = lineEndOffset + 2;
+
+ continue;
+ }
+
+ if (lineEndOffset == offset) {
+ offset += 2;
+ break;
+ }
+
+ if (line.c_str()[0] == ' ' || line.c_str()[0] == '\t') {
+ // Support for folded header values.
+
+ if (lastDictIndex >= 0) {
+ // Otherwise it's malformed since the first header line
+ // cannot continue anything...
+
+ AString &value = mDict.editValueAt(lastDictIndex);
+ value.append(line);
+ }
+
+ offset = lineEndOffset + 2;
+ continue;
+ }
+
+ ssize_t colonPos = line.find(":");
+ if (colonPos >= 0) {
+ AString key(line, 0, colonPos);
+ key.trim();
+ key.tolower();
+
+ line.erase(0, colonPos + 1);
+
+ lastDictIndex = mDict.add(key, line);
+ }
+
+ offset = lineEndOffset + 2;
+ }
+
+ for (size_t i = 0; i < mDict.size(); ++i) {
+ mDict.editValueAt(i).trim();
+ }
+
+ // Found the end of headers.
+
+ int32_t contentLength;
+ if (!findInt32("content-length", &contentLength) || contentLength < 0) {
+ contentLength = 0;
+ }
+
+ size_t totalLength = offset + contentLength;
+
+ if (size < totalLength) {
+ return -1;
+ }
+
+ mContent.setTo(&data[offset], contentLength);
+
+ return totalLength;
+}
+
+void ParsedMessage::getRequestField(size_t index, AString *field) const {
+ AString line;
+ CHECK(findString("_", &line));
+
+ size_t prevOffset = 0;
+ size_t offset = 0;
+ for (size_t i = 0; i <= index; ++i) {
+ ssize_t spacePos = line.find(" ", offset);
+
+ if (spacePos < 0) {
+ spacePos = line.size();
+ }
+
+ prevOffset = offset;
+ offset = spacePos + 1;
+ }
+
+ field->setTo(line, prevOffset, offset - prevOffset - 1);
+}
+
+bool ParsedMessage::getStatusCode(int32_t *statusCode) const {
+ AString statusCodeString;
+ getRequestField(1, &statusCodeString);
+
+ char *end;
+ *statusCode = strtol(statusCodeString.c_str(), &end, 10);
+
+ if (*end != '\0' || end == statusCodeString.c_str()
+ || (*statusCode) < 100 || (*statusCode) > 999) {
+ *statusCode = 0;
+ return false;
+ }
+
+ return true;
+}
+
+AString ParsedMessage::debugString() const {
+ AString line;
+ CHECK(findString("_", &line));
+
+ line.append("\n");
+
+ for (size_t i = 0; i < mDict.size(); ++i) {
+ const AString &key = mDict.keyAt(i);
+ const AString &value = mDict.valueAt(i);
+
+ if (key == AString("_")) {
+ continue;
+ }
+
+ line.append(key);
+ line.append(": ");
+ line.append(value);
+ line.append("\n");
+ }
+
+ line.append("\n");
+ line.append(mContent);
+
+ return line;
+}
+
+// static
+bool ParsedMessage::GetAttribute(
+ const char *s, const char *key, AString *value) {
+ value->clear();
+
+ size_t keyLen = strlen(key);
+
+ for (;;) {
+ while (isspace(*s)) {
+ ++s;
+ }
+
+ const char *colonPos = strchr(s, ';');
+
+ size_t len =
+ (colonPos == NULL) ? strlen(s) : colonPos - s;
+
+ if (len >= keyLen + 1 && s[keyLen] == '=' && !strncmp(s, key, keyLen)) {
+ value->setTo(&s[keyLen + 1], len - keyLen - 1);
+ return true;
+ }
+
+ if (colonPos == NULL) {
+ return false;
+ }
+
+ s = colonPos + 1;
+ }
+}
+
+// static
+bool ParsedMessage::GetInt32Attribute(
+ const char *s, const char *key, int32_t *value) {
+ AString stringValue;
+ if (!GetAttribute(s, key, &stringValue)) {
+ *value = 0;
+ return false;
+ }
+
+ char *end;
+ *value = strtol(stringValue.c_str(), &end, 10);
+
+ if (end == stringValue.c_str() || *end != '\0') {
+ *value = 0;
+ return false;
+ }
+
+ return true;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/wifi-display/ParsedMessage.h b/media/libstagefright/wifi-display/ParsedMessage.h
new file mode 100644
index 0000000..00f578f
--- /dev/null
+++ b/media/libstagefright/wifi-display/ParsedMessage.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AString.h>
+#include <utils/KeyedVector.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct ParsedMessage : public RefBase {
+ static sp<ParsedMessage> Parse(
+ const char *data, size_t size, bool noMoreData, size_t *length);
+
+ bool findString(const char *name, AString *value) const;
+ bool findInt32(const char *name, int32_t *value) const;
+
+ const char *getContent() const;
+
+ void getRequestField(size_t index, AString *field) const;
+ bool getStatusCode(int32_t *statusCode) const;
+
+ AString debugString() const;
+
+ static bool GetAttribute(const char *s, const char *key, AString *value);
+
+ static bool GetInt32Attribute(
+ const char *s, const char *key, int32_t *value);
+
+
+protected:
+ virtual ~ParsedMessage();
+
+private:
+ KeyedVector<AString, AString> mDict;
+ AString mContent;
+
+ ParsedMessage();
+
+ ssize_t parse(const char *data, size_t size, bool noMoreData);
+
+ DISALLOW_EVIL_CONSTRUCTORS(ParsedMessage);
+};
+
+} // namespace android
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 8cccf49..079599a 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -10,7 +10,8 @@
CameraService.cpp \
CameraClient.cpp \
Camera2Client.cpp \
- Camera2Device.cpp
+ Camera2Device.cpp \
+ CameraMetadata.cpp
LOCAL_SHARED_LIBRARIES:= \
libui \
diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp
index aa30501..3f12ed0 100644
--- a/services/camera/libcameraservice/Camera2Client.cpp
+++ b/services/camera/libcameraservice/Camera2Client.cpp
@@ -54,16 +54,14 @@
cameraId, cameraFacing, clientPid),
mDeviceInfo(NULL),
mPreviewStreamId(NO_STREAM),
- mPreviewRequest(NULL),
mCallbackStreamId(NO_STREAM),
mCallbackHeapId(0),
mCaptureStreamId(NO_STREAM),
- mCaptureRequest(NULL),
mRecordingStreamId(NO_STREAM),
- mRecordingRequest(NULL),
mRecordingHeapCount(kDefaultRecordingHeapCount)
{
ATRACE_CALL();
+ ALOGV("%s: Created client for camera %d", __FUNCTION__, cameraId);
mDevice = new Camera2Device(cameraId);
@@ -83,9 +81,14 @@
status_t Camera2Client::initialize(camera_module_t *module)
{
ATRACE_CALL();
- ALOGV("%s: E", __FUNCTION__);
+ ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
status_t res;
+ mFrameProcessor = new FrameProcessor(this);
+ String8 frameThreadName = String8::format("Camera2Client[%d]::FrameProcessor",
+ mCameraId);
+ mFrameProcessor->run(frameThreadName.string());
+
res = mDevice->initialize(module);
if (res != OK) {
ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
@@ -94,7 +97,6 @@
}
res = mDevice->setNotifyCallback(this);
- res = mDevice->setFrameListener(this);
res = buildDeviceInfo();
res = buildDefaultParameters();
@@ -116,13 +118,16 @@
Camera2Client::~Camera2Client() {
ATRACE_CALL();
- ALOGV("%s: Camera %d: Shutting down", __FUNCTION__, mCameraId);
+ ALOGV("%s: Camera %d: Shutting down client.", __FUNCTION__, mCameraId);
mDestructionStarted = true;
// Rewrite mClientPid to allow shutdown by CameraService
mClientPid = getCallingPid();
disconnect();
+
+ mFrameProcessor->requestExit();
+ ALOGV("%s: Camera %d: Shutdown complete", __FUNCTION__, mCameraId);
}
status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
@@ -291,33 +296,35 @@
result.appendFormat(" Recording stream ID: %d\n", mRecordingStreamId);
result.append(" Current requests:\n");
- if (mPreviewRequest != NULL) {
+ if (mPreviewRequest.entryCount() != 0) {
result.append(" Preview request:\n");
write(fd, result.string(), result.size());
- dump_indented_camera_metadata(mPreviewRequest, fd, 2, 6);
+ mPreviewRequest.dump(fd, 2, 6);
} else {
result.append(" Preview request: undefined\n");
write(fd, result.string(), result.size());
}
- if (mCaptureRequest != NULL) {
+ if (mCaptureRequest.entryCount() != 0) {
result = " Capture request:\n";
write(fd, result.string(), result.size());
- dump_indented_camera_metadata(mCaptureRequest, fd, 2, 6);
+ mCaptureRequest.dump(fd, 2, 6);
} else {
result = " Capture request: undefined\n";
write(fd, result.string(), result.size());
}
- if (mRecordingRequest != NULL) {
+ if (mRecordingRequest.entryCount() != 0) {
result = " Recording request:\n";
write(fd, result.string(), result.size());
- dump_indented_camera_metadata(mRecordingRequest, fd, 2, 6);
+ mRecordingRequest.dump(fd, 2, 6);
} else {
result = " Recording request: undefined\n";
write(fd, result.string(), result.size());
}
+ mFrameProcessor->dump(fd, args);
+
result = " Device dump:\n";
write(fd, result.string(), result.size());
@@ -523,7 +530,7 @@
// Already running preview - need to stop and create a new stream
// TODO: Optimize this so that we don't wait for old stream to drain
// before spinning up new stream
- mDevice->setStreamingRequest(NULL);
+ mDevice->clearStreamingRequest();
k.mParameters.state = WAITING_FOR_PREVIEW_WINDOW;
break;
}
@@ -634,7 +641,7 @@
}
}
- if (mPreviewRequest == NULL) {
+ if (mPreviewRequest.entryCount() == 0) {
res = updatePreviewRequest(params);
if (res != OK) {
ALOGE("%s: Camera %d: Unable to create preview request: %s (%d)",
@@ -646,20 +653,21 @@
if (callbacksEnabled) {
uint8_t outputStreams[2] =
{ mPreviewStreamId, mCallbackStreamId };
- res = updateEntry(mPreviewRequest,
+ res = mPreviewRequest.update(
ANDROID_REQUEST_OUTPUT_STREAMS,
outputStreams, 2);
} else {
- res = updateEntry(mPreviewRequest,
+ uint8_t outputStreams[1] = { mPreviewStreamId };
+ res = mPreviewRequest.update(
ANDROID_REQUEST_OUTPUT_STREAMS,
- &mPreviewStreamId, 1);
+ outputStreams, 1);
}
if (res != OK) {
ALOGE("%s: Camera %d: Unable to set up preview request: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
return res;
}
- res = sort_camera_metadata(mPreviewRequest);
+ res = mPreviewRequest.sort();
if (res != OK) {
ALOGE("%s: Camera %d: Error sorting preview request: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
@@ -709,7 +717,7 @@
case RECORD:
// no break - identical to preview
case PREVIEW:
- mDevice->setStreamingRequest(NULL);
+ mDevice->clearStreamingRequest();
mDevice->waitUntilDrained();
// no break
case WAITING_FOR_PREVIEW_WINDOW: {
@@ -814,7 +822,7 @@
}
}
- if (mRecordingRequest == NULL) {
+ if (mRecordingRequest.entryCount() == 0) {
res = updateRecordingRequest(params);
if (res != OK) {
ALOGE("%s: Camera %d: Unable to create recording request: %s (%d)",
@@ -826,12 +834,12 @@
if (callbacksEnabled) {
uint8_t outputStreams[3] =
{ mPreviewStreamId, mRecordingStreamId, mCallbackStreamId };
- res = updateEntry(mRecordingRequest,
+ res = mRecordingRequest.update(
ANDROID_REQUEST_OUTPUT_STREAMS,
outputStreams, 3);
} else {
uint8_t outputStreams[2] = { mPreviewStreamId, mRecordingStreamId };
- res = updateEntry(mRecordingRequest,
+ res = mRecordingRequest.update(
ANDROID_REQUEST_OUTPUT_STREAMS,
outputStreams, 2);
}
@@ -840,7 +848,7 @@
__FUNCTION__, mCameraId, strerror(-res), res);
return res;
}
- res = sort_camera_metadata(mRecordingRequest);
+ res = mRecordingRequest.sort();
if (res != OK) {
ALOGE("%s: Camera %d: Error sorting recording request: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
@@ -1043,7 +1051,7 @@
return res;
}
- if (mCaptureRequest == NULL) {
+ if (mCaptureRequest.entryCount() == 0) {
res = updateCaptureRequest(k.mParameters);
if (res != OK) {
ALOGE("%s: Camera %d: Can't create still image capture request: "
@@ -1052,8 +1060,6 @@
}
}
- camera_metadata_entry_t outputStreams;
-
bool callbacksEnabled = k.mParameters.previewCallbackFlags &
CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK;
bool recordingEnabled = (k.mParameters.state == RECORD);
@@ -1063,29 +1069,29 @@
switch ( streamSwitch ) {
case 0: { // No recording, callbacks
uint8_t streamIds[2] = { mPreviewStreamId, mCaptureStreamId };
- res = updateEntry(mCaptureRequest, ANDROID_REQUEST_OUTPUT_STREAMS,
- &streamIds, 2);
+ res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+ streamIds, 2);
break;
}
case 1: { // Recording
uint8_t streamIds[3] = { mPreviewStreamId, mRecordingStreamId,
mCaptureStreamId };
- res = updateEntry(mCaptureRequest, ANDROID_REQUEST_OUTPUT_STREAMS,
- &streamIds, 3);
+ res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+ streamIds, 3);
break;
}
case 2: { // Callbacks
uint8_t streamIds[3] = { mPreviewStreamId, mCallbackStreamId,
mCaptureStreamId };
- res = updateEntry(mCaptureRequest, ANDROID_REQUEST_OUTPUT_STREAMS,
- &streamIds, 3);
+ res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+ streamIds, 3);
break;
}
case 3: { // Both
uint8_t streamIds[4] = { mPreviewStreamId, mCallbackStreamId,
mRecordingStreamId, mCaptureStreamId };
- res = updateEntry(mCaptureRequest, ANDROID_REQUEST_OUTPUT_STREAMS,
- &streamIds, 4);
+ res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+ streamIds, 4);
break;
}
};
@@ -1095,22 +1101,22 @@
__FUNCTION__, mCameraId, strerror(-res), res);
return res;
}
- res = sort_camera_metadata(mCaptureRequest);
+ res = mCaptureRequest.sort();
if (res != OK) {
ALOGE("%s: Camera %d: Unable to sort capture request: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
return res;
}
- camera_metadata_t *captureCopy = clone_camera_metadata(mCaptureRequest);
- if (captureCopy == NULL) {
+ CameraMetadata captureCopy = mCaptureRequest;
+ if (captureCopy.entryCount() == 0) {
ALOGE("%s: Camera %d: Unable to copy capture request for HAL device",
__FUNCTION__, mCameraId);
return NO_MEMORY;
}
if (k.mParameters.state == PREVIEW) {
- res = mDevice->setStreamingRequest(NULL);
+ res = mDevice->clearStreamingRequest();
if (res != OK) {
ALOGE("%s: Camera %d: Unable to stop preview for still capture: "
"%s (%d)",
@@ -1182,7 +1188,7 @@
previewWidth, previewHeight);
return BAD_VALUE;
}
- camera_metadata_entry_t availablePreviewSizes =
+ camera_metadata_ro_entry_t availablePreviewSizes =
staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
for (i = 0; i < availablePreviewSizes.count; i += 2 ) {
if (availablePreviewSizes.data.i32[i] == previewWidth &&
@@ -1203,7 +1209,7 @@
if (previewFpsRange[0] != k.mParameters.previewFpsRange[0] ||
previewFpsRange[1] != k.mParameters.previewFpsRange[1]) {
fpsRangeChanged = true;
- camera_metadata_entry_t availablePreviewFpsRanges =
+ camera_metadata_ro_entry_t availablePreviewFpsRanges =
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2);
for (i = 0; i < availablePreviewFpsRanges.count; i += 2) {
if ((availablePreviewFpsRanges.data.i32[i] ==
@@ -1229,7 +1235,7 @@
"is active!", __FUNCTION__);
return BAD_VALUE;
}
- camera_metadata_entry_t availableFormats =
+ camera_metadata_ro_entry_t availableFormats =
staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS);
for (i = 0; i < availableFormats.count; i++) {
if (availableFormats.data.i32[i] == previewFormat) break;
@@ -1247,7 +1253,7 @@
if (!fpsRangeChanged) {
previewFps = newParams.getPreviewFrameRate();
if (previewFps != k.mParameters.previewFps) {
- camera_metadata_entry_t availableFrameRates =
+ camera_metadata_ro_entry_t availableFrameRates =
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
for (i = 0; i < availableFrameRates.count; i+=2) {
if (availableFrameRates.data.i32[i] == previewFps) break;
@@ -1267,7 +1273,7 @@
newParams.getPictureSize(&pictureWidth, &pictureHeight);
if (pictureWidth == k.mParameters.pictureWidth ||
pictureHeight == k.mParameters.pictureHeight) {
- camera_metadata_entry_t availablePictureSizes =
+ camera_metadata_ro_entry_t availablePictureSizes =
staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
for (i = 0; i < availablePictureSizes.count; i+=2) {
if (availablePictureSizes.data.i32[i] == pictureWidth &&
@@ -1288,7 +1294,7 @@
newParams.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
if (jpegThumbSize[0] != k.mParameters.jpegThumbSize[0] ||
jpegThumbSize[1] != k.mParameters.jpegThumbSize[1]) {
- camera_metadata_entry_t availableJpegThumbSizes =
+ camera_metadata_ro_entry_t availableJpegThumbSizes =
staticInfo(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES);
for (i = 0; i < availableJpegThumbSizes.count; i+=2) {
if (availableJpegThumbSizes.data.i32[i] == jpegThumbSize[0] &&
@@ -1392,7 +1398,7 @@
int wbMode = wbModeStringToEnum(
newParams.get(CameraParameters::KEY_WHITE_BALANCE) );
if (wbMode != k.mParameters.wbMode) {
- camera_metadata_entry_t availableWbModes =
+ camera_metadata_ro_entry_t availableWbModes =
staticInfo(ANDROID_CONTROL_AWB_AVAILABLE_MODES);
for (i = 0; i < availableWbModes.count; i++) {
if (wbMode == availableWbModes.data.u8[i]) break;
@@ -1409,7 +1415,7 @@
int effectMode = effectModeStringToEnum(
newParams.get(CameraParameters::KEY_EFFECT) );
if (effectMode != k.mParameters.effectMode) {
- camera_metadata_entry_t availableEffectModes =
+ camera_metadata_ro_entry_t availableEffectModes =
staticInfo(ANDROID_CONTROL_AVAILABLE_EFFECTS);
for (i = 0; i < availableEffectModes.count; i++) {
if (effectMode == availableEffectModes.data.u8[i]) break;
@@ -1426,7 +1432,7 @@
int antibandingMode = abModeStringToEnum(
newParams.get(CameraParameters::KEY_ANTIBANDING) );
if (antibandingMode != k.mParameters.antibandingMode) {
- camera_metadata_entry_t availableAbModes =
+ camera_metadata_ro_entry_t availableAbModes =
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES);
for (i = 0; i < availableAbModes.count; i++) {
if (antibandingMode == availableAbModes.data.u8[i]) break;
@@ -1444,7 +1450,7 @@
newParams.get(CameraParameters::KEY_SCENE_MODE) );
if (sceneMode != k.mParameters.sceneMode &&
sceneMode != ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) {
- camera_metadata_entry_t availableSceneModes =
+ camera_metadata_ro_entry_t availableSceneModes =
staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES);
for (i = 0; i < availableSceneModes.count; i++) {
if (sceneMode == availableSceneModes.data.u8[i]) break;
@@ -1461,7 +1467,7 @@
Parameters::flashMode_t flashMode = flashModeStringToEnum(
newParams.get(CameraParameters::KEY_FLASH_MODE) );
if (flashMode != k.mParameters.flashMode) {
- camera_metadata_entry_t flashAvailable =
+ camera_metadata_ro_entry_t flashAvailable =
staticInfo(ANDROID_FLASH_AVAILABLE, 1, 1);
if (!flashAvailable.data.u8[0] &&
flashMode != Parameters::FLASH_MODE_OFF) {
@@ -1470,7 +1476,7 @@
newParams.get(CameraParameters::KEY_FLASH_MODE));
return BAD_VALUE;
} else if (flashMode == Parameters::FLASH_MODE_RED_EYE) {
- camera_metadata_entry_t availableAeModes =
+ camera_metadata_ro_entry_t availableAeModes =
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_MODES);
for (i = 0; i < availableAeModes.count; i++) {
if (flashMode == availableAeModes.data.u8[i]) break;
@@ -1494,7 +1500,7 @@
newParams.get(CameraParameters::KEY_FOCUS_MODE));
if (focusMode != k.mParameters.focusMode) {
if (focusMode != Parameters::FOCUS_MODE_FIXED) {
- camera_metadata_entry_t minFocusDistance =
+ camera_metadata_ro_entry_t minFocusDistance =
staticInfo(ANDROID_LENS_MINIMUM_FOCUS_DISTANCE);
if (minFocusDistance.data.f[0] == 0) {
ALOGE("%s: Requested focus mode \"%s\" is not available: "
@@ -1503,7 +1509,7 @@
newParams.get(CameraParameters::KEY_FOCUS_MODE));
return BAD_VALUE;
} else if (focusMode != Parameters::FOCUS_MODE_INFINITY) {
- camera_metadata_entry_t availableFocusModes =
+ camera_metadata_ro_entry_t availableFocusModes =
staticInfo(ANDROID_CONTROL_AF_AVAILABLE_MODES);
for (i = 0; i < availableFocusModes.count; i++) {
if (focusMode == availableFocusModes.data.u8[i]) break;
@@ -1534,7 +1540,7 @@
// EXPOSURE_COMPENSATION
int exposureCompensation =
newParams.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
- camera_metadata_entry_t exposureCompensationRange =
+ camera_metadata_ro_entry_t exposureCompensationRange =
staticInfo(ANDROID_CONTROL_AE_EXP_COMPENSATION_RANGE);
if (exposureCompensation < exposureCompensationRange.data.i32[0] ||
exposureCompensation > exposureCompensationRange.data.i32[1]) {
@@ -1555,7 +1561,7 @@
Vector<Parameters::Area> meteringAreas;
res = parseAreas(newParams.get(CameraParameters::KEY_METERING_AREAS),
&meteringAreas);
- if (res == OK) res = validateAreas(focusingAreas, max3aRegions);
+ if (res == OK) res = validateAreas(meteringAreas, max3aRegions);
if (res != OK) {
ALOGE("%s: Requested metering areas are malformed: %s",
__FUNCTION__,
@@ -1581,7 +1587,7 @@
__FUNCTION__);
return BAD_VALUE;
}
- camera_metadata_entry_t availableVideoSizes =
+ camera_metadata_ro_entry_t availableVideoSizes =
staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
for (i = 0; i < availableVideoSizes.count; i += 2 ) {
if (availableVideoSizes.data.i32[i] == videoWidth &&
@@ -1601,7 +1607,7 @@
// VIDEO_STABILIZATION
bool videoStabilization = boolFromString(
newParams.get(CameraParameters::KEY_VIDEO_STABILIZATION) );
- camera_metadata_entry_t availableVideoStabilizationModes =
+ camera_metadata_ro_entry_t availableVideoStabilizationModes =
staticInfo(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
if (videoStabilization && availableVideoStabilizationModes.count == 1) {
ALOGE("%s: Video stabilization not supported", __FUNCTION__);
@@ -1990,106 +1996,130 @@
__FUNCTION__, newState, triggerId);
}
-void Camera2Client::onNewFrameAvailable() {
+Camera2Client::FrameProcessor::FrameProcessor(wp<Camera2Client> client):
+ Thread(false), mClient(client) {
+}
+
+Camera2Client::FrameProcessor::~FrameProcessor() {
+ ALOGV("%s: Exit", __FUNCTION__);
+}
+
+void Camera2Client::FrameProcessor::dump(int fd, const Vector<String16>& args) {
+ String8 result(" Latest received frame:\n");
+ write(fd, result.string(), result.size());
+ mLastFrame.dump(fd, 2, 6);
+}
+
+bool Camera2Client::FrameProcessor::threadLoop() {
status_t res;
- camera_metadata_t *frame = NULL;
- do {
- res = mDevice->getNextFrame(&frame);
- if (res != OK) {
- ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
- return;
- }
- if (frame != NULL) {
- camera_metadata_entry_t entry;
- res = find_camera_metadata_entry(frame, ANDROID_REQUEST_FRAME_COUNT,
- &entry);
- if (res != OK) {
- ALOGE("%s: Camera %d: Error reading frame number: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
- break;
- }
- res = processFrameFaceDetect(frame);
- if (res != OK) break;
-
- free_camera_metadata(frame);
- }
- } while (frame != NULL);
-
- if (frame != NULL) {
- free_camera_metadata(frame);
+ sp<Camera2Device> device;
+ {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return false;
+ device = client->mDevice;
}
+
+ res = device->waitForNextFrame(kWaitDuration);
+ if (res == OK) {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) return false;
+ processNewFrames(client);
+ } else if (res != TIMED_OUT) {
+ ALOGE("Camera2Client::FrameProcessor: Error waiting for new "
+ "frames: %s (%d)", strerror(-res), res);
+ }
+
+ return true;
+}
+
+void Camera2Client::FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
+ status_t res;
+ CameraMetadata frame;
+ while ( (res = client->mDevice->getNextFrame(&frame)) == OK) {
+ camera_metadata_entry_t entry;
+ entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: Error reading frame number: %s (%d)",
+ __FUNCTION__, client->mCameraId, strerror(-res), res);
+ break;
+ }
+
+ res = processFaceDetect(frame, client);
+ if (res != OK) break;
+
+ mLastFrame.acquire(frame);
+ }
+ if (res != NOT_ENOUGH_DATA) {
+ ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
+ __FUNCTION__, client->mCameraId, strerror(-res), res);
+ return;
+ }
+
return;
}
-status_t Camera2Client::processFrameFaceDetect(camera_metadata_t *frame) {
+status_t Camera2Client::FrameProcessor::processFaceDetect(
+ const CameraMetadata &frame, sp<Camera2Client> &client) {
status_t res;
- camera_metadata_entry_t entry;
+ camera_metadata_ro_entry_t entry;
bool enableFaceDetect;
{
- LockedParameters::Key k(mParameters);
+ LockedParameters::Key k(client->mParameters);
enableFaceDetect = k.mParameters.enableFaceDetect;
}
- res = find_camera_metadata_entry(frame, ANDROID_STATS_FACE_DETECT_MODE,
- &entry);
- // TODO: Remove this check once things are more compliant. For now, assume that
- // if we can't find the face detect mode, then it's probably not working.
- if (res == NAME_NOT_FOUND) {
+ entry = frame.find(ANDROID_STATS_FACE_DETECT_MODE);
+
+ // TODO: This should be an error once implementations are compliant
+ if (entry.count == 0) {
return OK;
- } else if (res != OK) {
- ALOGE("%s: Camera %d: Error reading face mode: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
- return res;
}
+
uint8_t faceDetectMode = entry.data.u8[0];
if (enableFaceDetect && faceDetectMode != ANDROID_STATS_FACE_DETECTION_OFF) {
- res = find_camera_metadata_entry(frame, ANDROID_STATS_FACE_RECTANGLES,
- &entry);
- if (res != OK) {
- ALOGE("%s: Camera %d: Error reading face rectangles: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
+ entry = frame.find(ANDROID_STATS_FACE_RECTANGLES);
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: Unable to read face rectangles",
+ __FUNCTION__, client->mCameraId);
return res;
}
camera_frame_metadata metadata;
metadata.number_of_faces = entry.count / 4;
if (metadata.number_of_faces >
- mDeviceInfo->maxFaces) {
+ client->mDeviceInfo->maxFaces) {
ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
- __FUNCTION__, mCameraId,
- metadata.number_of_faces, mDeviceInfo->maxFaces);
+ __FUNCTION__, client->mCameraId,
+ metadata.number_of_faces, client->mDeviceInfo->maxFaces);
return res;
}
- int32_t *faceRects = entry.data.i32;
+ const int32_t *faceRects = entry.data.i32;
- res = find_camera_metadata_entry(frame, ANDROID_STATS_FACE_SCORES,
- &entry);
- if (res != OK) {
- ALOGE("%s: Camera %d: Error reading face scores: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
+ entry = frame.find(ANDROID_STATS_FACE_SCORES);
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: Unable to read face scores",
+ __FUNCTION__, client->mCameraId);
return res;
}
- uint8_t *faceScores = entry.data.u8;
+ const uint8_t *faceScores = entry.data.u8;
- int32_t *faceLandmarks = NULL;
- int32_t *faceIds = NULL;
+ const int32_t *faceLandmarks = NULL;
+ const int32_t *faceIds = NULL;
if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
- res = find_camera_metadata_entry(frame, ANDROID_STATS_FACE_LANDMARKS,
- &entry);
- if (res != OK) {
- ALOGE("%s: Camera %d: Error reading face landmarks: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
+ entry = frame.find(ANDROID_STATS_FACE_LANDMARKS);
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: Unable to read face landmarks",
+ __FUNCTION__, client->mCameraId);
return res;
}
faceLandmarks = entry.data.i32;
- res = find_camera_metadata_entry(frame, ANDROID_STATS_FACE_IDS,
- &entry);
- if (res != OK) {
- ALOGE("%s: Camera %d: Error reading face IDs: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
+ entry = frame.find(ANDROID_STATS_FACE_IDS);
+
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: Unable to read face IDs",
+ __FUNCTION__, client->mCameraId);
return res;
}
faceIds = entry.data.i32;
@@ -2101,20 +2131,26 @@
for (int i = 0; i < metadata.number_of_faces; i++) {
camera_face_t face;
- face.rect[0] = arrayXToNormalized(faceRects[i*4 + 0]);
- face.rect[1] = arrayYToNormalized(faceRects[i*4 + 1]);
- face.rect[2] = arrayXToNormalized(faceRects[i*4 + 2]);
- face.rect[3] = arrayYToNormalized(faceRects[i*4 + 3]);
+ face.rect[0] = client->arrayXToNormalized(faceRects[i*4 + 0]);
+ face.rect[1] = client->arrayYToNormalized(faceRects[i*4 + 1]);
+ face.rect[2] = client->arrayXToNormalized(faceRects[i*4 + 2]);
+ face.rect[3] = client->arrayYToNormalized(faceRects[i*4 + 3]);
face.score = faceScores[i];
if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
face.id = faceIds[i];
- face.left_eye[0] = arrayXToNormalized(faceLandmarks[i*6 + 0]);
- face.left_eye[1] = arrayYToNormalized(faceLandmarks[i*6 + 1]);
- face.right_eye[0] = arrayXToNormalized(faceLandmarks[i*6 + 2]);
- face.right_eye[1] = arrayYToNormalized(faceLandmarks[i*6 + 3]);
- face.mouth[0] = arrayXToNormalized(faceLandmarks[i*6 + 4]);
- face.mouth[1] = arrayYToNormalized(faceLandmarks[i*6 + 5]);
+ face.left_eye[0] =
+ client->arrayXToNormalized(faceLandmarks[i*6 + 0]);
+ face.left_eye[1] =
+ client->arrayYToNormalized(faceLandmarks[i*6 + 1]);
+ face.right_eye[0] =
+ client->arrayXToNormalized(faceLandmarks[i*6 + 2]);
+ face.right_eye[1] =
+ client->arrayYToNormalized(faceLandmarks[i*6 + 3]);
+ face.mouth[0] =
+ client->arrayXToNormalized(faceLandmarks[i*6 + 4]);
+ face.mouth[1] =
+ client->arrayYToNormalized(faceLandmarks[i*6 + 5]);
} else {
face.id = 0;
face.left_eye[0] = face.left_eye[1] = -2000;
@@ -2126,9 +2162,9 @@
metadata.faces = faces.editArray();
{
- Mutex::Autolock iccl(mICameraClientLock);
- if (mCameraClient != NULL) {
- mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
+ Mutex::Autolock iccl(client->mICameraClientLock);
+ if (client->mCameraClient != NULL) {
+ client->mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
NULL, &metadata);
}
}
@@ -2426,23 +2462,19 @@
}
}
-camera_metadata_entry_t Camera2Client::staticInfo(uint32_t tag,
- size_t minCount, size_t maxCount) {
+camera_metadata_ro_entry_t Camera2Client::staticInfo(uint32_t tag,
+ size_t minCount, size_t maxCount) const {
status_t res;
- camera_metadata_entry_t entry;
- res = find_camera_metadata_entry(mDevice->info(),
- tag,
- &entry);
- if (CC_UNLIKELY( res != OK )) {
+ camera_metadata_ro_entry_t entry = mDevice->info().find(tag);
+
+ if (CC_UNLIKELY( entry.count == 0 )) {
const char* tagSection = get_camera_metadata_section_name(tag);
if (tagSection == NULL) tagSection = "<unknown>";
const char* tagName = get_camera_metadata_tag_name(tag);
if (tagName == NULL) tagName = "<unknown>";
- ALOGE("Error finding static metadata entry '%s.%s' (%x): %s (%d)",
- tagSection, tagName, tag, strerror(-res), res);
- entry.count = 0;
- entry.data.u8 = NULL;
+ ALOGE("Error finding static metadata entry '%s.%s' (%x)",
+ tagSection, tagName, tag);
} else if (CC_UNLIKELY(
(minCount != 0 && entry.count < minCount) ||
(maxCount != 0 && entry.count > maxCount) ) ) {
@@ -2453,8 +2485,6 @@
ALOGE("Malformed static metadata entry '%s.%s' (%x):"
"Expected between %d and %d values, but got %d values",
tagSection, tagName, tag, minCount, maxCount, entry.count);
- entry.count = 0;
- entry.data.u8 = NULL;
}
return entry;
@@ -2469,13 +2499,13 @@
DeviceInfo *deviceInfo = new DeviceInfo;
mDeviceInfo = deviceInfo;
- camera_metadata_entry_t activeArraySize =
+ camera_metadata_ro_entry_t activeArraySize =
staticInfo(ANDROID_SENSOR_ACTIVE_ARRAY_SIZE, 2, 2);
if (!activeArraySize.count) return NO_INIT;
deviceInfo->arrayWidth = activeArraySize.data.i32[0];
deviceInfo->arrayHeight = activeArraySize.data.i32[1];
- camera_metadata_entry_t availableFaceDetectModes =
+ camera_metadata_ro_entry_t availableFaceDetectModes =
staticInfo(ANDROID_STATS_AVAILABLE_FACE_DETECT_MODES);
if (!availableFaceDetectModes.count) return NO_INIT;
@@ -2504,7 +2534,7 @@
}
}
- camera_metadata_entry_t maxFacesDetected =
+ camera_metadata_ro_entry_t maxFacesDetected =
staticInfo(ANDROID_STATS_MAX_FACE_COUNT, 1, 1);
if (!maxFacesDetected.count) return NO_INIT;
@@ -2520,7 +2550,7 @@
status_t res;
CameraParameters params;
- camera_metadata_entry_t availableProcessedSizes =
+ camera_metadata_ro_entry_t availableProcessedSizes =
staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 2);
if (!availableProcessedSizes.count) return NO_INIT;
@@ -2549,7 +2579,7 @@
supportedPreviewSizes);
}
- camera_metadata_entry_t availableFpsRanges =
+ camera_metadata_ro_entry_t availableFpsRanges =
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2);
if (!availableFpsRanges.count) return NO_INIT;
@@ -2580,7 +2610,7 @@
k.mParameters.previewTransform = degToTransform(0,
mCameraFacing == CAMERA_FACING_FRONT);
- camera_metadata_entry_t availableFormats =
+ camera_metadata_ro_entry_t availableFormats =
staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS);
{
@@ -2648,7 +2678,7 @@
supportedPreviewFrameRates);
}
- camera_metadata_entry_t availableJpegSizes =
+ camera_metadata_ro_entry_t availableJpegSizes =
staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 2);
if (!availableJpegSizes.count) return NO_INIT;
@@ -2675,7 +2705,7 @@
params.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS,
CameraParameters::PIXEL_FORMAT_JPEG);
- camera_metadata_entry_t availableJpegThumbnailSizes =
+ camera_metadata_ro_entry_t availableJpegThumbnailSizes =
staticInfo(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 2);
if (!availableJpegThumbnailSizes.count) return NO_INIT;
@@ -2718,7 +2748,7 @@
params.set(CameraParameters::KEY_WHITE_BALANCE,
CameraParameters::WHITE_BALANCE_AUTO);
- camera_metadata_entry_t availableWhiteBalanceModes =
+ camera_metadata_ro_entry_t availableWhiteBalanceModes =
staticInfo(ANDROID_CONTROL_AWB_AVAILABLE_MODES);
{
String8 supportedWhiteBalance;
@@ -2779,7 +2809,7 @@
params.set(CameraParameters::KEY_EFFECT,
CameraParameters::EFFECT_NONE);
- camera_metadata_entry_t availableEffects =
+ camera_metadata_ro_entry_t availableEffects =
staticInfo(ANDROID_CONTROL_AVAILABLE_EFFECTS);
if (!availableEffects.count) return NO_INIT;
{
@@ -2839,7 +2869,7 @@
params.set(CameraParameters::KEY_ANTIBANDING,
CameraParameters::ANTIBANDING_AUTO);
- camera_metadata_entry_t availableAntibandingModes =
+ camera_metadata_ro_entry_t availableAntibandingModes =
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES);
if (!availableAntibandingModes.count) return NO_INIT;
{
@@ -2881,7 +2911,7 @@
params.set(CameraParameters::KEY_SCENE_MODE,
CameraParameters::SCENE_MODE_AUTO);
- camera_metadata_entry_t availableSceneModes =
+ camera_metadata_ro_entry_t availableSceneModes =
staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES);
if (!availableSceneModes.count) return NO_INIT;
{
@@ -2973,11 +3003,11 @@
}
}
- camera_metadata_entry_t flashAvailable =
+ camera_metadata_ro_entry_t flashAvailable =
staticInfo(ANDROID_FLASH_AVAILABLE, 1, 1);
if (!flashAvailable.count) return NO_INIT;
- camera_metadata_entry_t availableAeModes =
+ camera_metadata_ro_entry_t availableAeModes =
staticInfo(ANDROID_CONTROL_AE_AVAILABLE_MODES);
if (!availableAeModes.count) return NO_INIT;
@@ -3009,11 +3039,11 @@
CameraParameters::FLASH_MODE_OFF);
}
- camera_metadata_entry_t minFocusDistance =
+ camera_metadata_ro_entry_t minFocusDistance =
staticInfo(ANDROID_LENS_MINIMUM_FOCUS_DISTANCE, 1, 1);
if (!minFocusDistance.count) return NO_INIT;
- camera_metadata_entry_t availableAfModes =
+ camera_metadata_ro_entry_t availableAfModes =
staticInfo(ANDROID_CONTROL_AF_AVAILABLE_MODES);
if (!availableAfModes.count) return NO_INIT;
@@ -3070,7 +3100,7 @@
supportedFocusModes);
}
- camera_metadata_entry_t max3aRegions =
+ camera_metadata_ro_entry_t max3aRegions =
staticInfo(ANDROID_CONTROL_MAX_REGIONS, 1, 1);
if (!max3aRegions.count) return NO_INIT;
@@ -3081,14 +3111,14 @@
k.mParameters.focusingAreas.clear();
k.mParameters.focusingAreas.add(Parameters::Area(0,0,0,0,0));
- camera_metadata_entry_t availableFocalLengths =
+ camera_metadata_ro_entry_t availableFocalLengths =
staticInfo(ANDROID_LENS_AVAILABLE_FOCAL_LENGTHS);
if (!availableFocalLengths.count) return NO_INIT;
float minFocalLength = availableFocalLengths.data.f[0];
params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, minFocalLength);
- camera_metadata_entry_t sensorSize =
+ camera_metadata_ro_entry_t sensorSize =
staticInfo(ANDROID_SENSOR_PHYSICAL_SIZE, 2, 2);
if (!sensorSize.count) return NO_INIT;
@@ -3104,7 +3134,7 @@
params.set(CameraParameters::KEY_EXPOSURE_COMPENSATION,
k.mParameters.exposureCompensation);
- camera_metadata_entry_t exposureCompensationRange =
+ camera_metadata_ro_entry_t exposureCompensationRange =
staticInfo(ANDROID_CONTROL_AE_EXP_COMPENSATION_RANGE, 2, 2);
if (!exposureCompensationRange.count) return NO_INIT;
@@ -3113,7 +3143,7 @@
params.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION,
exposureCompensationRange.data.i32[0]);
- camera_metadata_entry_t exposureCompensationStep =
+ camera_metadata_ro_entry_t exposureCompensationStep =
staticInfo(ANDROID_CONTROL_AE_EXP_COMPENSATION_STEP, 1, 1);
if (!exposureCompensationStep.count) return NO_INIT;
@@ -3143,7 +3173,7 @@
params.set(CameraParameters::KEY_ZOOM, k.mParameters.zoom);
params.set(CameraParameters::KEY_MAX_ZOOM, NUM_ZOOM_STEPS - 1);
- camera_metadata_entry_t maxDigitalZoom =
+ camera_metadata_ro_entry_t maxDigitalZoom =
staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM, 1, 1);
if (!maxDigitalZoom.count) return NO_INIT;
@@ -3187,7 +3217,7 @@
params.set(CameraParameters::KEY_VIDEO_STABILIZATION,
CameraParameters::FALSE);
- camera_metadata_entry_t availableVideoStabilizationModes =
+ camera_metadata_ro_entry_t availableVideoStabilizationModes =
staticInfo(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
if (!availableVideoStabilizationModes.count) return NO_INIT;
@@ -3321,7 +3351,7 @@
status_t Camera2Client::updatePreviewRequest(const Parameters ¶ms) {
ATRACE_CALL();
status_t res;
- if (mPreviewRequest == NULL) {
+ if (mPreviewRequest.entryCount() == 0) {
res = mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
&mPreviewRequest);
if (res != OK) {
@@ -3331,7 +3361,7 @@
}
}
- res = updateRequestCommon(mPreviewRequest, params);
+ res = updateRequestCommon(&mPreviewRequest, params);
if (res != OK) {
ALOGE("%s: Camera %d: Unable to update common entries of preview "
"request: %s (%d)", __FUNCTION__, mCameraId,
@@ -3404,7 +3434,7 @@
ATRACE_CALL();
status_t res;
// Find out buffer size for JPEG
- camera_metadata_entry_t maxJpegSize =
+ camera_metadata_ro_entry_t maxJpegSize =
staticInfo(ANDROID_JPEG_MAX_SIZE);
if (maxJpegSize.count == 0) {
ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!",
@@ -3471,7 +3501,7 @@
status_t Camera2Client::updateCaptureRequest(const Parameters ¶ms) {
ATRACE_CALL();
status_t res;
- if (mCaptureRequest == NULL) {
+ if (mCaptureRequest.entryCount() == 0) {
res = mDevice->createDefaultRequest(CAMERA2_TEMPLATE_STILL_CAPTURE,
&mCaptureRequest);
if (res != OK) {
@@ -3481,7 +3511,7 @@
}
}
- res = updateRequestCommon(mCaptureRequest, params);
+ res = updateRequestCommon(&mCaptureRequest, params);
if (res != OK) {
ALOGE("%s: Camera %d: Unable to update common entries of capture "
"request: %s (%d)", __FUNCTION__, mCameraId,
@@ -3489,46 +3519,39 @@
return res;
}
- res = updateEntry(mCaptureRequest,
- ANDROID_JPEG_THUMBNAIL_SIZE,
+ res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_SIZE,
params.jpegThumbSize, 2);
if (res != OK) return res;
- res = updateEntry(mCaptureRequest,
- ANDROID_JPEG_THUMBNAIL_QUALITY,
+ res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
¶ms.jpegThumbQuality, 1);
if (res != OK) return res;
- res = updateEntry(mCaptureRequest,
- ANDROID_JPEG_QUALITY,
+ res = mCaptureRequest.update(ANDROID_JPEG_QUALITY,
¶ms.jpegQuality, 1);
if (res != OK) return res;
- res = updateEntry(mCaptureRequest,
+ res = mCaptureRequest.update(
ANDROID_JPEG_ORIENTATION,
¶ms.jpegRotation, 1);
if (res != OK) return res;
if (params.gpsEnabled) {
- res = updateEntry(mCaptureRequest,
+ res = mCaptureRequest.update(
ANDROID_JPEG_GPS_COORDINATES,
params.gpsCoordinates, 3);
if (res != OK) return res;
- res = updateEntry(mCaptureRequest,
+ res = mCaptureRequest.update(
ANDROID_JPEG_GPS_TIMESTAMP,
¶ms.gpsTimestamp, 1);
if (res != OK) return res;
- res = updateEntry(mCaptureRequest,
+ res = mCaptureRequest.update(
ANDROID_JPEG_GPS_PROCESSING_METHOD,
- params.gpsProcessingMethod.string(),
- params.gpsProcessingMethod.size());
+ params.gpsProcessingMethod);
if (res != OK) return res;
} else {
- res = deleteEntry(mCaptureRequest,
- ANDROID_JPEG_GPS_COORDINATES);
+ res = mCaptureRequest.erase(ANDROID_JPEG_GPS_COORDINATES);
if (res != OK) return res;
- res = deleteEntry(mCaptureRequest,
- ANDROID_JPEG_GPS_TIMESTAMP);
+ res = mCaptureRequest.erase(ANDROID_JPEG_GPS_TIMESTAMP);
if (res != OK) return res;
- res = deleteEntry(mCaptureRequest,
- ANDROID_JPEG_GPS_PROCESSING_METHOD);
+ res = mCaptureRequest.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
if (res != OK) return res;
}
@@ -3538,7 +3561,7 @@
status_t Camera2Client::updateRecordingRequest(const Parameters ¶ms) {
ATRACE_CALL();
status_t res;
- if (mRecordingRequest == NULL) {
+ if (mRecordingRequest.entryCount() == 0) {
res = mDevice->createDefaultRequest(CAMERA2_TEMPLATE_VIDEO_RECORD,
&mRecordingRequest);
if (res != OK) {
@@ -3548,7 +3571,7 @@
}
}
- res = updateRequestCommon(mRecordingRequest, params);
+ res = updateRequestCommon(&mRecordingRequest, params);
if (res != OK) {
ALOGE("%s: Camera %d: Unable to update common entries of recording "
"request: %s (%d)", __FUNCTION__, mCameraId,
@@ -3616,36 +3639,34 @@
return OK;
}
-status_t Camera2Client::updateRequestCommon(camera_metadata_t *request,
+status_t Camera2Client::updateRequestCommon(CameraMetadata *request,
const Parameters ¶ms) {
ATRACE_CALL();
status_t res;
- res = updateEntry(request,
- ANDROID_CONTROL_AE_TARGET_FPS_RANGE, params.previewFpsRange, 2);
+ res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ params.previewFpsRange, 2);
if (res != OK) return res;
uint8_t wbMode = params.autoWhiteBalanceLock ?
- ANDROID_CONTROL_AWB_LOCKED : params.wbMode;
- res = updateEntry(request,
- ANDROID_CONTROL_AWB_MODE, &wbMode, 1);
+ (uint8_t)ANDROID_CONTROL_AWB_LOCKED : params.wbMode;
+ res = request->update(ANDROID_CONTROL_AWB_MODE,
+ &wbMode, 1);
if (res != OK) return res;
- res = updateEntry(request,
- ANDROID_CONTROL_EFFECT_MODE, ¶ms.effectMode, 1);
+ res = request->update(ANDROID_CONTROL_EFFECT_MODE,
+ ¶ms.effectMode, 1);
if (res != OK) return res;
- res = updateEntry(request,
- ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+ res = request->update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
¶ms.antibandingMode, 1);
if (res != OK) return res;
uint8_t controlMode =
(params.sceneMode == ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) ?
ANDROID_CONTROL_AUTO : ANDROID_CONTROL_USE_SCENE_MODE;
- res = updateEntry(request,
- ANDROID_CONTROL_MODE, &controlMode, 1);
+ res = request->update(ANDROID_CONTROL_MODE,
+ &controlMode, 1);
if (res != OK) return res;
if (controlMode == ANDROID_CONTROL_USE_SCENE_MODE) {
- res = updateEntry(request,
- ANDROID_CONTROL_SCENE_MODE,
+ res = request->update(ANDROID_CONTROL_SCENE_MODE,
¶ms.sceneMode, 1);
if (res != OK) return res;
}
@@ -3672,11 +3693,11 @@
}
if (params.autoExposureLock) aeMode = ANDROID_CONTROL_AE_LOCKED;
- res = updateEntry(request,
- ANDROID_FLASH_MODE, &flashMode, 1);
+ res = request->update(ANDROID_FLASH_MODE,
+ &flashMode, 1);
if (res != OK) return res;
- res = updateEntry(request,
- ANDROID_CONTROL_AE_MODE, &aeMode, 1);
+ res = request->update(ANDROID_CONTROL_AE_MODE,
+ &aeMode, 1);
if (res != OK) return res;
float focusDistance = 0; // infinity focus in diopters
@@ -3698,54 +3719,71 @@
mCameraId, params.focusMode);
return BAD_VALUE;
}
- res = updateEntry(request,
- ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
+ res = request->update(ANDROID_LENS_FOCUS_DISTANCE,
+ &focusDistance, 1);
if (res != OK) return res;
- res = updateEntry(request,
- ANDROID_CONTROL_AF_MODE, &focusMode, 1);
+ res = request->update(ANDROID_CONTROL_AF_MODE,
+ &focusMode, 1);
if (res != OK) return res;
size_t focusingAreasSize = params.focusingAreas.size() * 5;
int32_t *focusingAreas = new int32_t[focusingAreasSize];
for (size_t i = 0; i < focusingAreasSize; i += 5) {
- focusingAreas[i + 0] = params.focusingAreas[i].left;
- focusingAreas[i + 1] = params.focusingAreas[i].top;
- focusingAreas[i + 2] = params.focusingAreas[i].right;
- focusingAreas[i + 3] = params.focusingAreas[i].bottom;
+ if (params.focusingAreas[i].weight != 0) {
+ focusingAreas[i + 0] = normalizedXToArray(params.focusingAreas[i].left);
+ focusingAreas[i + 1] = normalizedYToArray(params.focusingAreas[i].top);
+ focusingAreas[i + 2] = normalizedXToArray(params.focusingAreas[i].right);
+ focusingAreas[i + 3] = normalizedYToArray(params.focusingAreas[i].bottom);
+ } else {
+ focusingAreas[i + 0] = 0;
+ focusingAreas[i + 1] = 0;
+ focusingAreas[i + 2] = 0;
+ focusingAreas[i + 3] = 0;
+ }
focusingAreas[i + 4] = params.focusingAreas[i].weight;
}
- res = updateEntry(request,
- ANDROID_CONTROL_AF_REGIONS, focusingAreas,focusingAreasSize);
+ res = request->update(ANDROID_CONTROL_AF_REGIONS,
+ focusingAreas,focusingAreasSize);
if (res != OK) return res;
delete[] focusingAreas;
- res = updateEntry(request,
- ANDROID_CONTROL_AE_EXP_COMPENSATION,
+ res = request->update(ANDROID_CONTROL_AE_EXP_COMPENSATION,
¶ms.exposureCompensation, 1);
if (res != OK) return res;
size_t meteringAreasSize = params.meteringAreas.size() * 5;
int32_t *meteringAreas = new int32_t[meteringAreasSize];
for (size_t i = 0; i < meteringAreasSize; i += 5) {
- meteringAreas[i + 0] = params.meteringAreas[i].left;
- meteringAreas[i + 1] = params.meteringAreas[i].top;
- meteringAreas[i + 2] = params.meteringAreas[i].right;
- meteringAreas[i + 3] = params.meteringAreas[i].bottom;
+ if (params.meteringAreas[i].weight != 0) {
+ meteringAreas[i + 0] =
+ normalizedXToArray(params.meteringAreas[i].left);
+ meteringAreas[i + 1] =
+ normalizedYToArray(params.meteringAreas[i].top);
+ meteringAreas[i + 2] =
+ normalizedXToArray(params.meteringAreas[i].right);
+ meteringAreas[i + 3] =
+ normalizedYToArray(params.meteringAreas[i].bottom);
+ } else {
+ meteringAreas[i + 0] = 0;
+ meteringAreas[i + 1] = 0;
+ meteringAreas[i + 2] = 0;
+ meteringAreas[i + 3] = 0;
+ }
meteringAreas[i + 4] = params.meteringAreas[i].weight;
}
- res = updateEntry(request,
- ANDROID_CONTROL_AE_REGIONS, meteringAreas, meteringAreasSize);
+ res = request->update(ANDROID_CONTROL_AE_REGIONS,
+ meteringAreas, meteringAreasSize);
if (res != OK) return res;
- res = updateEntry(request,
- ANDROID_CONTROL_AWB_REGIONS, meteringAreas, meteringAreasSize);
+ res = request->update(ANDROID_CONTROL_AWB_REGIONS,
+ meteringAreas, meteringAreasSize);
if (res != OK) return res;
delete[] meteringAreas;
// Need to convert zoom index into a crop rectangle. The rectangle is
// chosen to maximize its area on the sensor
- camera_metadata_entry_t maxDigitalZoom =
+ camera_metadata_ro_entry_t maxDigitalZoom =
staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM);
float zoomIncrement = (maxDigitalZoom.data.f[0] - 1) /
(NUM_ZOOM_STEPS-1);
@@ -3765,8 +3803,8 @@
zoomTop = (mDeviceInfo->arrayHeight - zoomHeight) / 2;
int32_t cropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
- res = updateEntry(request,
- ANDROID_SCALER_CROP_REGION, cropRegion, 3);
+ res = request->update(ANDROID_SCALER_CROP_REGION,
+ cropRegion, 3);
if (res != OK) return res;
// TODO: Decide how to map recordingHint, or whether just to ignore it
@@ -3774,22 +3812,28 @@
uint8_t vstabMode = params.videoStabilization ?
ANDROID_CONTROL_VIDEO_STABILIZATION_ON :
ANDROID_CONTROL_VIDEO_STABILIZATION_OFF;
- res = updateEntry(request,
- ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+ res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
&vstabMode, 1);
if (res != OK) return res;
uint8_t faceDetectMode = params.enableFaceDetect ?
mDeviceInfo->bestFaceDetectMode :
(uint8_t)ANDROID_STATS_FACE_DETECTION_OFF;
- res = updateEntry(request,
- ANDROID_STATS_FACE_DETECT_MODE,
+ res = request->update(ANDROID_STATS_FACE_DETECT_MODE,
&faceDetectMode, 1);
if (res != OK) return res;
return OK;
}
+int Camera2Client::normalizedXToArray(int x) const {
+ return (x + 1000) * (mDeviceInfo->arrayWidth - 1) / 2000;
+}
+
+int Camera2Client::normalizedYToArray(int y) const {
+ return (y + 1000) * (mDeviceInfo->arrayHeight - 1) / 2000;
+}
+
int Camera2Client::arrayXToNormalized(int width) const {
return width * 2000 / (mDeviceInfo->arrayWidth - 1) - 1000;
}
@@ -3798,50 +3842,6 @@
return height * 2000 / (mDeviceInfo->arrayHeight - 1) - 1000;
}
-status_t Camera2Client::updateEntry(camera_metadata_t *buffer,
- uint32_t tag, const void *data, size_t data_count) {
- camera_metadata_entry_t entry;
- status_t res;
- res = find_camera_metadata_entry(buffer, tag, &entry);
- if (res == NAME_NOT_FOUND) {
- res = add_camera_metadata_entry(buffer,
- tag, data, data_count);
- } else if (res == OK) {
- res = update_camera_metadata_entry(buffer,
- entry.index, data, data_count, NULL);
- }
-
- if (res != OK) {
- ALOGE("%s: Unable to update metadata entry %s.%s (%x): %s (%d)",
- __FUNCTION__, get_camera_metadata_section_name(tag),
- get_camera_metadata_tag_name(tag), tag, strerror(-res), res);
- }
- return res;
-}
-
-status_t Camera2Client::deleteEntry(camera_metadata_t *buffer, uint32_t tag) {
- camera_metadata_entry_t entry;
- status_t res;
- res = find_camera_metadata_entry(buffer, tag, &entry);
- if (res == NAME_NOT_FOUND) {
- return OK;
- } else if (res != OK) {
- ALOGE("%s: Error looking for entry %s.%s (%x): %s %d",
- __FUNCTION__,
- get_camera_metadata_section_name(tag),
- get_camera_metadata_tag_name(tag), tag, strerror(-res), res);
- return res;
- }
- res = delete_camera_metadata_entry(buffer, entry.index);
- if (res != OK) {
- ALOGE("%s: Error deleting entry %s.%s (%x): %s %d",
- __FUNCTION__,
- get_camera_metadata_section_name(tag),
- get_camera_metadata_tag_name(tag), tag, strerror(-res), res);
- }
- return res;
-}
-
int Camera2Client::formatStringToEnum(const char *format) {
return
!strcmp(format, CameraParameters::PIXEL_FORMAT_YUV422SP) ?
@@ -4125,7 +4125,7 @@
return width * height * 2;
case HAL_PIXEL_FORMAT_YV12: { // YV12
size_t ySize = stride * height;
- size_t uvStride = (stride / 2 + 0xF) & ~0x10;
+ size_t uvStride = (stride / 2 + 0xF) & ~0xF;
size_t uvSize = uvStride * height / 2;
return ySize + uvSize * 2;
}
diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h
index 028d458..a3ab128 100644
--- a/services/camera/libcameraservice/Camera2Client.h
+++ b/services/camera/libcameraservice/Camera2Client.h
@@ -33,8 +33,7 @@
*/
class Camera2Client :
public CameraService::Client,
- public Camera2Device::NotificationListener,
- public Camera2Device::FrameListener
+ public Camera2Device::NotificationListener
{
public:
// ICamera interface (see ICamera for details)
@@ -83,7 +82,6 @@
virtual void notifyAutoExposure(uint8_t newState, int triggerId);
virtual void notifyAutoWhitebalance(uint8_t newState, int triggerId);
- virtual void onNewFrameAvailable();
private:
enum State {
DISCONNECTED,
@@ -153,10 +151,10 @@
int64_t gpsTimestamp;
String8 gpsProcessingMethod;
- int wbMode;
- int effectMode;
- int antibandingMode;
- int sceneMode;
+ uint8_t wbMode;
+ uint8_t effectMode;
+ uint8_t antibandingMode;
+ uint8_t sceneMode;
enum flashMode_t {
FLASH_MODE_OFF = 0,
@@ -298,14 +296,34 @@
// Used with stream IDs
static const int NO_STREAM = -1;
- /* Output frame metadata processing methods */
+ /* Output frame metadata processing thread. This thread waits for new
+ * frames from the device, and analyzes them as necessary.
+ */
+ class FrameProcessor: public Thread {
+ public:
+ FrameProcessor(wp<Camera2Client> client);
+ ~FrameProcessor();
- status_t processFrameFaceDetect(camera_metadata_t *frame);
+ void dump(int fd, const Vector<String16>& args);
+ private:
+ static const nsecs_t kWaitDuration = 10000000; // 10 ms
+ wp<Camera2Client> mClient;
+
+ virtual bool threadLoop();
+
+ void processNewFrames(sp<Camera2Client> &client);
+ status_t processFaceDetect(const CameraMetadata &frame,
+ sp<Camera2Client> &client);
+
+ CameraMetadata mLastFrame;
+ };
+
+ sp<FrameProcessor> mFrameProcessor;
/* Preview related members */
int mPreviewStreamId;
- camera_metadata_t *mPreviewRequest;
+ CameraMetadata mPreviewRequest;
sp<IBinder> mPreviewSurface;
sp<ANativeWindow> mPreviewWindow;
@@ -351,7 +369,7 @@
Camera2Client *mParent;
};
sp<CaptureWaiter> mCaptureWaiter;
- camera_metadata_t *mCaptureRequest;
+ CameraMetadata mCaptureRequest;
sp<Camera2Heap> mCaptureHeap;
// Handle captured image buffers
void onCaptureAvailable();
@@ -375,7 +393,7 @@
Camera2Client *mParent;
};
sp<RecordingWaiter> mRecordingWaiter;
- camera_metadata_t *mRecordingRequest;
+ CameraMetadata mRecordingRequest;
sp<Camera2Heap> mRecordingHeap;
static const size_t kDefaultRecordingHeapCount = 8;
@@ -431,8 +449,8 @@
// checking the number of values in the entry. 0 for max/minCount means to
// do no bounds check in that direction. In case of error, the entry data
// pointer is null and the count is 0.
- camera_metadata_entry_t staticInfo(uint32_t tag,
- size_t minCount=0, size_t maxCount=0);
+ camera_metadata_ro_entry_t staticInfo(uint32_t tag,
+ size_t minCount=0, size_t maxCount=0) const;
// Extract frequently-used camera static information into mDeviceInfo
status_t buildDeviceInfo();
@@ -441,24 +459,16 @@
status_t buildDefaultParameters();
// Update parameters all requests use, based on mParameters
- status_t updateRequestCommon(camera_metadata_t *request, const Parameters ¶ms);
+ status_t updateRequestCommon(CameraMetadata *request, const Parameters ¶ms);
// Map from sensor active array pixel coordinates to normalized camera
// parameter coordinates. The former are (0,0)-(array width - 1, array height
// - 1), the latter from (-1000,-1000)-(1000,1000)
+ int normalizedXToArray(int x) const;
+ int normalizedYToArray(int y) const;
int arrayXToNormalized(int width) const;
int arrayYToNormalized(int height) const;
- // Update specific metadata entry with new values. Adds entry if it does not
- // exist, which will invalidate sorting
- static status_t updateEntry(camera_metadata_t *buffer,
- uint32_t tag, const void *data, size_t data_count);
-
- // Remove metadata entry. Will invalidate sorting. If entry does not exist,
- // does nothing.
- static status_t deleteEntry(camera_metadata_t *buffer,
- uint32_t tag);
-
// Convert camera1 preview format string to camera2 enum
static int formatStringToEnum(const char *format);
static const char *formatEnumToString(int format);
diff --git a/services/camera/libcameraservice/Camera2Device.cpp b/services/camera/libcameraservice/Camera2Device.cpp
index 583701d..35c4e74 100644
--- a/services/camera/libcameraservice/Camera2Device.cpp
+++ b/services/camera/libcameraservice/Camera2Device.cpp
@@ -33,12 +33,12 @@
mId(id),
mDevice(NULL)
{
- ALOGV("%s: E", __FUNCTION__);
+ ALOGV("%s: Created device for camera %d", __FUNCTION__, id);
}
Camera2Device::~Camera2Device()
{
- ALOGV("%s: E", __FUNCTION__);
+ ALOGV("%s: Shutting down device for camera %d", __FUNCTION__, mId);
if (mDevice) {
status_t res;
res = mDevice->common.close(&mDevice->common);
@@ -49,11 +49,12 @@
}
mDevice = NULL;
}
+ ALOGV("%s: Shutdown complete", __FUNCTION__);
}
status_t Camera2Device::initialize(camera_module_t *module)
{
- ALOGV("%s: E", __FUNCTION__);
+ ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId);
status_t res;
char name[10];
@@ -155,25 +156,28 @@
return res;
}
-camera_metadata_t *Camera2Device::info() {
+const CameraMetadata& Camera2Device::info() const {
ALOGVV("%s: E", __FUNCTION__);
return mDeviceInfo;
}
-status_t Camera2Device::capture(camera_metadata_t* request) {
+status_t Camera2Device::capture(CameraMetadata &request) {
ALOGV("%s: E", __FUNCTION__);
- mRequestQueue.enqueue(request);
+ mRequestQueue.enqueue(request.release());
return OK;
}
-status_t Camera2Device::setStreamingRequest(camera_metadata_t* request) {
+status_t Camera2Device::setStreamingRequest(const CameraMetadata &request) {
ALOGV("%s: E", __FUNCTION__);
+ CameraMetadata streamRequest(request);
+ return mRequestQueue.setStreamSlot(streamRequest.release());
+}
- mRequestQueue.setStreamSlot(request);
- return OK;
+status_t Camera2Device::clearStreamingRequest() {
+ return mRequestQueue.setStreamSlot(NULL);
}
status_t Camera2Device::createStream(sp<ANativeWindow> consumer,
@@ -269,10 +273,14 @@
}
status_t Camera2Device::createDefaultRequest(int templateId,
- camera_metadata_t **request) {
+ CameraMetadata *request) {
+ status_t err;
ALOGV("%s: E", __FUNCTION__);
- return mDevice->ops->construct_default_request(
- mDevice, templateId, request);
+ camera_metadata_t *rawRequest;
+ err = mDevice->ops->construct_default_request(
+ mDevice, templateId, &rawRequest);
+ request->acquire(rawRequest);
+ return err;
}
status_t Camera2Device::waitUntilDrained() {
@@ -340,12 +348,20 @@
}
}
-status_t Camera2Device::setFrameListener(FrameListener *listener) {
- return mFrameQueue.setListener(listener);
+status_t Camera2Device::waitForNextFrame(nsecs_t timeout) {
+ return mFrameQueue.waitForBuffer(timeout);
}
-status_t Camera2Device::getNextFrame(camera_metadata_t **frame) {
- return mFrameQueue.dequeue(frame);
+status_t Camera2Device::getNextFrame(CameraMetadata *frame) {
+ status_t res;
+ camera_metadata_t *rawFrame;
+ res = mFrameQueue.dequeue(&rawFrame);
+ if (rawFrame == NULL) {
+ return NOT_ENOUGH_DATA;
+ } else if (res == OK) {
+ frame->acquire(rawFrame);
+ }
+ return res;
}
status_t Camera2Device::triggerAutofocus(uint32_t id) {
@@ -392,13 +408,6 @@
}
/**
- * Camera2Device::FrameListener
- */
-
-Camera2Device::FrameListener::~FrameListener() {
-}
-
-/**
* Camera2Device::MetadataQueue
*/
@@ -407,8 +416,7 @@
mFrameCount(0),
mCount(0),
mStreamSlotCount(0),
- mSignalConsumer(true),
- mListener(NULL)
+ mSignalConsumer(true)
{
camera2_request_queue_src_ops::dequeue_request = consumer_dequeue;
camera2_request_queue_src_ops::request_count = consumer_buffer_count;
@@ -526,12 +534,6 @@
return OK;
}
-status_t Camera2Device::MetadataQueue::setListener(FrameListener *listener) {
- Mutex::Autolock l(mMutex);
- mListener = listener;
- return OK;
-}
-
status_t Camera2Device::MetadataQueue::setStreamSlot(camera_metadata_t *buf)
{
ALOGV("%s: E", __FUNCTION__);
@@ -644,13 +646,6 @@
res = mDevice->ops->notify_request_queue_not_empty(mDevice);
mMutex.lock();
}
- if (mListener != NULL) {
- FrameListener *listener = mListener;
- mMutex.unlock();
- ALOGVV("%s: Signaling listener", __FUNCTION__);
- listener->onNewFrameAvailable();
- mMutex.lock();
- }
return res;
}
diff --git a/services/camera/libcameraservice/Camera2Device.h b/services/camera/libcameraservice/Camera2Device.h
index 790b946..223d77a 100644
--- a/services/camera/libcameraservice/Camera2Device.h
+++ b/services/camera/libcameraservice/Camera2Device.h
@@ -27,6 +27,7 @@
#include <utils/Vector.h>
#include "hardware/camera2.h"
+#include "CameraMetadata.h"
namespace android {
@@ -41,21 +42,26 @@
status_t dump(int fd, const Vector<String16>& args);
/**
- * Get a pointer to the device's static characteristics metadata buffer
+ * The device's static characteristics metadata buffer
*/
- camera_metadata_t* info();
+ const CameraMetadata& info() const;
/**
* Submit request for capture. The Camera2Device takes ownership of the
* passed-in buffer.
*/
- status_t capture(camera_metadata_t *request);
+ status_t capture(CameraMetadata &request);
/**
* Submit request for streaming. The Camera2Device makes a copy of the
* passed-in buffer and the caller retains ownership.
*/
- status_t setStreamingRequest(camera_metadata_t *request);
+ status_t setStreamingRequest(const CameraMetadata &request);
+
+ /**
+ * Clear the streaming request slot.
+ */
+ status_t clearStreamingRequest();
/**
* Create an output stream of the requested size and format.
@@ -92,8 +98,7 @@
* Create a metadata buffer with fields that the HAL device believes are
* best for the given use case
*/
- status_t createDefaultRequest(int templateId,
- camera_metadata_t **request);
+ status_t createDefaultRequest(int templateId, CameraMetadata *request);
/**
* Wait until all requests have been processed. Returns INVALID_OPERATION if
@@ -124,25 +129,16 @@
status_t setNotifyCallback(NotificationListener *listener);
/**
- * Abstract class for HAL frame available notifications
+ * Wait for a new frame to be produced, with timeout in nanoseconds.
+ * Returns TIMED_OUT when no frame produced within the specified duration
*/
- class FrameListener {
- public:
- virtual void onNewFrameAvailable() = 0;
- protected:
- virtual ~FrameListener();
- };
-
- /**
- * Set a frame listener to be notified about new frames.
- */
- status_t setFrameListener(FrameListener *listener);
+ status_t waitForNextFrame(nsecs_t timeout);
/**
* Get next metadata frame from the frame queue. Returns NULL if the queue
* is empty; caller takes ownership of the metadata buffer.
*/
- status_t getNextFrame(camera_metadata_t **frame);
+ status_t getNextFrame(CameraMetadata *frame);
/**
* Trigger auto-focus. The latest ID used in a trigger autofocus or cancel
@@ -170,7 +166,7 @@
const int mId;
camera2_device_t *mDevice;
- camera_metadata_t *mDeviceInfo;
+ CameraMetadata mDeviceInfo;
vendor_tag_query_ops_t *mVendorTagOps;
/**
@@ -201,7 +197,6 @@
status_t dequeue(camera_metadata_t **buf, bool incrementCount = true);
int getBufferCount();
status_t waitForBuffer(nsecs_t timeout);
- status_t setListener(FrameListener *listener);
// Set repeating buffer(s); if the queue is empty on a dequeue call, the
// queue copies the contents of the stream slot into the queue, and then
@@ -230,7 +225,6 @@
List<camera_metadata_t*> mStreamSlot;
bool mSignalConsumer;
- FrameListener *mListener;
static MetadataQueue* getInstance(
const camera2_frame_queue_dst_ops_t *q);
diff --git a/services/camera/libcameraservice/CameraMetadata.cpp b/services/camera/libcameraservice/CameraMetadata.cpp
new file mode 100644
index 0000000..b402115
--- /dev/null
+++ b/services/camera/libcameraservice/CameraMetadata.cpp
@@ -0,0 +1,290 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraMetadata"
+#include <utils/Log.h>
+#include <utils/Errors.h>
+
+#include "CameraMetadata.h"
+
+namespace android {
+
+CameraMetadata::CameraMetadata() :
+ mBuffer(NULL) {
+}
+
+CameraMetadata::CameraMetadata(size_t entryCapacity, size_t dataCapacity)
+{
+ mBuffer = allocate_camera_metadata(entryCapacity, dataCapacity);
+}
+
+CameraMetadata::CameraMetadata(const CameraMetadata &other) {
+ mBuffer = clone_camera_metadata(other.mBuffer);
+}
+
+CameraMetadata &CameraMetadata::operator=(const CameraMetadata &other) {
+ return operator=(other.mBuffer);
+}
+
+CameraMetadata &CameraMetadata::operator=(const camera_metadata_t *buffer) {
+ if (CC_LIKELY(buffer != mBuffer)) {
+ camera_metadata_t *newBuffer = clone_camera_metadata(buffer);
+ clear();
+ mBuffer = newBuffer;
+ }
+ return *this;
+}
+
+CameraMetadata::~CameraMetadata() {
+ clear();
+}
+
+camera_metadata_t* CameraMetadata::release() {
+ camera_metadata_t *released = mBuffer;
+ mBuffer = NULL;
+ return released;
+}
+
+void CameraMetadata::clear() {
+ if (mBuffer) {
+ free_camera_metadata(mBuffer);
+ mBuffer = NULL;
+ }
+}
+
+void CameraMetadata::acquire(camera_metadata_t *buffer) {
+ clear();
+ mBuffer = buffer;
+}
+
+void CameraMetadata::acquire(CameraMetadata &other) {
+ acquire(other.release());
+}
+
+status_t CameraMetadata::append(const CameraMetadata &other) {
+ return append_camera_metadata(mBuffer, other.mBuffer);
+}
+
+size_t CameraMetadata::entryCount() const {
+ return (mBuffer == NULL) ? 0 :
+ get_camera_metadata_entry_count(mBuffer);
+}
+
+status_t CameraMetadata::sort() {
+ return sort_camera_metadata(mBuffer);
+}
+
+status_t CameraMetadata::checkType(uint32_t tag, uint8_t expectedType) {
+ int tagType = get_camera_metadata_tag_type(tag);
+ if ( CC_UNLIKELY(tagType == -1)) {
+ ALOGE("Update metadata entry: Unknown tag %d", tag);
+ return INVALID_OPERATION;
+ }
+ if ( CC_UNLIKELY(tagType != expectedType) ) {
+ ALOGE("Mismatched tag type when updating entry %s (%d) of type %s; "
+ "got type %s data instead ",
+ get_camera_metadata_tag_name(tag), tag,
+ camera_metadata_type_names[tagType],
+ camera_metadata_type_names[expectedType]);
+ return INVALID_OPERATION;
+ }
+ return OK;
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+ const int32_t *data, size_t data_count) {
+ status_t res;
+ if ( (res = checkType(tag, TYPE_INT32)) != OK) {
+ return res;
+ }
+ return update(tag, (const void*)data, data_count);
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+ const uint8_t *data, size_t data_count) {
+ status_t res;
+ if ( (res = checkType(tag, TYPE_BYTE)) != OK) {
+ return res;
+ }
+ return update(tag, (const void*)data, data_count);
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+ const float *data, size_t data_count) {
+ status_t res;
+ if ( (res = checkType(tag, TYPE_FLOAT)) != OK) {
+ return res;
+ }
+ return update(tag, (const void*)data, data_count);
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+ const int64_t *data, size_t data_count) {
+ status_t res;
+ if ( (res = checkType(tag, TYPE_INT64)) != OK) {
+ return res;
+ }
+ return update(tag, (const void*)data, data_count);
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+ const double *data, size_t data_count) {
+ status_t res;
+ if ( (res = checkType(tag, TYPE_DOUBLE)) != OK) {
+ return res;
+ }
+ return update(tag, (const void*)data, data_count);
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+ const camera_metadata_rational_t *data, size_t data_count) {
+ status_t res;
+ if ( (res = checkType(tag, TYPE_RATIONAL)) != OK) {
+ return res;
+ }
+ return update(tag, (const void*)data, data_count);
+}
+
+status_t CameraMetadata::update(uint32_t tag,
+ const String8 &string) {
+ status_t res;
+ if ( (res = checkType(tag, TYPE_BYTE)) != OK) {
+ return res;
+ }
+ return update(tag, (const void*)string.string(), string.size());
+}
+
+status_t CameraMetadata::update(uint32_t tag, const void *data,
+ size_t data_count) {
+ status_t res;
+ int type = get_camera_metadata_tag_type(tag);
+ if (type == -1) {
+ ALOGE("%s: Tag %d not found", __FUNCTION__, tag);
+ return BAD_VALUE;
+ }
+ size_t data_size = calculate_camera_metadata_entry_data_size(type,
+ data_count);
+
+ res = resizeIfNeeded(1, data_size);
+
+ if (res == OK) {
+ camera_metadata_entry_t entry;
+ res = find_camera_metadata_entry(mBuffer, tag, &entry);
+ if (res == NAME_NOT_FOUND) {
+ res = add_camera_metadata_entry(mBuffer,
+ tag, data, data_count);
+ } else if (res == OK) {
+ res = update_camera_metadata_entry(mBuffer,
+ entry.index, data, data_count, NULL);
+ }
+ }
+
+ if (res != OK) {
+ ALOGE("%s: Unable to update metadata entry %s.%s (%x): %s (%d)",
+ __FUNCTION__, get_camera_metadata_section_name(tag),
+ get_camera_metadata_tag_name(tag), tag, strerror(-res), res);
+ }
+ return res;
+}
+
+camera_metadata_entry_t CameraMetadata::find(uint32_t tag) {
+ status_t res;
+ camera_metadata_entry entry;
+ res = find_camera_metadata_entry(mBuffer, tag, &entry);
+ if (CC_UNLIKELY( res != OK )) {
+ entry.count = 0;
+ entry.data.u8 = NULL;
+ }
+ return entry;
+}
+
+camera_metadata_ro_entry_t CameraMetadata::find(uint32_t tag) const {
+ status_t res;
+ camera_metadata_ro_entry entry;
+ res = find_camera_metadata_ro_entry(mBuffer, tag, &entry);
+ if (CC_UNLIKELY( res != OK )) {
+ entry.count = 0;
+ entry.data.u8 = NULL;
+ }
+ return entry;
+}
+
+status_t CameraMetadata::erase(uint32_t tag) {
+ camera_metadata_entry_t entry;
+ status_t res;
+ res = find_camera_metadata_entry(mBuffer, tag, &entry);
+ if (res == NAME_NOT_FOUND) {
+ return OK;
+ } else if (res != OK) {
+ ALOGE("%s: Error looking for entry %s.%s (%x): %s %d",
+ __FUNCTION__,
+ get_camera_metadata_section_name(tag),
+ get_camera_metadata_tag_name(tag), tag, strerror(-res), res);
+ return res;
+ }
+ res = delete_camera_metadata_entry(mBuffer, entry.index);
+ if (res != OK) {
+ ALOGE("%s: Error deleting entry %s.%s (%x): %s %d",
+ __FUNCTION__,
+ get_camera_metadata_section_name(tag),
+ get_camera_metadata_tag_name(tag), tag, strerror(-res), res);
+ }
+ return res;
+}
+
+void CameraMetadata::dump(int fd, int verbosity, int indentation) const {
+ dump_indented_camera_metadata(mBuffer, fd, verbosity, indentation);
+}
+
+status_t CameraMetadata::resizeIfNeeded(size_t extraEntries, size_t extraData) {
+ if (mBuffer == NULL) {
+ mBuffer = allocate_camera_metadata(extraEntries * 2, extraData * 2);
+ if (mBuffer == NULL) {
+ ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__);
+ return NO_MEMORY;
+ }
+ } else {
+ size_t currentEntryCount = get_camera_metadata_entry_count(mBuffer);
+ size_t currentEntryCap = get_camera_metadata_entry_capacity(mBuffer);
+ size_t newEntryCount = currentEntryCount +
+ extraEntries;
+ newEntryCount = (newEntryCount > currentEntryCap) ?
+ newEntryCount * 2 : currentEntryCap;
+
+ size_t currentDataCount = get_camera_metadata_data_count(mBuffer);
+ size_t currentDataCap = get_camera_metadata_data_capacity(mBuffer);
+ size_t newDataCount = currentDataCount +
+ extraData;
+ newDataCount = (newDataCount > currentDataCap) ?
+ newDataCount * 2 : currentDataCap;
+
+ if (newEntryCount > currentEntryCap ||
+ newDataCount > currentDataCap) {
+ camera_metadata_t *oldBuffer = mBuffer;
+ mBuffer = allocate_camera_metadata(newEntryCount,
+ newDataCount);
+ if (mBuffer == NULL) {
+ ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__);
+ return NO_MEMORY;
+ }
+ append_camera_metadata(mBuffer, oldBuffer);
+ free_camera_metadata(oldBuffer);
+ }
+ }
+ return OK;
+}
+
+}; // namespace android
diff --git a/services/camera/libcameraservice/CameraMetadata.h b/services/camera/libcameraservice/CameraMetadata.h
new file mode 100644
index 0000000..afb8318
--- /dev/null
+++ b/services/camera/libcameraservice/CameraMetadata.h
@@ -0,0 +1,166 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_METADATA_CPP
+#define ANDROID_SERVERS_CAMERA_METADATA_CPP
+
+#include "system/camera_metadata.h"
+#include <utils/String8.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+/**
+ * A convenience wrapper around the C-based camera_metadata_t library.
+ */
+class CameraMetadata {
+ public:
+ /** Creates an empty object; best used when expecting to acquire contents
+ * from elsewhere */
+ CameraMetadata();
+ /** Creates an object with space for entryCapacity entries, with
+ * dataCapacity extra storage */
+ CameraMetadata(size_t entryCapacity, size_t dataCapacity = 10);
+
+ ~CameraMetadata();
+
+ /** Takes ownership of passed-in buffer */
+ CameraMetadata(camera_metadata_t *buffer);
+ /** Clones the metadata */
+ CameraMetadata(const CameraMetadata &other);
+
+ /**
+ * Assignment clones metadata buffer.
+ */
+ CameraMetadata &operator=(const CameraMetadata &other);
+ CameraMetadata &operator=(const camera_metadata_t *buffer);
+
+ /**
+ * Release a raw metadata buffer to the caller. After this call,
+ * CameraMetadata no longer references the buffer, and the caller takes
+ * responsibility for freeing the raw metadata buffer (using
+ * free_camera_metadata()), or for handing it to another CameraMetadata
+ * instance.
+ */
+ camera_metadata_t* release();
+
+ /**
+ * Clear the metadata buffer and free all storage used by it
+ */
+ void clear();
+
+ /**
+ * Acquire a raw metadata buffer from the caller. After this call,
+ * the caller no longer owns the raw buffer, and must not free or manipulate it.
+ * If CameraMetadata already contains metadata, it is freed.
+ */
+ void acquire(camera_metadata_t* buffer);
+
+ /**
+ * Acquires raw buffer from other CameraMetadata object. After the call, the argument
+ * object no longer has any metadata.
+ */
+ void acquire(CameraMetadata &other);
+
+ /**
+ * Append metadata from another CameraMetadata object.
+ */
+ status_t append(const CameraMetadata &other);
+
+ /**
+ * Number of metadata entries.
+ */
+ size_t entryCount() const;
+
+ /**
+ * Sort metadata buffer for faster find
+ */
+ status_t sort();
+
+ /**
+ * Update metadata entry. Will create entry if it doesn't exist already, and
+ * will reallocate the buffer if insufficient space exists. Overloaded for
+ * the various types of valid data.
+ */
+ status_t update(uint32_t tag,
+ const uint8_t *data, size_t data_count);
+ status_t update(uint32_t tag,
+ const int32_t *data, size_t data_count);
+ status_t update(uint32_t tag,
+ const float *data, size_t data_count);
+ status_t update(uint32_t tag,
+ const int64_t *data, size_t data_count);
+ status_t update(uint32_t tag,
+ const double *data, size_t data_count);
+ status_t update(uint32_t tag,
+ const camera_metadata_rational_t *data, size_t data_count);
+ status_t update(uint32_t tag,
+ const String8 &string);
+
+ template<typename T>
+ status_t update(uint32_t tag, Vector<T> data) {
+ return update(tag, data.array(), data.size());
+ }
+
+ /**
+ * Get metadata entry by tag id
+ */
+ camera_metadata_entry find(uint32_t tag);
+
+ /**
+ * Get metadata entry by tag id, with no editing
+ */
+ camera_metadata_ro_entry find(uint32_t tag) const;
+
+ /**
+ * Delete metadata entry by tag
+ */
+ status_t erase(uint32_t tag);
+
+ /**
+ * Dump contents into FD for debugging. The verbosity levels are
+ * 0: Tag entry information only, no data values
+ * 1: Level 0 plus at most 16 data values per entry
+ * 2: All information
+ *
+ * The indentation parameter sets the number of spaces to add to the start
+ * each line of output.
+ */
+ void dump(int fd, int verbosity = 1, int indentation = 0) const;
+
+ private:
+ camera_metadata_t *mBuffer;
+
+ /**
+ * Check if tag has a given type
+ */
+ status_t checkType(uint32_t tag, uint8_t expectedType);
+
+ /**
+ * Base update entry method
+ */
+ status_t update(uint32_t tag, const void *data, size_t data_count);
+
+ /**
+ * Resize metadata buffer if needed by reallocating it and copying it over.
+ */
+ status_t resizeIfNeeded(size_t extraEntries, size_t extraData);
+
+};
+
+}; // namespace android
+
+#endif