Merge "Added support for MPEG2 video in MPEG4Extractor" into mnc-dev
diff --git a/camera/Android.mk b/camera/Android.mk
index 4c4700b..471cb0d 100644
--- a/camera/Android.mk
+++ b/camera/Android.mk
@@ -28,6 +28,7 @@
ICameraClient.cpp \
ICameraService.cpp \
ICameraServiceListener.cpp \
+ ICameraServiceProxy.cpp \
ICameraRecordingProxy.cpp \
ICameraRecordingProxyListener.cpp \
camera2/ICameraDeviceUser.cpp \
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 84e0d1c..9bf3134 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -71,13 +71,14 @@
// deadlock if we call any method of ICamera here.
}
-sp<Camera> Camera::connect(int cameraId, const String16& opPackageName, int clientUid)
+sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
+ int clientUid)
{
- return CameraBaseT::connect(cameraId, opPackageName, clientUid);
+ return CameraBaseT::connect(cameraId, clientPackageName, clientUid);
}
status_t Camera::connectLegacy(int cameraId, int halVersion,
- const String16& opPackageName,
+ const String16& clientPackageName,
int clientUid,
sp<Camera>& camera)
{
@@ -88,7 +89,7 @@
const sp<ICameraService>& cs = CameraBaseT::getCameraService();
if (cs != 0) {
- status = cs.get()->connectLegacy(cl, cameraId, halVersion, opPackageName,
+ status = cs.get()->connectLegacy(cl, cameraId, halVersion, clientPackageName,
clientUid, /*out*/c->mCamera);
}
if (status == OK && c->mCamera != 0) {
@@ -96,7 +97,8 @@
c->mStatus = NO_ERROR;
camera = c;
} else {
- ALOGW("An error occurred while connecting to camera: %d", cameraId);
+ ALOGW("An error occurred while connecting to camera %d: %d (%s)",
+ cameraId, status, strerror(-status));
c.clear();
}
return status;
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 0dc0276..5d50aa8 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -91,7 +91,7 @@
template <typename TCam, typename TCamTraits>
sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
- const String16& opPackageName,
+ const String16& clientPackageName,
int clientUid)
{
ALOGV("%s: connect", __FUNCTION__);
@@ -102,7 +102,7 @@
if (cs != 0) {
TCamConnectService fnConnectService = TCamTraits::fnConnectService;
- status = (cs.get()->*fnConnectService)(cl, cameraId, opPackageName, clientUid,
+ status = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
/*out*/ c->mCamera);
}
if (status == OK && c->mCamera != 0) {
diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp
index e216d26..b96a88f 100644
--- a/camera/CameraMetadata.cpp
+++ b/camera/CameraMetadata.cpp
@@ -583,7 +583,7 @@
*/
WritableBlob blob;
do {
- res = data.writeBlob(blobSize, &blob);
+ res = data.writeBlob(blobSize, false, &blob);
if (res != OK) {
break;
}
diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp
index 192e40d..7bb24ee 100644
--- a/camera/ICameraService.cpp
+++ b/camera/ICameraService.cpp
@@ -20,6 +20,7 @@
#include <utils/Errors.h>
#include <utils/String16.h>
+#include <inttypes.h>
#include <stdint.h>
#include <sys/types.h>
@@ -164,7 +165,7 @@
// connect to camera service (android.hardware.Camera)
virtual status_t connect(const sp<ICameraClient>& cameraClient, int cameraId,
- const String16& opPackageName, int clientUid,
+ const String16 &clientPackageName, int clientUid,
/*out*/
sp<ICamera>& device)
{
@@ -172,12 +173,15 @@
data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
data.writeStrongBinder(IInterface::asBinder(cameraClient));
data.writeInt32(cameraId);
- data.writeString16(opPackageName);
+ data.writeString16(clientPackageName);
data.writeInt32(clientUid);
- remote()->transact(BnCameraService::CONNECT, data, &reply);
+
+ status_t status;
+ status = remote()->transact(BnCameraService::CONNECT, data, &reply);
+ if (status != OK) return status;
if (readExceptionCode(reply)) return -EPROTO;
- status_t status = reply.readInt32();
+ status = reply.readInt32();
if (reply.readInt32() != 0) {
device = interface_cast<ICamera>(reply.readStrongBinder());
}
@@ -187,7 +191,7 @@
// connect to camera service (android.hardware.Camera)
virtual status_t connectLegacy(const sp<ICameraClient>& cameraClient, int cameraId,
int halVersion,
- const String16& opPackageName, int clientUid,
+ const String16 &clientPackageName, int clientUid,
/*out*/sp<ICamera>& device)
{
Parcel data, reply;
@@ -195,12 +199,15 @@
data.writeStrongBinder(IInterface::asBinder(cameraClient));
data.writeInt32(cameraId);
data.writeInt32(halVersion);
- data.writeString16(opPackageName);
+ data.writeString16(clientPackageName);
data.writeInt32(clientUid);
- remote()->transact(BnCameraService::CONNECT_LEGACY, data, &reply);
+
+ status_t status;
+ status = remote()->transact(BnCameraService::CONNECT_LEGACY, data, &reply);
+ if (status != OK) return status;
if (readExceptionCode(reply)) return -EPROTO;
- status_t status = reply.readInt32();
+ status = reply.readInt32();
if (reply.readInt32() != 0) {
device = interface_cast<ICamera>(reply.readStrongBinder());
}
@@ -225,7 +232,7 @@
virtual status_t connectDevice(
const sp<ICameraDeviceCallbacks>& cameraCb,
int cameraId,
- const String16& opPackageName,
+ const String16& clientPackageName,
int clientUid,
/*out*/
sp<ICameraDeviceUser>& device)
@@ -234,12 +241,15 @@
data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
data.writeStrongBinder(IInterface::asBinder(cameraCb));
data.writeInt32(cameraId);
- data.writeString16(opPackageName);
+ data.writeString16(clientPackageName);
data.writeInt32(clientUid);
- remote()->transact(BnCameraService::CONNECT_DEVICE, data, &reply);
+
+ status_t status;
+ status = remote()->transact(BnCameraService::CONNECT_DEVICE, data, &reply);
+ if (status != OK) return status;
if (readExceptionCode(reply)) return -EPROTO;
- status_t status = reply.readInt32();
+ status = reply.readInt32();
if (reply.readInt32() != 0) {
device = interface_cast<ICameraDeviceUser>(reply.readStrongBinder());
}
@@ -303,10 +313,10 @@
return res;
}
- virtual void notifySystemEvent(int eventId, int arg0) {
+ virtual void notifySystemEvent(int32_t eventId, const int32_t* args, size_t len) {
Parcel data, reply;
data.writeInt32(eventId);
- data.writeInt32(arg0);
+ data.writeInt32Array(len, args);
remote()->transact(BnCameraService::NOTIFY_SYSTEM_EVENT, data, &reply,
IBinder::FLAG_ONEWAY);
}
@@ -374,11 +384,11 @@
sp<ICameraClient> cameraClient =
interface_cast<ICameraClient>(data.readStrongBinder());
int32_t cameraId = data.readInt32();
- const String16 opPackageName = data.readString16();
+ const String16 clientName = data.readString16();
int32_t clientUid = data.readInt32();
sp<ICamera> camera;
status_t status = connect(cameraClient, cameraId,
- opPackageName, clientUid, /*out*/camera);
+ clientName, clientUid, /*out*/camera);
reply->writeNoException();
reply->writeInt32(status);
if (camera != NULL) {
@@ -394,11 +404,11 @@
sp<ICameraDeviceCallbacks> cameraClient =
interface_cast<ICameraDeviceCallbacks>(data.readStrongBinder());
int32_t cameraId = data.readInt32();
- const String16 opPackageName = data.readString16();
+ const String16 clientName = data.readString16();
int32_t clientUid = data.readInt32();
sp<ICameraDeviceUser> camera;
status_t status = connectDevice(cameraClient, cameraId,
- opPackageName, clientUid, /*out*/camera);
+ clientName, clientUid, /*out*/camera);
reply->writeNoException();
reply->writeInt32(status);
if (camera != NULL) {
@@ -454,11 +464,11 @@
interface_cast<ICameraClient>(data.readStrongBinder());
int32_t cameraId = data.readInt32();
int32_t halVersion = data.readInt32();
- const String16 opPackageName = data.readString16();
+ const String16 clientName = data.readString16();
int32_t clientUid = data.readInt32();
sp<ICamera> camera;
status_t status = connectLegacy(cameraClient, cameraId, halVersion,
- opPackageName, clientUid, /*out*/camera);
+ clientName, clientUid, /*out*/camera);
reply->writeNoException();
reply->writeInt32(status);
if (camera != NULL) {
@@ -481,9 +491,27 @@
} break;
case NOTIFY_SYSTEM_EVENT: {
CHECK_INTERFACE(ICameraService, data, reply);
- int eventId = data.readInt32();
- int arg0 = data.readInt32();
- notifySystemEvent(eventId, arg0);
+ int32_t eventId = data.readInt32();
+ int32_t len = data.readInt32();
+ if (len < 0) {
+ ALOGE("%s: Received poorly formatted length in binder request: notifySystemEvent.",
+ __FUNCTION__);
+ return FAILED_TRANSACTION;
+ }
+ if (len > 512) {
+ ALOGE("%s: Length %" PRIi32 " too long in binder request: notifySystemEvent.",
+ __FUNCTION__, len);
+ return FAILED_TRANSACTION;
+ }
+ int32_t events[len];
+ memset(events, 0, sizeof(int32_t) * len);
+ status_t status = data.read(events, sizeof(int32_t) * len);
+ if (status != NO_ERROR) {
+ ALOGE("%s: Received poorly formatted binder request: notifySystemEvent.",
+ __FUNCTION__);
+ return FAILED_TRANSACTION;
+ }
+ notifySystemEvent(eventId, events, len);
return NO_ERROR;
} break;
default:
diff --git a/camera/ICameraServiceProxy.cpp b/camera/ICameraServiceProxy.cpp
new file mode 100644
index 0000000..06a5afb
--- /dev/null
+++ b/camera/ICameraServiceProxy.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BpCameraServiceProxy"
+
+#include <stdint.h>
+
+#include <binder/Parcel.h>
+
+#include <camera/ICameraServiceProxy.h>
+
+namespace android {
+
+class BpCameraServiceProxy: public BpInterface<ICameraServiceProxy> {
+public:
+ BpCameraServiceProxy(const sp<IBinder>& impl) : BpInterface<ICameraServiceProxy>(impl) {}
+
+ virtual void pingForUserUpdate() {
+ Parcel data, reply;
+ data.writeInterfaceToken(ICameraServiceProxy::getInterfaceDescriptor());
+ remote()->transact(BnCameraServiceProxy::PING_FOR_USER_UPDATE, data, &reply,
+ IBinder::FLAG_ONEWAY);
+ }
+};
+
+
+IMPLEMENT_META_INTERFACE(CameraServiceProxy, "android.hardware.ICameraServiceProxy");
+
+status_t BnCameraServiceProxy::onTransact(uint32_t code, const Parcel& data, Parcel* reply,
+ uint32_t flags) {
+ switch(code) {
+ case PING_FOR_USER_UPDATE: {
+ CHECK_INTERFACE(ICameraServiceProxy, data, reply);
+ pingForUserUpdate();
+ return NO_ERROR;
+ } break;
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+}; // namespace android
+
diff --git a/camera/camera2/ICameraDeviceUser.cpp b/camera/camera2/ICameraDeviceUser.cpp
index 9700258..a7549f2 100644
--- a/camera/camera2/ICameraDeviceUser.cpp
+++ b/camera/camera2/ICameraDeviceUser.cpp
@@ -190,11 +190,13 @@
return reply.readInt32();
}
- virtual status_t endConfigure()
+ virtual status_t endConfigure(bool isConstrainedHighSpeed)
{
ALOGV("endConfigure");
Parcel data, reply;
data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor());
+ data.writeInt32(isConstrainedHighSpeed);
+
remote()->transact(END_CONFIGURE, data, &reply);
reply.readExceptionCode();
return reply.readInt32();
@@ -556,8 +558,9 @@
} break;
case END_CONFIGURE: {
CHECK_INTERFACE(ICameraDeviceUser, data, reply);
+ bool isConstrainedHighSpeed = data.readInt32();
reply->writeNoException();
- reply->writeInt32(endConfigure());
+ reply->writeInt32(endConfigure(isConstrainedHighSpeed));
return NO_ERROR;
} break;
case PREPARE: {
diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp
index 0029aec..36fa3b5 100644
--- a/cmds/stagefright/muxer.cpp
+++ b/cmds/stagefright/muxer.cpp
@@ -142,8 +142,13 @@
CHECK_EQ(err, (status_t)OK);
ssize_t newTrackIndex = muxer->addTrack(format);
- CHECK_GE(newTrackIndex, 0);
- trackIndexMap.add(i, newTrackIndex);
+ if (newTrackIndex < 0) {
+ fprintf(stderr, "%s track (%zu) unsupported by muxer\n",
+ isAudio ? "audio" : "video",
+ i);
+ } else {
+ trackIndexMap.add(i, newTrackIndex);
+ }
}
int64_t muxerStartTimeUs = ALooper::GetNowUs();
@@ -162,7 +167,12 @@
ALOGV("saw input eos, err %d", err);
sawInputEOS = true;
break;
+ } else if (trackIndexMap.indexOfKey(trackIndex) < 0) {
+ // ALOGV("skipping input from unsupported track %zu", trackIndex);
+ extractor->advance();
+ continue;
} else {
+ // ALOGV("reading sample from track index %zu\n", trackIndex);
err = extractor->readSampleData(newBuffer);
CHECK_EQ(err, (status_t)OK);
diff --git a/drm/libdrmframework/plugins/forward-lock/internal-format/converter/FwdLockConv.c b/drm/libdrmframework/plugins/forward-lock/internal-format/converter/FwdLockConv.c
index 9d15835..6a0b3c0 100644
--- a/drm/libdrmframework/plugins/forward-lock/internal-format/converter/FwdLockConv.c
+++ b/drm/libdrmframework/plugins/forward-lock/internal-format/converter/FwdLockConv.c
@@ -19,6 +19,7 @@
#include <fcntl.h>
#include <limits.h>
#include <pthread.h>
+#include <stdlib.h>
#include <string.h>
#include <sys/stat.h>
#include <unistd.h>
diff --git a/drm/mediadrm/plugins/clearkey/CryptoFactory.cpp b/drm/mediadrm/plugins/clearkey/CryptoFactory.cpp
index ee3189b..eeb64c3 100644
--- a/drm/mediadrm/plugins/clearkey/CryptoFactory.cpp
+++ b/drm/mediadrm/plugins/clearkey/CryptoFactory.cpp
@@ -43,10 +43,18 @@
return android::BAD_VALUE;
}
- android::sp<Session> session = SessionLibrary::get()->findSession(
- data, size);
- *plugin = new CryptoPlugin(session);
- return android::OK;
+ android::Vector<uint8_t> sessionId;
+ sessionId.appendArray(reinterpret_cast<const uint8_t*>(data), size);
+
+ CryptoPlugin *clearKeyPlugin = new CryptoPlugin(sessionId);
+ android::status_t result = clearKeyPlugin->getInitStatus();
+ if (result == android::OK) {
+ *plugin = clearKeyPlugin;
+ } else {
+ delete clearKeyPlugin;
+ *plugin = NULL;
+ }
+ return result;
}
} // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/CryptoPlugin.cpp
index adad136..53cbf80 100644
--- a/drm/mediadrm/plugins/clearkey/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/CryptoPlugin.cpp
@@ -19,9 +19,9 @@
#include <utils/Log.h>
#include <media/stagefright/MediaErrors.h>
-#include <utils/Errors.h>
#include "CryptoPlugin.h"
+#include "SessionLibrary.h"
namespace clearkeydrm {
@@ -80,4 +80,18 @@
}
}
+android::status_t CryptoPlugin::setMediaDrmSession(
+ const android::Vector<uint8_t>& sessionId) {
+ if (!sessionId.size()) {
+ mSession.clear();
+ } else {
+ mSession = SessionLibrary::get()->findSession(sessionId);
+ if (!mSession.get()) {
+ return android::ERROR_DRM_SESSION_NOT_OPENED;
+ }
+ }
+ return android::OK;
+}
+
+
} // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/CryptoPlugin.h b/drm/mediadrm/plugins/clearkey/CryptoPlugin.h
index 002d9e0..fd38f28 100644
--- a/drm/mediadrm/plugins/clearkey/CryptoPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/CryptoPlugin.h
@@ -31,7 +31,10 @@
class CryptoPlugin : public android::CryptoPlugin {
public:
- CryptoPlugin(const android::sp<Session>& session) : mSession(session) {}
+ CryptoPlugin(const android::Vector<uint8_t>& sessionId) {
+ mInitStatus = setMediaDrmSession(sessionId);
+ }
+
virtual ~CryptoPlugin() {}
virtual bool requiresSecureDecoderComponent(const char* mime) const {
@@ -45,10 +48,16 @@
const SubSample* subSamples, size_t numSubSamples,
void* dstPtr, android::AString* errorDetailMsg);
+ virtual android::status_t setMediaDrmSession(
+ const android::Vector<uint8_t>& sessionId);
+
+ android::status_t getInitStatus() const {return mInitStatus;}
+
private:
DISALLOW_EVIL_CONSTRUCTORS(CryptoPlugin);
android::sp<Session> mSession;
+ android::status_t mInitStatus;
};
} // namespace clearkeydrm
diff --git a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
index 6b8c772..e5ee403 100644
--- a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
@@ -37,7 +37,9 @@
status_t DrmPlugin::closeSession(const Vector<uint8_t>& sessionId) {
sp<Session> session = mSessionLibrary->findSession(sessionId);
- mSessionLibrary->destroySession(session);
+ if (session.get()) {
+ mSessionLibrary->destroySession(session);
+ }
return android::OK;
}
@@ -55,8 +57,11 @@
return android::ERROR_DRM_CANNOT_HANDLE;
}
*keyRequestType = DrmPlugin::kKeyRequestType_Initial;
- sp<Session> session = mSessionLibrary->findSession(scope);
defaultUrl.clear();
+ sp<Session> session = mSessionLibrary->findSession(scope);
+ if (!session.get()) {
+ return android::ERROR_DRM_SESSION_NOT_OPENED;
+ }
return session->getKeyRequest(initData, initDataType, &request);
}
@@ -65,6 +70,9 @@
const Vector<uint8_t>& response,
Vector<uint8_t>& keySetId) {
sp<Session> session = mSessionLibrary->findSession(scope);
+ if (!session.get()) {
+ return android::ERROR_DRM_SESSION_NOT_OPENED;
+ }
status_t res = session->provideKeyResponse(response);
if (res == android::OK) {
keySetId.clear();
diff --git a/drm/mediadrm/plugins/clearkey/SessionLibrary.cpp b/drm/mediadrm/plugins/clearkey/SessionLibrary.cpp
index d047c53..46d7f77 100644
--- a/drm/mediadrm/plugins/clearkey/SessionLibrary.cpp
+++ b/drm/mediadrm/plugins/clearkey/SessionLibrary.cpp
@@ -63,13 +63,6 @@
return mSessions.valueFor(sessionId);
}
-const sp<Session>& SessionLibrary::findSession(
- const void* data, size_t size) {
- Vector<uint8_t> sessionId;
- sessionId.appendArray(reinterpret_cast<const uint8_t*>(data), size);
- return findSession(sessionId);
-}
-
void SessionLibrary::destroySession(const sp<Session>& session) {
Mutex::Autolock lock(mSessionsLock);\
mSessions.removeItem(session->sessionId());
diff --git a/drm/mediadrm/plugins/clearkey/SessionLibrary.h b/drm/mediadrm/plugins/clearkey/SessionLibrary.h
index 56c8828..199ad64 100644
--- a/drm/mediadrm/plugins/clearkey/SessionLibrary.h
+++ b/drm/mediadrm/plugins/clearkey/SessionLibrary.h
@@ -36,8 +36,6 @@
const android::sp<Session>& findSession(
const android::Vector<uint8_t>& sessionId);
- const android::sp<Session>& findSession(const void* data, size_t size);
-
void destroySession(const android::sp<Session>& session);
private:
@@ -50,7 +48,7 @@
android::Mutex mSessionsLock;
uint32_t mNextSessionId;
- android::KeyedVector<android::Vector<uint8_t>, android::sp<Session> >
+ android::DefaultKeyedVector<android::Vector<uint8_t>, android::sp<Session> >
mSessions;
};
diff --git a/include/camera/Camera.h b/include/camera/Camera.h
index 25d75f7..2b60842 100644
--- a/include/camera/Camera.h
+++ b/include/camera/Camera.h
@@ -71,11 +71,11 @@
// construct a camera client from an existing remote
static sp<Camera> create(const sp<ICamera>& camera);
static sp<Camera> connect(int cameraId,
- const String16& opPackageName,
+ const String16& clientPackageName,
int clientUid);
static status_t connectLegacy(int cameraId, int halVersion,
- const String16& opPackageName,
+ const String16& clientPackageName,
int clientUid, sp<Camera>& camera);
virtual ~Camera();
diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h
index 38bff3e..5f85635 100644
--- a/include/camera/ICameraService.h
+++ b/include/camera/ICameraService.h
@@ -109,7 +109,7 @@
*/
virtual status_t connect(const sp<ICameraClient>& cameraClient,
int cameraId,
- const String16& opPackageName,
+ const String16& clientPackageName,
int clientUid,
/*out*/
sp<ICamera>& device) = 0;
@@ -117,7 +117,7 @@
virtual status_t connectDevice(
const sp<ICameraDeviceCallbacks>& cameraCb,
int cameraId,
- const String16& opPackageName,
+ const String16& clientPackageName,
int clientUid,
/*out*/
sp<ICameraDeviceUser>& device) = 0;
@@ -141,7 +141,7 @@
*/
virtual status_t connectLegacy(const sp<ICameraClient>& cameraClient,
int cameraId, int halVersion,
- const String16& opPackageName,
+ const String16& clientPackageName,
int clientUid,
/*out*/
sp<ICamera>& device) = 0;
@@ -164,7 +164,7 @@
/**
* Notify the camera service of a system event. Should only be called from system_server.
*/
- virtual void notifySystemEvent(int eventId, int arg0) = 0;
+ virtual void notifySystemEvent(int32_t eventId, const int32_t* args, size_t length) = 0;
};
// ----------------------------------------------------------------------------
diff --git a/include/camera/ICameraServiceProxy.h b/include/camera/ICameraServiceProxy.h
new file mode 100644
index 0000000..12a555f
--- /dev/null
+++ b/include/camera/ICameraServiceProxy.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_ICAMERASERVICEPROXY_H
+#define ANDROID_HARDWARE_ICAMERASERVICEPROXY_H
+
+#include <utils/RefBase.h>
+#include <binder/IInterface.h>
+#include <binder/Parcel.h>
+
+namespace android {
+
+class ICameraServiceProxy : public IInterface {
+public:
+ enum {
+ PING_FOR_USER_UPDATE = IBinder::FIRST_CALL_TRANSACTION,
+ };
+
+ DECLARE_META_INTERFACE(CameraServiceProxy);
+
+ virtual void pingForUserUpdate() = 0;
+};
+
+class BnCameraServiceProxy: public BnInterface<ICameraServiceProxy>
+{
+public:
+ virtual status_t onTransact( uint32_t code,
+ const Parcel& data,
+ Parcel* reply,
+ uint32_t flags = 0);
+};
+
+
+
+}; // namespace android
+
+#endif // ANDROID_HARDWARE_ICAMERASERVICEPROXY_H
+
+
diff --git a/include/camera/camera2/ICameraDeviceUser.h b/include/camera/camera2/ICameraDeviceUser.h
index 619b161..b3dd140 100644
--- a/include/camera/camera2/ICameraDeviceUser.h
+++ b/include/camera/camera2/ICameraDeviceUser.h
@@ -97,7 +97,7 @@
* must be called before any requests can be submitted.
* <p>
*/
- virtual status_t endConfigure() = 0;
+ virtual status_t endConfigure(bool isConstrainedHighSpeed = false) = 0;
virtual status_t deleteStream(int streamId) = 0;
diff --git a/include/media/AudioEffect.h b/include/media/AudioEffect.h
index 61da4f2..5af6c10 100644
--- a/include/media/AudioEffect.h
+++ b/include/media/AudioEffect.h
@@ -429,7 +429,8 @@
private:
// Implements the IEffectClient interface
- class EffectClient : public android::BnEffectClient, public android::IBinder::DeathRecipient
+ class EffectClient :
+ public android::BnEffectClient, public android::IBinder::DeathRecipient
{
public:
@@ -437,24 +438,39 @@
// IEffectClient
virtual void controlStatusChanged(bool controlGranted) {
- mEffect->controlStatusChanged(controlGranted);
+ sp<AudioEffect> effect = mEffect.promote();
+ if (effect != 0) {
+ effect->controlStatusChanged(controlGranted);
+ }
}
virtual void enableStatusChanged(bool enabled) {
- mEffect->enableStatusChanged(enabled);
+ sp<AudioEffect> effect = mEffect.promote();
+ if (effect != 0) {
+ effect->enableStatusChanged(enabled);
+ }
}
virtual void commandExecuted(uint32_t cmdCode,
uint32_t cmdSize,
void *pCmdData,
uint32_t replySize,
void *pReplyData) {
- mEffect->commandExecuted(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+ sp<AudioEffect> effect = mEffect.promote();
+ if (effect != 0) {
+ effect->commandExecuted(
+ cmdCode, cmdSize, pCmdData, replySize, pReplyData);
+ }
}
// IBinder::DeathRecipient
- virtual void binderDied(const wp<IBinder>& who) {mEffect->binderDied();}
+ virtual void binderDied(const wp<IBinder>& who) {
+ sp<AudioEffect> effect = mEffect.promote();
+ if (effect != 0) {
+ effect->binderDied();
+ }
+ }
private:
- AudioEffect *mEffect;
+ wp<AudioEffect> mEffect;
};
void binderDied();
diff --git a/include/media/AudioIoDescriptor.h b/include/media/AudioIoDescriptor.h
index 2437901..c94b738 100644
--- a/include/media/AudioIoDescriptor.h
+++ b/include/media/AudioIoDescriptor.h
@@ -33,12 +33,31 @@
class AudioIoDescriptor : public RefBase {
public:
AudioIoDescriptor() :
+ mIoHandle(AUDIO_IO_HANDLE_NONE),
mSamplingRate(0), mFormat(AUDIO_FORMAT_DEFAULT), mChannelMask(AUDIO_CHANNEL_NONE),
- mFrameCount(0), mLatency(0) {}
+ mFrameCount(0), mLatency(0)
+ {
+ memset(&mPatch, 0, sizeof(struct audio_patch));
+ }
virtual ~AudioIoDescriptor() {}
+ audio_port_handle_t getDeviceId() {
+ if (mPatch.num_sources != 0 && mPatch.num_sinks != 0) {
+ if (mPatch.sources[0].type == AUDIO_PORT_TYPE_MIX) {
+ // this is an output mix
+ // FIXME: the API only returns the first device in case of multiple device selection
+ return mPatch.sinks[0].id;
+ } else {
+ // this is an input mix
+ return mPatch.sources[0].id;
+ }
+ }
+ return AUDIO_PORT_HANDLE_NONE;
+ }
+
audio_io_handle_t mIoHandle;
+ struct audio_patch mPatch;
uint32_t mSamplingRate;
audio_format_t mFormat;
audio_channel_mask_t mChannelMask;
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index 4d8bd32..c4c7b0e 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -394,6 +394,39 @@
* TODO Document this method.
*/
audio_port_handle_t getInputDevice();
+
+ /* Returns the ID of the audio device actually used by the input to which this AudioRecord
+ * is attached.
+ * A value of AUDIO_PORT_HANDLE_NONE indicates the AudioRecord is not attached to any input.
+ *
+ * Parameters:
+ * none.
+ */
+ audio_port_handle_t getRoutedDeviceId();
+
+ /* Add an AudioDeviceCallback. The caller will be notified when the audio device
+ * to which this AudioRecord is routed is updated.
+ * Replaces any previously installed callback.
+ * Parameters:
+ * callback: The callback interface
+ * Returns NO_ERROR if successful.
+ * INVALID_OPERATION if the same callback is already installed.
+ * NO_INIT or PREMISSION_DENIED if AudioFlinger service is not reachable
+ * BAD_VALUE if the callback is NULL
+ */
+ status_t addAudioDeviceCallback(
+ const sp<AudioSystem::AudioDeviceCallback>& callback);
+
+ /* remove an AudioDeviceCallback.
+ * Parameters:
+ * callback: The callback interface
+ * Returns NO_ERROR if successful.
+ * INVALID_OPERATION if the callback is not installed
+ * BAD_VALUE if the callback is NULL
+ */
+ status_t removeAudioDeviceCallback(
+ const sp<AudioSystem::AudioDeviceCallback>& callback);
+
private:
/* If nonContig is non-NULL, it is an output parameter that will be set to the number of
* additional non-contiguous frames that are predicted to be available immediately,
@@ -588,6 +621,7 @@
// For Device Selection API
// a value of AUDIO_PORT_HANDLE_NONE indicated default (AudioPolicyManager) routing.
audio_port_handle_t mSelectedDeviceId;
+ sp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
};
}; // namespace android
diff --git a/include/media/AudioResamplerPublic.h b/include/media/AudioResamplerPublic.h
index 53b8c13..6cf2ca9 100644
--- a/include/media/AudioResamplerPublic.h
+++ b/include/media/AudioResamplerPublic.h
@@ -143,6 +143,16 @@
return required * (double)speed + 1 + 1; // accounting for rounding dependencies
}
+// Identifies sample rates that we associate with music
+// and thus eligible for better resampling and fast capture.
+// This is somewhat less than 44100 to allow for pitch correction
+// involving resampling as well as asynchronous resampling.
+#define AUDIO_PROCESSING_MUSIC_RATE 40000
+
+static inline bool isMusicRate(uint32_t sampleRate) {
+ return sampleRate >= AUDIO_PROCESSING_MUSIC_RATE;
+}
+
} // namespace android
// ---------------------------------------------------------------------------
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index 0cbcdb1..26cffa6 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -158,6 +158,9 @@
// or no HW sync source is used.
static audio_hw_sync_t getAudioHwSyncForSession(audio_session_t sessionId);
+ // Indicate JAVA services are ready (scheduling, power management ...)
+ static status_t systemReady();
+
// Events used to synchronize actions between audio sessions.
// For instance SYNC_EVENT_PRESENTATION_COMPLETE can be used to delay recording start until
// playback is complete on another audio session.
@@ -332,8 +335,26 @@
};
- static status_t addAudioPortCallback(const sp<AudioPortCallback>& callBack);
- static status_t removeAudioPortCallback(const sp<AudioPortCallback>& callBack);
+ static status_t addAudioPortCallback(const sp<AudioPortCallback>& callback);
+ static status_t removeAudioPortCallback(const sp<AudioPortCallback>& callback);
+
+ class AudioDeviceCallback : public RefBase
+ {
+ public:
+
+ AudioDeviceCallback() {}
+ virtual ~AudioDeviceCallback() {}
+
+ virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
+ audio_port_handle_t deviceId) = 0;
+ };
+
+ static status_t addAudioDeviceCallback(const sp<AudioDeviceCallback>& callback,
+ audio_io_handle_t audioIo);
+ static status_t removeAudioDeviceCallback(const sp<AudioDeviceCallback>& callback,
+ audio_io_handle_t audioIo);
+
+ static audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
private:
@@ -359,10 +380,20 @@
// values for output/input parameters up-to-date in client process
virtual void ioConfigChanged(audio_io_config_event event,
const sp<AudioIoDescriptor>& ioDesc);
+
+
+ status_t addAudioDeviceCallback(const sp<AudioDeviceCallback>& callback,
+ audio_io_handle_t audioIo);
+ status_t removeAudioDeviceCallback(const sp<AudioDeviceCallback>& callback,
+ audio_io_handle_t audioIo);
+
+ audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
+
private:
Mutex mLock;
- DefaultKeyedVector<audio_io_handle_t, sp<AudioIoDescriptor> > mIoDescriptors;
-
+ DefaultKeyedVector<audio_io_handle_t, sp<AudioIoDescriptor> > mIoDescriptors;
+ DefaultKeyedVector<audio_io_handle_t, Vector < sp<AudioDeviceCallback> > >
+ mAudioDeviceCallbacks;
// cached values for recording getInputBufferSize() queries
size_t mInBuffSize; // zero indicates cache is invalid
uint32_t mInSamplingRate;
@@ -377,8 +408,8 @@
AudioPolicyServiceClient() {
}
- status_t addAudioPortCallback(const sp<AudioPortCallback>& callBack);
- status_t removeAudioPortCallback(const sp<AudioPortCallback>& callBack);
+ status_t addAudioPortCallback(const sp<AudioPortCallback>& callback);
+ status_t removeAudioPortCallback(const sp<AudioPortCallback>& callback);
// DeathRecipient
virtual void binderDied(const wp<IBinder>& who);
@@ -393,6 +424,9 @@
Vector <sp <AudioPortCallback> > mAudioPortCallbacks;
};
+ static const sp<AudioFlingerClient> getAudioFlingerClient();
+ static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
+
static sp<AudioFlingerClient> gAudioFlingerClient;
static sp<AudioPolicyServiceClient> gAudioPolicyServiceClient;
friend class AudioFlingerClient;
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index 0ccd19e..c0bc516 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -43,25 +43,35 @@
*/
enum event_type {
EVENT_MORE_DATA = 0, // Request to write more data to buffer.
+ // This event only occurs for TRANSFER_CALLBACK.
// If this event is delivered but the callback handler
- // does not want to write more data, the handler must explicitly
+ // does not want to write more data, the handler must
// ignore the event by setting frameCount to zero.
- EVENT_UNDERRUN = 1, // Buffer underrun occurred.
+ // This might occur, for example, if the application is
+ // waiting for source data or is at the end of stream.
+ //
+ // For data filling, it is preferred that the callback
+ // does not block and instead returns a short count on
+ // the amount of data actually delivered
+ // (or 0, if no data is currently available).
+ EVENT_UNDERRUN = 1, // Buffer underrun occurred. This will not occur for
+ // static tracks.
EVENT_LOOP_END = 2, // Sample loop end was reached; playback restarted from
- // loop start if loop count was not 0.
+ // loop start if loop count was not 0 for a static track.
EVENT_MARKER = 3, // Playback head is at the specified marker position
// (See setMarkerPosition()).
EVENT_NEW_POS = 4, // Playback head is at a new position
// (See setPositionUpdatePeriod()).
- EVENT_BUFFER_END = 5, // Playback head is at the end of the buffer.
- // Not currently used by android.media.AudioTrack.
+ EVENT_BUFFER_END = 5, // Playback has completed for a static track.
EVENT_NEW_IAUDIOTRACK = 6, // IAudioTrack was re-created, either due to re-routing and
// voluntary invalidation by mediaserver, or mediaserver crash.
EVENT_STREAM_END = 7, // Sent after all the buffers queued in AF and HW are played
- // back (after stop is called)
+ // back (after stop is called) for an offloaded track.
+#if 0 // FIXME not yet implemented
EVENT_NEW_TIMESTAMP = 8, // Delivered periodically and when there's a significant change
// in the mapping from frame position to presentation time.
// See AudioTimestamp for the information included with event.
+#endif
};
/* Client should declare a Buffer and pass the address to obtainBuffer()
@@ -183,6 +193,10 @@
* pid: Process ID of the app which initially requested this AudioTrack
* for power management tracking, or -1 for current process ID.
* pAttributes: If not NULL, supersedes streamType for use case selection.
+ * doNotReconnect: If set to true, AudioTrack won't automatically recreate the IAudioTrack
+ binder to AudioFlinger.
+ It will return an error instead. The application will recreate
+ the track based on offloading or different channel configuration, etc.
* threadCanCallJava: Not present in parameter list, and so is fixed at false.
*/
@@ -200,7 +214,8 @@
const audio_offload_info_t *offloadInfo = NULL,
int uid = -1,
pid_t pid = -1,
- const audio_attributes_t* pAttributes = NULL);
+ const audio_attributes_t* pAttributes = NULL,
+ bool doNotReconnect = false);
/* Creates an audio track and registers it with AudioFlinger.
* With this constructor, the track is configured for static buffer mode.
@@ -228,7 +243,8 @@
const audio_offload_info_t *offloadInfo = NULL,
int uid = -1,
pid_t pid = -1,
- const audio_attributes_t* pAttributes = NULL);
+ const audio_attributes_t* pAttributes = NULL,
+ bool doNotReconnect = false);
/* Terminates the AudioTrack and unregisters it from AudioFlinger.
* Also destroys all resources associated with the AudioTrack.
@@ -272,7 +288,8 @@
const audio_offload_info_t *offloadInfo = NULL,
int uid = -1,
pid_t pid = -1,
- const audio_attributes_t* pAttributes = NULL);
+ const audio_attributes_t* pAttributes = NULL,
+ bool doNotReconnect = false);
/* Result of constructing the AudioTrack. This must be checked for successful initialization
* before using any AudioTrack API (except for set()), because using
@@ -510,7 +527,7 @@
*/
status_t setOutputDevice(audio_port_handle_t deviceId);
- /* Returns the ID of the audio device used for output of this AudioTrack.
+ /* Returns the ID of the audio device selected for this AudioTrack.
* A value of AUDIO_PORT_HANDLE_NONE indicates default (AudioPolicyManager) routing.
*
* Parameters:
@@ -518,6 +535,15 @@
*/
audio_port_handle_t getOutputDevice();
+ /* Returns the ID of the audio device actually used by the output to which this AudioTrack is
+ * attached.
+ * A value of AUDIO_PORT_HANDLE_NONE indicates the audio track is not attached to any output.
+ *
+ * Parameters:
+ * none.
+ */
+ audio_port_handle_t getRoutedDeviceId();
+
/* Returns the unique session ID associated with this track.
*
* Parameters:
@@ -664,6 +690,28 @@
*/
status_t getTimestamp(AudioTimestamp& timestamp);
+ /* Add an AudioDeviceCallback. The caller will be notified when the audio device to which this
+ * AudioTrack is routed is updated.
+ * Replaces any previously installed callback.
+ * Parameters:
+ * callback: The callback interface
+ * Returns NO_ERROR if successful.
+ * INVALID_OPERATION if the same callback is already installed.
+ * NO_INIT or PREMISSION_DENIED if AudioFlinger service is not reachable
+ * BAD_VALUE if the callback is NULL
+ */
+ status_t addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
+
+ /* remove an AudioDeviceCallback.
+ * Parameters:
+ * callback: The callback interface
+ * Returns NO_ERROR if successful.
+ * INVALID_OPERATION if the callback is not installed
+ * BAD_VALUE if the callback is NULL
+ */
+ status_t removeAudioDeviceCallback(
+ const sp<AudioSystem::AudioDeviceCallback>& callback);
+
protected:
/* copying audio tracks is not allowed */
AudioTrack(const AudioTrack& other);
@@ -761,6 +809,13 @@
size_t mReqFrameCount; // frame count to request the first or next time
// a new IAudioTrack is needed, non-decreasing
+ // The following AudioFlinger server-side values are cached in createAudioTrack_l().
+ // These values can be used for informational purposes until the track is invalidated,
+ // whereupon restoreTrack_l() calls createTrack_l() to update the values.
+ uint32_t mAfLatency; // AudioFlinger latency in ms
+ size_t mAfFrameCount; // AudioFlinger frame count
+ uint32_t mAfSampleRate; // AudioFlinger sample rate
+
// constant after constructor or set()
audio_format_t mFormat; // as requested by client, not forced to 16-bit
audio_stream_type_t mStreamType; // mStreamType == AUDIO_STREAM_DEFAULT implies
@@ -839,6 +894,7 @@
// only used for offloaded and direct tracks.
bool mPreviousTimestampValid;// true if mPreviousTimestamp is valid
+ bool mTimestampStartupGlitchReported; // reduce log spam
bool mRetrogradeMotionReported; // reduce log spam
AudioTimestamp mPreviousTimestamp; // used to detect retrograde motion
@@ -846,6 +902,8 @@
// const after set(), except for bits AUDIO_OUTPUT_FLAG_FAST and AUDIO_OUTPUT_FLAG_OFFLOAD.
// mLock must be held to read or write those bits reliably.
+ bool mDoNotReconnect;
+
int mSessionId;
int mAuxEffectId;
@@ -885,6 +943,8 @@
uint32_t mSequence; // incremented for each new IAudioTrack attempt
int mClientUid;
pid_t mClientPid;
+
+ sp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
};
class TimedAudioTrack : public AudioTrack
diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h
index 3f7fd09..5051aff 100644
--- a/include/media/IAudioFlinger.h
+++ b/include/media/IAudioFlinger.h
@@ -243,6 +243,9 @@
/* Get the HW synchronization source used for an audio session */
virtual audio_hw_sync_t getAudioHwSyncForSession(audio_session_t sessionId) = 0;
+
+ /* Indicate JAVA services are ready (scheduling, power management ...) */
+ virtual status_t systemReady() = 0;
};
diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h
index 47de0ca..77ed5d3 100644
--- a/include/media/IMediaRecorder.h
+++ b/include/media/IMediaRecorder.h
@@ -56,7 +56,7 @@
virtual status_t init() = 0;
virtual status_t close() = 0;
virtual status_t release() = 0;
- virtual status_t usePersistentSurface(const sp<IGraphicBufferConsumer>& surface) = 0;
+ virtual status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface) = 0;
virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() = 0;
};
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index df3aeca..3d29e4a 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -25,6 +25,10 @@
#include <utils/List.h>
#include <utils/String8.h>
+#include <list>
+
+#include <media/hardware/MetadataBufferType.h>
+
#include <OMX_Core.h>
#include <OMX_Video.h>
@@ -81,14 +85,16 @@
virtual status_t getState(
node_id node, OMX_STATETYPE* state) = 0;
+ // This will set *type to previous metadata buffer type on OMX error (not on binder error), and
+ // new metadata buffer type on success.
virtual status_t storeMetaDataInBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
+ node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type = NULL) = 0;
virtual status_t prepareForAdaptivePlayback(
node_id node, OMX_U32 portIndex, OMX_BOOL enable,
OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) = 0;
- virtual status_t configureVideoTunnelMode(
+ virtual status_t configureVideoTunnelMode(
node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
OMX_U32 audioHwSync, native_handle_t **sidebandHandle) = 0;
@@ -98,9 +104,10 @@
virtual status_t getGraphicBufferUsage(
node_id node, OMX_U32 port_index, OMX_U32* usage) = 0;
+ // Use |params| as an OMX buffer, but limit the size of the OMX buffer to |allottedSize|.
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer) = 0;
+ buffer_id *buffer, OMX_U32 allottedSize) = 0;
virtual status_t useGraphicBuffer(
node_id node, OMX_U32 port_index,
@@ -110,17 +117,23 @@
node_id node, OMX_U32 port_index,
const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) = 0;
+ // This will set *type to resulting metadata buffer type on OMX error (not on binder error) as
+ // well as on success.
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index,
- sp<IGraphicBufferProducer> *bufferProducer) = 0;
+ sp<IGraphicBufferProducer> *bufferProducer,
+ MetadataBufferType *type = NULL) = 0;
virtual status_t createPersistentInputSurface(
sp<IGraphicBufferProducer> *bufferProducer,
sp<IGraphicBufferConsumer> *bufferConsumer) = 0;
- virtual status_t usePersistentInputSurface(
+ // This will set *type to resulting metadata buffer type on OMX error (not on binder error) as
+ // well as on success.
+ virtual status_t setInputSurface(
node_id node, OMX_U32 port_index,
- const sp<IGraphicBufferConsumer> &bufferConsumer) = 0;
+ const sp<IGraphicBufferConsumer> &bufferConsumer,
+ MetadataBufferType *type) = 0;
virtual status_t signalEndOfInputStream(node_id node) = 0;
@@ -132,20 +145,32 @@
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data) = 0;
+ // Allocate an OMX buffer of size |allotedSize|. Use |params| as the backup buffer, which
+ // may be larger.
virtual status_t allocateBufferWithBackup(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer) = 0;
+ buffer_id *buffer, OMX_U32 allottedSize) = 0;
virtual status_t freeBuffer(
node_id node, OMX_U32 port_index, buffer_id buffer) = 0;
- virtual status_t fillBuffer(node_id node, buffer_id buffer) = 0;
+ enum {
+ kFenceTimeoutMs = 1000
+ };
+ // Calls OMX_FillBuffer on buffer, and passes |fenceFd| to component if it supports
+ // fences. Otherwise, it waits on |fenceFd| before calling OMX_FillBuffer.
+ // Takes ownership of |fenceFd| even if this call fails.
+ virtual status_t fillBuffer(node_id node, buffer_id buffer, int fenceFd = -1) = 0;
+ // Calls OMX_EmptyBuffer on buffer (after updating buffer header with |range_offset|,
+ // |range_length|, |flags| and |timestamp|). Passes |fenceFd| to component if it
+ // supports fences. Otherwise, it waits on |fenceFd| before calling OMX_EmptyBuffer.
+ // Takes ownership of |fenceFd| even if this call fails.
virtual status_t emptyBuffer(
node_id node,
buffer_id buffer,
OMX_U32 range_offset, OMX_U32 range_length,
- OMX_U32 flags, OMX_TICKS timestamp) = 0;
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd = -1) = 0;
virtual status_t getExtensionIndex(
node_id node,
@@ -173,10 +198,11 @@
EVENT,
EMPTY_BUFFER_DONE,
FILL_BUFFER_DONE,
-
+ FRAME_RENDERED,
} type;
IOMX::node_id node;
+ int fenceFd; // used for EMPTY_BUFFER_DONE and FILL_BUFFER_DONE; client must close this
union {
// if type == EVENT
@@ -200,6 +226,11 @@
OMX_TICKS timestamp;
} extended_buffer_data;
+ // if type == FRAME_RENDERED
+ struct {
+ OMX_TICKS timestamp;
+ OMX_S64 nanoTime;
+ } render_data;
} u;
};
@@ -207,7 +238,8 @@
public:
DECLARE_META_INTERFACE(OMXObserver);
- virtual void onMessage(const omx_message &msg) = 0;
+ // Handle (list of) messages.
+ virtual void onMessages(const std::list<omx_message> &messages) = 0;
};
////////////////////////////////////////////////////////////////////////////////
@@ -233,4 +265,15 @@
} // namespace android
+inline static const char *asString(android::MetadataBufferType i, const char *def = "??") {
+ using namespace android;
+ switch (i) {
+ case kMetadataBufferTypeCameraSource: return "CameraSource";
+ case kMetadataBufferTypeGrallocSource: return "GrallocSource";
+ case kMetadataBufferTypeANWBuffer: return "ANWBuffer";
+ case kMetadataBufferTypeInvalid: return "Invalid";
+ default: return def;
+ }
+}
+
#endif // ANDROID_IOMX_H_
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index fa917f9..de82554 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -113,7 +113,9 @@
AudioCallback cb = NULL,
void *cookie = NULL,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
- const audio_offload_info_t *offloadInfo = NULL) = 0;
+ const audio_offload_info_t *offloadInfo = NULL,
+ bool doNotReconnect = false,
+ uint32_t suggestedFrameCount = 0) = 0;
virtual status_t start() = 0;
diff --git a/include/media/MediaProfiles.h b/include/media/MediaProfiles.h
index f061d22..e02918f 100644
--- a/include/media/MediaProfiles.h
+++ b/include/media/MediaProfiles.h
@@ -58,24 +58,6 @@
CAMCORDER_QUALITY_HIGH_SPEED_LIST_END = 2005,
};
-/**
- * Set CIF as default maximum import and export resolution of video editor.
- * The maximum import and export resolutions are platform specific,
- * which should be defined in media_profiles.xml.
- * Set default maximum prefetch YUV frames to 6, which means video editor can
- * queue up to 6 YUV frames in the video encoder source.
- * This value is used to limit the amount of memory used by video editor
- * engine when the encoder consumes YUV frames at a lower speed
- * than video editor engine produces.
- */
-enum videoeditor_capability {
- VIDEOEDITOR_DEFAULT_MAX_INPUT_FRAME_WIDTH = 352,
- VIDEOEDITOR_DEFUALT_MAX_INPUT_FRAME_HEIGHT = 288,
- VIDEOEDITOR_DEFAULT_MAX_OUTPUT_FRAME_WIDTH = 352,
- VIDEOEDITOR_DEFUALT_MAX_OUTPUT_FRAME_HEIGHT = 288,
- VIDEOEDITOR_DEFAULT_MAX_PREFETCH_YUV_FRAMES = 6
-};
-
enum video_decoder {
VIDEO_DECODER_WMV,
};
@@ -148,32 +130,6 @@
int getVideoEncoderParamByName(const char *name, video_encoder codec) const;
/**
- * Returns the value for the given param name for the video editor cap
- * param or -1 if error.
- * Supported param name are:
- * videoeditor.input.width.max - max input video frame width
- * videoeditor.input.height.max - max input video frame height
- * videoeditor.output.width.max - max output video frame width
- * videoeditor.output.height.max - max output video frame height
- * maxPrefetchYUVFrames - max prefetch YUV frames in video editor engine. This value is used
- * to limit the memory consumption.
- */
- int getVideoEditorCapParamByName(const char *name) const;
-
- /**
- * Returns the value for the given param name for the video editor export codec format
- * param or -1 if error.
- * Supported param name are:
- * videoeditor.export.profile - export video profile
- * videoeditor.export.level - export video level
- * Supported param codec are:
- * 1 for h263
- * 2 for h264
- * 3 for mpeg4
- */
- int getVideoEditorExportParamByName(const char *name, int codec) const;
-
- /**
* Returns the audio encoders supported.
*/
Vector<audio_encoder> getAudioEncoders() const;
@@ -221,7 +177,7 @@
MediaProfiles& operator=(const MediaProfiles&); // Don't call me
MediaProfiles(const MediaProfiles&); // Don't call me
- MediaProfiles() { mVideoEditorCap = NULL; } // Dummy default constructor
+ MediaProfiles() {} // Dummy default constructor
~MediaProfiles(); // Don't delete me
struct VideoCodec {
@@ -366,31 +322,6 @@
int mCameraId;
Vector<int> mLevels;
};
- struct ExportVideoProfile {
- ExportVideoProfile(int codec, int profile, int level)
- :mCodec(codec),mProfile(profile),mLevel(level) {}
- ~ExportVideoProfile() {}
- int mCodec;
- int mProfile;
- int mLevel;
- };
- struct VideoEditorCap {
- VideoEditorCap(int inFrameWidth, int inFrameHeight,
- int outFrameWidth, int outFrameHeight, int frames)
- : mMaxInputFrameWidth(inFrameWidth),
- mMaxInputFrameHeight(inFrameHeight),
- mMaxOutputFrameWidth(outFrameWidth),
- mMaxOutputFrameHeight(outFrameHeight),
- mMaxPrefetchYUVFrames(frames) {}
-
- ~VideoEditorCap() {}
-
- int mMaxInputFrameWidth;
- int mMaxInputFrameHeight;
- int mMaxOutputFrameWidth;
- int mMaxOutputFrameHeight;
- int mMaxPrefetchYUVFrames;
- };
int getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const;
void initRequiredProfileRefs(const Vector<int>& cameraIds);
@@ -403,7 +334,6 @@
static void logAudioEncoderCap(const AudioEncoderCap& cap);
static void logVideoDecoderCap(const VideoDecoderCap& cap);
static void logAudioDecoderCap(const AudioDecoderCap& cap);
- static void logVideoEditorCap(const VideoEditorCap& cap);
// If the xml configuration file does exist, use the settings
// from the xml
@@ -415,9 +345,6 @@
static VideoDecoderCap* createVideoDecoderCap(const char **atts);
static VideoEncoderCap* createVideoEncoderCap(const char **atts);
static AudioEncoderCap* createAudioEncoderCap(const char **atts);
- static VideoEditorCap* createVideoEditorCap(
- const char **atts, MediaProfiles *profiles);
- static ExportVideoProfile* createExportVideoProfile(const char **atts);
static CamcorderProfile* createCamcorderProfile(
int cameraId, const char **atts, Vector<int>& cameraIds);
@@ -461,8 +388,6 @@
static void createDefaultEncoderOutputFileFormats(MediaProfiles *profiles);
static void createDefaultImageEncodingQualityLevels(MediaProfiles *profiles);
static void createDefaultImageDecodingMaxMemory(MediaProfiles *profiles);
- static void createDefaultVideoEditorCap(MediaProfiles *profiles);
- static void createDefaultExportVideoProfiles(MediaProfiles *profiles);
static VideoEncoderCap* createDefaultH263VideoEncoderCap();
static VideoEncoderCap* createDefaultM4vVideoEncoderCap();
@@ -520,8 +445,6 @@
RequiredProfiles *mRequiredProfileRefs;
Vector<int> mCameraIds;
- VideoEditorCap* mVideoEditorCap;
- Vector<ExportVideoProfile*> mVideoEditorExportProfiles;
};
}; // namespace android
diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h
index 9947309..d6cc4bb 100644
--- a/include/media/MediaRecorderBase.h
+++ b/include/media/MediaRecorderBase.h
@@ -57,7 +57,7 @@
virtual status_t reset() = 0;
virtual status_t getMaxAmplitude(int *max) = 0;
virtual status_t dump(int fd, const Vector<String16>& args) const = 0;
- virtual status_t usePersistentSurface(const sp<IGraphicBufferConsumer>& surface) = 0;
+ virtual status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface) = 0;
virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() const = 0;
diff --git a/include/media/MediaResourcePolicy.h b/include/media/MediaResourcePolicy.h
index 1e1c341..9bc2eec 100644
--- a/include/media/MediaResourcePolicy.h
+++ b/include/media/MediaResourcePolicy.h
@@ -29,7 +29,7 @@
class MediaResourcePolicy {
public:
MediaResourcePolicy();
- MediaResourcePolicy(String8 type, uint64_t value);
+ MediaResourcePolicy(String8 type, String8 value);
void readFromParcel(const Parcel &parcel);
void writeToParcel(Parcel *parcel) const;
@@ -37,7 +37,7 @@
String8 toString() const;
String8 mType;
- uint64_t mValue;
+ String8 mValue;
};
}; // namespace android
diff --git a/services/camera/libcameraservice/utils/RingBuffer.h b/include/media/RingBuffer.h
similarity index 100%
rename from services/camera/libcameraservice/utils/RingBuffer.h
rename to include/media/RingBuffer.h
diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h
index 9210feb..15ff82d 100644
--- a/include/media/mediarecorder.h
+++ b/include/media/mediarecorder.h
@@ -237,7 +237,7 @@
status_t close();
status_t release();
void notify(int msg, int ext1, int ext2);
- status_t usePersistentSurface(const sp<PersistentSurface>& surface);
+ status_t setInputSurface(const sp<PersistentSurface>& surface);
sp<IGraphicBufferProducer> querySurfaceMediaSourceFromMediaServer();
private:
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index cdb923d..f9ea38e 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -20,9 +20,11 @@
#include <stdint.h>
#include <android/native_window.h>
+#include <media/hardware/MetadataBufferType.h>
#include <media/IOMX.h>
#include <media/stagefright/foundation/AHierarchicalStateMachine.h>
#include <media/stagefright/CodecBase.h>
+#include <media/stagefright/FrameRenderTracker.h>
#include <media/stagefright/SkipCutBuffer.h>
#include <OMX_Audio.h>
@@ -44,8 +46,7 @@
virtual void initiateAllocateComponent(const sp<AMessage> &msg);
virtual void initiateConfigureComponent(const sp<AMessage> &msg);
virtual void initiateCreateInputSurface();
- virtual void initiateUsePersistentInputSurface(
- const sp<PersistentSurface> &surface);
+ virtual void initiateSetInputSurface(const sp<PersistentSurface> &surface);
virtual void initiateStart();
virtual void initiateShutdown(bool keepComponentAllocated = false);
@@ -109,6 +110,7 @@
enum {
kWhatSetup = 'setu',
kWhatOMXMessage = 'omx ',
+ kWhatOMXMessageList = 'omxL',
kWhatInputBufferFilled = 'inpF',
kWhatOutputBufferDrained = 'outD',
kWhatShutdown = 'shut',
@@ -119,12 +121,12 @@
kWhatConfigureComponent = 'conf',
kWhatSetSurface = 'setS',
kWhatCreateInputSurface = 'cisf',
- kWhatUsePersistentInputSurface = 'pisf',
+ kWhatSetInputSurface = 'sisf',
kWhatSignalEndOfInputStream = 'eois',
kWhatStart = 'star',
kWhatRequestIDRFrame = 'ridr',
kWhatSetParameters = 'setP',
- kWhatSubmitOutputMetaDataBufferIfEOS = 'subm',
+ kWhatSubmitOutputMetadataBufferIfEOS = 'subm',
kWhatOMXDied = 'OMXd',
kWhatReleaseCodecInstance = 'relC',
};
@@ -147,16 +149,39 @@
OWNED_BY_UPSTREAM,
OWNED_BY_DOWNSTREAM,
OWNED_BY_NATIVE_WINDOW,
+ UNRECOGNIZED, // not a tracked buffer
};
+ static inline Status getSafeStatus(BufferInfo *info) {
+ return info == NULL ? UNRECOGNIZED : info->mStatus;
+ }
+
IOMX::buffer_id mBufferID;
Status mStatus;
unsigned mDequeuedAt;
sp<ABuffer> mData;
sp<GraphicBuffer> mGraphicBuffer;
+ int mFenceFd;
+ FrameRenderTracker::Info *mRenderInfo;
+
+ // The following field and 4 methods are used for debugging only
+ bool mIsReadFence;
+ // Store |fenceFd| and set read/write flag. Log error, if there is already a fence stored.
+ void setReadFence(int fenceFd, const char *dbg);
+ void setWriteFence(int fenceFd, const char *dbg);
+ // Log error, if the current fence is not a read/write fence.
+ void checkReadFence(const char *dbg);
+ void checkWriteFence(const char *dbg);
};
+ static const char *_asString(BufferInfo::Status s);
+ void dumpBuffers(OMX_U32 portIndex);
+
+ // If |fd| is non-negative, waits for fence with |fd| and logs an error if it fails. Returns
+ // the error code or OK on success. If |fd| is negative, it returns OK
+ status_t waitForFence(int fd, const char *dbg);
+
#if TRACK_BUFFER_TIMING
struct BufferStats {
int64_t mEmptyBufferTimeUs;
@@ -191,6 +216,7 @@
sp<AMessage> mOutputFormat;
sp<AMessage> mBaseOutputFormat;
+ FrameRenderTracker mRenderTracker; // render information for buffers rendered by ACodec
Vector<BufferInfo> mBuffers[2];
bool mPortEOS[2];
status_t mInputEOSResult;
@@ -200,7 +226,6 @@
bool mSentFormat;
bool mIsVideo;
bool mIsEncoder;
- bool mUseMetadataOnEncoderOutput;
bool mShutdownInProgress;
bool mExplicitShutdown;
@@ -215,8 +240,10 @@
bool mChannelMaskPresent;
int32_t mChannelMask;
unsigned mDequeueCounter;
- bool mStoreMetaDataInOutputBuffers;
- int32_t mMetaDataBuffersToSubmit;
+ MetadataBufferType mInputMetadataType;
+ MetadataBufferType mOutputMetadataType;
+ bool mLegacyAdaptiveExperiment;
+ int32_t mMetadataBuffersToSubmit;
size_t mNumUndequeuedBuffers;
int64_t mRepeatFrameDelayUs;
@@ -236,22 +263,27 @@
status_t freeBuffer(OMX_U32 portIndex, size_t i);
status_t handleSetSurface(const sp<Surface> &surface);
- status_t setNativeWindowSizeFormatAndUsage(
- ANativeWindow *nativeWindow /* nonnull */,
- int width, int height, int format, int rotation, int usage);
status_t setupNativeWindowSizeFormatAndUsage(ANativeWindow *nativeWindow /* nonnull */);
status_t configureOutputBuffersFromNativeWindow(
OMX_U32 *nBufferCount, OMX_U32 *nBufferSize,
OMX_U32 *nMinUndequeuedBuffers);
- status_t allocateOutputMetaDataBuffers();
- status_t submitOutputMetaDataBuffer();
- void signalSubmitOutputMetaDataBufferIfEOS_workaround();
+ status_t allocateOutputMetadataBuffers();
+ status_t submitOutputMetadataBuffer();
+ void signalSubmitOutputMetadataBufferIfEOS_workaround();
status_t allocateOutputBuffersFromNativeWindow();
status_t cancelBufferToNativeWindow(BufferInfo *info);
status_t freeOutputBuffersNotOwnedByComponent();
BufferInfo *dequeueBufferFromNativeWindow();
+ inline bool storingMetadataInDecodedBuffers() {
+ return mOutputMetadataType >= 0 && !mIsEncoder;
+ }
+
+ inline bool usingMetadataOnEncoderOutput() {
+ return mOutputMetadataType >= 0 && mIsEncoder;
+ }
+
BufferInfo *findBufferByID(
uint32_t portIndex, IOMX::buffer_id bufferID,
ssize_t *index = NULL);
@@ -332,8 +364,6 @@
status_t initNativeWindow();
- status_t pushBlankBuffersToNativeWindow();
-
// Returns true iff all buffers on the given port have status
// OWNED_BY_US or OWNED_BY_NATIVE_WINDOW.
bool allYourBuffersAreBelongToUs(OMX_U32 portIndex);
@@ -348,6 +378,23 @@
void deferMessage(const sp<AMessage> &msg);
void processDeferredMessages();
+ void onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
+ // called when we have dequeued a buffer |buf| from the native window to track render info.
+ // |fenceFd| is the dequeue fence, and |info| points to the buffer info where this buffer is
+ // stored.
+ void updateRenderInfoForDequeuedBuffer(
+ ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info);
+
+ // Checks to see if any frames have rendered up until |until|, and to notify client
+ // (MediaCodec) of rendered frames up-until the frame pointed to by |until| or the first
+ // unrendered frame. These frames are removed from the render queue.
+ // If |dropIncomplete| is true, unrendered frames up-until |until| will be dropped from the
+ // queue, allowing all rendered framed up till then to be notified of.
+ // (This will effectively clear the render queue up-until (and including) |until|.)
+ // If |until| is NULL, or is not in the rendered queue, this method will check all frames.
+ void notifyOfRenderedFrames(
+ bool dropIncomplete = false, FrameRenderTracker::Info *until = NULL);
+
void sendFormatChange(const sp<AMessage> &reply);
status_t getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify);
diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h
index 96dfd7e..069e897 100644
--- a/include/media/stagefright/CameraSource.h
+++ b/include/media/stagefright/CameraSource.h
@@ -83,7 +83,7 @@
Size videoSize,
int32_t frameRate,
const sp<IGraphicBufferProducer>& surface,
- bool storeMetaDataInVideoBuffers = false);
+ bool storeMetaDataInVideoBuffers = true);
virtual ~CameraSource();
@@ -149,6 +149,8 @@
int32_t mNumInputBuffers;
int32_t mVideoFrameRate;
int32_t mColorFormat;
+ int32_t mEncoderFormat;
+ int32_t mEncoderDataSpace;
status_t mInitCheck;
sp<Camera> mCamera;
diff --git a/include/media/stagefright/CodecBase.h b/include/media/stagefright/CodecBase.h
index 51213b6..bb36052 100644
--- a/include/media/stagefright/CodecBase.h
+++ b/include/media/stagefright/CodecBase.h
@@ -43,6 +43,7 @@
kWhatInputSurfaceAccepted = 'isfa',
kWhatSignaledInputEOS = 'seos',
kWhatBuffersAllocated = 'allc',
+ kWhatOutputFramesRendered = 'outR',
};
virtual void setNotificationMessage(const sp<AMessage> &msg) = 0;
@@ -50,7 +51,7 @@
virtual void initiateAllocateComponent(const sp<AMessage> &msg) = 0;
virtual void initiateConfigureComponent(const sp<AMessage> &msg) = 0;
virtual void initiateCreateInputSurface() = 0;
- virtual void initiateUsePersistentInputSurface(
+ virtual void initiateSetInputSurface(
const sp<PersistentSurface> &surface) = 0;
virtual void initiateStart() = 0;
virtual void initiateShutdown(bool keepComponentAllocated = false) = 0;
diff --git a/include/media/stagefright/FrameRenderTracker.h b/include/media/stagefright/FrameRenderTracker.h
new file mode 100644
index 0000000..3b0db5a
--- /dev/null
+++ b/include/media/stagefright/FrameRenderTracker.h
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAME_RENDER_TRACKER_H_
+
+#define FRAME_RENDER_TRACKER_H_
+
+#include <utils/RefBase.h>
+#include <utils/Timers.h>
+#include <system/window.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AString.h>
+
+#include <list>
+
+namespace android {
+
+class Fence;
+class GraphicBuffer;
+
+struct FrameRenderTracker : public RefBase {
+ // Tracks the render information about a frame. Frames go through several states while
+ // the render information is tracked:
+ //
+ // 1. queued frame: mMediaTime and mGraphicBuffer are set for the frame. mFence is the
+ // queue fence (read fence). mIndex is negative, and mRenderTimeNs is invalid.
+ // Key characteristics: mFence is not NULL and mIndex is negative.
+ //
+ // 2. dequeued frame: mFence is updated with the dequeue fence (write fence). mIndex is set.
+ // Key characteristics: mFence is not NULL and mIndex is non-negative. mRenderTime is still
+ // invalid.
+ //
+ // 3. rendered frame or frame: mFence is cleared, mRenderTimeNs is set.
+ // Key characteristics: mFence is NULL.
+ //
+ struct Info {
+ // set by client during onFrameQueued or onFrameRendered
+ int64_t getMediaTimeUs() const { return mMediaTimeUs; }
+
+ // -1 if frame is not yet rendered
+ nsecs_t getRenderTimeNs() const { return mRenderTimeNs; }
+
+ // set by client during updateRenderInfoForDequeuedBuffer; -1 otherwise
+ ssize_t getIndex() const { return mIndex; }
+
+ // creates information for a queued frame
+ Info(int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer, const sp<Fence> &fence)
+ : mMediaTimeUs(mediaTimeUs),
+ mRenderTimeNs(-1),
+ mIndex(-1),
+ mGraphicBuffer(graphicBuffer),
+ mFence(fence) {
+ }
+
+ // creates information for a frame rendered on a tunneled surface
+ Info(int64_t mediaTimeUs, nsecs_t renderTimeNs)
+ : mMediaTimeUs(mediaTimeUs),
+ mRenderTimeNs(renderTimeNs),
+ mIndex(-1),
+ mGraphicBuffer(NULL),
+ mFence(NULL) {
+ }
+
+ private:
+ int64_t mMediaTimeUs;
+ nsecs_t mRenderTimeNs;
+ ssize_t mIndex; // to be used by client
+ sp<GraphicBuffer> mGraphicBuffer;
+ sp<Fence> mFence;
+
+ friend class FrameRenderTracker;
+ };
+
+ FrameRenderTracker();
+
+ void setComponentName(const AString &componentName);
+
+ // clears all tracked frames, and resets last render time
+ void clear(nsecs_t lastRenderTimeNs);
+
+ // called when |graphicBuffer| corresponding to |mediaTimeUs| is
+ // queued to the output surface using |fence|.
+ void onFrameQueued(
+ int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer, const sp<Fence> &fence);
+
+ // Called when we have dequeued a buffer |buf| from the native window to track render info.
+ // |fenceFd| is the dequeue fence, and |index| is a positive buffer ID to be usable by the
+ // client to track this render info among the dequeued buffers.
+ // Returns pointer to the tracked info, or NULL if buffer is not tracked or if |index|
+ // is negative.
+ Info *updateInfoForDequeuedBuffer(ANativeWindowBuffer *buf, int fenceFd, int index);
+
+ // called when tunneled codec signals frame rendered event
+ // returns BAD_VALUE if systemNano is not monotonic. Otherwise, returns OK.
+ status_t onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
+
+ // Checks to see if any frames have rendered up until |until|. If |until| is NULL or not a
+ // tracked info, this method searches the entire render queue.
+ // Returns list of rendered frames up-until the frame pointed to by |until| or the first
+ // unrendered frame, as well as any dropped frames (those with invalid fence) up-until |until|.
+ // These frames are removed from the render queue.
+ // If |dropIncomplete| is true, unrendered frames up-until |until| will also be dropped from the
+ // queue, allowing all rendered framed up till then to be notified of.
+ // (This will effectively clear the render queue up-until (and including) |until|.)
+ std::list<Info> checkFencesAndGetRenderedFrames(const Info *until, bool dropIncomplete);
+
+ // Stop tracking a queued frame (e.g. if the frame has been discarded). If |info| is NULL or is
+ // not tracked, this method is a no-op.
+ void untrackFrame(const Info *info);
+
+ void dumpRenderQueue() const;
+
+ virtual ~FrameRenderTracker();
+
+private:
+
+ // Render information for buffers. Regular surface buffers are queued in the order of
+ // rendering. Tunneled buffers are queued in the order of receipt.
+ std::list<Info> mRenderQueue;
+ nsecs_t mLastRenderTimeNs;
+ AString mComponentName;
+
+ DISALLOW_EVIL_CONSTRUCTORS(FrameRenderTracker);
+};
+
+} // namespace android
+
+#endif // FRAME_RENDER_TRACKER_H_
diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h
index f5d523d..09cbe8f 100644
--- a/include/media/stagefright/MediaCodec.h
+++ b/include/media/stagefright/MediaCodec.h
@@ -22,6 +22,7 @@
#include <media/hardware/CryptoAPI.h>
#include <media/MediaResource.h>
#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/FrameRenderTracker.h>
#include <utils/Vector.h>
namespace android {
@@ -60,8 +61,6 @@
CB_RESOURCE_RECLAIMED = 5,
};
- struct BatteryNotifier;
-
static sp<MediaCodec> CreateByType(
const sp<ALooper> &looper, const char *mime, bool encoder, status_t *err = NULL);
@@ -78,9 +77,11 @@
status_t setCallback(const sp<AMessage> &callback);
+ status_t setOnFrameRenderedNotification(const sp<AMessage> ¬ify);
+
status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);
- status_t usePersistentInputSurface(const sp<PersistentSurface> &surface);
+ status_t setInputSurface(const sp<PersistentSurface> &surface);
status_t start();
@@ -159,11 +160,22 @@
status_t setParameters(const sp<AMessage> ¶ms);
+ // Create a MediaCodec notification message from a list of rendered or dropped render infos
+ // by adding rendered frame information to a base notification message. Returns the number
+ // of frames that were rendered.
+ static size_t CreateFramesRenderedMessage(
+ std::list<FrameRenderTracker::Info> done, sp<AMessage> &msg);
+
protected:
virtual ~MediaCodec();
virtual void onMessageReceived(const sp<AMessage> &msg);
private:
+ // used by ResourceManagerClient
+ status_t reclaim();
+ friend struct ResourceManagerClient;
+
+private:
enum State {
UNINITIALIZED,
INITIALIZING,
@@ -188,7 +200,7 @@
kWhatConfigure = 'conf',
kWhatSetSurface = 'sSur',
kWhatCreateInputSurface = 'cisf',
- kWhatUsePersistentInputSurface = 'pisf',
+ kWhatSetInputSurface = 'sisf',
kWhatStart = 'strt',
kWhatStop = 'stop',
kWhatRelease = 'rele',
@@ -209,6 +221,7 @@
kWhatGetName = 'getN',
kWhatSetParameters = 'setP',
kWhatSetCallback = 'setC',
+ kWhatSetNotification = 'setN',
};
enum {
@@ -224,6 +237,7 @@
kFlagGatherCodecSpecificData = 512,
kFlagIsAsync = 1024,
kFlagIsComponentAllocated = 2048,
+ kFlagPushBlankBuffersOnShutdown = 4096,
};
struct BufferInfo {
@@ -261,6 +275,7 @@
};
State mState;
+ bool mReleasedByResourceManager;
sp<ALooper> mLooper;
sp<ALooper> mCodecLooper;
sp<CodecBase> mCodec;
@@ -270,9 +285,11 @@
status_t mStickyError;
sp<Surface> mSurface;
SoftwareRenderer *mSoftRenderer;
+
sp<AMessage> mOutputFormat;
sp<AMessage> mInputFormat;
sp<AMessage> mCallback;
+ sp<AMessage> mOnFrameRenderedNotification;
sp<MemoryDealer> mDealer;
sp<IResourceManagerClient> mResourceManagerClient;
@@ -320,7 +337,7 @@
static status_t PostAndAwaitResponse(
const sp<AMessage> &msg, sp<AMessage> *response);
- static void PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err);
+ void PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err);
status_t init(const AString &name, bool nameIsType, bool encoder);
diff --git a/include/media/stagefright/MediaCodecList.h b/include/media/stagefright/MediaCodecList.h
index 9d1d675..3aaa032 100644
--- a/include/media/stagefright/MediaCodecList.h
+++ b/include/media/stagefright/MediaCodecList.h
@@ -32,6 +32,8 @@
namespace android {
+extern const char *kMaxEncoderInputBuffers;
+
struct AMessage;
struct MediaCodecList : public BnMediaCodecList {
@@ -53,8 +55,11 @@
// to be used by MediaPlayerService alone
static sp<IMediaCodecList> getLocalInstance();
- // only to be used in getLocalInstance
- void updateDetailsForMultipleCodecs(const KeyedVector<AString, CodecSettings>& updates);
+ // only to be used by getLocalInstance
+ static void *profilerThreadWrapper(void * /*arg*/);
+
+ // only to be used by MediaPlayerService
+ void parseTopLevelXMLFile(const char *path, bool ignore_errors = false);
private:
class BinderDeathObserver : public IBinder::DeathRecipient {
@@ -97,7 +102,6 @@
status_t initCheck() const;
void parseXMLFile(const char *path);
- void parseTopLevelXMLFile(const char *path, bool ignore_errors = false);
static void StartElementHandlerWrapper(
void *me, const char *name, const char **attrs);
diff --git a/include/media/stagefright/MediaDefs.h b/include/media/stagefright/MediaDefs.h
index 3b58122..21eb04a 100644
--- a/include/media/stagefright/MediaDefs.h
+++ b/include/media/stagefright/MediaDefs.h
@@ -64,7 +64,7 @@
extern const char *MEDIA_MIMETYPE_TEXT_SUBRIP;
extern const char *MEDIA_MIMETYPE_TEXT_VTT;
extern const char *MEDIA_MIMETYPE_TEXT_CEA_608;
-extern const char *MEDIA_MIMETYPE_DATA_METADATA;
+extern const char *MEDIA_MIMETYPE_DATA_TIMED_ID3;
} // namespace android
diff --git a/include/media/stagefright/MediaFilter.h b/include/media/stagefright/MediaFilter.h
index fdd2a34..d0a572c 100644
--- a/include/media/stagefright/MediaFilter.h
+++ b/include/media/stagefright/MediaFilter.h
@@ -34,8 +34,7 @@
virtual void initiateAllocateComponent(const sp<AMessage> &msg);
virtual void initiateConfigureComponent(const sp<AMessage> &msg);
virtual void initiateCreateInputSurface();
- virtual void initiateUsePersistentInputSurface(
- const sp<PersistentSurface> &surface);
+ virtual void initiateSetInputSurface(const sp<PersistentSurface> &surface);
virtual void initiateStart();
virtual void initiateShutdown(bool keepComponentAllocated = false);
diff --git a/include/media/stagefright/MediaSync.h b/include/media/stagefright/MediaSync.h
index d1d634d..1eef211 100644
--- a/include/media/stagefright/MediaSync.h
+++ b/include/media/stagefright/MediaSync.h
@@ -169,7 +169,7 @@
class OutputListener : public BnProducerListener,
public IBinder::DeathRecipient {
public:
- OutputListener(const sp<MediaSync> &sync);
+ OutputListener(const sp<MediaSync> &sync, const sp<IGraphicBufferProducer> &output);
virtual ~OutputListener();
// From IProducerListener
@@ -180,6 +180,7 @@
private:
sp<MediaSync> mSync;
+ sp<IGraphicBufferProducer> mOutput;
};
// mIsAbandoned is set to true when the input or output dies.
@@ -192,6 +193,7 @@
size_t mNumOutstandingBuffers;
sp<IGraphicBufferConsumer> mInput;
sp<IGraphicBufferProducer> mOutput;
+ int mUsageFlagsFromOutput;
sp<AudioTrack> mAudioTrack;
uint32_t mNativeSampleRateInHz;
@@ -207,6 +209,12 @@
// and that could cause problem if the producer of |mInput| only
// supports pre-registered buffers.
KeyedVector<uint64_t, sp<GraphicBuffer> > mBuffersFromInput;
+
+ // Keep track of buffers sent to |mOutput|. When a new output surface comes
+ // in, those buffers will be returned to input and old output surface will
+ // be disconnected immediately.
+ KeyedVector<uint64_t, sp<GraphicBuffer> > mBuffersSentToOutput;
+
sp<ALooper> mLooper;
float mPlaybackRate;
@@ -241,7 +249,7 @@
// It gets called from an OutputListener.
// During this callback, we detach the buffer from the output, and release
// it to the input. A blocked onFrameAvailable call will be allowed to proceed.
- void onBufferReleasedByOutput();
+ void onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output);
// Return |buffer| back to the input.
void returnBufferToInput_l(const sp<GraphicBuffer> &buffer, const sp<Fence> &fence);
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index 84b1b1a..7fabcb3 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -298,7 +298,6 @@
status_t queueBufferToNativeWindow(BufferInfo *info);
status_t cancelBufferToNativeWindow(BufferInfo *info);
BufferInfo* dequeueBufferFromNativeWindow();
- status_t pushBlankBuffersToNativeWindow();
status_t freeBuffersOnPort(
OMX_U32 portIndex, bool onlyThoseWeOwn = false);
@@ -347,7 +346,6 @@
status_t configureCodec(const sp<MetaData> &meta);
- status_t applyRotation();
status_t waitForBufferFilled_l();
int64_t getDecodingTimeUs();
diff --git a/include/media/stagefright/SurfaceUtils.h b/include/media/stagefright/SurfaceUtils.h
new file mode 100644
index 0000000..c1a9c0a
--- /dev/null
+++ b/include/media/stagefright/SurfaceUtils.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SURFACE_UTILS_H_
+
+#define SURFACE_UTILS_H_
+
+#include <utils/Errors.h>
+
+struct ANativeWindow;
+
+namespace android {
+
+status_t setNativeWindowSizeFormatAndUsage(
+ ANativeWindow *nativeWindow /* nonnull */,
+ int width, int height, int format, int rotation, int usage);
+status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */);
+
+} // namespace android
+
+#endif // SURFACE_UTILS_H_
diff --git a/include/media/stagefright/foundation/ADebug.h b/include/media/stagefright/foundation/ADebug.h
index 1d0e2cb..24df85a 100644
--- a/include/media/stagefright/foundation/ADebug.h
+++ b/include/media/stagefright/foundation/ADebug.h
@@ -24,6 +24,31 @@
#include <media/stagefright/foundation/AString.h>
#include <utils/Log.h>
+inline static const char *asString(android::status_t i, const char *def = "??") {
+ using namespace android;
+ switch (i) {
+ case NO_ERROR: return "NO_ERROR";
+ case UNKNOWN_ERROR: return "UNKNOWN_ERROR";
+ case NO_MEMORY: return "NO_MEMORY";
+ case INVALID_OPERATION: return "INVALID_OPERATION";
+ case BAD_VALUE: return "BAD_VALUE";
+ case BAD_TYPE: return "BAD_TYPE";
+ case NAME_NOT_FOUND: return "NAME_NOT_FOUND";
+ case PERMISSION_DENIED: return "PERMISSION_DENIED";
+ case NO_INIT: return "NO_INIT";
+ case ALREADY_EXISTS: return "ALREADY_EXISTS";
+ case DEAD_OBJECT: return "DEAD_OBJECT";
+ case FAILED_TRANSACTION: return "FAILED_TRANSACTION";
+ case BAD_INDEX: return "BAD_INDEX";
+ case NOT_ENOUGH_DATA: return "NOT_ENOUGH_DATA";
+ case WOULD_BLOCK: return "WOULD_BLOCK";
+ case TIMED_OUT: return "TIMED_OUT";
+ case UNKNOWN_TRANSACTION: return "UNKNOWN_TRANSACTION";
+ case FDS_NOT_ALLOWED: return "FDS_NOT_ALLOWED";
+ default: return def;
+ }
+}
+
namespace android {
#define LITERAL_TO_STRING_INTERNAL(x) #x
@@ -108,6 +133,26 @@
// remove redundant segments of a codec name, and return a newly allocated
// string suitable for debugging
static char *GetDebugName(const char *name);
+
+ inline static bool isExperimentEnabled(
+ const char *name __unused /* nonnull */, bool allow __unused = true) {
+#ifdef ENABLE_STAGEFRIGHT_EXPERIMENTS
+ if (!strcmp(name, "legacy-adaptive")) {
+ return getExperimentFlag(allow, name, 2, 1); // every other day
+ } else if (!strcmp(name, "legacy-setsurface")) {
+ return getExperimentFlag(allow, name, 3, 1); // every third day
+ } else {
+ ALOGE("unknown experiment '%s' (disabled)", name);
+ }
+#endif
+ return false;
+ }
+
+private:
+ // pass in allow, so we can print in the log if the experiment is disabled
+ static bool getExperimentFlag(
+ bool allow, const char *name, uint64_t modulo, uint64_t limit,
+ uint64_t plus = 0, uint64_t timeDivisor = 24 * 60 * 60 /* 1 day */);
};
} // namespace android
diff --git a/include/media/stagefright/foundation/AUtils.h b/include/media/stagefright/foundation/AUtils.h
index d7ecf50..47444c1 100644
--- a/include/media/stagefright/foundation/AUtils.h
+++ b/include/media/stagefright/foundation/AUtils.h
@@ -61,6 +61,28 @@
return a > b ? a : b;
}
+template<class T>
+void ENSURE_UNSIGNED_TYPE() {
+ T TYPE_MUST_BE_UNSIGNED[(T)-1 < 0 ? -1 : 0] __unused;
+}
+
+// needle is in range [hayStart, hayStart + haySize)
+template<class T, class U>
+inline static bool isInRange(const T &hayStart, const U &haySize, const T &needle) {
+ ENSURE_UNSIGNED_TYPE<U>();
+ return (T)(hayStart + haySize) >= hayStart && needle >= hayStart && (U)(needle - hayStart) < haySize;
+}
+
+// [needleStart, needleStart + needleSize) is in range [hayStart, hayStart + haySize)
+template<class T, class U>
+inline static bool isInRange(
+ const T &hayStart, const U &haySize, const T &needleStart, const U &needleSize) {
+ ENSURE_UNSIGNED_TYPE<U>();
+ return isInRange(hayStart, haySize, needleStart)
+ && (T)(needleStart + needleSize) >= needleStart
+ && (U)(needleStart + needleSize - hayStart) <= haySize;
+}
+
/* T must be integer type, period must be positive */
template<class T>
inline static T periodicError(const T &val, const T &period) {
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index 6aeb919..d904ab6 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -27,6 +27,7 @@
#include <cutils/log.h>
#include "EffectBundle.h"
+#include "math.h"
// effect_handle_t interface implementation for bass boost
@@ -830,32 +831,69 @@
int gainCorrection = 0;
//Count the energy contribution per band for EQ and BassBoost only if they are active.
float energyContribution = 0;
+ float energyCross = 0;
+ float energyBassBoost = 0;
+ float crossCorrection = 0;
//EQ contribution
if (pContext->pBundledContext->bEqualizerEnabled == LVM_TRUE) {
for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
- float bandEnergy = (pContext->pBundledContext->bandGaindB[i] *
- LimitLevel_bandEnergyContribution[i])/15.0;
+ float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0;
+ float bandCoefficient = LimitLevel_bandEnergyCoefficient[i];
+ float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
if (bandEnergy > 0)
energyContribution += bandEnergy;
}
+
+ //cross EQ coefficients
+ float bandFactorSum = 0;
+ for (int i = 0; i < FIVEBAND_NUMBANDS-1; i++) {
+ float bandFactor1 = pContext->pBundledContext->bandGaindB[i]/15.0;
+ float bandFactor2 = pContext->pBundledContext->bandGaindB[i+1]/15.0;
+
+ if (bandFactor1 > 0 && bandFactor2 > 0) {
+ float crossEnergy = bandFactor1 * bandFactor2 *
+ LimitLevel_bandEnergyCrossCoefficient[i];
+ bandFactorSum += bandFactor1 * bandFactor2;
+
+ if (crossEnergy > 0)
+ energyCross += crossEnergy;
+ }
+ }
+ bandFactorSum -= 1.0;
+ if (bandFactorSum > 0)
+ crossCorrection = bandFactorSum * 0.7;
}
//BassBoost contribution
if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
- float bandEnergy = (pContext->pBundledContext->BassStrengthSaved *
- LimitLevel_bassBoostEnergyContribution)/1000.0;
- if (bandEnergy > 0)
- energyContribution += bandEnergy;
+ float boostFactor = (pContext->pBundledContext->BassStrengthSaved)/1000.0;
+ float boostCoefficient = LimitLevel_bassBoostEnergyCoefficient;
+
+ energyContribution += boostFactor * boostCoefficient * boostCoefficient;
+
+ for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+ float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0;
+ float bandCrossCoefficient = LimitLevel_bassBoostEnergyCrossCoefficient[i];
+ float bandEnergy = boostFactor * bandFactor *
+ bandCrossCoefficient;
+ if (bandEnergy > 0)
+ energyBassBoost += bandEnergy;
+ }
}
//Virtualizer contribution
if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
- energyContribution += LimitLevel_virtualizerContribution;
- }
+ energyContribution += LimitLevel_virtualizerContribution *
+ LimitLevel_virtualizerContribution;
+ }
+
+ double totalEnergyEstimation = sqrt(energyContribution + energyCross + energyBassBoost) -
+ crossCorrection;
+ ALOGV(" TOTAL energy estimation: %0.2f", totalEnergyEstimation);
//roundoff
- int maxLevelRound = (int)(energyContribution + 0.99);
+ int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
if (maxLevelRound + pContext->pBundledContext->volume > 0) {
gainCorrection = maxLevelRound + pContext->pBundledContext->volume;
}
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
index b3071f4..9459b87 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
@@ -200,10 +200,16 @@
* updated.
*/
-static const float LimitLevel_bandEnergyContribution[FIVEBAND_NUMBANDS] = {
- 5.0, 6.5, 6.45, 4.8, 1.7 };
+static const float LimitLevel_bandEnergyCoefficient[FIVEBAND_NUMBANDS] = {
+ 7.56, 9.69, 9.59, 7.37, 2.88};
-static const float LimitLevel_bassBoostEnergyContribution = 6.7;
+static const float LimitLevel_bandEnergyCrossCoefficient[FIVEBAND_NUMBANDS-1] = {
+ 126.0, 115.0, 125.0, 104.0 };
+
+static const float LimitLevel_bassBoostEnergyCrossCoefficient[FIVEBAND_NUMBANDS] = {
+ 221.21, 208.10, 28.16, 0.0, 0.0 };
+
+static const float LimitLevel_bassBoostEnergyCoefficient = 7.12;
static const float LimitLevel_virtualizerContribution = 1.9;
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 0c18828..a3c3d3c 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -7,7 +7,7 @@
LOCAL_MODULE:= libmedia_helper
LOCAL_MODULE_TAGS := optional
-LOCAL_C_FLAGS += -Werror -Wall
+LOCAL_C_FLAGS += -Werror -Wno-error=deprecated-declarations -Wall
LOCAL_CLANG := true
include $(BUILD_STATIC_LIBRARY)
@@ -87,7 +87,7 @@
$(call include-path-for, audio-effects) \
$(call include-path-for, audio-utils)
-LOCAL_CFLAGS += -Werror -Wall
+LOCAL_CFLAGS += -Werror -Wno-error=deprecated-declarations -Wall
LOCAL_CLANG := true
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp
index bbeb854..ff82544 100644
--- a/media/libmedia/AudioEffect.cpp
+++ b/media/libmedia/AudioEffect.cpp
@@ -134,12 +134,14 @@
if (iEffect == 0 || (mStatus != NO_ERROR && mStatus != ALREADY_EXISTS)) {
ALOGE("set(): AudioFlinger could not create effect, status: %d", mStatus);
+ if (iEffect == 0) {
+ mStatus = NO_INIT;
+ }
return mStatus;
}
mEnabled = (volatile int32_t)enabled;
- mIEffect = iEffect;
cblk = iEffect->getCblk();
if (cblk == 0) {
mStatus = NO_INIT;
@@ -147,6 +149,7 @@
return mStatus;
}
+ mIEffect = iEffect;
mCblkMemory = cblk;
mCblk = static_cast<effect_param_cblk_t*>(cblk->pointer());
int bufOffset = ((sizeof(effect_param_cblk_t) - 1) / sizeof(int) + 1) * sizeof(int);
@@ -177,11 +180,11 @@
mIEffect->disconnect();
IInterface::asBinder(mIEffect)->unlinkToDeath(mIEffectClient);
}
+ mIEffect.clear();
+ mCblkMemory.clear();
+ mIEffectClient.clear();
IPCThreadState::self()->flushCommands();
}
- mIEffect.clear();
- mIEffectClient.clear();
- mCblkMemory.clear();
}
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index a4d6c7d..3868f13 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -114,6 +114,10 @@
mAudioRecordThread->requestExitAndWait();
mAudioRecordThread.clear();
}
+ // No lock here: worst case we remove a NULL callback which will be a nop
+ if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) {
+ AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mInput);
+ }
IInterface::asBinder(mAudioRecord)->unlinkToDeath(mDeathNotifier, this);
mAudioRecord.clear();
mCblkMemory.clear();
@@ -304,6 +308,8 @@
mNewPosition = mProxy->getPosition() + mUpdatePeriod;
int32_t flags = android_atomic_acquire_load(&mCblk->mFlags);
+ mActive = true;
+
status_t status = NO_ERROR;
if (!(flags & CBLK_INVALID)) {
status = mAudioRecord->start(event, triggerSession);
@@ -316,9 +322,9 @@
}
if (status != NO_ERROR) {
+ mActive = false;
ALOGE("start() status %d", status);
} else {
- mActive = true;
sp<AudioRecordThread> t = mAudioRecordThread;
if (t != 0) {
t->resume();
@@ -443,6 +449,11 @@
AutoMutex lock(mLock);
if (mSelectedDeviceId != deviceId) {
mSelectedDeviceId = deviceId;
+ // stop capture so that audio policy manager does not reject the new instance start request
+ // as only one capture can be active at a time.
+ if (mAudioRecord != 0 && mActive) {
+ mAudioRecord->stop();
+ }
android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
}
return NO_ERROR;
@@ -453,6 +464,14 @@
return mSelectedDeviceId;
}
+audio_port_handle_t AudioRecord::getRoutedDeviceId() {
+ AutoMutex lock(mLock);
+ if (mInput == AUDIO_IO_HANDLE_NONE) {
+ return AUDIO_PORT_HANDLE_NONE;
+ }
+ return AudioSystem::getDeviceIdForIo(mInput);
+}
+
// -------------------------------------------------------------------------
// must be called with mLock held
@@ -496,6 +515,10 @@
}
}
+ if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) {
+ AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mInput);
+ }
+
audio_io_handle_t input;
status_t status = AudioSystem::getInputForAttr(&mAttributes, &input,
(audio_session_t)mSessionId,
@@ -628,6 +651,10 @@
mDeathNotifier = new DeathNotifier(this);
IInterface::asBinder(mAudioRecord)->linkToDeath(mDeathNotifier, this);
+ if (mDeviceCallback != 0) {
+ AudioSystem::addAudioDeviceCallback(mDeviceCallback, mInput);
+ }
+
return NO_ERROR;
}
@@ -1073,6 +1100,48 @@
return result;
}
+status_t AudioRecord::addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback)
+{
+ if (callback == 0) {
+ ALOGW("%s adding NULL callback!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ AutoMutex lock(mLock);
+ if (mDeviceCallback == callback) {
+ ALOGW("%s adding same callback!", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+ status_t status = NO_ERROR;
+ if (mInput != AUDIO_IO_HANDLE_NONE) {
+ if (mDeviceCallback != 0) {
+ ALOGW("%s callback already present!", __FUNCTION__);
+ AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mInput);
+ }
+ status = AudioSystem::addAudioDeviceCallback(callback, mInput);
+ }
+ mDeviceCallback = callback;
+ return status;
+}
+
+status_t AudioRecord::removeAudioDeviceCallback(
+ const sp<AudioSystem::AudioDeviceCallback>& callback)
+{
+ if (callback == 0) {
+ ALOGW("%s removing NULL callback!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ AutoMutex lock(mLock);
+ if (mDeviceCallback != callback) {
+ ALOGW("%s removing different callback!", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+ if (mInput != AUDIO_IO_HANDLE_NONE) {
+ AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mInput);
+ }
+ mDeviceCallback = 0;
+ return NO_ERROR;
+}
+
// =========================================================================
void AudioRecord::DeathNotifier::binderDied(const wp<IBinder>& who __unused)
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index 85ed2b1..6c2c226 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -76,6 +76,25 @@
return af;
}
+const sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient()
+{
+ // calling get_audio_flinger() will initialize gAudioFlingerClient if needed
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return 0;
+ Mutex::Autolock _l(gLock);
+ return gAudioFlingerClient;
+}
+
+sp<AudioIoDescriptor> AudioSystem::getIoDescriptor(audio_io_handle_t ioHandle)
+{
+ sp<AudioIoDescriptor> desc;
+ const sp<AudioFlingerClient> afc = getAudioFlingerClient();
+ if (afc != 0) {
+ desc = afc->getIoDescriptor(ioHandle);
+ }
+ return desc;
+}
+
/* static */ status_t AudioSystem::checkAudioFlinger()
{
if (defaultServiceManager()->checkService(String16("media.audio_flinger")) != 0) {
@@ -249,9 +268,7 @@
{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
-
- LOG_ALWAYS_FATAL_IF(gAudioFlingerClient == 0);
- sp<AudioIoDescriptor> outputDesc = gAudioFlingerClient->getIoDescriptor(output);
+ sp<AudioIoDescriptor> outputDesc = getIoDescriptor(output);
if (outputDesc == 0) {
ALOGV("getOutputSamplingRate() no output descriptor for output %d in gOutputs", output);
*samplingRate = af->sampleRate(output);
@@ -290,9 +307,7 @@
{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
-
- LOG_ALWAYS_FATAL_IF(gAudioFlingerClient == 0);
- sp<AudioIoDescriptor> outputDesc = gAudioFlingerClient->getIoDescriptor(output);
+ sp<AudioIoDescriptor> outputDesc = getIoDescriptor(output);
if (outputDesc == 0) {
*frameCount = af->frameCount(output);
} else {
@@ -329,9 +344,7 @@
{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
-
- LOG_ALWAYS_FATAL_IF(gAudioFlingerClient == 0);
- sp<AudioIoDescriptor> outputDesc = gAudioFlingerClient->getIoDescriptor(output);
+ sp<AudioIoDescriptor> outputDesc = getIoDescriptor(output);
if (outputDesc == 0) {
*latency = af->latency(output);
} else {
@@ -346,10 +359,11 @@
status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, audio_format_t format,
audio_channel_mask_t channelMask, size_t* buffSize)
{
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
- LOG_ALWAYS_FATAL_IF(gAudioFlingerClient == 0);
- return gAudioFlingerClient->getInputBufferSize(sampleRate, format, channelMask, buffSize);
+ const sp<AudioFlingerClient> afc = getAudioFlingerClient();
+ if (afc == 0) {
+ return NO_INIT;
+ }
+ return afc->getInputBufferSize(sampleRate, format, channelMask, buffSize);
}
status_t AudioSystem::setVoiceVolume(float value)
@@ -409,6 +423,13 @@
return af->getAudioHwSyncForSession(sessionId);
}
+status_t AudioSystem::systemReady()
+{
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return NO_INIT;
+ return af->systemReady();
+}
+
// ---------------------------------------------------------------------------
@@ -446,47 +467,77 @@
if (ioDesc == 0 || ioDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) return;
- Mutex::Autolock _l(mLock);
+ audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
+ Vector < sp<AudioDeviceCallback> > callbacks;
- switch (event) {
- case AUDIO_OUTPUT_OPENED:
- case AUDIO_INPUT_OPENED: {
- if (getIoDescriptor(ioDesc->mIoHandle) != 0) {
- ALOGV("ioConfigChanged() opening already existing output! %d", ioDesc->mIoHandle);
- break;
- }
- mIoDescriptors.add(ioDesc->mIoHandle, ioDesc);
- ALOGV("ioConfigChanged() new %s opened %d samplingRate %u, format %#x channel mask %#x "
- "frameCount %zu", event == AUDIO_OUTPUT_OPENED ? "output" : "input",
- ioDesc->mIoHandle, ioDesc->mSamplingRate, ioDesc->mFormat, ioDesc->mChannelMask,
- ioDesc->mFrameCount);
- } break;
- case AUDIO_OUTPUT_CLOSED:
- case AUDIO_INPUT_CLOSED: {
- if (getIoDescriptor(ioDesc->mIoHandle) == 0) {
- ALOGW("ioConfigChanged() closing unknown %s %d",
+ {
+ Mutex::Autolock _l(mLock);
+
+ switch (event) {
+ case AUDIO_OUTPUT_OPENED:
+ case AUDIO_INPUT_OPENED: {
+ if (getIoDescriptor(ioDesc->mIoHandle) != 0) {
+ ALOGV("ioConfigChanged() opening already existing output! %d", ioDesc->mIoHandle);
+ break;
+ }
+ mIoDescriptors.add(ioDesc->mIoHandle, ioDesc);
+
+ if (ioDesc->getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
+ deviceId = ioDesc->getDeviceId();
+ ssize_t ioIndex = mAudioDeviceCallbacks.indexOfKey(ioDesc->mIoHandle);
+ if (ioIndex >= 0) {
+ callbacks = mAudioDeviceCallbacks.valueAt(ioIndex);
+ }
+ }
+ ALOGV("ioConfigChanged() new %s opened %d samplingRate %u, format %#x channel mask %#x "
+ "frameCount %zu deviceId %d", event == AUDIO_OUTPUT_OPENED ? "output" : "input",
+ ioDesc->mIoHandle, ioDesc->mSamplingRate, ioDesc->mFormat, ioDesc->mChannelMask,
+ ioDesc->mFrameCount, ioDesc->getDeviceId());
+ } break;
+ case AUDIO_OUTPUT_CLOSED:
+ case AUDIO_INPUT_CLOSED: {
+ if (getIoDescriptor(ioDesc->mIoHandle) == 0) {
+ ALOGW("ioConfigChanged() closing unknown %s %d",
+ event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->mIoHandle);
+ break;
+ }
+ ALOGV("ioConfigChanged() %s %d closed",
event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->mIoHandle);
- break;
- }
- ALOGV("ioConfigChanged() %s %d closed", event == AUDIO_OUTPUT_CLOSED ? "output" : "input",
- ioDesc->mIoHandle);
- mIoDescriptors.removeItem(ioDesc->mIoHandle);
+ mIoDescriptors.removeItem(ioDesc->mIoHandle);
+ mAudioDeviceCallbacks.removeItem(ioDesc->mIoHandle);
+ } break;
+
+ case AUDIO_OUTPUT_CONFIG_CHANGED:
+ case AUDIO_INPUT_CONFIG_CHANGED: {
+ sp<AudioIoDescriptor> oldDesc = getIoDescriptor(ioDesc->mIoHandle);
+ if (oldDesc == 0) {
+ ALOGW("ioConfigChanged() modifying unknown output! %d", ioDesc->mIoHandle);
+ break;
+ }
+
+ deviceId = oldDesc->getDeviceId();
+ mIoDescriptors.replaceValueFor(ioDesc->mIoHandle, ioDesc);
+
+ if (deviceId != ioDesc->getDeviceId()) {
+ deviceId = ioDesc->getDeviceId();
+ ssize_t ioIndex = mAudioDeviceCallbacks.indexOfKey(ioDesc->mIoHandle);
+ if (ioIndex >= 0) {
+ callbacks = mAudioDeviceCallbacks.valueAt(ioIndex);
+ }
+ }
+ ALOGV("ioConfigChanged() new config for %s %d samplingRate %u, format %#x "
+ "channel mask %#x frameCount %zu deviceId %d",
+ event == AUDIO_OUTPUT_CONFIG_CHANGED ? "output" : "input",
+ ioDesc->mIoHandle, ioDesc->mSamplingRate, ioDesc->mFormat,
+ ioDesc->mChannelMask, ioDesc->mFrameCount, ioDesc->getDeviceId());
+
} break;
-
- case AUDIO_OUTPUT_CONFIG_CHANGED:
- case AUDIO_INPUT_CONFIG_CHANGED: {
- if (getIoDescriptor(ioDesc->mIoHandle) == 0) {
- ALOGW("ioConfigChanged() modifying unknown output! %d", ioDesc->mIoHandle);
- break;
}
- mIoDescriptors.replaceValueFor(ioDesc->mIoHandle, ioDesc);
- ALOGV("ioConfigChanged() new config for %s %d samplingRate %u, format %#x "
- "channel mask %#x frameCount %zu",
- event == AUDIO_OUTPUT_CONFIG_CHANGED ? "output" : "input",
- ioDesc->mIoHandle, ioDesc->mSamplingRate, ioDesc->mFormat,
- ioDesc->mChannelMask, ioDesc->mFrameCount);
- } break;
+ }
+ // callbacks.size() != 0 => ioDesc->mIoHandle and deviceId are valid
+ for (size_t i = 0; i < callbacks.size(); i++) {
+ callbacks[i]->onAudioDeviceUpdate(ioDesc->mIoHandle, deviceId);
}
}
@@ -532,7 +583,56 @@
return desc;
}
-/*static*/ void AudioSystem::setErrorCallback(audio_error_callback cb)
+status_t AudioSystem::AudioFlingerClient::addAudioDeviceCallback(
+ const sp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo)
+{
+ Mutex::Autolock _l(mLock);
+ Vector < sp<AudioDeviceCallback> > callbacks;
+ ssize_t ioIndex = mAudioDeviceCallbacks.indexOfKey(audioIo);
+ if (ioIndex >= 0) {
+ callbacks = mAudioDeviceCallbacks.valueAt(ioIndex);
+ }
+
+ for (size_t cbIndex = 0; cbIndex < callbacks.size(); cbIndex++) {
+ if (callbacks[cbIndex] == callback) {
+ return INVALID_OPERATION;
+ }
+ }
+ callbacks.add(callback);
+
+ mAudioDeviceCallbacks.replaceValueFor(audioIo, callbacks);
+ return NO_ERROR;
+}
+
+status_t AudioSystem::AudioFlingerClient::removeAudioDeviceCallback(
+ const sp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo)
+{
+ Mutex::Autolock _l(mLock);
+ ssize_t ioIndex = mAudioDeviceCallbacks.indexOfKey(audioIo);
+ if (ioIndex < 0) {
+ return INVALID_OPERATION;
+ }
+ Vector < sp<AudioDeviceCallback> > callbacks = mAudioDeviceCallbacks.valueAt(ioIndex);
+
+ size_t cbIndex;
+ for (cbIndex = 0; cbIndex < callbacks.size(); cbIndex++) {
+ if (callbacks[cbIndex] == callback) {
+ break;
+ }
+ }
+ if (cbIndex == callbacks.size()) {
+ return INVALID_OPERATION;
+ }
+ callbacks.removeAt(cbIndex);
+ if (callbacks.size() != 0) {
+ mAudioDeviceCallbacks.replaceValueFor(audioIo, callbacks);
+ } else {
+ mAudioDeviceCallbacks.removeItem(audioIo);
+ }
+ return NO_ERROR;
+}
+
+/* static */ void AudioSystem::setErrorCallback(audio_error_callback cb)
{
Mutex::Autolock _l(gLock);
gAudioErrorCallback = cb;
@@ -864,11 +964,11 @@
{
// called by restoreTrack_l(), which needs new IAudioFlinger and IAudioPolicyService instances
ALOGV("clearAudioConfigCache()");
- if (gAudioFlingerClient != 0) {
- gAudioFlingerClient->clearIoCache();
- }
{
Mutex::Autolock _l(gLock);
+ if (gAudioFlingerClient != 0) {
+ gAudioFlingerClient->clearIoCache();
+ }
gAudioFlinger.clear();
}
{
@@ -934,7 +1034,7 @@
return aps->setAudioPortConfig(config);
}
-status_t AudioSystem::addAudioPortCallback(const sp<AudioPortCallback>& callBack)
+status_t AudioSystem::addAudioPortCallback(const sp<AudioPortCallback>& callback)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
@@ -943,11 +1043,11 @@
if (gAudioPolicyServiceClient == 0) {
return NO_INIT;
}
- return gAudioPolicyServiceClient->addAudioPortCallback(callBack);
+ return gAudioPolicyServiceClient->addAudioPortCallback(callback);
}
/*static*/
-status_t AudioSystem::removeAudioPortCallback(const sp<AudioPortCallback>& callBack)
+status_t AudioSystem::removeAudioPortCallback(const sp<AudioPortCallback>& callback)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
@@ -956,7 +1056,38 @@
if (gAudioPolicyServiceClient == 0) {
return NO_INIT;
}
- return gAudioPolicyServiceClient->removeAudioPortCallback(callBack);
+ return gAudioPolicyServiceClient->removeAudioPortCallback(callback);
+}
+
+status_t AudioSystem::addAudioDeviceCallback(
+ const sp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo)
+{
+ const sp<AudioFlingerClient> afc = getAudioFlingerClient();
+ if (afc == 0) {
+ return NO_INIT;
+ }
+ return afc->addAudioDeviceCallback(callback, audioIo);
+}
+
+status_t AudioSystem::removeAudioDeviceCallback(
+ const sp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo)
+{
+ const sp<AudioFlingerClient> afc = getAudioFlingerClient();
+ if (afc == 0) {
+ return NO_INIT;
+ }
+ return afc->removeAudioDeviceCallback(callback, audioIo);
+}
+
+audio_port_handle_t AudioSystem::getDeviceIdForIo(audio_io_handle_t audioIo)
+{
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return PERMISSION_DENIED;
+ const sp<AudioIoDescriptor> desc = getIoDescriptor(audioIo);
+ if (desc == 0) {
+ return AUDIO_PORT_HANDLE_NONE;
+ }
+ return desc->getDeviceId();
}
status_t AudioSystem::acquireSoundTriggerSession(audio_session_t *session,
@@ -1008,25 +1139,25 @@
// ---------------------------------------------------------------------------
status_t AudioSystem::AudioPolicyServiceClient::addAudioPortCallback(
- const sp<AudioPortCallback>& callBack)
+ const sp<AudioPortCallback>& callback)
{
Mutex::Autolock _l(mLock);
for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
- if (mAudioPortCallbacks[i] == callBack) {
+ if (mAudioPortCallbacks[i] == callback) {
return INVALID_OPERATION;
}
}
- mAudioPortCallbacks.add(callBack);
+ mAudioPortCallbacks.add(callback);
return NO_ERROR;
}
status_t AudioSystem::AudioPolicyServiceClient::removeAudioPortCallback(
- const sp<AudioPortCallback>& callBack)
+ const sp<AudioPortCallback>& callback)
{
Mutex::Autolock _l(mLock);
size_t i;
for (i = 0; i < mAudioPortCallbacks.size(); i++) {
- if (mAudioPortCallbacks[i] == callBack) {
+ if (mAudioPortCallbacks[i] == callback) {
break;
}
}
@@ -1037,6 +1168,7 @@
return NO_ERROR;
}
+
void AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate()
{
Mutex::Autolock _l(mLock);
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index bb47d3e..81ae6d7 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -38,11 +38,23 @@
namespace android {
// ---------------------------------------------------------------------------
+// TODO: Move to a separate .h
+
template <typename T>
-const T &min(const T &x, const T &y) {
+static inline const T &min(const T &x, const T &y) {
return x < y ? x : y;
}
+template <typename T>
+static inline const T &max(const T &x, const T &y) {
+ return x > y ? x : y;
+}
+
+static inline nsecs_t framesToNanoseconds(ssize_t frames, uint32_t sampleRate, float speed)
+{
+ return ((double)frames * 1000000000) / ((double)sampleRate * speed);
+}
+
static int64_t convertTimespecToUs(const struct timespec &tv)
{
return tv.tv_sec * 1000000ll + tv.tv_nsec / 1000;
@@ -178,7 +190,8 @@
const audio_offload_info_t *offloadInfo,
int uid,
pid_t pid,
- const audio_attributes_t* pAttributes)
+ const audio_attributes_t* pAttributes,
+ bool doNotReconnect)
: mStatus(NO_INIT),
mIsTimed(false),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
@@ -189,7 +202,7 @@
mStatus = set(streamType, sampleRate, format, channelMask,
frameCount, flags, cbf, user, notificationFrames,
0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType,
- offloadInfo, uid, pid, pAttributes);
+ offloadInfo, uid, pid, pAttributes, doNotReconnect);
}
AudioTrack::AudioTrack(
@@ -207,7 +220,8 @@
const audio_offload_info_t *offloadInfo,
int uid,
pid_t pid,
- const audio_attributes_t* pAttributes)
+ const audio_attributes_t* pAttributes,
+ bool doNotReconnect)
: mStatus(NO_INIT),
mIsTimed(false),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
@@ -218,7 +232,7 @@
mStatus = set(streamType, sampleRate, format, channelMask,
0 /*frameCount*/, flags, cbf, user, notificationFrames,
sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
- uid, pid, pAttributes);
+ uid, pid, pAttributes, doNotReconnect);
}
AudioTrack::~AudioTrack()
@@ -234,6 +248,10 @@
mAudioTrackThread->requestExitAndWait();
mAudioTrackThread.clear();
}
+ // No lock here: worst case we remove a NULL callback which will be a nop
+ if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) {
+ AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mOutput);
+ }
IInterface::asBinder(mAudioTrack)->unlinkToDeath(mDeathNotifier, this);
mAudioTrack.clear();
mCblkMemory.clear();
@@ -262,7 +280,8 @@
const audio_offload_info_t *offloadInfo,
int uid,
pid_t pid,
- const audio_attributes_t* pAttributes)
+ const audio_attributes_t* pAttributes,
+ bool doNotReconnect)
{
ALOGV("set(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
"flags #%x, notificationFrames %u, sessionId %d, transferType %d, uid %d, pid %d",
@@ -304,6 +323,7 @@
}
mSharedBuffer = sharedBuffer;
mTransfer = transferType;
+ mDoNotReconnect = doNotReconnect;
ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(),
sharedBuffer->size());
@@ -472,6 +492,8 @@
mObservedSequence = mSequence;
mInUnderrun = false;
mPreviousTimestampValid = false;
+ mTimestampStartupGlitchReported = false;
+ mRetrogradeMotionReported = false;
return NO_ERROR;
}
@@ -499,6 +521,8 @@
// reset current position as seen by client to 0
mPosition = 0;
mPreviousTimestampValid = false;
+ mTimestampStartupGlitchReported = false;
+ mRetrogradeMotionReported = false;
// For offloaded tracks, we don't know if the hardware counters are really zero here,
// since the flush is asynchronous and stop may not fully drain.
@@ -1042,6 +1066,14 @@
return mSelectedDeviceId;
}
+audio_port_handle_t AudioTrack::getRoutedDeviceId() {
+ AutoMutex lock(mLock);
+ if (mOutput == AUDIO_IO_HANDLE_NONE) {
+ return AUDIO_PORT_HANDLE_NONE;
+ }
+ return AudioSystem::getDeviceIdForIo(mOutput);
+}
+
status_t AudioTrack::attachAuxEffect(int effectId)
{
AutoMutex lock(mLock);
@@ -1071,6 +1103,9 @@
return NO_INIT;
}
+ if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) {
+ AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mOutput);
+ }
audio_io_handle_t output;
audio_stream_type_t streamType = mStreamType;
audio_attributes_t *attr = (mStreamType == AUDIO_STREAM_DEFAULT) ? &mAttributes : NULL;
@@ -1092,31 +1127,27 @@
// we must release it ourselves if anything goes wrong.
// Not all of these values are needed under all conditions, but it is easier to get them all
-
- uint32_t afLatency;
- status = AudioSystem::getLatency(output, &afLatency);
+ status = AudioSystem::getLatency(output, &mAfLatency);
if (status != NO_ERROR) {
ALOGE("getLatency(%d) failed status %d", output, status);
goto release;
}
- ALOGV("createTrack_l() output %d afLatency %u", output, afLatency);
+ ALOGV("createTrack_l() output %d afLatency %u", output, mAfLatency);
- size_t afFrameCount;
- status = AudioSystem::getFrameCount(output, &afFrameCount);
+ status = AudioSystem::getFrameCount(output, &mAfFrameCount);
if (status != NO_ERROR) {
ALOGE("getFrameCount(output=%d) status %d", output, status);
goto release;
}
- uint32_t afSampleRate;
- status = AudioSystem::getSamplingRate(output, &afSampleRate);
+ status = AudioSystem::getSamplingRate(output, &mAfSampleRate);
if (status != NO_ERROR) {
ALOGE("getSamplingRate(output=%d) status %d", output, status);
goto release;
}
if (mSampleRate == 0) {
- mSampleRate = afSampleRate;
- mOriginalSampleRate = afSampleRate;
+ mSampleRate = mAfSampleRate;
+ mOriginalSampleRate = mAfSampleRate;
}
// Client decides whether the track is TIMED (see below), but can only express a preference
// for FAST. Server will perform additional tests.
@@ -1129,9 +1160,9 @@
// use case 3: obtain/release mode
(mTransfer == TRANSFER_OBTAIN)) &&
// matching sample rate
- (mSampleRate == afSampleRate))) {
+ (mSampleRate == mAfSampleRate))) {
ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client; transfer %d, track %u Hz, output %u Hz",
- mTransfer, mSampleRate, afSampleRate);
+ mTransfer, mSampleRate, mAfSampleRate);
// once denied, do not request again if IAudioTrack is re-created
mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST);
}
@@ -1152,7 +1183,7 @@
// Same comment as below about ignoring frameCount parameter for set()
frameCount = mSharedBuffer->size();
} else if (frameCount == 0) {
- frameCount = afFrameCount;
+ frameCount = mAfFrameCount;
}
if (mNotificationFramesAct != frameCount) {
mNotificationFramesAct = frameCount;
@@ -1188,7 +1219,7 @@
if ((mFlags & AUDIO_OUTPUT_FLAG_FAST) == 0) {
// for normal tracks precompute the frame count based on speed.
const size_t minFrameCount = calculateMinFrameCount(
- afLatency, afFrameCount, afSampleRate, mSampleRate,
+ mAfLatency, mAfFrameCount, mAfSampleRate, mSampleRate,
mPlaybackRate.mSpeed);
if (frameCount < minFrameCount) {
frameCount = minFrameCount;
@@ -1338,7 +1369,7 @@
mAudioTrack->attachAuxEffect(mAuxEffectId);
// FIXME doesn't take into account speed or future sample rate changes (until restoreTrack)
// FIXME don't believe this lie
- mLatency = afLatency + (1000*frameCount) / mSampleRate;
+ mLatency = mAfLatency + (1000*frameCount) / mSampleRate;
mFrameCount = frameCount;
// If IAudioTrack is re-created, don't let the requested frameCount
@@ -1375,6 +1406,10 @@
mDeathNotifier = new DeathNotifier(this);
IInterface::asBinder(mAudioTrack)->linkToDeath(mDeathNotifier, this);
+ if (mDeviceCallback != 0) {
+ AudioSystem::addAudioDeviceCallback(mDeviceCallback, mOutput);
+ }
+
return NO_ERROR;
}
@@ -1740,7 +1775,7 @@
// Cache other fields that will be needed soon
uint32_t sampleRate = mSampleRate;
float speed = mPlaybackRate.mSpeed;
- uint32_t notificationFrames = mNotificationFramesAct;
+ const uint32_t notificationFrames = mNotificationFramesAct;
if (mRefreshRemaining) {
mRefreshRemaining = false;
mRemainingFrames = notificationFrames;
@@ -1778,7 +1813,14 @@
mLock.unlock();
+ // get anchor time to account for callbacks.
+ const nsecs_t timeBeforeCallbacks = systemTime();
+
if (waitStreamEnd) {
+ // FIXME: Instead of blocking in proxy->waitStreamEndDone(), Callback thread
+ // should wait on proxy futex and handle CBLK_STREAM_END_DONE within this function
+ // (and make sure we don't callback for more data while we're stopping).
+ // This helps with position, marker notifications, and track invalidation.
struct timespec timeout;
timeout.tv_sec = WAIT_STREAM_END_TIMEOUT_SEC;
timeout.tv_nsec = 0;
@@ -1863,12 +1905,17 @@
minFrames = kPoll * notificationFrames;
}
+ // This "fudge factor" avoids soaking CPU, and compensates for late progress by server
+ static const nsecs_t kWaitPeriodNs = WAIT_PERIOD_MS * 1000000LL;
+ const nsecs_t timeAfterCallbacks = systemTime();
+
// Convert frame units to time units
nsecs_t ns = NS_WHENEVER;
if (minFrames != (uint32_t) ~0) {
- // This "fudge factor" avoids soaking CPU, and compensates for late progress by server
- static const nsecs_t kFudgeNs = 10000000LL; // 10 ms
- ns = ((double)minFrames * 1000000000) / ((double)sampleRate * speed) + kFudgeNs;
+ ns = framesToNanoseconds(minFrames, sampleRate, speed) + kWaitPeriodNs;
+ ns -= (timeAfterCallbacks - timeBeforeCallbacks); // account for callback time
+ // TODO: Should we warn if the callback time is too long?
+ if (ns < 0) ns = 0;
}
// If not supplying data by EVENT_MORE_DATA, then we're done
@@ -1876,6 +1923,13 @@
return ns;
}
+ // EVENT_MORE_DATA callback handling.
+ // Timing for linear pcm audio data formats can be derived directly from the
+ // buffer fill level.
+ // Timing for compressed data is not directly available from the buffer fill level,
+ // rather indirectly from waiting for blocking mode callbacks or waiting for obtain()
+ // to return a certain fill level.
+
struct timespec timeout;
const struct timespec *requested = &ClientProxy::kForever;
if (ns != NS_WHENEVER) {
@@ -1906,12 +1960,15 @@
return NS_NEVER;
}
- if (mRetryOnPartialBuffer && !isOffloaded()) {
+ if (mRetryOnPartialBuffer && audio_is_linear_pcm(mFormat)) {
mRetryOnPartialBuffer = false;
if (avail < mRemainingFrames) {
- int64_t myns = ((double)(mRemainingFrames - avail) * 1100000000)
- / ((double)sampleRate * speed);
- if (ns < 0 || myns < ns) {
+ if (ns > 0) { // account for obtain time
+ const nsecs_t timeNow = systemTime();
+ ns = max((nsecs_t)0, ns - (timeNow - timeAfterCallbacks));
+ }
+ nsecs_t myns = framesToNanoseconds(mRemainingFrames - avail, sampleRate, speed);
+ if (ns < 0 /* NS_WHENEVER */ || myns < ns) {
ns = myns;
}
return ns;
@@ -1934,7 +1991,42 @@
// Keep this thread going to handle timed events and
// still try to get more data in intervals of WAIT_PERIOD_MS
// but don't just loop and block the CPU, so wait
- return WAIT_PERIOD_MS * 1000000LL;
+
+ // mCbf(EVENT_MORE_DATA, ...) might either
+ // (1) Block until it can fill the buffer, returning 0 size on EOS.
+ // (2) Block until it can fill the buffer, returning 0 data (silence) on EOS.
+ // (3) Return 0 size when no data is available, does not wait for more data.
+ //
+ // (1) and (2) occurs with AudioPlayer/AwesomePlayer; (3) occurs with NuPlayer.
+ // We try to compute the wait time to avoid a tight sleep-wait cycle,
+ // especially for case (3).
+ //
+ // The decision to support (1) and (2) affect the sizing of mRemainingFrames
+ // and this loop; whereas for case (3) we could simply check once with the full
+ // buffer size and skip the loop entirely.
+
+ nsecs_t myns;
+ if (audio_is_linear_pcm(mFormat)) {
+ // time to wait based on buffer occupancy
+ const nsecs_t datans = mRemainingFrames <= avail ? 0 :
+ framesToNanoseconds(mRemainingFrames - avail, sampleRate, speed);
+ // audio flinger thread buffer size (TODO: adjust for fast tracks)
+ const nsecs_t afns = framesToNanoseconds(mAfFrameCount, mAfSampleRate, speed);
+ // add a half the AudioFlinger buffer time to avoid soaking CPU if datans is 0.
+ myns = datans + (afns / 2);
+ } else {
+ // FIXME: This could ping quite a bit if the buffer isn't full.
+ // Note that when mState is stopping we waitStreamEnd, so it never gets here.
+ myns = kWaitPeriodNs;
+ }
+ if (ns > 0) { // account for obtain and callback time
+ const nsecs_t timeNow = systemTime();
+ ns = max((nsecs_t)0, ns - (timeNow - timeAfterCallbacks));
+ }
+ if (ns < 0 /* NS_WHENEVER */ || myns < ns) {
+ ns = myns;
+ }
+ return ns;
}
size_t releasedFrames = writtenSize / mFrameSize;
@@ -1987,7 +2079,7 @@
// output parameters and new IAudioFlinger in createTrack_l()
AudioSystem::clearAudioConfigCache();
- if (isOffloadedOrDirect_l()) {
+ if (isOffloadedOrDirect_l() || mDoNotReconnect) {
// FIXME re-creation of offloaded tracks is not yet implemented
return DEAD_OBJECT;
}
@@ -2066,30 +2158,8 @@
if (mStaticProxy != 0) {
return true; // static tracks do not have issues with buffer sizing.
}
- status_t status;
- uint32_t afLatency;
- status = AudioSystem::getLatency(mOutput, &afLatency);
- if (status != NO_ERROR) {
- ALOGE("getLatency(%d) failed status %d", mOutput, status);
- return false;
- }
-
- size_t afFrameCount;
- status = AudioSystem::getFrameCount(mOutput, &afFrameCount);
- if (status != NO_ERROR) {
- ALOGE("getFrameCount(output=%d) status %d", mOutput, status);
- return false;
- }
-
- uint32_t afSampleRate;
- status = AudioSystem::getSamplingRate(mOutput, &afSampleRate);
- if (status != NO_ERROR) {
- ALOGE("getSamplingRate(output=%d) status %d", mOutput, status);
- return false;
- }
-
const size_t minFrameCount =
- calculateMinFrameCount(afLatency, afFrameCount, afSampleRate, sampleRate, speed);
+ calculateMinFrameCount(mAfLatency, mAfFrameCount, mAfSampleRate, sampleRate, speed);
ALOGV("isSampleRateSpeedAllowed_l mFrameCount %zu minFrameCount %zu",
mFrameCount, minFrameCount);
return mFrameCount >= minFrameCount;
@@ -2152,7 +2222,12 @@
}
// Check whether a pending flush or stop has completed, as those commands may
- // be asynchronous or return near finish.
+ // be asynchronous or return near finish or exhibit glitchy behavior.
+ //
+ // Originally this showed up as the first timestamp being a continuation of
+ // the previous song under gapless playback.
+ // However, we sometimes see zero timestamps, then a glitch of
+ // the previous song's position, and then correct timestamps afterwards.
if (mStartUs != 0 && mSampleRate != 0) {
static const int kTimeJitterUs = 100000; // 100 ms
static const int k1SecUs = 1000000;
@@ -2170,16 +2245,29 @@
if (deltaPositionByUs > deltaTimeUs + kTimeJitterUs) {
// Verify that the counter can't count faster than the sample rate
- // since the start time. If greater, then that means we have failed
+ // since the start time. If greater, then that means we may have failed
// to completely flush or stop the previous playing track.
- ALOGW("incomplete flush or stop:"
+ ALOGW_IF(!mTimestampStartupGlitchReported,
+ "getTimestamp startup glitch detected"
" deltaTimeUs(%lld) deltaPositionUs(%lld) tsmPosition(%u)",
(long long)deltaTimeUs, (long long)deltaPositionByUs,
timestamp.mPosition);
+ mTimestampStartupGlitchReported = true;
+ if (previousTimestampValid
+ && mPreviousTimestamp.mPosition == 0 /* should be true if valid */) {
+ timestamp = mPreviousTimestamp;
+ mPreviousTimestampValid = true;
+ return NO_ERROR;
+ }
return WOULD_BLOCK;
}
+ if (deltaPositionByUs != 0) {
+ mStartUs = 0; // don't check again, we got valid nonzero position.
+ }
+ } else {
+ mStartUs = 0; // don't check again, start time expired.
}
- mStartUs = 0; // no need to check again, start timestamp has either expired or unneeded.
+ mTimestampStartupGlitchReported = false;
}
} else {
// Update the mapping between local consumed (mPosition) and server consumed (mServer)
@@ -2308,6 +2396,48 @@
return mProxy->getUnderrunFrames();
}
+status_t AudioTrack::addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback)
+{
+ if (callback == 0) {
+ ALOGW("%s adding NULL callback!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ AutoMutex lock(mLock);
+ if (mDeviceCallback == callback) {
+ ALOGW("%s adding same callback!", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+ status_t status = NO_ERROR;
+ if (mOutput != AUDIO_IO_HANDLE_NONE) {
+ if (mDeviceCallback != 0) {
+ ALOGW("%s callback already present!", __FUNCTION__);
+ AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mOutput);
+ }
+ status = AudioSystem::addAudioDeviceCallback(callback, mOutput);
+ }
+ mDeviceCallback = callback;
+ return status;
+}
+
+status_t AudioTrack::removeAudioDeviceCallback(
+ const sp<AudioSystem::AudioDeviceCallback>& callback)
+{
+ if (callback == 0) {
+ ALOGW("%s removing NULL callback!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ AutoMutex lock(mLock);
+ if (mDeviceCallback != callback) {
+ ALOGW("%s removing different callback!", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+ if (mOutput != AUDIO_IO_HANDLE_NONE) {
+ AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mOutput);
+ }
+ mDeviceCallback = 0;
+ return NO_ERROR;
+}
+
// =========================================================================
void AudioTrack::DeathNotifier::binderDied(const wp<IBinder>& who __unused)
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index d722fe9..a3f014b 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -80,7 +80,8 @@
RELEASE_AUDIO_PATCH,
LIST_AUDIO_PATCHES,
SET_AUDIO_PORT_CONFIG,
- GET_AUDIO_HW_SYNC
+ GET_AUDIO_HW_SYNC,
+ SYSTEM_READY
};
#define MAX_ITEMS_PER_LIST 1024
@@ -903,6 +904,12 @@
}
return (audio_hw_sync_t)reply.readInt32();
}
+ virtual status_t systemReady()
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ return remote()->transact(SYSTEM_READY, data, &reply, IBinder::FLAG_ONEWAY);
+ }
};
IMPLEMENT_META_INTERFACE(AudioFlinger, "android.media.IAudioFlinger");
@@ -1396,6 +1403,11 @@
reply->writeInt32(getAudioHwSyncForSession((audio_session_t)data.readInt32()));
return NO_ERROR;
} break;
+ case SYSTEM_READY: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ systemReady();
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp
index a622241..3429d36 100644
--- a/media/libmedia/IAudioFlingerClient.cpp
+++ b/media/libmedia/IAudioFlingerClient.cpp
@@ -45,6 +45,7 @@
data.writeInterfaceToken(IAudioFlingerClient::getInterfaceDescriptor());
data.writeInt32(event);
data.writeInt32((int32_t)ioDesc->mIoHandle);
+ data.write(&ioDesc->mPatch, sizeof(struct audio_patch));
data.writeInt32(ioDesc->mSamplingRate);
data.writeInt32(ioDesc->mFormat);
data.writeInt32(ioDesc->mChannelMask);
@@ -67,6 +68,7 @@
audio_io_config_event event = (audio_io_config_event)data.readInt32();
sp<AudioIoDescriptor> ioDesc = new AudioIoDescriptor();
ioDesc->mIoHandle = (audio_io_handle_t) data.readInt32();
+ data.read(&ioDesc->mPatch, sizeof(struct audio_patch));
ioDesc->mSamplingRate = data.readInt32();
ioDesc->mFormat = (audio_format_t) data.readInt32();
ioDesc->mChannelMask = (audio_channel_mask_t) data.readInt32();
diff --git a/media/libmedia/ICrypto.cpp b/media/libmedia/ICrypto.cpp
index 9246a7c..2f440fe 100644
--- a/media/libmedia/ICrypto.cpp
+++ b/media/libmedia/ICrypto.cpp
@@ -142,7 +142,7 @@
ssize_t result = reply.readInt32();
- if (result >= ERROR_DRM_VENDOR_MIN && result <= ERROR_DRM_VENDOR_MAX) {
+ if (isCryptoError(result)) {
errorDetailMsg->setTo(reply.readCString());
}
@@ -319,8 +319,7 @@
reply->writeInt32(result);
- if (result >= ERROR_DRM_VENDOR_MIN
- && result <= ERROR_DRM_VENDOR_MAX) {
+ if (isCryptoError(result)) {
reply->writeCString(errorDetailMsg.c_str());
}
diff --git a/media/libmedia/IMediaHTTPConnection.cpp b/media/libmedia/IMediaHTTPConnection.cpp
index 2ff7658..09137ef 100644
--- a/media/libmedia/IMediaHTTPConnection.cpp
+++ b/media/libmedia/IMediaHTTPConnection.cpp
@@ -24,6 +24,7 @@
#include <binder/Parcel.h>
#include <utils/String8.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaErrors.h>
namespace android {
@@ -106,11 +107,18 @@
return UNKNOWN_ERROR;
}
- int32_t len = reply.readInt32();
+ size_t len = reply.readInt32();
- if (len > 0) {
- memcpy(buffer, mMemory->pointer(), len);
+ if (len > size) {
+ ALOGE("requested %zu, got %zu", size, len);
+ return ERROR_OUT_OF_RANGE;
}
+ if (len > mMemory->size()) {
+ ALOGE("got %zu, but memory has %zu", len, mMemory->size());
+ return ERROR_OUT_OF_RANGE;
+ }
+
+ memcpy(buffer, mMemory->pointer(), len);
return len;
}
diff --git a/media/libmedia/IMediaHTTPService.cpp b/media/libmedia/IMediaHTTPService.cpp
index f30d0f3..0c16a2b 100644
--- a/media/libmedia/IMediaHTTPService.cpp
+++ b/media/libmedia/IMediaHTTPService.cpp
@@ -44,6 +44,7 @@
status_t err = reply.readInt32();
if (err != OK) {
+ ALOGE("Unable to make HTTP connection (err = %d)", err);
return NULL;
}
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index c7a1394..ee3b584 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -35,7 +35,7 @@
RELEASE = IBinder::FIRST_CALL_TRANSACTION,
INIT,
CLOSE,
- USE_PERSISTENT_SURFACE,
+ SET_INPUT_SURFACE,
QUERY_SURFACE_MEDIASOURCE,
RESET,
STOP,
@@ -76,13 +76,13 @@
return reply.readInt32();
}
- status_t usePersistentSurface(const sp<IGraphicBufferConsumer>& surface)
+ status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface)
{
- ALOGV("usePersistentSurface(%p)", surface.get());
+ ALOGV("setInputSurface(%p)", surface.get());
Parcel data, reply;
data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
data.writeStrongBinder(IInterface::asBinder(surface));
- remote()->transact(USE_PERSISTENT_SURFACE, data, &reply);
+ remote()->transact(SET_INPUT_SURFACE, data, &reply);
return reply.readInt32();
}
@@ -453,12 +453,12 @@
reply->writeInt32(setCamera(camera, proxy));
return NO_ERROR;
} break;
- case USE_PERSISTENT_SURFACE: {
- ALOGV("USE_PERSISTENT_SURFACE");
+ case SET_INPUT_SURFACE: {
+ ALOGV("SET_INPUT_SURFACE");
CHECK_INTERFACE(IMediaRecorder, data, reply);
sp<IGraphicBufferConsumer> surface = interface_cast<IGraphicBufferConsumer>(
data.readStrongBinder());
- reply->writeInt32(usePersistentSurface(surface));
+ reply->writeInt32(setInputSurface(surface));
return NO_ERROR;
} break;
case QUERY_SURFACE_MEDIASOURCE: {
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index 39b135b..16da65e 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -42,7 +42,7 @@
USE_GRAPHIC_BUFFER,
CREATE_INPUT_SURFACE,
CREATE_PERSISTENT_INPUT_SURFACE,
- USE_PERSISTENT_INPUT_SURFACE,
+ SET_INPUT_SURFACE,
SIGNAL_END_OF_INPUT_STREAM,
STORE_META_DATA_IN_BUFFERS,
PREPARE_FOR_ADAPTIVE_PLAYBACK,
@@ -245,12 +245,13 @@
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer) {
+ buffer_id *buffer, OMX_U32 allottedSize) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeInt32((int32_t)node);
data.writeInt32(port_index);
data.writeStrongBinder(IInterface::asBinder(params));
+ data.writeInt32(allottedSize);
remote()->transact(USE_BUFFER, data, &reply);
status_t err = reply.readInt32();
@@ -305,7 +306,7 @@
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index,
- sp<IGraphicBufferProducer> *bufferProducer) {
+ sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
Parcel data, reply;
status_t err;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
@@ -317,6 +318,12 @@
return err;
}
+ // read type even if createInputSurface failed
+ int negotiatedType = reply.readInt32();
+ if (type != NULL) {
+ *type = (MetadataBufferType)negotiatedType;
+ }
+
err = reply.readInt32();
if (err != OK) {
return err;
@@ -353,9 +360,9 @@
return err;
}
- virtual status_t usePersistentInputSurface(
+ virtual status_t setInputSurface(
node_id node, OMX_U32 port_index,
- const sp<IGraphicBufferConsumer> &bufferConsumer) {
+ const sp<IGraphicBufferConsumer> &bufferConsumer, MetadataBufferType *type) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
status_t err;
@@ -363,16 +370,22 @@
data.writeInt32(port_index);
data.writeStrongBinder(IInterface::asBinder(bufferConsumer));
- err = remote()->transact(USE_PERSISTENT_INPUT_SURFACE, data, &reply);
+ err = remote()->transact(SET_INPUT_SURFACE, data, &reply);
if (err != OK) {
ALOGW("binder transaction failed: %d", err);
return err;
}
+
+ // read type even if setInputSurface failed
+ int negotiatedType = reply.readInt32();
+ if (type != NULL) {
+ *type = (MetadataBufferType)negotiatedType;
+ }
+
return reply.readInt32();
}
-
virtual status_t signalEndOfInputStream(node_id node) {
Parcel data, reply;
status_t err;
@@ -388,7 +401,7 @@
}
virtual status_t storeMetaDataInBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeInt32((int32_t)node);
@@ -396,8 +409,13 @@
data.writeInt32((uint32_t)enable);
remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply);
- status_t err = reply.readInt32();
- return err;
+ // read type even storeMetaDataInBuffers failed
+ int negotiatedType = reply.readInt32();
+ if (type != NULL) {
+ *type = (MetadataBufferType)negotiatedType;
+ }
+
+ return reply.readInt32();
}
virtual status_t prepareForAdaptivePlayback(
@@ -460,12 +478,13 @@
virtual status_t allocateBufferWithBackup(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer) {
+ buffer_id *buffer, OMX_U32 allottedSize) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeInt32((int32_t)node);
data.writeInt32(port_index);
data.writeStrongBinder(IInterface::asBinder(params));
+ data.writeInt32(allottedSize);
remote()->transact(ALLOC_BUFFER_WITH_BACKUP, data, &reply);
status_t err = reply.readInt32();
@@ -492,11 +511,15 @@
return reply.readInt32();
}
- virtual status_t fillBuffer(node_id node, buffer_id buffer) {
+ virtual status_t fillBuffer(node_id node, buffer_id buffer, int fenceFd) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeInt32((int32_t)node);
data.writeInt32((int32_t)buffer);
+ data.writeInt32(fenceFd >= 0);
+ if (fenceFd >= 0) {
+ data.writeFileDescriptor(fenceFd, true /* takeOwnership */);
+ }
remote()->transact(FILL_BUFFER, data, &reply);
return reply.readInt32();
@@ -506,7 +529,7 @@
node_id node,
buffer_id buffer,
OMX_U32 range_offset, OMX_U32 range_length,
- OMX_U32 flags, OMX_TICKS timestamp) {
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeInt32((int32_t)node);
@@ -515,6 +538,10 @@
data.writeInt32(range_length);
data.writeInt32(flags);
data.writeInt64(timestamp);
+ data.writeInt32(fenceFd >= 0);
+ if (fenceFd >= 0) {
+ data.writeFileDescriptor(fenceFd, true /* takeOwnership */);
+ }
remote()->transact(EMPTY_BUFFER, data, &reply);
return reply.readInt32();
@@ -758,9 +785,10 @@
OMX_U32 port_index = data.readInt32();
sp<IMemory> params =
interface_cast<IMemory>(data.readStrongBinder());
+ OMX_U32 allottedSize = data.readInt32();
buffer_id buffer;
- status_t err = useBuffer(node, port_index, params, &buffer);
+ status_t err = useBuffer(node, port_index, params, &buffer, allottedSize);
reply->writeInt32(err);
if (err == OK) {
@@ -816,9 +844,10 @@
OMX_U32 port_index = data.readInt32();
sp<IGraphicBufferProducer> bufferProducer;
- status_t err = createInputSurface(node, port_index,
- &bufferProducer);
+ MetadataBufferType type;
+ status_t err = createInputSurface(node, port_index, &bufferProducer, &type);
+ reply->writeInt32(type);
reply->writeInt32(err);
if (err == OK) {
@@ -847,7 +876,7 @@
return NO_ERROR;
}
- case USE_PERSISTENT_INPUT_SURFACE:
+ case SET_INPUT_SURFACE:
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
@@ -857,9 +886,10 @@
sp<IGraphicBufferConsumer> bufferConsumer =
interface_cast<IGraphicBufferConsumer>(data.readStrongBinder());
- status_t err = usePersistentInputSurface(
- node, port_index, bufferConsumer);
+ MetadataBufferType type;
+ status_t err = setInputSurface(node, port_index, bufferConsumer, &type);
+ reply->writeInt32(type);
reply->writeInt32(err);
return NO_ERROR;
}
@@ -884,7 +914,9 @@
OMX_U32 port_index = data.readInt32();
OMX_BOOL enable = (OMX_BOOL)data.readInt32();
- status_t err = storeMetaDataInBuffers(node, port_index, enable);
+ MetadataBufferType type;
+ status_t err = storeMetaDataInBuffers(node, port_index, enable, &type);
+ reply->writeInt32(type);
reply->writeInt32(err);
return NO_ERROR;
@@ -955,10 +987,11 @@
OMX_U32 port_index = data.readInt32();
sp<IMemory> params =
interface_cast<IMemory>(data.readStrongBinder());
+ OMX_U32 allottedSize = data.readInt32();
buffer_id buffer;
status_t err = allocateBufferWithBackup(
- node, port_index, params, &buffer);
+ node, port_index, params, &buffer, allottedSize);
reply->writeInt32(err);
@@ -987,7 +1020,9 @@
node_id node = (node_id)data.readInt32();
buffer_id buffer = (buffer_id)data.readInt32();
- reply->writeInt32(fillBuffer(node, buffer));
+ bool haveFence = data.readInt32();
+ int fenceFd = haveFence ? ::dup(data.readFileDescriptor()) : -1;
+ reply->writeInt32(fillBuffer(node, buffer, fenceFd));
return NO_ERROR;
}
@@ -1002,11 +1037,10 @@
OMX_U32 range_length = data.readInt32();
OMX_U32 flags = data.readInt32();
OMX_TICKS timestamp = data.readInt64();
-
- reply->writeInt32(
- emptyBuffer(
- node, buffer, range_offset, range_length,
- flags, timestamp));
+ bool haveFence = data.readInt32();
+ int fenceFd = haveFence ? ::dup(data.readFileDescriptor()) : -1;
+ reply->writeInt32(emptyBuffer(
+ node, buffer, range_offset, range_length, flags, timestamp, fenceFd));
return NO_ERROR;
}
@@ -1043,14 +1077,29 @@
: BpInterface<IOMXObserver>(impl) {
}
- virtual void onMessage(const omx_message &msg) {
+ virtual void onMessages(const std::list<omx_message> &messages) {
Parcel data, reply;
- data.writeInterfaceToken(IOMXObserver::getInterfaceDescriptor());
- data.write(&msg, sizeof(msg));
-
- ALOGV("onMessage writing message %d, size %zu", msg.type, sizeof(msg));
-
- remote()->transact(OBSERVER_ON_MSG, data, &reply, IBinder::FLAG_ONEWAY);
+ std::list<omx_message>::const_iterator it = messages.cbegin();
+ bool first = true;
+ while (it != messages.cend()) {
+ const omx_message &msg = *it++;
+ if (first) {
+ data.writeInterfaceToken(IOMXObserver::getInterfaceDescriptor());
+ data.writeInt32(msg.node);
+ first = false;
+ }
+ data.writeInt32(msg.fenceFd >= 0);
+ if (msg.fenceFd >= 0) {
+ data.writeFileDescriptor(msg.fenceFd, true /* takeOwnership */);
+ }
+ data.writeInt32(msg.type);
+ data.write(&msg.u, sizeof(msg.u));
+ ALOGV("onMessage writing message %d, size %zu", msg.type, sizeof(msg));
+ }
+ if (!first) {
+ data.writeInt32(-1); // mark end
+ remote()->transact(OBSERVER_ON_MSG, data, &reply, IBinder::FLAG_ONEWAY);
+ }
}
};
@@ -1062,16 +1111,28 @@
case OBSERVER_ON_MSG:
{
CHECK_OMX_INTERFACE(IOMXObserver, data, reply);
+ IOMX::node_id node = data.readInt32();
+ std::list<omx_message> messages;
+ status_t err = FAILED_TRANSACTION; // must receive at least one message
+ do {
+ int haveFence = data.readInt32();
+ if (haveFence < 0) { // we use -1 to mark end of messages
+ break;
+ }
+ omx_message msg;
+ msg.node = node;
+ msg.fenceFd = haveFence ? ::dup(data.readFileDescriptor()) : -1;
+ msg.type = (typeof(msg.type))data.readInt32();
+ err = data.read(&msg.u, sizeof(msg.u));
+ ALOGV("onTransact reading message %d, size %zu", msg.type, sizeof(msg));
+ messages.push_back(msg);
+ } while (err == OK);
- omx_message msg;
- data.read(&msg, sizeof(msg));
+ if (err == OK) {
+ onMessages(messages);
+ }
- ALOGV("onTransact reading message %d, size %zu", msg.type, sizeof(msg));
-
- // XXX Could use readInplace maybe?
- onMessage(msg);
-
- return NO_ERROR;
+ return err;
}
default:
diff --git a/media/libmedia/IResourceManagerService.cpp b/media/libmedia/IResourceManagerService.cpp
index 7ae946d..6902e99 100644
--- a/media/libmedia/IResourceManagerService.cpp
+++ b/media/libmedia/IResourceManagerService.cpp
@@ -48,7 +48,7 @@
template <typename T>
static void readFromParcel(const Parcel &data, Vector<T> *items) {
size_t size = (size_t)data.readUint32();
- for (size_t i = 0; i < size; i++) {
+ for (size_t i = 0; i < size && data.dataAvail() > 0; i++) {
T item;
item.readFromParcel(data);
items->add(item);
@@ -119,8 +119,6 @@
switch (code) {
case CONFIG: {
CHECK_INTERFACE(IResourceManagerService, data, reply);
- sp<IResourceManagerClient> client(
- interface_cast<IResourceManagerClient>(data.readStrongBinder()));
Vector<MediaResourcePolicy> policies;
readFromParcel(data, &policies);
config(policies);
diff --git a/media/libmedia/JetPlayer.cpp b/media/libmedia/JetPlayer.cpp
index 271be0c..34deb59 100644
--- a/media/libmedia/JetPlayer.cpp
+++ b/media/libmedia/JetPlayer.cpp
@@ -85,12 +85,18 @@
// create the output AudioTrack
mAudioTrack = new AudioTrack();
- mAudioTrack->set(AUDIO_STREAM_MUSIC, //TODO parameterize this
+ status_t status = mAudioTrack->set(AUDIO_STREAM_MUSIC, //TODO parameterize this
pLibConfig->sampleRate,
AUDIO_FORMAT_PCM_16_BIT,
audio_channel_out_mask_from_count(pLibConfig->numChannels),
(size_t) mTrackBufferSize,
AUDIO_OUTPUT_FLAG_NONE);
+ if (status != OK) {
+ ALOGE("JetPlayer::init(): Error initializing JET library; AudioTrack error %d", status);
+ mAudioTrack.clear();
+ mState = EAS_STATE_ERROR;
+ return EAS_FAILURE;
+ }
// create render and playback thread
{
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index ae0061f..c5790fb 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -152,16 +152,6 @@
ALOGV("codec = %d", cap.mCodec);
}
-/*static*/ void
-MediaProfiles::logVideoEditorCap(const MediaProfiles::VideoEditorCap& cap UNUSED)
-{
- ALOGV("videoeditor cap:");
- ALOGV("mMaxInputFrameWidth = %d", cap.mMaxInputFrameWidth);
- ALOGV("mMaxInputFrameHeight = %d", cap.mMaxInputFrameHeight);
- ALOGV("mMaxOutputFrameWidth = %d", cap.mMaxOutputFrameWidth);
- ALOGV("mMaxOutputFrameHeight = %d", cap.mMaxOutputFrameHeight);
-}
-
/*static*/ int
MediaProfiles::findTagForName(const MediaProfiles::NameToTagMap *map, size_t nMappings,
const char *name)
@@ -398,42 +388,6 @@
ALOGV("%s: cameraId=%d, offset=%d ms", __func__, cameraId, offsetTimeMs);
mStartTimeOffsets.replaceValueFor(cameraId, offsetTimeMs);
}
-/*static*/ MediaProfiles::ExportVideoProfile*
-MediaProfiles::createExportVideoProfile(const char **atts)
-{
- CHECK(!strcmp("name", atts[0]) &&
- !strcmp("profile", atts[2]) &&
- !strcmp("level", atts[4]));
-
- const size_t nMappings =
- sizeof(sVideoEncoderNameMap)/sizeof(sVideoEncoderNameMap[0]);
- const int codec = findTagForName(sVideoEncoderNameMap, nMappings, atts[1]);
- CHECK(codec != -1);
-
- MediaProfiles::ExportVideoProfile *profile =
- new MediaProfiles::ExportVideoProfile(
- codec, atoi(atts[3]), atoi(atts[5]));
-
- return profile;
-}
-/*static*/ MediaProfiles::VideoEditorCap*
-MediaProfiles::createVideoEditorCap(const char **atts, MediaProfiles *profiles)
-{
- CHECK(!strcmp("maxInputFrameWidth", atts[0]) &&
- !strcmp("maxInputFrameHeight", atts[2]) &&
- !strcmp("maxOutputFrameWidth", atts[4]) &&
- !strcmp("maxOutputFrameHeight", atts[6]) &&
- !strcmp("maxPrefetchYUVFrames", atts[8]));
-
- MediaProfiles::VideoEditorCap *pVideoEditorCap =
- new MediaProfiles::VideoEditorCap(atoi(atts[1]), atoi(atts[3]),
- atoi(atts[5]), atoi(atts[7]), atoi(atts[9]));
-
- logVideoEditorCap(*pVideoEditorCap);
- profiles->mVideoEditorCap = pVideoEditorCap;
-
- return pVideoEditorCap;
-}
/*static*/ void
MediaProfiles::startElementHandler(void *userData, const char *name, const char **atts)
@@ -465,10 +419,6 @@
createCamcorderProfile(profiles->mCurrentCameraId, atts, profiles->mCameraIds));
} else if (strcmp("ImageEncoding", name) == 0) {
profiles->addImageEncodingQualityLevel(profiles->mCurrentCameraId, atts);
- } else if (strcmp("VideoEditorCap", name) == 0) {
- createVideoEditorCap(atts, profiles);
- } else if (strcmp("ExportVideoProfile", name) == 0) {
- profiles->mVideoEditorExportProfiles.add(createExportVideoProfile(atts));
}
}
@@ -873,32 +823,6 @@
profiles->mImageEncodingQualityLevels.add(levels);
}
-/*static*/ void
-MediaProfiles::createDefaultVideoEditorCap(MediaProfiles *profiles)
-{
- profiles->mVideoEditorCap =
- new MediaProfiles::VideoEditorCap(
- VIDEOEDITOR_DEFAULT_MAX_INPUT_FRAME_WIDTH,
- VIDEOEDITOR_DEFUALT_MAX_INPUT_FRAME_HEIGHT,
- VIDEOEDITOR_DEFAULT_MAX_OUTPUT_FRAME_WIDTH,
- VIDEOEDITOR_DEFUALT_MAX_OUTPUT_FRAME_HEIGHT,
- VIDEOEDITOR_DEFAULT_MAX_PREFETCH_YUV_FRAMES);
-}
-/*static*/ void
-MediaProfiles::createDefaultExportVideoProfiles(MediaProfiles *profiles)
-{
- // Create default video export profiles
- profiles->mVideoEditorExportProfiles.add(
- new ExportVideoProfile(VIDEO_ENCODER_H263,
- OMX_VIDEO_H263ProfileBaseline, OMX_VIDEO_H263Level10));
- profiles->mVideoEditorExportProfiles.add(
- new ExportVideoProfile(VIDEO_ENCODER_MPEG_4_SP,
- OMX_VIDEO_MPEG4ProfileSimple, OMX_VIDEO_MPEG4Level1));
- profiles->mVideoEditorExportProfiles.add(
- new ExportVideoProfile(VIDEO_ENCODER_H264,
- OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel13));
-}
-
/*static*/ MediaProfiles*
MediaProfiles::createDefaultInstance()
{
@@ -910,8 +834,6 @@
createDefaultAudioDecoders(profiles);
createDefaultEncoderOutputFileFormats(profiles);
createDefaultImageEncodingQualityLevels(profiles);
- createDefaultVideoEditorCap(profiles);
- createDefaultExportVideoProfiles(profiles);
return profiles;
}
@@ -1009,54 +931,6 @@
ALOGE("The given video encoder param name %s is not found", name);
return -1;
}
-int MediaProfiles::getVideoEditorExportParamByName(
- const char *name, int codec) const
-{
- ALOGV("getVideoEditorExportParamByName: name %s codec %d", name, codec);
- ExportVideoProfile *exportProfile = NULL;
- int index = -1;
- for (size_t i =0; i < mVideoEditorExportProfiles.size(); i++) {
- exportProfile = mVideoEditorExportProfiles[i];
- if (exportProfile->mCodec == codec) {
- index = i;
- break;
- }
- }
- if (index == -1) {
- ALOGE("The given video decoder %d is not found", codec);
- return -1;
- }
- if (!strcmp("videoeditor.export.profile", name))
- return exportProfile->mProfile;
- if (!strcmp("videoeditor.export.level", name))
- return exportProfile->mLevel;
-
- ALOGE("The given video editor export param name %s is not found", name);
- return -1;
-}
-int MediaProfiles::getVideoEditorCapParamByName(const char *name) const
-{
- ALOGV("getVideoEditorCapParamByName: %s", name);
-
- if (mVideoEditorCap == NULL) {
- ALOGE("The mVideoEditorCap is not created, then create default cap.");
- createDefaultVideoEditorCap(sInstance);
- }
-
- if (!strcmp("videoeditor.input.width.max", name))
- return mVideoEditorCap->mMaxInputFrameWidth;
- if (!strcmp("videoeditor.input.height.max", name))
- return mVideoEditorCap->mMaxInputFrameHeight;
- if (!strcmp("videoeditor.output.width.max", name))
- return mVideoEditorCap->mMaxOutputFrameWidth;
- if (!strcmp("videoeditor.output.height.max", name))
- return mVideoEditorCap->mMaxOutputFrameHeight;
- if (!strcmp("maxPrefetchYUVFrames", name))
- return mVideoEditorCap->mMaxPrefetchYUVFrames;
-
- ALOGE("The given video editor param name %s is not found", name);
- return -1;
-}
Vector<audio_encoder> MediaProfiles::getAudioEncoders() const
{
diff --git a/media/libmedia/MediaResourcePolicy.cpp b/media/libmedia/MediaResourcePolicy.cpp
index 139a38c..5210825 100644
--- a/media/libmedia/MediaResourcePolicy.cpp
+++ b/media/libmedia/MediaResourcePolicy.cpp
@@ -24,25 +24,25 @@
const char kPolicySupportsMultipleSecureCodecs[] = "supports-multiple-secure-codecs";
const char kPolicySupportsSecureWithNonSecureCodec[] = "supports-secure-with-non-secure-codec";
-MediaResourcePolicy::MediaResourcePolicy() : mValue(0) {}
+MediaResourcePolicy::MediaResourcePolicy() {}
-MediaResourcePolicy::MediaResourcePolicy(String8 type, uint64_t value)
+MediaResourcePolicy::MediaResourcePolicy(String8 type, String8 value)
: mType(type),
mValue(value) {}
void MediaResourcePolicy::readFromParcel(const Parcel &parcel) {
mType = parcel.readString8();
- mValue = parcel.readUint64();
+ mValue = parcel.readString8();
}
void MediaResourcePolicy::writeToParcel(Parcel *parcel) const {
parcel->writeString8(mType);
- parcel->writeUint64(mValue);
+ parcel->writeString8(mValue);
}
String8 MediaResourcePolicy::toString() const {
String8 str;
- str.appendFormat("%s:%llu", mType.string(), (unsigned long long)mValue);
+ str.appendFormat("%s:%s", mType.string(), mValue.string());
return str;
}
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 1f8b1d3..8bbd8f1 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -345,9 +345,9 @@
-status_t MediaRecorder::usePersistentSurface(const sp<PersistentSurface>& surface)
+status_t MediaRecorder::setInputSurface(const sp<PersistentSurface>& surface)
{
- ALOGV("usePersistentSurface");
+ ALOGV("setInputSurface");
if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
@@ -356,11 +356,11 @@
(MEDIA_RECORDER_PREPARED |
MEDIA_RECORDER_RECORDING));
if (isInvalidState) {
- ALOGE("usePersistentSurface is called in an invalid state: %d", mCurrentState);
+ ALOGE("setInputSurface is called in an invalid state: %d", mCurrentState);
return INVALID_OPERATION;
}
- return mMediaRecorder->usePersistentSurface(surface->getBufferConsumer());
+ return mMediaRecorder->setInputSurface(surface->getBufferConsumer());
}
status_t MediaRecorder::setVideoFrameRate(int frames_per_second)
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index 2c4e719..7f0cca2 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -33,6 +33,7 @@
libdl \
libgui \
libmedia \
+ libmediautils \
libsonivox \
libstagefright \
libstagefright_foundation \
@@ -54,7 +55,7 @@
$(TOP)/frameworks/native/include/media/openmax \
$(TOP)/external/tremolo/Tremolo \
-LOCAL_CFLAGS += -Werror -Wall
+LOCAL_CFLAGS += -Werror -Wno-error=deprecated-declarations -Wall
LOCAL_CLANG := true
LOCAL_MODULE:= libmediaplayerservice
diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp
index d55482d..a7f6f8b 100644
--- a/media/libmediaplayerservice/Drm.cpp
+++ b/media/libmediaplayerservice/Drm.cpp
@@ -358,7 +358,18 @@
status_t err = mPlugin->openSession(sessionId);
if (err == ERROR_DRM_RESOURCE_BUSY) {
bool retry = false;
+ mLock.unlock();
+ // reclaimSession may call back to closeSession, since mLock is shared between Drm
+ // instances, we should unlock here to avoid deadlock.
retry = DrmSessionManager::Instance()->reclaimSession(getCallingPid());
+ mLock.lock();
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ if (mPlugin == NULL) {
+ return -EINVAL;
+ }
if (retry) {
err = mPlugin->openSession(sessionId);
}
diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp
index ca33aed..e8d495b 100644
--- a/media/libmediaplayerservice/MediaPlayerFactory.cpp
+++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp
@@ -70,12 +70,6 @@
return STAGEFRIGHT_PLAYER;
}
- // TODO: remove this EXPERIMENTAL developer settings property
- if (property_get("persist.sys.media.use-awesome", value, NULL)
- && !strcasecmp("true", value)) {
- return STAGEFRIGHT_PLAYER;
- }
-
return NU_PLAYER;
}
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 891a9e9..ae869d6 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -34,7 +34,6 @@
#include <utils/misc.h>
-#include <binder/IBatteryStats.h>
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
#include <binder/MemoryHeapBase.h>
@@ -60,6 +59,7 @@
#include <media/stagefright/AudioPlayer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooperRoster.h>
+#include <mediautils/BatteryNotifier.h>
#include <system/audio.h>
@@ -287,17 +287,9 @@
// reset battery stats
// if the mediaserver has crashed, battery stats could be left
// in bad state, reset the state upon service start.
- const sp<IServiceManager> sm(defaultServiceManager());
- if (sm != NULL) {
- const String16 name("batterystats");
- // use checkService() to avoid blocking if service is not up yet
- sp<IBatteryStats> batteryStats =
- interface_cast<IBatteryStats>(sm->checkService(name));
- if (batteryStats != NULL) {
- batteryStats->noteResetVideo();
- batteryStats->noteResetAudio();
- }
- }
+ BatteryNotifier& notifier(BatteryNotifier::getInstance());
+ notifier.noteResetVideo();
+ notifier.noteResetAudio();
MediaPlayerFactory::registerBuiltinFactories();
}
@@ -1489,20 +1481,12 @@
audio_format_t format, int bufferCount,
AudioCallback cb, void *cookie,
audio_output_flags_t flags,
- const audio_offload_info_t *offloadInfo)
+ const audio_offload_info_t *offloadInfo,
+ bool doNotReconnect,
+ uint32_t suggestedFrameCount)
{
- mCallback = cb;
- mCallbackCookie = cookie;
-
- // Check argument "bufferCount" against the mininum buffer count
- if (bufferCount < mMinBufferCount) {
- ALOGD("bufferCount (%d) is too small and increased to %d", bufferCount, mMinBufferCount);
- bufferCount = mMinBufferCount;
-
- }
ALOGV("open(%u, %d, 0x%x, 0x%x, %d, %d 0x%x)", sampleRate, channelCount, channelMask,
format, bufferCount, mSessionId, flags);
- size_t frameCount;
// offloading is only supported in callback mode for now.
// offloadInfo must be present if offload flag is set
@@ -1511,20 +1495,36 @@
return BAD_VALUE;
}
+ // compute frame count for the AudioTrack internal buffer
+ size_t frameCount;
if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
frameCount = 0; // AudioTrack will get frame count from AudioFlinger
} else {
+ // try to estimate the buffer processing fetch size from AudioFlinger.
+ // framesPerBuffer is approximate and generally correct, except when it's not :-).
uint32_t afSampleRate;
size_t afFrameCount;
-
if (AudioSystem::getOutputFrameCount(&afFrameCount, mStreamType) != NO_ERROR) {
return NO_INIT;
}
if (AudioSystem::getOutputSamplingRate(&afSampleRate, mStreamType) != NO_ERROR) {
return NO_INIT;
}
+ const size_t framesPerBuffer =
+ (unsigned long long)sampleRate * afFrameCount / afSampleRate;
- frameCount = (sampleRate*afFrameCount*bufferCount)/afSampleRate;
+ if (bufferCount == 0) {
+ // use suggestedFrameCount
+ bufferCount = (suggestedFrameCount + framesPerBuffer - 1) / framesPerBuffer;
+ }
+ // Check argument bufferCount against the mininum buffer count
+ if (bufferCount != 0 && bufferCount < mMinBufferCount) {
+ ALOGV("bufferCount (%d) increased to %d", bufferCount, mMinBufferCount);
+ bufferCount = mMinBufferCount;
+ }
+ // if frameCount is 0, then AudioTrack will get frame count from AudioFlinger
+ // which will be the minimum size permitted.
+ frameCount = bufferCount * framesPerBuffer;
}
if (channelMask == CHANNEL_MASK_USE_CHANNEL_ORDER) {
@@ -1535,6 +1535,9 @@
}
}
+ mCallback = cb;
+ mCallbackCookie = cookie;
+
// Check whether we can recycle the track
bool reuse = false;
bool bothOffloaded = false;
@@ -1605,7 +1608,8 @@
offloadInfo,
mUid,
mPid,
- mAttributes);
+ mAttributes,
+ doNotReconnect);
} else {
t = new AudioTrack(
mStreamType,
@@ -1622,12 +1626,14 @@
NULL, // offload info
mUid,
mPid,
- mAttributes);
+ mAttributes,
+ doNotReconnect);
}
if ((t == 0) || (t->initCheck() != NO_ERROR)) {
ALOGE("Unable to create audio track");
delete newcbd;
+ // t goes out of scope, so reference count drops to zero
return NO_INIT;
} else {
// successful AudioTrack initialization implies a legacy stream type was generated
@@ -1673,7 +1679,7 @@
t->setVolume(mLeftVolume, mRightVolume);
mSampleRateHz = sampleRate;
- mFlags = flags;
+ mFlags = t->getFlags(); // we suggest the flags above, but new AudioTrack() may not grant it.
mMsecsPerFrame = 1E3f / (mPlaybackRate.mSpeed * sampleRate);
uint32_t pos;
if (t->getPosition(&pos) == OK) {
@@ -1682,7 +1688,9 @@
mTrack = t;
status_t res = NO_ERROR;
- if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0) {
+ // Note some output devices may give us a direct track even though we don't specify it.
+ // Example: Line application b/17459982.
+ if ((mFlags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0) {
res = t->setPlaybackRate(mPlaybackRate);
if (res == NO_ERROR) {
t->setAuxEffectSendLevel(mSendLevel);
@@ -1748,12 +1756,14 @@
void MediaPlayerService::AudioOutput::stop()
{
ALOGV("stop");
+ mBytesWritten = 0;
if (mTrack != 0) mTrack->stop();
}
void MediaPlayerService::AudioOutput::flush()
{
ALOGV("flush");
+ mBytesWritten = 0;
if (mTrack != 0) mTrack->flush();
}
@@ -1855,20 +1865,23 @@
me, buffer->raw, buffer->size, me->mCallbackCookie,
CB_EVENT_FILL_BUFFER);
- if ((me->mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0 &&
- actualSize == 0 && buffer->size > 0 && me->mNextOutput == NULL) {
- // We've reached EOS but the audio track is not stopped yet,
- // keep playing silence.
+ // Log when no data is returned from the callback.
+ // (1) We may have no data (especially with network streaming sources).
+ // (2) We may have reached the EOS and the audio track is not stopped yet.
+ // Note that AwesomePlayer/AudioPlayer will only return zero size when it reaches the EOS.
+ // NuPlayerRenderer will return zero when it doesn't have data (it doesn't block to fill).
+ //
+ // This is a benign busy-wait, with the next data request generated 10 ms or more later;
+ // nevertheless for power reasons, we don't want to see too many of these.
- memset(buffer->raw, 0, buffer->size);
- actualSize = buffer->size;
- }
+ ALOGV_IF(actualSize == 0 && buffer->size > 0, "callbackwrapper: empty buffer returned");
+ me->mBytesWritten += actualSize; // benign race with reader.
buffer->size = actualSize;
} break;
-
case AudioTrack::EVENT_STREAM_END:
+ // currently only occurs for offloaded callbacks
ALOGV("callbackwrapper: deliver EVENT_STREAM_END");
(*me->mCallback)(me, NULL /* buffer */, 0 /* size */,
me->mCallbackCookie, CB_EVENT_STREAM_END);
@@ -1880,6 +1893,19 @@
me->mCallbackCookie, CB_EVENT_TEAR_DOWN);
break;
+ case AudioTrack::EVENT_UNDERRUN:
+ // This occurs when there is no data available, typically
+ // when there is a failure to supply data to the AudioTrack. It can also
+ // occur in non-offloaded mode when the audio device comes out of standby.
+ //
+ // If an AudioTrack underruns it outputs silence. Since this happens suddenly
+ // it may sound like an audible pop or glitch.
+ //
+ // The underrun event is sent once per track underrun; the condition is reset
+ // when more data is sent to the AudioTrack.
+ ALOGI("callbackwrapper: EVENT_UNDERRUN (discarded)");
+ break;
+
default:
ALOGE("received unknown event type: %d inside CallbackWrapper !", event);
}
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 5103841..7527506 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -97,7 +97,9 @@
audio_format_t format, int bufferCount,
AudioCallback cb, void *cookie,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
- const audio_offload_info_t *offloadInfo = NULL);
+ const audio_offload_info_t *offloadInfo = NULL,
+ bool doNotReconnect = false,
+ uint32_t suggestedFrameCount = 0);
virtual status_t start();
virtual ssize_t write(const void* buffer, size_t size, bool blocking = true);
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index ed442e3..f761dec 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -55,15 +55,15 @@
return ok;
}
-status_t MediaRecorderClient::usePersistentSurface(const sp<IGraphicBufferConsumer>& surface)
+status_t MediaRecorderClient::setInputSurface(const sp<IGraphicBufferConsumer>& surface)
{
- ALOGV("usePersistentSurface");
+ ALOGV("setInputSurface");
Mutex::Autolock lock(mLock);
if (mRecorder == NULL) {
ALOGE("recorder is not initialized");
return NO_INIT;
}
- return mRecorder->usePersistentSurface(surface);
+ return mRecorder->setInputSurface(surface);
}
sp<IGraphicBufferProducer> MediaRecorderClient::querySurfaceMediaSource()
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 7ac88cb..05130d4 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -55,7 +55,7 @@
virtual status_t close();
virtual status_t release();
virtual status_t dump(int fd, const Vector<String16>& args);
- virtual status_t usePersistentSurface(const sp<IGraphicBufferConsumer>& surface);
+ virtual status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface);
virtual sp<IGraphicBufferProducer> querySurfaceMediaSource();
private:
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 509a592..98abe9c 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -243,7 +243,7 @@
return OK;
}
-status_t StagefrightRecorder::usePersistentSurface(
+status_t StagefrightRecorder::setInputSurface(
const sp<IGraphicBufferConsumer>& surface) {
mPersistentSurface = surface;
@@ -1194,8 +1194,7 @@
}
}
-status_t StagefrightRecorder::checkVideoEncoderCapabilities(
- bool *supportsCameraSourceMetaDataMode) {
+status_t StagefrightRecorder::checkVideoEncoderCapabilities() {
/* hardware codecs must support camera source meta data mode */
Vector<CodecCapabilities> codecs;
OMXClient client;
@@ -1207,9 +1206,6 @@
mVideoEncoder == VIDEO_ENCODER_VP8 ? MEDIA_MIMETYPE_VIDEO_VP8 :
mVideoEncoder == VIDEO_ENCODER_H264 ? MEDIA_MIMETYPE_VIDEO_AVC : ""),
false /* decoder */, true /* hwCodec */, &codecs);
- *supportsCameraSourceMetaDataMode = codecs.size() > 0;
- ALOGV("encoder %s camera source meta-data mode",
- *supportsCameraSourceMetaDataMode ? "supports" : "DOES NOT SUPPORT");
if (!mCaptureTimeLapse) {
// Dont clip for time lapse capture as encoder will have enough
@@ -1418,9 +1414,7 @@
status_t StagefrightRecorder::setupCameraSource(
sp<CameraSource> *cameraSource) {
status_t err = OK;
- bool encoderSupportsCameraSourceMetaDataMode;
- if ((err = checkVideoEncoderCapabilities(
- &encoderSupportsCameraSourceMetaDataMode)) != OK) {
+ if ((err = checkVideoEncoderCapabilities()) != OK) {
return err;
}
Size videoSize;
@@ -1436,14 +1430,13 @@
mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,
videoSize, mFrameRate, mPreviewSurface,
- mTimeBetweenTimeLapseFrameCaptureUs,
- encoderSupportsCameraSourceMetaDataMode);
+ mTimeBetweenTimeLapseFrameCaptureUs);
*cameraSource = mCameraSourceTimeLapse;
} else {
*cameraSource = CameraSource::CreateFromCamera(
mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,
videoSize, mFrameRate,
- mPreviewSurface, encoderSupportsCameraSourceMetaDataMode);
+ mPreviewSurface);
}
mCamera.clear();
mCameraProxy.clear();
@@ -1752,6 +1745,7 @@
}
mGraphicBufferProducer.clear();
+ mPersistentSurface.clear();
if (mOutputFd >= 0) {
::close(mOutputFd);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 1a7b720..8af9278 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -54,7 +54,7 @@
virtual status_t setVideoFrameRate(int frames_per_second);
virtual status_t setCamera(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy);
virtual status_t setPreviewSurface(const sp<IGraphicBufferProducer>& surface);
- virtual status_t usePersistentSurface(const sp<IGraphicBufferConsumer>& surface);
+ virtual status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface);
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
virtual status_t setParameters(const String8& params);
virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
@@ -141,8 +141,7 @@
status_t setupRTPRecording();
status_t setupMPEG2TSRecording();
sp<MediaSource> createAudioSource();
- status_t checkVideoEncoderCapabilities(
- bool *supportsCameraSourceMetaDataMode);
+ status_t checkVideoEncoderCapabilities();
status_t checkAudioEncoderCapabilities();
// Generic MediaSource set-up. Returns the appropriate
// source (CameraSource or SurfaceMediaSource)
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 5e7b644..64d172e 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -56,7 +56,7 @@
mVideoLastDequeueTimeUs(0),
mFetchSubtitleDataGeneration(0),
mFetchTimedTextDataGeneration(0),
- mDurationUs(0ll),
+ mDurationUs(-1ll),
mAudioIsVorbis(false),
mIsWidevine(false),
mIsSecure(false),
@@ -324,6 +324,10 @@
return INVALID_OPERATION;
}
+bool NuPlayer::GenericSource::isStreaming() const {
+ return mIsStreaming;
+}
+
NuPlayer::GenericSource::~GenericSource() {
if (mLooper != NULL) {
mLooper->unregisterHandler(id());
@@ -1510,17 +1514,7 @@
mVideoTimeUs = timeUs;
}
- // formatChange && seeking: track whose source is changed during selection
- // formatChange && !seeking: track whose source is not changed during selection
- // !formatChange: normal seek
- if ((seeking || formatChange)
- && (trackType == MEDIA_TRACK_TYPE_AUDIO
- || trackType == MEDIA_TRACK_TYPE_VIDEO)) {
- ATSParser::DiscontinuityType type = (formatChange && seeking)
- ? ATSParser::DISCONTINUITY_FORMATCHANGE
- : ATSParser::DISCONTINUITY_NONE;
- track->mPackets->queueDiscontinuity( type, NULL, true /* discard */);
- }
+ queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);
sp<ABuffer> buffer = mediaBufferToABuffer(
mbuf, trackType, seekTimeUs, actualTimeUs);
@@ -1538,10 +1532,26 @@
false /* discard */);
#endif
} else {
+ queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);
track->mPackets->signalEOS(err);
break;
}
}
}
+void NuPlayer::GenericSource::queueDiscontinuityIfNeeded(
+ bool seeking, bool formatChange, media_track_type trackType, Track *track) {
+ // formatChange && seeking: track whose source is changed during selection
+ // formatChange && !seeking: track whose source is not changed during selection
+ // !formatChange: normal seek
+ if ((seeking || formatChange)
+ && (trackType == MEDIA_TRACK_TYPE_AUDIO
+ || trackType == MEDIA_TRACK_TYPE_VIDEO)) {
+ ATSParser::DiscontinuityType type = (formatChange && seeking)
+ ? ATSParser::DISCONTINUITY_FORMATCHANGE
+ : ATSParser::DISCONTINUITY_NONE;
+ track->mPackets->queueDiscontinuity(type, NULL /* extra */, true /* discard */);
+ }
+}
+
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h
index 7fab051..dc85d2d 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.h
@@ -75,6 +75,8 @@
virtual status_t setBuffers(bool audio, Vector<MediaBuffer *> &buffers);
+ virtual bool isStreaming() const;
+
protected:
virtual ~GenericSource();
@@ -200,6 +202,9 @@
media_track_type trackType,
int64_t seekTimeUs = -1ll, int64_t *actualTimeUs = NULL, bool formatChange = false);
+ void queueDiscontinuityIfNeeded(
+ bool seeking, bool formatChange, media_track_type trackType, Track *track);
+
void schedulePollBuffering();
void cancelPollBuffering();
void restartPollBuffering();
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index 39b8d09..126625a 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -161,24 +161,22 @@
status_t err = INVALID_OPERATION;
bool postFetchMsg = false, isSub = false;
- if (trackIndex != mLiveSession->getTrackCount() - 1) {
+ if (!mHasMetadata || trackIndex != mLiveSession->getTrackCount() - 1) {
err = mLiveSession->selectTrack(trackIndex, select);
postFetchMsg = select;
isSub = true;
} else {
- // metadata track
- if (mHasMetadata) {
- if (mMetadataSelected && !select) {
- err = OK;
- } else if (!mMetadataSelected && select) {
- postFetchMsg = true;
- err = OK;
- } else {
- err = BAD_VALUE; // behave as LiveSession::selectTrack
- }
-
- mMetadataSelected = select;
+ // metadata track; i.e. (mHasMetadata && trackIndex == mLiveSession->getTrackCount() - 1)
+ if (mMetadataSelected && !select) {
+ err = OK;
+ } else if (!mMetadataSelected && select) {
+ postFetchMsg = true;
+ err = OK;
+ } else {
+ err = BAD_VALUE; // behave as LiveSession::selectTrack
}
+
+ mMetadataSelected = select;
}
if (err == OK) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 4f64426..8760cbb 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -69,18 +69,16 @@
};
struct NuPlayer::SeekAction : public Action {
- SeekAction(int64_t seekTimeUs, bool needNotify)
- : mSeekTimeUs(seekTimeUs),
- mNeedNotify(needNotify) {
+ SeekAction(int64_t seekTimeUs)
+ : mSeekTimeUs(seekTimeUs) {
}
virtual void execute(NuPlayer *player) {
- player->performSeek(mSeekTimeUs, mNeedNotify);
+ player->performSeek(mSeekTimeUs);
}
private:
int64_t mSeekTimeUs;
- bool mNeedNotify;
DISALLOW_EVIL_CONSTRUCTORS(SeekAction);
};
@@ -189,7 +187,8 @@
mVideoFpsHint(-1.f),
mStarted(false),
mPaused(false),
- mPausedByClient(false) {
+ mPausedByClient(false),
+ mPausedForBuffering(false) {
clearFlushComplete();
}
@@ -423,7 +422,19 @@
CHECK(format->findInt32("type", &trackType));
AString mime;
- CHECK(format->findString("mime", &mime));
+ if (!format->findString("mime", &mime)) {
+ // Java MediaPlayer only uses mimetype for subtitle and timedtext tracks.
+ // If we can't find the mimetype here it means that we wouldn't be needing
+ // the mimetype on the Java end. We still write a placeholder mime to keep the
+ // (de)serialization logic simple.
+ if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+ mime = "audio/";
+ } else if (trackType == MEDIA_TRACK_TYPE_VIDEO) {
+ mime = "video/";
+ } else {
+ TRESPASS();
+ }
+ }
AString lang;
CHECK(format->findString("language", &lang));
@@ -636,7 +647,7 @@
int64_t currentPositionUs = 0;
if (getCurrentPosition(¤tPositionUs) == OK) {
mDeferredActions.push_back(
- new SeekAction(currentPositionUs, false /* needNotify */));
+ new SeekAction(currentPositionUs));
}
}
@@ -671,7 +682,10 @@
{
ALOGV("kWhatStart");
if (mStarted) {
- onResume();
+ // do not resume yet if the source is still buffering
+ if (!mPausedForBuffering) {
+ onResume();
+ }
} else {
onStart();
}
@@ -820,7 +834,7 @@
audio_stream_type_t streamType = mAudioSink->getAudioStreamType();
const bool hasVideo = (videoFormat != NULL);
const bool canOffload = canOffloadStream(
- audioMeta, hasVideo, true /* is_streaming */, streamType);
+ audioMeta, hasVideo, mSource->isStreaming(), streamType);
if (canOffload) {
if (!mOffloadAudio) {
mRenderer->signalEnableOffloadAudio();
@@ -1069,12 +1083,12 @@
} else if (what == Renderer::kWhatMediaRenderingStart) {
ALOGV("media rendering started");
notifyListener(MEDIA_STARTED, 0, 0);
- } else if (what == Renderer::kWhatAudioOffloadTearDown) {
- ALOGV("Tear down audio offload, fall back to s/w path if due to error.");
+ } else if (what == Renderer::kWhatAudioTearDown) {
int64_t positionUs;
CHECK(msg->findInt64("positionUs", &positionUs));
int32_t reason;
CHECK(msg->findInt32("reason", &reason));
+ ALOGV("Tear down audio with reason %d.", reason);
closeAudioSink();
mAudioDecoder.clear();
++mAudioDecoderGeneration;
@@ -1085,10 +1099,23 @@
false /* audio */, false /* notifyComplete */);
}
- performSeek(positionUs, false /* needNotify */);
+ performSeek(positionUs);
+
if (reason == Renderer::kDueToError) {
- mRenderer->signalDisableOffloadAudio();
- mOffloadAudio = false;
+ sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
+ sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
+ audio_stream_type_t streamType = mAudioSink->getAudioStreamType();
+ const bool hasVideo = (videoFormat != NULL);
+ const bool canOffload = canOffloadStream(
+ audioMeta, hasVideo, mSource->isStreaming(), streamType);
+ if (canOffload) {
+ mRenderer->signalEnableOffloadAudio();
+ sp<AMessage> format = mSource->getFormat(true /*audio*/);
+ tryOpenAudioSinkForOffload(format, hasVideo);
+ } else {
+ mRenderer->signalDisableOffloadAudio();
+ mOffloadAudio = false;
+ }
instantiateDecoder(true /* audio */, &mAudioDecoder);
}
}
@@ -1126,12 +1153,28 @@
ALOGV("kWhatSeek seekTimeUs=%lld us, needNotify=%d",
(long long)seekTimeUs, needNotify);
+ if (!mStarted) {
+ // Seek before the player is started. In order to preview video,
+ // need to start the player and pause it. This branch is called
+ // only once if needed. After the player is started, any seek
+ // operation will go through normal path.
+ // All cases, including audio-only, are handled in the same way
+ // for the sake of simplicity.
+ onStart(seekTimeUs);
+ onPause();
+ mPausedByClient = true;
+ if (needNotify) {
+ notifyDriverSeekComplete();
+ }
+ break;
+ }
+
mDeferredActions.push_back(
new FlushDecoderAction(FLUSH_CMD_FLUSH /* audio */,
FLUSH_CMD_FLUSH /* video */));
mDeferredActions.push_back(
- new SeekAction(seekTimeUs, needNotify));
+ new SeekAction(seekTimeUs));
// After a flush without shutdown, decoder is paused.
// Don't resume it until source seek is done, otherwise it could
@@ -1219,13 +1262,16 @@
return OK;
}
-void NuPlayer::onStart() {
+void NuPlayer::onStart(int64_t startPositionUs) {
mOffloadAudio = false;
mAudioEOS = false;
mVideoEOS = false;
mStarted = true;
mSource->start();
+ if (startPositionUs > 0) {
+ performSeek(startPositionUs);
+ }
uint32_t flags = 0;
@@ -1242,8 +1288,7 @@
sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
mOffloadAudio =
- canOffloadStream(audioMeta, (videoFormat != NULL),
- true /* is_streaming */, streamType);
+ canOffloadStream(audioMeta, (videoFormat != NULL), mSource->isStreaming(), streamType);
if (mOffloadAudio) {
flags |= Renderer::FLAG_OFFLOAD_AUDIO;
}
@@ -1753,11 +1798,10 @@
}
}
-void NuPlayer::performSeek(int64_t seekTimeUs, bool needNotify) {
- ALOGV("performSeek seekTimeUs=%lld us (%.2f secs), needNotify(%d)",
+void NuPlayer::performSeek(int64_t seekTimeUs) {
+ ALOGV("performSeek seekTimeUs=%lld us (%.2f secs)",
(long long)seekTimeUs,
- seekTimeUs / 1E6,
- needNotify);
+ seekTimeUs / 1E6);
if (mSource == NULL) {
// This happens when reset occurs right before the loop mode
@@ -1882,11 +1926,15 @@
void NuPlayer::finishResume() {
if (mResumePending) {
mResumePending = false;
- if (mDriver != NULL) {
- sp<NuPlayerDriver> driver = mDriver.promote();
- if (driver != NULL) {
- driver->notifySeekComplete();
- }
+ notifyDriverSeekComplete();
+ }
+}
+
+void NuPlayer::notifyDriverSeekComplete() {
+ if (mDriver != NULL) {
+ sp<NuPlayerDriver> driver = mDriver.promote();
+ if (driver != NULL) {
+ driver->notifySeekComplete();
}
}
}
@@ -1995,9 +2043,10 @@
case Source::kWhatPauseOnBufferingStart:
{
// ignore if not playing
- if (mStarted && !mPausedByClient) {
+ if (mStarted) {
ALOGI("buffer low, pausing...");
+ mPausedForBuffering = true;
onPause();
}
// fall-thru
@@ -2012,10 +2061,15 @@
case Source::kWhatResumeOnBufferingEnd:
{
// ignore if not playing
- if (mStarted && !mPausedByClient) {
+ if (mStarted) {
ALOGI("buffer ready, resuming...");
- onResume();
+ mPausedForBuffering = false;
+
+ // do not resume yet if client didn't unpause
+ if (!mPausedByClient) {
+ onResume();
+ }
}
// fall-thru
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 6b7d71e..d7aa830 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -203,6 +203,9 @@
// still become true, when we pause internally due to buffering.
bool mPausedByClient;
+ // Pause state as requested by source (internally) due to buffering
+ bool mPausedForBuffering;
+
inline const sp<DecoderBase> &getDecoder(bool audio) {
return audio ? mAudioDecoder : mVideoDecoder;
}
@@ -230,7 +233,7 @@
void handleFlushComplete(bool audio, bool isDecoder);
void finishFlushIfPossible();
- void onStart();
+ void onStart(int64_t startPositionUs = -1);
void onResume();
void onPause();
@@ -239,6 +242,7 @@
void flushDecoder(bool audio, bool needShutdown);
void finishResume();
+ void notifyDriverSeekComplete();
void postScanSources();
@@ -247,7 +251,7 @@
void processDeferredActions();
- void performSeek(int64_t seekTimeUs, bool needNotify);
+ void performSeek(int64_t seekTimeUs);
void performDecoderFlush(FlushCommand audio, FlushCommand video);
void performReset();
void performScanSources();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 376c93a..c649c62 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -24,6 +24,7 @@
#include "NuPlayerRenderer.h"
#include "NuPlayerSource.h"
+#include <cutils/properties.h>
#include <media/ICrypto.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -40,6 +41,10 @@
namespace android {
+static inline bool getAudioDeepBufferSetting() {
+ return property_get_bool("media.stagefright.audio.deep", false /* default_value */);
+}
+
NuPlayer::Decoder::Decoder(
const sp<AMessage> ¬ify,
const sp<Source> &source,
@@ -68,6 +73,7 @@
}
NuPlayer::Decoder::~Decoder() {
+ mCodec->release();
releaseAndResetMediaBuffers();
}
@@ -414,6 +420,11 @@
sp<ABuffer> buffer;
mCodec->getInputBuffer(index, &buffer);
+ if (buffer == NULL) {
+ handleError(UNKNOWN_ERROR);
+ return false;
+ }
+
if (index >= mInputBuffers.size()) {
for (size_t i = mInputBuffers.size(); i <= index; ++i) {
mInputBuffers.add();
@@ -533,9 +544,10 @@
uint32_t flags;
int64_t durationUs;
bool hasVideo = (mSource->getFormat(false /* audio */) != NULL);
- if (!hasVideo &&
- mSource->getDuration(&durationUs) == OK &&
- durationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
+ if (getAudioDeepBufferSetting() // override regardless of source duration
+ || (!hasVideo
+ && mSource->getDuration(&durationUs) == OK
+ && durationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US)) {
flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
} else {
flags = AUDIO_OUTPUT_FLAG_NONE;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 231f2e1..84ae25e 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -397,23 +397,13 @@
switch (mState) {
case STATE_PREPARED:
case STATE_STOPPED_AND_PREPARED:
- {
- mStartupSeekTimeUs = seekTimeUs;
- // pretend that the seek completed. It will actually happen when starting playback.
- // TODO: actually perform the seek here, so the player is ready to go at the new
- // location
- notifySeekComplete_l();
- break;
- }
-
- case STATE_RUNNING:
case STATE_PAUSED:
+ mStartupSeekTimeUs = seekTimeUs;
+ // fall through.
+ case STATE_RUNNING:
{
mAtEOS = false;
mSeekInProgress = true;
- if (mState == STATE_PAUSED) {
- mStartupSeekTimeUs = seekTimeUs;
- }
// seeks can take a while, so we essentially paused
notifyListener_l(MEDIA_PAUSED);
mPlayer->seekToAsync(seekTimeUs, true /* needNotify */);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 6b8f99c..fb2e767 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -19,7 +19,7 @@
#include <utils/Log.h>
#include "NuPlayerRenderer.h"
-
+#include <cutils/properties.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -36,6 +36,36 @@
namespace android {
+/*
+ * Example of common configuration settings in shell script form
+
+ #Turn offload audio off (use PCM for Play Music) -- AudioPolicyManager
+ adb shell setprop audio.offload.disable 1
+
+ #Allow offload audio with video (requires offloading to be enabled) -- AudioPolicyManager
+ adb shell setprop audio.offload.video 1
+
+ #Use audio callbacks for PCM data
+ adb shell setprop media.stagefright.audio.cbk 1
+
+ #Use deep buffer for PCM data with video (it is generally enabled for audio-only)
+ adb shell setprop media.stagefright.audio.deep 1
+
+ #Set size of buffers for pcm audio sink in msec (example: 1000 msec)
+ adb shell setprop media.stagefright.audio.sink 1000
+
+ * These configurations take effect for the next track played (not the current track).
+ */
+
+static inline bool getUseAudioCallbackSetting() {
+ return property_get_bool("media.stagefright.audio.cbk", false /* default_value */);
+}
+
+static inline int32_t getAudioSinkPcmMsSetting() {
+ return property_get_int32(
+ "media.stagefright.audio.sink", 500 /* default_value */);
+}
+
// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
// is closed to allow the audio DSP to power down.
static const int64_t kOffloadPauseMaxUs = 10000000ll;
@@ -82,11 +112,12 @@
mVideoRenderingStartGeneration(0),
mAudioRenderingStartGeneration(0),
mAudioOffloadPauseTimeoutGeneration(0),
- mAudioOffloadTornDown(false),
+ mAudioTornDown(false),
mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),
mTotalBuffersQueued(0),
mLastAudioBufferDrained(0),
+ mUseAudioCallback(false),
mWakeLock(new AWakeLock()) {
mMediaClock = new MediaClock;
mPlaybackRate = mPlaybackSettings.mSpeed;
@@ -146,7 +177,7 @@
return OK;
}
- if (mAudioSink != NULL) {
+ if (mAudioSink != NULL && mAudioSink->ready()) {
status_t err = mAudioSink->setPlaybackRate(rate);
if (err != OK) {
return err;
@@ -172,7 +203,7 @@
}
status_t NuPlayer::Renderer::onGetPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
- if (mAudioSink != NULL) {
+ if (mAudioSink != NULL && mAudioSink->ready()) {
status_t err = mAudioSink->getPlaybackRate(rate);
if (err == OK) {
if (!isAudioPlaybackRateEqual(*rate, mPlaybackSettings)) {
@@ -394,14 +425,14 @@
case kWhatDrainAudioQueue:
{
+ mDrainAudioQueuePending = false;
+
int32_t generation;
CHECK(msg->findInt32("drainGeneration", &generation));
if (generation != getDrainGeneration(true /* audio */)) {
break;
}
- mDrainAudioQueuePending = false;
-
if (onDrainAudioQueue()) {
uint32_t numFramesPlayed;
CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
@@ -566,9 +597,9 @@
break;
}
- case kWhatAudioOffloadTearDown:
+ case kWhatAudioTearDown:
{
- onAudioOffloadTearDown(kDueToError);
+ onAudioTearDown(kDueToError);
break;
}
@@ -580,7 +611,7 @@
break;
}
ALOGV("Audio Offload tear down due to pause timeout.");
- onAudioOffloadTearDown(kDueToTimeout);
+ onAudioTearDown(kDueToTimeout);
mWakeLock->release();
break;
}
@@ -592,8 +623,7 @@
}
void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
- if (mDrainAudioQueuePending || mSyncQueues || mPaused
- || offloadingAudio()) {
+ if (mDrainAudioQueuePending || mSyncQueues || mUseAudioCallback) {
return;
}
@@ -642,13 +672,15 @@
case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
{
+ ALOGV("AudioSink::CB_EVENT_STREAM_END");
me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
break;
}
case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
{
- me->notifyAudioOffloadTearDown();
+ ALOGV("AudioSink::CB_EVENT_TEAR_DOWN");
+ me->notifyAudioTearDown();
break;
}
}
@@ -659,7 +691,7 @@
size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
Mutex::Autolock autoLock(mLock);
- if (!offloadingAudio() || mPaused) {
+ if (!mUseAudioCallback) {
return 0;
}
@@ -667,13 +699,13 @@
size_t sizeCopied = 0;
bool firstEntry = true;
+ QueueEntry *entry; // will be valid after while loop if hasEOS is set.
while (sizeCopied < size && !mAudioQueue.empty()) {
- QueueEntry *entry = &*mAudioQueue.begin();
+ entry = &*mAudioQueue.begin();
if (entry->mBuffer == NULL) { // EOS
hasEOS = true;
mAudioQueue.erase(mAudioQueue.begin());
- entry = NULL;
break;
}
@@ -681,7 +713,7 @@
firstEntry = false;
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
- ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
+ ALOGV("fillAudioBuffer: rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
}
@@ -714,10 +746,28 @@
mMediaClock->updateAnchor(nowMediaUs, nowUs, INT64_MAX);
}
- if (hasEOS) {
- (new AMessage(kWhatStopAudioSink, this))->post();
+ // for non-offloaded audio, we need to compute the frames written because
+ // there is no EVENT_STREAM_END notification. The frames written gives
+ // an estimate on the pending played out duration.
+ if (!offloadingAudio()) {
+ mNumFramesWritten += sizeCopied / mAudioSink->frameSize();
}
+ if (hasEOS) {
+ (new AMessage(kWhatStopAudioSink, this))->post();
+ // As there is currently no EVENT_STREAM_END callback notification for
+ // non-offloaded audio tracks, we need to post the EOS ourselves.
+ if (!offloadingAudio()) {
+ int64_t postEOSDelayUs = 0;
+ if (mAudioSink->needsTrailingPadding()) {
+ postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
+ }
+ ALOGV("fillAudioBuffer: notifyEOS "
+ "mNumFramesWritten:%u finalResult:%d postEOSDelay:%lld",
+ mNumFramesWritten, entry->mFinalResult, (long long)postEOSDelayUs);
+ notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
+ }
+ }
return sizeCopied;
}
@@ -749,6 +799,7 @@
}
#endif
+ uint32_t prevFramesWritten = mNumFramesWritten;
while (!mAudioQueue.empty()) {
QueueEntry *entry = &*mAudioQueue.begin();
@@ -778,7 +829,8 @@
if (entry->mOffset == 0 && entry->mBuffer->size() > 0) {
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
- ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
+ ALOGV("onDrainAudioQueue: rendering audio at media time %.2f secs",
+ mediaTimeUs / 1E6);
onNewAudioMediaTime(mediaTimeUs);
}
@@ -789,9 +841,10 @@
if (written < 0) {
// An error in AudioSink write. Perhaps the AudioSink was not properly opened.
if (written == WOULD_BLOCK) {
- ALOGW("AudioSink write would block when writing %zu bytes", copy);
+ ALOGV("AudioSink write would block when writing %zu bytes", copy);
} else {
ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
+ notifyAudioTearDown();
}
break;
}
@@ -845,7 +898,13 @@
}
mMediaClock->updateMaxTimeMedia(maxTimeMedia);
- return !mAudioQueue.empty();
+ // calculate whether we need to reschedule another write.
+ bool reschedule = !mAudioQueue.empty()
+ && (!mPaused
+ || prevFramesWritten != mNumFramesWritten); // permit pause to fill buffers
+ //ALOGD("reschedule:%d empty:%d mPaused:%d prevFramesWritten:%u mNumFramesWritten:%u",
+ // reschedule, mAudioQueue.empty(), mPaused, prevFramesWritten, mNumFramesWritten);
+ return reschedule;
}
int64_t NuPlayer::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) {
@@ -1060,8 +1119,8 @@
notify->post(delayUs);
}
-void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
- (new AMessage(kWhatAudioOffloadTearDown, this))->post();
+void NuPlayer::Renderer::notifyAudioTearDown() {
+ (new AMessage(kWhatAudioTearDown, this))->post();
}
void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
@@ -1229,9 +1288,8 @@
++mAudioDrainGeneration;
prepareForMediaRenderingStart_l();
- if (offloadingAudio()) {
- clearAudioFirstAnchorTime_l();
- }
+ // the frame count will be reset after flush.
+ clearAudioFirstAnchorTime_l();
}
mDrainAudioQueuePending = false;
@@ -1239,7 +1297,9 @@
if (offloadingAudio()) {
mAudioSink->pause();
mAudioSink->flush();
- mAudioSink->start();
+ if (!mPaused) {
+ mAudioSink->start();
+ }
} else {
mAudioSink->pause();
mAudioSink->flush();
@@ -1344,7 +1404,7 @@
{
Mutex::Autolock autoLock(mLock);
- ++mAudioDrainGeneration;
+ // we do not increment audio drain generation so that we fill audio buffer during pause.
++mVideoDrainGeneration;
prepareForMediaRenderingStart_l();
mPaused = true;
@@ -1378,7 +1438,7 @@
mPaused = false;
// configure audiosink as we did not do it when pausing
- if (mAudioSink != NULL) {
+ if (mAudioSink != NULL && mAudioSink->ready()) {
mAudioSink->setPlaybackRate(mPlaybackSettings);
}
@@ -1480,11 +1540,11 @@
return durationUs;
}
-void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) {
- if (mAudioOffloadTornDown) {
+void NuPlayer::Renderer::onAudioTearDown(AudioTearDownReason reason) {
+ if (mAudioTornDown) {
return;
}
- mAudioOffloadTornDown = true;
+ mAudioTornDown = true;
int64_t currentPositionUs;
if (getCurrentPosition(¤tPositionUs) != OK) {
@@ -1495,7 +1555,7 @@
mAudioSink->flush();
sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatAudioOffloadTearDown);
+ notify->setInt32("what", kWhatAudioTearDown);
notify->setInt64("positionUs", currentPositionUs);
notify->setInt32("reason", reason);
notify->post();
@@ -1589,12 +1649,13 @@
offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
audioSinkChanged = true;
mAudioSink->close();
+
err = mAudioSink->open(
sampleRate,
numChannels,
(audio_channel_mask_t)channelMask,
audioFormat,
- 8 /* bufferCount */,
+ 0 /* bufferCount - unused */,
&NuPlayer::Renderer::AudioSinkCallback,
this,
(audio_output_flags_t)offloadFlags,
@@ -1612,7 +1673,9 @@
// before reaching the hardware.
// TODO
mCurrentOffloadInfo = offloadInfo;
- err = mAudioSink->start();
+ if (!mPaused) { // for preview mode, don't start if paused
+ err = mAudioSink->start();
+ }
ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
}
if (err != OK) {
@@ -1621,6 +1684,9 @@
onDisableOffloadAudio();
mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
ALOGV("openAudioSink: offload failed");
+ } else {
+ mUseAudioCallback = true; // offload mode transfers data through callback
+ ++mAudioDrainGeneration; // discard pending kWhatDrainAudioQueue message.
}
}
}
@@ -1645,15 +1711,30 @@
audioSinkChanged = true;
mAudioSink->close();
mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
+ // Note: It is possible to set up the callback, but not use it to send audio data.
+ // This requires a fix in AudioSink to explicitly specify the transfer mode.
+ mUseAudioCallback = getUseAudioCallbackSetting();
+ if (mUseAudioCallback) {
+ ++mAudioDrainGeneration; // discard pending kWhatDrainAudioQueue message.
+ }
+
+ // Compute the desired buffer size.
+ // For callback mode, the amount of time before wakeup is about half the buffer size.
+ const uint32_t frameCount =
+ (unsigned long long)sampleRate * getAudioSinkPcmMsSetting() / 1000;
+
status_t err = mAudioSink->open(
sampleRate,
numChannels,
(audio_channel_mask_t)channelMask,
AUDIO_FORMAT_PCM_16_BIT,
- 8 /* bufferCount */,
+ 0 /* bufferCount - unused */,
+ mUseAudioCallback ? &NuPlayer::Renderer::AudioSinkCallback : NULL,
+ mUseAudioCallback ? this : NULL,
+ (audio_output_flags_t)pcmFlags,
NULL,
- NULL,
- (audio_output_flags_t)pcmFlags);
+ true /* doNotReconnect */,
+ frameCount);
if (err == OK) {
err = mAudioSink->setPlaybackRate(mPlaybackSettings);
}
@@ -1663,14 +1744,14 @@
return err;
}
mCurrentPcmInfo = info;
- mAudioSink->start();
+ if (!mPaused) { // for preview mode, don't start if paused
+ mAudioSink->start();
+ }
}
if (audioSinkChanged) {
onAudioSinkChanged();
}
- if (offloadingAudio()) {
- mAudioOffloadTornDown = false;
- }
+ mAudioTornDown = false;
return OK;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index 928b71b..c2fea40 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -82,16 +82,16 @@
void closeAudioSink();
enum {
- kWhatEOS = 'eos ',
- kWhatFlushComplete = 'fluC',
- kWhatPosition = 'posi',
- kWhatVideoRenderingStart = 'vdrd',
- kWhatMediaRenderingStart = 'mdrd',
- kWhatAudioOffloadTearDown = 'aOTD',
+ kWhatEOS = 'eos ',
+ kWhatFlushComplete = 'fluC',
+ kWhatPosition = 'posi',
+ kWhatVideoRenderingStart = 'vdrd',
+ kWhatMediaRenderingStart = 'mdrd',
+ kWhatAudioTearDown = 'adTD',
kWhatAudioOffloadPauseTimeout = 'aOPT',
};
- enum AudioOffloadTearDownReason {
+ enum AudioTearDownReason {
kDueToError = 0,
kDueToTimeout,
};
@@ -179,7 +179,7 @@
int64_t mLastPositionUpdateUs;
int32_t mAudioOffloadPauseTimeoutGeneration;
- bool mAudioOffloadTornDown;
+ bool mAudioTornDown;
audio_offload_info_t mCurrentOffloadInfo;
struct PcmInfo {
@@ -194,6 +194,7 @@
int32_t mTotalBuffersQueued;
int32_t mLastAudioBufferDrained;
+ bool mUseAudioCallback;
sp<AWakeLock> mWakeLock;
@@ -242,7 +243,7 @@
int32_t getQueueGeneration(bool audio);
int32_t getDrainGeneration(bool audio);
bool getSyncQueues();
- void onAudioOffloadTearDown(AudioOffloadTearDownReason reason);
+ void onAudioTearDown(AudioTearDownReason reason);
status_t onOpenAudioSink(
const sp<AMessage> &format,
bool offloadOnly,
@@ -255,7 +256,7 @@
void notifyPosition();
void notifyVideoLateBy(int64_t lateByUs);
void notifyVideoRenderingStart();
- void notifyAudioOffloadTearDown();
+ void notifyAudioTearDown();
void flushQueue(List<QueueEntry> *queue);
bool dropBufferIfStale(bool audio, const sp<AMessage> &msg);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index ef1ba13..11a6a9f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -118,6 +118,10 @@
return false;
}
+ virtual bool isStreaming() const {
+ return true;
+ }
+
protected:
virtual ~Source() {}
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index b7798d2..cf37eba 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -37,17 +37,20 @@
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
#include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/SurfaceUtils.h>
#include <media/hardware/HardwareAPI.h>
#include <OMX_AudioExt.h>
#include <OMX_VideoExt.h>
#include <OMX_Component.h>
#include <OMX_IndexExt.h>
+#include <OMX_AsString.h>
#include "include/avc_utils.h"
@@ -104,6 +107,18 @@
params->nVersion.s.nStep = 0;
}
+struct MessageList : public RefBase {
+ MessageList() {
+ }
+ virtual ~MessageList() {
+ }
+ std::list<sp<AMessage> > &getList() { return mList; }
+private:
+ std::list<sp<AMessage> > mList;
+
+ DISALLOW_EVIL_CONSTRUCTORS(MessageList);
+};
+
struct CodecObserver : public BnOMXObserver {
CodecObserver() {}
@@ -112,52 +127,78 @@
}
// from IOMXObserver
- virtual void onMessage(const omx_message &omx_msg) {
- sp<AMessage> msg = mNotify->dup();
-
- msg->setInt32("type", omx_msg.type);
- msg->setInt32("node", omx_msg.node);
-
- switch (omx_msg.type) {
- case omx_message::EVENT:
- {
- msg->setInt32("event", omx_msg.u.event_data.event);
- msg->setInt32("data1", omx_msg.u.event_data.data1);
- msg->setInt32("data2", omx_msg.u.event_data.data2);
- break;
- }
-
- case omx_message::EMPTY_BUFFER_DONE:
- {
- msg->setInt32("buffer", omx_msg.u.buffer_data.buffer);
- break;
- }
-
- case omx_message::FILL_BUFFER_DONE:
- {
- msg->setInt32(
- "buffer", omx_msg.u.extended_buffer_data.buffer);
- msg->setInt32(
- "range_offset",
- omx_msg.u.extended_buffer_data.range_offset);
- msg->setInt32(
- "range_length",
- omx_msg.u.extended_buffer_data.range_length);
- msg->setInt32(
- "flags",
- omx_msg.u.extended_buffer_data.flags);
- msg->setInt64(
- "timestamp",
- omx_msg.u.extended_buffer_data.timestamp);
- break;
- }
-
- default:
- TRESPASS();
- break;
+ virtual void onMessages(const std::list<omx_message> &messages) {
+ if (messages.empty()) {
+ return;
}
- msg->post();
+ sp<AMessage> notify = mNotify->dup();
+ bool first = true;
+ sp<MessageList> msgList = new MessageList();
+ for (std::list<omx_message>::const_iterator it = messages.cbegin();
+ it != messages.cend(); ++it) {
+ const omx_message &omx_msg = *it;
+ if (first) {
+ notify->setInt32("node", omx_msg.node);
+ first = false;
+ }
+
+ sp<AMessage> msg = new AMessage;
+ msg->setInt32("type", omx_msg.type);
+ switch (omx_msg.type) {
+ case omx_message::EVENT:
+ {
+ msg->setInt32("event", omx_msg.u.event_data.event);
+ msg->setInt32("data1", omx_msg.u.event_data.data1);
+ msg->setInt32("data2", omx_msg.u.event_data.data2);
+ break;
+ }
+
+ case omx_message::EMPTY_BUFFER_DONE:
+ {
+ msg->setInt32("buffer", omx_msg.u.buffer_data.buffer);
+ msg->setInt32("fence_fd", omx_msg.fenceFd);
+ break;
+ }
+
+ case omx_message::FILL_BUFFER_DONE:
+ {
+ msg->setInt32(
+ "buffer", omx_msg.u.extended_buffer_data.buffer);
+ msg->setInt32(
+ "range_offset",
+ omx_msg.u.extended_buffer_data.range_offset);
+ msg->setInt32(
+ "range_length",
+ omx_msg.u.extended_buffer_data.range_length);
+ msg->setInt32(
+ "flags",
+ omx_msg.u.extended_buffer_data.flags);
+ msg->setInt64(
+ "timestamp",
+ omx_msg.u.extended_buffer_data.timestamp);
+ msg->setInt32(
+ "fence_fd", omx_msg.fenceFd);
+ break;
+ }
+
+ case omx_message::FRAME_RENDERED:
+ {
+ msg->setInt64(
+ "media_time_us", omx_msg.u.render_data.timestamp);
+ msg->setInt64(
+ "system_nano", omx_msg.u.render_data.nanoTime);
+ break;
+ }
+
+ default:
+ ALOGE("Unrecognized message type: %d", omx_msg.type);
+ break;
+ }
+ msgList->getList().push_back(msg);
+ }
+ notify->setObject("messages", msgList);
+ notify->post();
}
protected:
@@ -195,15 +236,25 @@
void postFillThisBuffer(BufferInfo *info);
private:
+ // Handles an OMX message. Returns true iff message was handled.
bool onOMXMessage(const sp<AMessage> &msg);
- bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID);
+ // Handles a list of messages. Returns true iff messages were handled.
+ bool onOMXMessageList(const sp<AMessage> &msg);
+
+ // returns true iff this message is for this component and the component is alive
+ bool checkOMXMessage(const sp<AMessage> &msg);
+
+ bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd);
bool onOMXFillBufferDone(
IOMX::buffer_id bufferID,
size_t rangeOffset, size_t rangeLength,
OMX_U32 flags,
- int64_t timeUs);
+ int64_t timeUs,
+ int fenceFd);
+
+ virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
void getMoreInputDataIfPossible();
@@ -260,7 +311,7 @@
bool onConfigureComponent(const sp<AMessage> &msg);
void onCreateInputSurface(const sp<AMessage> &msg);
- void onUsePersistentInputSurface(const sp<AMessage> &msg);
+ void onSetInputSurface(const sp<AMessage> &msg);
void onStart();
void onShutdown(bool keepComponentAllocated);
@@ -321,6 +372,7 @@
virtual void stateEntered();
virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+ virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
private:
bool mActive;
@@ -339,6 +391,7 @@
virtual void stateEntered();
virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+ virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
private:
DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState);
@@ -405,13 +458,44 @@
////////////////////////////////////////////////////////////////////////////////
+void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) {
+ if (mFenceFd >= 0) {
+ ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s",
+ mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg);
+ }
+ mFenceFd = fenceFd;
+ mIsReadFence = false;
+}
+
+void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) {
+ if (mFenceFd >= 0) {
+ ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s",
+ mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg);
+ }
+ mFenceFd = fenceFd;
+ mIsReadFence = true;
+}
+
+void ACodec::BufferInfo::checkWriteFence(const char *dbg) {
+ if (mFenceFd >= 0 && mIsReadFence) {
+ ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg);
+ }
+}
+
+void ACodec::BufferInfo::checkReadFence(const char *dbg) {
+ if (mFenceFd >= 0 && !mIsReadFence) {
+ ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
ACodec::ACodec()
: mQuirks(0),
mNode(0),
mSentFormat(false),
mIsVideo(false),
mIsEncoder(false),
- mUseMetadataOnEncoderOutput(false),
mShutdownInProgress(false),
mExplicitShutdown(false),
mEncoderDelay(0),
@@ -420,8 +504,10 @@
mChannelMaskPresent(false),
mChannelMask(0),
mDequeueCounter(0),
- mStoreMetaDataInOutputBuffers(false),
- mMetaDataBuffersToSubmit(0),
+ mInputMetadataType(kMetadataBufferTypeInvalid),
+ mOutputMetadataType(kMetadataBufferTypeInvalid),
+ mLegacyAdaptiveExperiment(false),
+ mMetadataBuffersToSubmit(0),
mRepeatFrameDelayUs(-1ll),
mMaxPtsGapUs(-1ll),
mMaxFps(-1),
@@ -496,9 +582,9 @@
(new AMessage(kWhatCreateInputSurface, this))->post();
}
-void ACodec::initiateUsePersistentInputSurface(
+void ACodec::initiateSetInputSurface(
const sp<PersistentSurface> &surface) {
- sp<AMessage> msg = new AMessage(kWhatUsePersistentInputSurface, this);
+ sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
msg->setObject("input-surface", surface);
msg->post();
}
@@ -539,10 +625,10 @@
// This causes a halt if we already signaled an EOS on the input
// port. For now keep submitting an output buffer if there was an
// EOS on the input port, but not yet on the output port.
-void ACodec::signalSubmitOutputMetaDataBufferIfEOS_workaround() {
+void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() {
if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] &&
- mMetaDataBuffersToSubmit > 0) {
- (new AMessage(kWhatSubmitOutputMetaDataBufferIfEOS, this))->post();
+ mMetadataBuffersToSubmit > 0) {
+ (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post();
}
}
@@ -609,12 +695,16 @@
return err;
}
+ // need to enable allocation when attaching
+ surface->getIGraphicBufferProducer()->allowAllocation(true);
+
// for meta data mode, we move dequeud buffers to the new surface.
// for non-meta mode, we must move all registered buffers
for (size_t i = 0; i < buffers.size(); ++i) {
const BufferInfo &info = buffers[i];
// skip undequeued buffers for meta data mode
- if (mStoreMetaDataInOutputBuffers
+ if (storingMetadataInDecodedBuffers()
+ && !mLegacyAdaptiveExperiment
&& info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
ALOGV("skipping buffer %p", info.mGraphicBuffer->getNativeBuffer());
continue;
@@ -631,13 +721,14 @@
}
// cancel undequeued buffers to new surface
- if (!mStoreMetaDataInOutputBuffers) {
+ if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) {
for (size_t i = 0; i < buffers.size(); ++i) {
- const BufferInfo &info = buffers[i];
+ BufferInfo &info = buffers.editItemAt(i);
if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer());
err = nativeWindow->cancelBuffer(
- nativeWindow, info.mGraphicBuffer->getNativeBuffer(), -1);
+ nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd);
+ info.mFenceFd = -1;
if (err != OK) {
ALOGE("failed to cancel buffer %p to the new surface: %s (%d)",
info.mGraphicBuffer->getNativeBuffer(),
@@ -650,6 +741,11 @@
(void)surface->getIGraphicBufferProducer()->allowAllocation(false);
}
+ // push blank buffers to previous window if requested
+ if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) {
+ pushBlankBuffersToNativeWindow(mNativeWindow.get());
+ }
+
mNativeWindow = nativeWindow;
return OK;
}
@@ -662,8 +758,8 @@
status_t err;
if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
- if (mStoreMetaDataInOutputBuffers) {
- err = allocateOutputMetaDataBuffers();
+ if (storingMetadataInDecodedBuffers()) {
+ err = allocateOutputMetadataBuffers();
} else {
err = allocateOutputBuffersFromNativeWindow();
}
@@ -676,20 +772,44 @@
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err == OK) {
- ALOGV("[%s] Allocating %u buffers of size %u on %s port",
+ MetadataBufferType type =
+ portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType;
+ int32_t bufSize = def.nBufferSize;
+ if (type == kMetadataBufferTypeGrallocSource) {
+ bufSize = sizeof(VideoGrallocMetadata);
+ } else if (type == kMetadataBufferTypeANWBuffer) {
+ bufSize = sizeof(VideoNativeMetadata);
+ }
+
+ // If using gralloc or native source input metadata buffers, allocate largest
+ // metadata size as we prefer to generate native source metadata, but component
+ // may require gralloc source. For camera source, allocate at least enough
+ // size for native metadata buffers.
+ int32_t allottedSize = bufSize;
+ if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) {
+ bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata));
+ } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) {
+ bufSize = max(bufSize, (int32_t)sizeof(VideoNativeMetadata));
+ }
+
+ ALOGV("[%s] Allocating %u buffers of size %d/%d (from %u using %s) on %s port",
mComponentName.c_str(),
- def.nBufferCountActual, def.nBufferSize,
+ def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type),
portIndex == kPortIndexInput ? "input" : "output");
- size_t totalSize = def.nBufferCountActual * def.nBufferSize;
+ size_t totalSize = def.nBufferCountActual * bufSize;
mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec");
- for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) {
- sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize);
- CHECK(mem.get() != NULL);
+ for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) {
+ sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize);
+ if (mem == NULL || mem->pointer() == NULL) {
+ return NO_MEMORY;
+ }
BufferInfo info;
info.mStatus = BufferInfo::OWNED_BY_US;
+ info.mFenceFd = -1;
+ info.mRenderInfo = NULL;
uint32_t requiresAllocateBufferBit =
(portIndex == kPortIndexInput)
@@ -697,27 +817,27 @@
: OMXCodec::kRequiresAllocateBufferOnOutputPorts;
if ((portIndex == kPortIndexInput && (mFlags & kFlagIsSecure))
- || mUseMetadataOnEncoderOutput) {
+ || (portIndex == kPortIndexOutput && usingMetadataOnEncoderOutput())) {
mem.clear();
void *ptr;
err = mOMX->allocateBuffer(
- mNode, portIndex, def.nBufferSize, &info.mBufferID,
+ mNode, portIndex, bufSize, &info.mBufferID,
&ptr);
- int32_t bufSize = mUseMetadataOnEncoderOutput ?
- (4 + sizeof(buffer_handle_t)) : def.nBufferSize;
-
info.mData = new ABuffer(ptr, bufSize);
} else if (mQuirks & requiresAllocateBufferBit) {
err = mOMX->allocateBufferWithBackup(
- mNode, portIndex, mem, &info.mBufferID);
+ mNode, portIndex, mem, &info.mBufferID, allottedSize);
} else {
- err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID);
+ err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize);
}
if (mem != NULL) {
- info.mData = new ABuffer(mem->pointer(), def.nBufferSize);
+ info.mData = new ABuffer(mem->pointer(), bufSize);
+ if (type == kMetadataBufferTypeANWBuffer) {
+ ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1;
+ }
}
mBuffers[portIndex].push(info);
@@ -748,82 +868,6 @@
return OK;
}
-status_t ACodec::setNativeWindowSizeFormatAndUsage(
- ANativeWindow *nativeWindow /* nonnull */,
- int width, int height, int format, int rotation, int usage) {
- status_t err = native_window_set_buffers_dimensions(nativeWindow, width, height);
- if (err != 0) {
- ALOGE("native_window_set_buffers_dimensions failed: %s (%d)", strerror(-err), -err);
- return err;
- }
-
- err = native_window_set_buffers_format(nativeWindow, format);
- if (err != 0) {
- ALOGE("native_window_set_buffers_format failed: %s (%d)", strerror(-err), -err);
- return err;
- }
-
- int transform = 0;
- if ((rotation % 90) == 0) {
- switch ((rotation / 90) & 3) {
- case 1: transform = HAL_TRANSFORM_ROT_90; break;
- case 2: transform = HAL_TRANSFORM_ROT_180; break;
- case 3: transform = HAL_TRANSFORM_ROT_270; break;
- default: transform = 0; break;
- }
- }
-
- err = native_window_set_buffers_transform(nativeWindow, transform);
- if (err != 0) {
- ALOGE("native_window_set_buffers_transform failed: %s (%d)", strerror(-err), -err);
- return err;
- }
-
- // Make sure to check whether either Stagefright or the video decoder
- // requested protected buffers.
- if (usage & GRALLOC_USAGE_PROTECTED) {
- // Verify that the ANativeWindow sends images directly to
- // SurfaceFlinger.
- int queuesToNativeWindow = 0;
- err = nativeWindow->query(
- nativeWindow, NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &queuesToNativeWindow);
- if (err != 0) {
- ALOGE("error authenticating native window: %s (%d)", strerror(-err), -err);
- return err;
- }
- if (queuesToNativeWindow != 1) {
- ALOGE("native window could not be authenticated");
- return PERMISSION_DENIED;
- }
- }
-
- int consumerUsage = 0;
- err = nativeWindow->query(nativeWindow, NATIVE_WINDOW_CONSUMER_USAGE_BITS, &consumerUsage);
- if (err != 0) {
- ALOGW("failed to get consumer usage bits. ignoring");
- err = 0;
- }
-
- int finalUsage = usage | consumerUsage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP;
- ALOGV("gralloc usage: %#x(ACodec) + %#x(Consumer) = %#x", usage, consumerUsage, finalUsage);
- err = native_window_set_usage(nativeWindow, finalUsage);
- if (err != 0) {
- ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
- return err;
- }
-
- err = native_window_set_scaling_mode(
- nativeWindow, NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
- if (err != 0) {
- ALOGE("native_window_set_scaling_mode failed: %s (%d)", strerror(-err), -err);
- return err;
- }
-
- ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage %#x",
- nativeWindow, width, height, format, rotation, finalUsage);
- return OK;
-}
-
status_t ACodec::setupNativeWindowSizeFormatAndUsage(ANativeWindow *nativeWindow /* nonnull */) {
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
@@ -849,6 +893,8 @@
usage |= GRALLOC_USAGE_PROTECTED;
}
+ usage |= GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP;
+
ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage);
return setNativeWindowSizeFormatAndUsage(
nativeWindow,
@@ -956,7 +1002,7 @@
return err;
mNumUndequeuedBuffers = minUndequeuedBuffers;
- if (!mStoreMetaDataInOutputBuffers) {
+ if (!storingMetadataInDecodedBuffers()) {
static_cast<Surface*>(mNativeWindow.get())
->getIGraphicBufferProducer()->allowAllocation(true);
}
@@ -968,7 +1014,8 @@
// Dequeue buffers and send them to OMX
for (OMX_U32 i = 0; i < bufferCount; i++) {
ANativeWindowBuffer *buf;
- err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf);
+ int fenceFd;
+ err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
if (err != 0) {
ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
break;
@@ -977,6 +1024,9 @@
sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
BufferInfo info;
info.mStatus = BufferInfo::OWNED_BY_US;
+ info.mFenceFd = fenceFd;
+ info.mIsReadFence = false;
+ info.mRenderInfo = NULL;
info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */);
info.mGraphicBuffer = graphicBuffer;
mBuffers[kPortIndexOutput].push(info);
@@ -1019,7 +1069,7 @@
}
}
- if (!mStoreMetaDataInOutputBuffers) {
+ if (!storingMetadataInDecodedBuffers()) {
static_cast<Surface*>(mNativeWindow.get())
->getIGraphicBufferProducer()->allowAllocation(false);
}
@@ -1027,7 +1077,7 @@
return err;
}
-status_t ACodec::allocateOutputMetaDataBuffers() {
+status_t ACodec::allocateOutputMetadataBuffers() {
OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
status_t err = configureOutputBuffersFromNativeWindow(
&bufferCount, &bufferSize, &minUndequeuedBuffers);
@@ -1038,24 +1088,32 @@
ALOGV("[%s] Allocating %u meta buffers on output port",
mComponentName.c_str(), bufferCount);
- size_t totalSize = bufferCount * 8;
+ size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ?
+ sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata);
+ size_t totalSize = bufferCount * bufSize;
mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec");
// Dequeue buffers and send them to OMX
for (OMX_U32 i = 0; i < bufferCount; i++) {
BufferInfo info;
info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
+ info.mFenceFd = -1;
+ info.mRenderInfo = NULL;
info.mGraphicBuffer = NULL;
info.mDequeuedAt = mDequeueCounter;
- sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(
- sizeof(struct VideoDecoderOutputMetaData));
- CHECK(mem.get() != NULL);
+ sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize);
+ if (mem == NULL || mem->pointer() == NULL) {
+ return NO_MEMORY;
+ }
+ if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) {
+ ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1;
+ }
info.mData = new ABuffer(mem->pointer(), mem->size());
// we use useBuffer for metadata regardless of quirks
err = mOMX->useBuffer(
- mNode, kPortIndexOutput, mem, &info.mBufferID);
+ mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size());
mBuffers[kPortIndexOutput].push(info);
@@ -1063,28 +1121,109 @@
mComponentName.c_str(), info.mBufferID, mem->pointer());
}
- mMetaDataBuffersToSubmit = bufferCount - minUndequeuedBuffers;
+ if (mLegacyAdaptiveExperiment) {
+ // preallocate and preregister buffers
+ static_cast<Surface *>(mNativeWindow.get())
+ ->getIGraphicBufferProducer()->allowAllocation(true);
+
+ ALOGV("[%s] Allocating %u buffers from a native window of size %u on "
+ "output port",
+ mComponentName.c_str(), bufferCount, bufferSize);
+
+ // Dequeue buffers then cancel them all
+ for (OMX_U32 i = 0; i < bufferCount; i++) {
+ BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+
+ ANativeWindowBuffer *buf;
+ int fenceFd;
+ err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
+ if (err != 0) {
+ ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ break;
+ }
+
+ sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
+ mOMX->updateGraphicBufferInMeta(
+ mNode, kPortIndexOutput, graphicBuffer, info->mBufferID);
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy");
+ info->mGraphicBuffer = graphicBuffer;
+ }
+
+ for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) {
+ BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+ status_t error = cancelBufferToNativeWindow(info);
+ if (err == OK) {
+ err = error;
+ }
+ }
+
+ static_cast<Surface*>(mNativeWindow.get())
+ ->getIGraphicBufferProducer()->allowAllocation(false);
+ }
+
+ mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers;
return err;
}
-status_t ACodec::submitOutputMetaDataBuffer() {
- CHECK(mStoreMetaDataInOutputBuffers);
- if (mMetaDataBuffersToSubmit == 0)
+status_t ACodec::submitOutputMetadataBuffer() {
+ CHECK(storingMetadataInDecodedBuffers());
+ if (mMetadataBuffersToSubmit == 0)
return OK;
BufferInfo *info = dequeueBufferFromNativeWindow();
- if (info == NULL)
+ if (info == NULL) {
return ERROR_IO;
+ }
ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p",
mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get());
- --mMetaDataBuffersToSubmit;
- CHECK_EQ(mOMX->fillBuffer(mNode, info->mBufferID),
- (status_t)OK);
+ --mMetadataBuffersToSubmit;
+ info->checkWriteFence("submitOutputMetadataBuffer");
+ status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err == OK) {
+ info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+ }
- info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
- return OK;
+ return err;
+}
+
+status_t ACodec::waitForFence(int fd, const char *dbg ) {
+ status_t res = OK;
+ if (fd >= 0) {
+ sp<Fence> fence = new Fence(fd);
+ res = fence->wait(IOMX::kFenceTimeoutMs);
+ ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg);
+ }
+ return res;
+}
+
+// static
+const char *ACodec::_asString(BufferInfo::Status s) {
+ switch (s) {
+ case BufferInfo::OWNED_BY_US: return "OUR";
+ case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT";
+ case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM";
+ case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM";
+ case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE";
+ case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED";
+ default: return "?";
+ }
+}
+
+void ACodec::dumpBuffers(OMX_U32 portIndex) {
+ CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+ ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(),
+ portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size());
+ for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+ const BufferInfo &info = mBuffers[portIndex][i];
+ ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u",
+ i, info.mBufferID, info.mGraphicBuffer.get(),
+ info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(),
+ _asString(info.mStatus), info.mStatus, info.mDequeuedAt);
+ }
}
status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) {
@@ -1093,17 +1232,55 @@
ALOGV("[%s] Calling cancelBuffer on buffer %u",
mComponentName.c_str(), info->mBufferID);
+ info->checkWriteFence("cancelBufferToNativeWindow");
int err = mNativeWindow->cancelBuffer(
- mNativeWindow.get(), info->mGraphicBuffer.get(), -1);
+ mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
+ info->mFenceFd = -1;
ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window",
mComponentName.c_str(), info->mBufferID);
-
+ // change ownership even if cancelBuffer fails
info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
return err;
}
+void ACodec::updateRenderInfoForDequeuedBuffer(
+ ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) {
+
+ info->mRenderInfo =
+ mRenderTracker.updateInfoForDequeuedBuffer(
+ buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]);
+
+ // check for any fences already signaled
+ notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo);
+}
+
+void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
+ if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) {
+ mRenderTracker.dumpRenderQueue();
+ }
+}
+
+void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) {
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", CodecBase::kWhatOutputFramesRendered);
+ std::list<FrameRenderTracker::Info> done =
+ mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete);
+
+ // unlink untracked frames
+ for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
+ it != done.cend(); ++it) {
+ if (it->getIndex() >= 0) {
+ mBuffers[kPortIndexOutput].editItemAt(it->getIndex()).mRenderInfo = NULL;
+ }
+ }
+
+ if (MediaCodec::CreateFramesRenderedMessage(done, msg)) {
+ msg->post();
+ }
+}
+
ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
ANativeWindowBuffer *buf;
CHECK(mNativeWindow.get() != NULL);
@@ -1114,26 +1291,61 @@
return NULL;
}
- if (native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf) != 0) {
- ALOGE("dequeueBuffer failed.");
- return NULL;
- }
+ int fenceFd = -1;
+ do {
+ status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
+ if (err != 0) {
+ ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err);
+ return NULL;
+ }
+ bool stale = false;
+ for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) {
+ BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+
+ if (info->mGraphicBuffer != NULL &&
+ info->mGraphicBuffer->handle == buf->handle) {
+ // Since consumers can attach buffers to BufferQueues, it is possible
+ // that a known yet stale buffer can return from a surface that we
+ // once used. We can simply ignore this as we have already dequeued
+ // this buffer properly. NOTE: this does not eliminate all cases,
+ // e.g. it is possible that we have queued the valid buffer to the
+ // NW, and a stale copy of the same buffer gets dequeued - which will
+ // be treated as the valid buffer by ACodec.
+ if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+ ALOGI("dequeued stale buffer %p. discarding", buf);
+ stale = true;
+ break;
+ }
+
+ ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer());
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow");
+ updateRenderInfoForDequeuedBuffer(buf, fenceFd, info);
+ return info;
+ }
+ }
+
+ // It is also possible to receive a previously unregistered buffer
+ // in non-meta mode. These should be treated as stale buffers. The
+ // same is possible in meta mode, in which case, it will be treated
+ // as a normal buffer, which is not desirable.
+ // TODO: fix this.
+ if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) {
+ ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf);
+ stale = true;
+ }
+ if (stale) {
+ // TODO: detach stale buffer, but there is no API yet to do it.
+ buf = NULL;
+ }
+ } while (buf == NULL);
+
+ // get oldest undequeued buffer
BufferInfo *oldest = NULL;
for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) {
BufferInfo *info =
&mBuffers[kPortIndexOutput].editItemAt(i);
-
- if (info->mGraphicBuffer != NULL &&
- info->mGraphicBuffer->handle == buf->handle) {
- CHECK_EQ((int)info->mStatus,
- (int)BufferInfo::OWNED_BY_NATIVE_WINDOW);
-
- info->mStatus = BufferInfo::OWNED_BY_US;
-
- return info;
- }
-
if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW &&
(oldest == NULL ||
// avoid potential issues from counter rolling over
@@ -1143,47 +1355,61 @@
}
}
- if (oldest) {
- CHECK(mStoreMetaDataInOutputBuffers);
+ // it is impossible dequeue a buffer when there are no buffers with ANW
+ CHECK(oldest != NULL);
+ // it is impossible to dequeue an unknown buffer in non-meta mode, as the
+ // while loop above does not complete
+ CHECK(storingMetadataInDecodedBuffers());
- // discard buffer in LRU info and replace with new buffer
- oldest->mGraphicBuffer = new GraphicBuffer(buf, false);
- oldest->mStatus = BufferInfo::OWNED_BY_US;
+ // discard buffer in LRU info and replace with new buffer
+ oldest->mGraphicBuffer = new GraphicBuffer(buf, false);
+ oldest->mStatus = BufferInfo::OWNED_BY_US;
+ oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest");
+ mRenderTracker.untrackFrame(oldest->mRenderInfo);
+ oldest->mRenderInfo = NULL;
- mOMX->updateGraphicBufferInMeta(
- mNode, kPortIndexOutput, oldest->mGraphicBuffer,
- oldest->mBufferID);
+ mOMX->updateGraphicBufferInMeta(
+ mNode, kPortIndexOutput, oldest->mGraphicBuffer,
+ oldest->mBufferID);
- VideoDecoderOutputMetaData *metaData =
- reinterpret_cast<VideoDecoderOutputMetaData *>(
- oldest->mData->base());
- CHECK_EQ(metaData->eType, kMetadataBufferTypeGrallocSource);
-
+ if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) {
+ VideoGrallocMetadata *grallocMeta =
+ reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base());
ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)",
(unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
mDequeueCounter - oldest->mDequeuedAt,
- metaData->pHandle,
+ grallocMeta->pHandle,
oldest->mGraphicBuffer->handle, oldest->mData->base());
-
- return oldest;
+ } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) {
+ VideoNativeMetadata *nativeMeta =
+ reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base());
+ ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)",
+ (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
+ mDequeueCounter - oldest->mDequeuedAt,
+ nativeMeta->pBuffer,
+ oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base());
}
- TRESPASS();
-
- return NULL;
+ updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest);
+ return oldest;
}
status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) {
+ status_t err = OK;
for (size_t i = mBuffers[portIndex].size(); i-- > 0;) {
- CHECK_EQ((status_t)OK, freeBuffer(portIndex, i));
+ status_t err2 = freeBuffer(portIndex, i);
+ if (err == OK) {
+ err = err2;
+ }
}
+ // clear mDealer even on an error
mDealer[portIndex].clear();
-
- return OK;
+ return err;
}
status_t ACodec::freeOutputBuffersNotOwnedByComponent() {
+ status_t err = OK;
for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) {
BufferInfo *info =
&mBuffers[kPortIndexOutput].editItemAt(i);
@@ -1192,36 +1418,64 @@
// or being drained.
if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT &&
info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) {
- CHECK_EQ((status_t)OK, freeBuffer(kPortIndexOutput, i));
+ status_t err2 = freeBuffer(kPortIndexOutput, i);
+ if (err == OK) {
+ err = err2;
+ }
}
}
- return OK;
+ return err;
}
status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) {
BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+ status_t err = OK;
- CHECK(info->mStatus == BufferInfo::OWNED_BY_US
- || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW);
-
- if (portIndex == kPortIndexOutput && mNativeWindow != NULL
- && info->mStatus == BufferInfo::OWNED_BY_US) {
- cancelBufferToNativeWindow(info);
+ // there should not be any fences in the metadata
+ MetadataBufferType type =
+ portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType;
+ if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL
+ && info->mData->size() >= sizeof(VideoNativeMetadata)) {
+ int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd;
+ if (fenceFd >= 0) {
+ ALOGW("unreleased fence (%d) in %s metadata buffer %zu",
+ fenceFd, portIndex == kPortIndexInput ? "input" : "output", i);
+ }
}
- CHECK_EQ(mOMX->freeBuffer(
- mNode, portIndex, info->mBufferID),
- (status_t)OK);
+ switch (info->mStatus) {
+ case BufferInfo::OWNED_BY_US:
+ if (portIndex == kPortIndexOutput && mNativeWindow != NULL) {
+ (void)cancelBufferToNativeWindow(info);
+ }
+ // fall through
+ case BufferInfo::OWNED_BY_NATIVE_WINDOW:
+ err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID);
+ break;
+
+ default:
+ ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus);
+ err = FAILED_TRANSACTION;
+ break;
+ }
+
+ if (info->mFenceFd >= 0) {
+ ::close(info->mFenceFd);
+ }
+
+ mRenderTracker.untrackFrame(info->mRenderInfo);
+ info->mRenderInfo = NULL;
+
+ // remove buffer even if mOMX->freeBuffer fails
mBuffers[portIndex].removeAt(i);
- return OK;
+ return err;
}
ACodec::BufferInfo *ACodec::findBufferByID(
- uint32_t portIndex, IOMX::buffer_id bufferID,
- ssize_t *index) {
+ uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) {
for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
@@ -1233,8 +1487,7 @@
}
}
- TRESPASS();
-
+ ALOGE("Could not find buffer with ID %u", bufferID);
return NULL;
}
@@ -1347,6 +1600,8 @@
mIsEncoder = encoder;
+ mInputMetadataType = kMetadataBufferTypeInvalid;
+ mOutputMetadataType = kMetadataBufferTypeInvalid;
status_t err = setComponentRole(encoder /* isEncoder */, mime);
@@ -1365,15 +1620,19 @@
if (encoder
&& msg->findInt32("store-metadata-in-buffers", &storeMeta)
&& storeMeta != 0) {
- err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE);
-
+ err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType);
if (err != OK) {
- ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d",
+ ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d",
mComponentName.c_str(), err);
- return err;
- }
- }
+ return err;
+ }
+ // For this specific case we could be using camera source even if storeMetaDataInBuffers
+ // returns Gralloc source. Pretend that we are; this will force us to use nBufferSize.
+ if (mInputMetadataType == kMetadataBufferTypeGrallocSource) {
+ mInputMetadataType = kMetadataBufferTypeCameraSource;
+ }
+ }
int32_t prependSPSPPS = 0;
if (encoder
@@ -1412,14 +1671,10 @@
&& msg->findInt32("store-metadata-in-buffers-output", &storeMeta)
&& storeMeta != 0);
- err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable);
-
+ err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType);
if (err != OK) {
ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d",
mComponentName.c_str(), err);
- mUseMetadataOnEncoderOutput = 0;
- } else {
- mUseMetadataOnEncoderOutput = enable;
}
if (!msg->findInt64(
@@ -1451,7 +1706,7 @@
sp<RefBase> obj;
bool haveNativeWindow = msg->findObject("native-window", &obj)
&& obj != NULL && video && !encoder;
- mStoreMetaDataInOutputBuffers = false;
+ mLegacyAdaptiveExperiment = false;
if (video && !encoder) {
inputFormat->setInt32("adaptive-playback", false);
@@ -1536,7 +1791,7 @@
// Always try to enable dynamic output buffers on native surface
err = mOMX->storeMetaDataInBuffers(
- mNode, kPortIndexOutput, OMX_TRUE);
+ mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType);
if (err != OK) {
ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",
mComponentName.c_str(), err);
@@ -1588,7 +1843,10 @@
} else {
ALOGV("[%s] storeMetaDataInBuffers succeeded",
mComponentName.c_str());
- mStoreMetaDataInOutputBuffers = true;
+ CHECK(storingMetadataInDecodedBuffers());
+ mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled(
+ "legacy-adaptive", !msg->contains("no-experiments"));
+
inputFormat->setInt32("adaptive-playback", true);
}
@@ -1627,22 +1885,30 @@
if (haveNativeWindow) {
mNativeWindow = static_cast<Surface *>(obj.get());
- CHECK(mNativeWindow != NULL);
}
// initialize native window now to get actual output format
// TODO: this is needed for some encoders even though they don't use native window
- CHECK_EQ((status_t)OK, initNativeWindow());
+ err = initNativeWindow();
+ if (err != OK) {
+ return err;
+ }
// fallback for devices that do not handle flex-YUV for native buffers
if (haveNativeWindow) {
int32_t requestedColorFormat = OMX_COLOR_FormatUnused;
if (msg->findInt32("color-format", &requestedColorFormat) &&
requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) {
- CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK);
+ status_t err = getPortFormat(kPortIndexOutput, outputFormat);
+ if (err != OK) {
+ return err;
+ }
int32_t colorFormat = OMX_COLOR_FormatUnused;
OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused;
- CHECK(outputFormat->findInt32("color-format", &colorFormat));
+ if (!outputFormat->findInt32("color-format", &colorFormat)) {
+ ALOGE("ouptut port did not have a color format (wrong domain?)");
+ return BAD_VALUE;
+ }
ALOGD("[%s] Requested output format %#x and got %#x.",
mComponentName.c_str(), requestedColorFormat, colorFormat);
if (!isFlexibleColorFormat(
@@ -1654,9 +1920,10 @@
mNativeWindow.clear();
haveNativeWindow = false;
usingSwRenderer = true;
- if (mStoreMetaDataInOutputBuffers) {
- err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_FALSE);
- mStoreMetaDataInOutputBuffers = false;
+ if (storingMetadataInDecodedBuffers()) {
+ err = mOMX->storeMetaDataInBuffers(
+ mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType);
+ mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case
// TODO: implement adaptive-playback support for bytebuffer mode.
// This is done by SW codecs, but most HW codecs don't support it.
inputFormat->setInt32("adaptive-playback", false);
@@ -1863,11 +2130,14 @@
mBaseOutputFormat = outputFormat;
- CHECK_EQ(getPortFormat(kPortIndexInput, inputFormat), (status_t)OK);
- CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK);
- mInputFormat = inputFormat;
- mOutputFormat = outputFormat;
-
+ err = getPortFormat(kPortIndexInput, inputFormat);
+ if (err == OK) {
+ err = getPortFormat(kPortIndexOutput, outputFormat);
+ if (err == OK) {
+ mInputFormat = inputFormat;
+ mOutputFormat = outputFormat;
+ }
+ }
return err;
}
@@ -1947,7 +2217,10 @@
return err;
}
- CHECK(def.nBufferSize >= size);
+ if (def.nBufferSize < size) {
+ ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize);
+ return FAILED_TRANSACTION;
+ }
return OK;
}
@@ -2275,7 +2548,9 @@
}
status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) {
- CHECK(!encoder); // XXX TODO
+ if (encoder) {
+ return INVALID_OPERATION;
+ }
return setupRawAudioFormat(
kPortIndexInput, sampleRate, numChannels);
@@ -2786,7 +3061,9 @@
break;
}
- ALOGI("setupVideoEncoder succeeded");
+ if (err == OK) {
+ ALOGI("setupVideoEncoder succeeded");
+ }
return err;
}
@@ -3384,8 +3661,9 @@
status_t err = mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-
- CHECK_EQ(err, (status_t)OK);
+ if (err != OK) {
+ return err;
+ }
if (portIndex == kPortIndexInput) {
// XXX Need a (much) better heuristic to compute input buffer sizes.
@@ -3395,7 +3673,10 @@
}
}
- CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
+ if (def.eDomain != OMX_PortDomainVideo) {
+ ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain);
+ return FAILED_TRANSACTION;
+ }
video_def->nFrameWidth = width;
video_def->nFrameHeight = height;
@@ -3459,8 +3740,8 @@
while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers
&& dequeueBufferFromNativeWindow() != NULL) {
// these buffers will be submitted as regular buffers; account for this
- if (mStoreMetaDataInOutputBuffers && mMetaDataBuffersToSubmit > 0) {
- --mMetaDataBuffersToSubmit;
+ if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) {
+ --mMetadataBuffersToSubmit;
}
}
}
@@ -3672,17 +3953,20 @@
}
status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {
- // TODO: catch errors an return them instead of using CHECK
+ const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output";
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
def.nPortIndex = portIndex;
- CHECK_EQ(mOMX->getParameter(
- mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)),
- (status_t)OK);
+ status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != OK) {
+ return err;
+ }
- CHECK_EQ((int)def.eDir,
- (int)(portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput));
+ if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) {
+ ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex);
+ return BAD_VALUE;
+ }
switch (def.eDomain) {
case OMX_PortDomainVideo:
@@ -3745,12 +4029,16 @@
rect.nHeight = videoDef->nFrameHeight;
}
- CHECK_GE(rect.nLeft, 0);
- CHECK_GE(rect.nTop, 0);
- CHECK_GE(rect.nWidth, 0u);
- CHECK_GE(rect.nHeight, 0u);
- CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth);
- CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight);
+ if (rect.nLeft < 0 ||
+ rect.nTop < 0 ||
+ rect.nLeft + rect.nWidth > videoDef->nFrameWidth ||
+ rect.nTop + rect.nHeight > videoDef->nFrameHeight) {
+ ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)",
+ rect.nLeft, rect.nTop,
+ rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight,
+ videoDef->nFrameWidth, videoDef->nFrameHeight);
+ return BAD_VALUE;
+ }
notify->setRect(
"crop",
@@ -3807,7 +4095,13 @@
default:
{
- CHECK(mIsEncoder ^ (portIndex == kPortIndexInput));
+ if (mIsEncoder ^ (portIndex == kPortIndexOutput)) {
+ // should be CodingUnused
+ ALOGE("Raw port video compression format is %s(%d)",
+ asString(videoDef->eCompressionFormat),
+ videoDef->eCompressionFormat);
+ return BAD_VALUE;
+ }
AString mime;
if (GetMimeTypeForVideoCoding(
videoDef->eCompressionFormat, &mime) != OK) {
@@ -3838,20 +4132,25 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- CHECK_EQ(mOMX->getParameter(
- mNode, OMX_IndexParamAudioPcm,
- ¶ms, sizeof(params)),
- (status_t)OK);
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
- CHECK_GT(params.nChannels, 0);
- CHECK(params.nChannels == 1 || params.bInterleaved);
- CHECK_EQ(params.nBitPerSample, 16u);
-
- CHECK_EQ((int)params.eNumData,
- (int)OMX_NumericalDataSigned);
-
- CHECK_EQ((int)params.ePCMMode,
- (int)OMX_AUDIO_PCMModeLinear);
+ if (params.nChannels <= 0
+ || (params.nChannels != 1 && !params.bInterleaved)
+ || params.nBitPerSample != 16u
+ || params.eNumData != OMX_NumericalDataSigned
+ || params.ePCMMode != OMX_AUDIO_PCMModeLinear) {
+ ALOGE("unsupported PCM port: %u channels%s, %u-bit, %s(%d), %s(%d) mode ",
+ params.nChannels,
+ params.bInterleaved ? " interleaved" : "",
+ params.nBitPerSample,
+ asString(params.eNumData), params.eNumData,
+ asString(params.ePCMMode), params.ePCMMode);
+ return FAILED_TRANSACTION;
+ }
notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
notify->setInt32("channel-count", params.nChannels);
@@ -3869,10 +4168,11 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- CHECK_EQ(mOMX->getParameter(
- mNode, OMX_IndexParamAudioAac,
- ¶ms, sizeof(params)),
- (status_t)OK);
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
notify->setInt32("channel-count", params.nChannels);
@@ -3886,21 +4186,18 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- CHECK_EQ(mOMX->getParameter(
- mNode, OMX_IndexParamAudioAmr,
- ¶ms, sizeof(params)),
- (status_t)OK);
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
notify->setInt32("channel-count", 1);
if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) {
- notify->setString(
- "mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);
-
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);
notify->setInt32("sample-rate", 16000);
} else {
- notify->setString(
- "mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);
-
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);
notify->setInt32("sample-rate", 8000);
}
break;
@@ -3912,10 +4209,11 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- CHECK_EQ(mOMX->getParameter(
- mNode, OMX_IndexParamAudioFlac,
- ¶ms, sizeof(params)),
- (status_t)OK);
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC);
notify->setInt32("channel-count", params.nChannels);
@@ -3929,10 +4227,11 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- CHECK_EQ(mOMX->getParameter(
- mNode, OMX_IndexParamAudioMp3,
- ¶ms, sizeof(params)),
- (status_t)OK);
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG);
notify->setInt32("channel-count", params.nChannels);
@@ -3946,10 +4245,11 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- CHECK_EQ(mOMX->getParameter(
- mNode, OMX_IndexParamAudioVorbis,
- ¶ms, sizeof(params)),
- (status_t)OK);
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS);
notify->setInt32("channel-count", params.nChannels);
@@ -3963,11 +4263,12 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- CHECK_EQ((status_t)OK, mOMX->getParameter(
- mNode,
- (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
- ¶ms,
- sizeof(params)));
+ err = mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
+ ¶ms, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3);
notify->setInt32("channel-count", params.nChannels);
@@ -3981,11 +4282,12 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- CHECK_EQ((status_t)OK, mOMX->getParameter(
- mNode,
- (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
- ¶ms,
- sizeof(params)));
+ err = mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
+ ¶ms, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3);
notify->setInt32("channel-count", params.nChannels);
@@ -3999,11 +4301,12 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- CHECK_EQ((status_t)OK, mOMX->getParameter(
- mNode,
- (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus,
- ¶ms,
- sizeof(params)));
+ err = mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus,
+ ¶ms, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS);
notify->setInt32("channel-count", params.nChannels);
@@ -4017,11 +4320,11 @@
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- CHECK_EQ((status_t)OK, mOMX->getParameter(
- mNode,
- (OMX_INDEXTYPE)OMX_IndexParamAudioPcm,
- ¶ms,
- sizeof(params)));
+ err = mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
const char *mime = NULL;
if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) {
@@ -4039,30 +4342,33 @@
case OMX_AUDIO_CodingGSMFR:
{
- OMX_AUDIO_PARAM_MP3TYPE params;
+ OMX_AUDIO_PARAM_PCMMODETYPE params;
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
- CHECK_EQ(mOMX->getParameter(
- mNode, OMX_IndexParamAudioPcm,
- ¶ms, sizeof(params)),
- (status_t)OK);
+ err = mOMX->getParameter(
+ mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
+ if (err != OK) {
+ return err;
+ }
notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM);
notify->setInt32("channel-count", params.nChannels);
- notify->setInt32("sample-rate", params.nSampleRate);
+ notify->setInt32("sample-rate", params.nSamplingRate);
break;
}
default:
- ALOGE("UNKNOWN AUDIO CODING: %d\n", audioDef->eEncoding);
- TRESPASS();
+ ALOGE("Unsupported audio coding: %s(%d)\n",
+ asString(audioDef->eEncoding), audioDef->eEncoding);
+ return BAD_TYPE;
}
break;
}
default:
- TRESPASS();
+ ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain);
+ return BAD_TYPE;
}
return OK;
@@ -4072,7 +4378,10 @@
sp<AMessage> notify = mBaseOutputFormat->dup();
notify->setInt32("what", kWhatOutputFormatChanged);
- CHECK_EQ(getPortFormat(kPortIndexOutput, notify), (status_t)OK);
+ if (getPortFormat(kPortIndexOutput, notify) != OK) {
+ ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str());
+ return;
+ }
AString mime;
CHECK(notify->findString("mime", &mime));
@@ -4123,134 +4432,6 @@
notify->post();
}
-status_t ACodec::pushBlankBuffersToNativeWindow() {
- status_t err = NO_ERROR;
- ANativeWindowBuffer* anb = NULL;
- int numBufs = 0;
- int minUndequeuedBufs = 0;
-
- // We need to reconnect to the ANativeWindow as a CPU client to ensure that
- // no frames get dropped by SurfaceFlinger assuming that these are video
- // frames.
- err = native_window_api_disconnect(mNativeWindow.get(),
- NATIVE_WINDOW_API_MEDIA);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)",
- strerror(-err), -err);
- return err;
- }
-
- err = native_window_api_connect(mNativeWindow.get(),
- NATIVE_WINDOW_API_CPU);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: api_connect failed: %s (%d)",
- strerror(-err), -err);
- return err;
- }
-
- err = setNativeWindowSizeFormatAndUsage(
- mNativeWindow.get(), 1, 1, HAL_PIXEL_FORMAT_RGBX_8888, 0, GRALLOC_USAGE_SW_WRITE_OFTEN);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: set format failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- err = mNativeWindow->query(mNativeWindow.get(),
- NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: MIN_UNDEQUEUED_BUFFERS query "
- "failed: %s (%d)", strerror(-err), -err);
- goto error;
- }
-
- numBufs = minUndequeuedBufs + 1;
- err = native_window_set_buffer_count(mNativeWindow.get(), numBufs);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: set_buffer_count failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- // We push numBufs + 1 buffers to ensure that we've drawn into the same
- // buffer twice. This should guarantee that the buffer has been displayed
- // on the screen and then been replaced, so an previous video frames are
- // guaranteed NOT to be currently displayed.
- for (int i = 0; i < numBufs + 1; i++) {
- err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &anb);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: dequeueBuffer failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- sp<GraphicBuffer> buf(new GraphicBuffer(anb, false));
-
- // Fill the buffer with the a 1x1 checkerboard pattern ;)
- uint32_t* img = NULL;
- err = buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img));
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: lock failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- *img = 0;
-
- err = buf->unlock();
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: unlock failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- err = mNativeWindow->queueBuffer(mNativeWindow.get(),
- buf->getNativeBuffer(), -1);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: queueBuffer failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- anb = NULL;
- }
-
-error:
-
- if (err != NO_ERROR) {
- // Clean up after an error.
- if (anb != NULL) {
- mNativeWindow->cancelBuffer(mNativeWindow.get(), anb, -1);
- }
-
- native_window_api_disconnect(mNativeWindow.get(),
- NATIVE_WINDOW_API_CPU);
- native_window_api_connect(mNativeWindow.get(),
- NATIVE_WINDOW_API_MEDIA);
-
- return err;
- } else {
- // Clean up after success.
- err = native_window_api_disconnect(mNativeWindow.get(),
- NATIVE_WINDOW_API_CPU);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)",
- strerror(-err), -err);
- return err;
- }
-
- err = native_window_api_connect(mNativeWindow.get(),
- NATIVE_WINDOW_API_MEDIA);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: api_connect failed: %s (%d)",
- strerror(-err), -err);
- return err;
- }
-
- return NO_ERROR;
- }
-}
-
////////////////////////////////////////////////////////////////////////////////
ACodec::PortDescription::PortDescription() {
@@ -4318,9 +4499,14 @@
break;
}
+ case ACodec::kWhatOMXMessageList:
+ {
+ return checkOMXMessage(msg) ? onOMXMessageList(msg) : true;
+ }
+
case ACodec::kWhatOMXMessage:
{
- return onOMXMessage(msg);
+ return checkOMXMessage(msg) ? onOMXMessage(msg) : true;
}
case ACodec::kWhatSetSurface:
@@ -4331,7 +4517,9 @@
sp<RefBase> obj;
CHECK(msg->findObject("surface", &obj));
- status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get()));
+ status_t err =
+ ADebug::isExperimentEnabled("legacy-setsurface") ? BAD_VALUE :
+ mCodec->handleSetSurface(static_cast<Surface *>(obj.get()));
sp<AMessage> response = new AMessage;
response->setInt32("err", err);
@@ -4340,7 +4528,7 @@
}
case ACodec::kWhatCreateInputSurface:
- case ACodec::kWhatUsePersistentInputSurface:
+ case ACodec::kWhatSetInputSurface:
case ACodec::kWhatSignalEndOfInputStream:
{
// This may result in an app illegal state exception.
@@ -4377,21 +4565,50 @@
return true;
}
-bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) {
- int32_t type;
- CHECK(msg->findInt32("type", &type));
-
+bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) {
// there is a possibility that this is an outstanding message for a
// codec that we have already destroyed
if (mCodec->mNode == 0) {
ALOGI("ignoring message as already freed component: %s",
msg->debugString().c_str());
- return true;
+ return false;
}
IOMX::node_id nodeID;
CHECK(msg->findInt32("node", (int32_t*)&nodeID));
- CHECK_EQ(nodeID, mCodec->mNode);
+ if (nodeID != mCodec->mNode) {
+ ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode);
+ return false;
+ }
+ return true;
+}
+
+bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) {
+ sp<RefBase> obj;
+ CHECK(msg->findObject("messages", &obj));
+ sp<MessageList> msgList = static_cast<MessageList *>(obj.get());
+
+ bool receivedRenderedEvents = false;
+ for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin();
+ it != msgList->getList().cend(); ++it) {
+ onOMXMessage(*it);
+ int32_t type;
+ CHECK((*it)->findInt32("type", &type));
+ if (type == omx_message::FRAME_RENDERED) {
+ receivedRenderedEvents = true;
+ }
+ }
+
+ if (receivedRenderedEvents) {
+ // NOTE: all buffers are rendered in this case
+ mCodec->notifyOfRenderedFrames();
+ }
+ return true;
+}
+
+bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) {
+ int32_t type;
+ CHECK(msg->findInt32("type", &type));
switch (type) {
case omx_message::EVENT:
@@ -4421,9 +4638,12 @@
case omx_message::EMPTY_BUFFER_DONE:
{
IOMX::buffer_id bufferID;
- CHECK(msg->findInt32("buffer", (int32_t*)&bufferID));
+ int32_t fenceFd;
- return onOMXEmptyBufferDone(bufferID);
+ CHECK(msg->findInt32("buffer", (int32_t*)&bufferID));
+ CHECK(msg->findInt32("fence_fd", &fenceFd));
+
+ return onOMXEmptyBufferDone(bufferID, fenceFd);
}
case omx_message::FILL_BUFFER_DONE:
@@ -4431,27 +4651,46 @@
IOMX::buffer_id bufferID;
CHECK(msg->findInt32("buffer", (int32_t*)&bufferID));
- int32_t rangeOffset, rangeLength, flags;
+ int32_t rangeOffset, rangeLength, flags, fenceFd;
int64_t timeUs;
CHECK(msg->findInt32("range_offset", &rangeOffset));
CHECK(msg->findInt32("range_length", &rangeLength));
CHECK(msg->findInt32("flags", &flags));
CHECK(msg->findInt64("timestamp", &timeUs));
+ CHECK(msg->findInt32("fence_fd", &fenceFd));
return onOMXFillBufferDone(
bufferID,
(size_t)rangeOffset, (size_t)rangeLength,
(OMX_U32)flags,
- timeUs);
+ timeUs,
+ fenceFd);
+ }
+
+ case omx_message::FRAME_RENDERED:
+ {
+ int64_t mediaTimeUs, systemNano;
+
+ CHECK(msg->findInt64("media_time_us", &mediaTimeUs));
+ CHECK(msg->findInt64("system_nano", &systemNano));
+
+ return onOMXFrameRendered(
+ mediaTimeUs, systemNano);
}
default:
- TRESPASS();
- break;
+ ALOGE("Unexpected message type: %d", type);
+ return false;
}
}
+bool ACodec::BaseState::onOMXFrameRendered(
+ int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) {
+ // ignore outside of Executing and PortSettingsChanged states
+ return true;
+}
+
bool ACodec::BaseState::onOMXEvent(
OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
if (event != OMX_EventError) {
@@ -4474,16 +4713,29 @@
return true;
}
-bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID) {
+bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) {
ALOGV("[%s] onOMXEmptyBufferDone %u",
mCodec->mComponentName.c_str(), bufferID);
- BufferInfo *info =
- mCodec->findBufferByID(kPortIndexInput, bufferID);
-
- CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);
+ BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID);
+ BufferInfo::Status status = BufferInfo::getSafeStatus(info);
+ if (status != BufferInfo::OWNED_BY_COMPONENT) {
+ ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID);
+ mCodec->dumpBuffers(kPortIndexInput);
+ if (fenceFd >= 0) {
+ ::close(fenceFd);
+ }
+ return false;
+ }
info->mStatus = BufferInfo::OWNED_BY_US;
+ // input buffers cannot take fences, so wait for any fence now
+ (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone");
+ fenceFd = -1;
+
+ // still save fence for completeness
+ info->setWriteFence(fenceFd, "onOMXEmptyBufferDone");
+
// We're in "store-metadata-in-buffers" mode, the underlying
// OMX component had access to data that's implicitly refcounted
// by this "MediaBuffer" object. Now that the OMX component has
@@ -4501,12 +4753,10 @@
postFillThisBuffer(info);
break;
+ case FREE_BUFFERS:
default:
- {
- CHECK_EQ((int)mode, (int)FREE_BUFFERS);
- TRESPASS(); // Not currently used
- break;
- }
+ ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers");
+ return false;
}
return true;
@@ -4567,7 +4817,13 @@
}
BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID);
- CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_UPSTREAM);
+ BufferInfo::Status status = BufferInfo::getSafeStatus(info);
+ if (status != BufferInfo::OWNED_BY_UPSTREAM) {
+ ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID);
+ mCodec->dumpBuffers(kPortIndexInput);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return;
+ }
info->mStatus = BufferInfo::OWNED_BY_US;
@@ -4606,7 +4862,13 @@
bufferID,
buffer.get(), info->mData.get());
- CHECK_LE(buffer->size(), info->mData->capacity());
+ if (buffer->size() > info->mData->capacity()) {
+ ALOGE("data size (%zu) is greated than buffer capacity (%zu)",
+ buffer->size(), // this is the data received
+ info->mData->capacity()); // this is out buffer size
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return;
+ }
memcpy(info->mData->data(), buffer->data(), buffer->size());
}
@@ -4633,43 +4895,46 @@
mCodec->mBufferStats.add(timeUs, stats);
#endif
- if (mCodec->mStoreMetaDataInOutputBuffers) {
+ if (mCodec->storingMetadataInDecodedBuffers()) {
// try to submit an output buffer for each input buffer
PortMode outputMode = getPortMode(kPortIndexOutput);
- ALOGV("MetaDataBuffersToSubmit=%u portMode=%s",
- mCodec->mMetaDataBuffersToSubmit,
+ ALOGV("MetadataBuffersToSubmit=%u portMode=%s",
+ mCodec->mMetadataBuffersToSubmit,
(outputMode == FREE_BUFFERS ? "FREE" :
outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT"));
if (outputMode == RESUBMIT_BUFFERS) {
- mCodec->submitOutputMetaDataBuffer();
+ mCodec->submitOutputMetadataBuffer();
}
}
-
- CHECK_EQ(mCodec->mOMX->emptyBuffer(
- mCodec->mNode,
- bufferID,
- 0,
- buffer->size(),
- flags,
- timeUs),
- (status_t)OK);
-
+ info->checkReadFence("onInputBufferFilled");
+ status_t err2 = mCodec->mOMX->emptyBuffer(
+ mCodec->mNode,
+ bufferID,
+ 0,
+ buffer->size(),
+ flags,
+ timeUs,
+ info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err2 != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2));
+ return;
+ }
info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
- if (!eos) {
+ if (!eos && err == OK) {
getMoreInputDataIfPossible();
} else {
- ALOGV("[%s] Signalled EOS on the input port",
- mCodec->mComponentName.c_str());
+ ALOGV("[%s] Signalled EOS (%d) on the input port",
+ mCodec->mComponentName.c_str(), err);
mCodec->mPortEOS[kPortIndexInput] = true;
mCodec->mInputEOSResult = err;
}
} else if (!mCodec->mPortEOS[kPortIndexInput]) {
- if (err != ERROR_END_OF_STREAM) {
- ALOGV("[%s] Signalling EOS on the input port "
- "due to error %d",
+ if (err != OK && err != ERROR_END_OF_STREAM) {
+ ALOGV("[%s] Signalling EOS on the input port due to error %d",
mCodec->mComponentName.c_str(), err);
} else {
ALOGV("[%s] Signalling EOS on the input port",
@@ -4679,15 +4944,20 @@
ALOGV("[%s] calling emptyBuffer %u signalling EOS",
mCodec->mComponentName.c_str(), bufferID);
- CHECK_EQ(mCodec->mOMX->emptyBuffer(
- mCodec->mNode,
- bufferID,
- 0,
- 0,
- OMX_BUFFERFLAG_EOS,
- 0),
- (status_t)OK);
-
+ info->checkReadFence("onInputBufferFilled");
+ status_t err2 = mCodec->mOMX->emptyBuffer(
+ mCodec->mNode,
+ bufferID,
+ 0,
+ 0,
+ OMX_BUFFERFLAG_EOS,
+ 0,
+ info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err2 != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2));
+ return;
+ }
info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
mCodec->mPortEOS[kPortIndexInput] = true;
@@ -4696,8 +4966,11 @@
break;
}
+ case FREE_BUFFERS:
+ break;
+
default:
- CHECK_EQ((int)mode, (int)FREE_BUFFERS);
+ ALOGE("invalid port mode: %d", mode);
break;
}
}
@@ -4735,11 +5008,13 @@
IOMX::buffer_id bufferID,
size_t rangeOffset, size_t rangeLength,
OMX_U32 flags,
- int64_t timeUs) {
+ int64_t timeUs,
+ int fenceFd) {
ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x",
mCodec->mComponentName.c_str(), bufferID, timeUs, flags);
ssize_t index;
+ status_t err= OK;
#if TRACK_BUFFER_TIMING
index = mCodec->mBufferStats.indexOfKey(timeUs);
@@ -4758,12 +5033,36 @@
BufferInfo *info =
mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
-
- CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);
+ BufferInfo::Status status = BufferInfo::getSafeStatus(info);
+ if (status != BufferInfo::OWNED_BY_COMPONENT) {
+ ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID);
+ mCodec->dumpBuffers(kPortIndexOutput);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ if (fenceFd >= 0) {
+ ::close(fenceFd);
+ }
+ return true;
+ }
info->mDequeuedAt = ++mCodec->mDequeueCounter;
info->mStatus = BufferInfo::OWNED_BY_US;
+ if (info->mRenderInfo != NULL) {
+ // The fence for an emptied buffer must have signaled, but there still could be queued
+ // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these,
+ // as we will soon requeue this buffer to the surface. While in theory we could still keep
+ // track of buffers that are requeued to the surface, it is better to add support to the
+ // buffer-queue to notify us of released buffers and their fences (in the future).
+ mCodec->notifyOfRenderedFrames(true /* dropIncomplete */);
+ }
+
+ // byte buffers cannot take fences, so wait for any fence now
+ if (mCodec->mNativeWindow == NULL) {
+ (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone");
+ fenceFd = -1;
+ }
+ info->setReadFence(fenceFd, "onOMXFillBufferDone");
+
PortMode mode = getPortMode(kPortIndexOutput);
switch (mode) {
@@ -4777,9 +5076,12 @@
ALOGV("[%s] calling fillBuffer %u",
mCodec->mComponentName.c_str(), info->mBufferID);
- CHECK_EQ(mCodec->mOMX->fillBuffer(
- mCodec->mNode, info->mBufferID),
- (status_t)OK);
+ err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ return true;
+ }
info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
break;
@@ -4791,10 +5093,17 @@
if (!mCodec->mSentFormat && rangeLength > 0) {
mCodec->sendFormatChange(reply);
}
-
- if (mCodec->mUseMetadataOnEncoderOutput) {
- native_handle_t* handle =
- *(native_handle_t**)(info->mData->data() + 4);
+ if (mCodec->usingMetadataOnEncoderOutput()) {
+ native_handle_t *handle = NULL;
+ VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)info->mData->data();
+ VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)info->mData->data();
+ if (info->mData->size() >= sizeof(grallocMeta)
+ && grallocMeta.eType == kMetadataBufferTypeGrallocSource) {
+ handle = (native_handle_t *)grallocMeta.pHandle;
+ } else if (info->mData->size() >= sizeof(nativeMeta)
+ && nativeMeta.eType == kMetadataBufferTypeANWBuffer) {
+ handle = (native_handle_t *)nativeMeta.pBuffer->handle;
+ }
info->mData->meta()->setPointer("handle", handle);
info->mData->meta()->setInt32("rangeOffset", rangeOffset);
info->mData->meta()->setInt32("rangeLength", rangeLength);
@@ -4841,14 +5150,17 @@
break;
}
- default:
- {
- CHECK_EQ((int)mode, (int)FREE_BUFFERS);
-
- CHECK_EQ((status_t)OK,
- mCodec->freeBuffer(kPortIndexOutput, index));
+ case FREE_BUFFERS:
+ err = mCodec->freeBuffer(kPortIndexOutput, index);
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ return true;
+ }
break;
- }
+
+ default:
+ ALOGE("Invalid port mode: %d", mode);
+ return false;
}
return true;
@@ -4858,15 +5170,19 @@
IOMX::buffer_id bufferID;
CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
ssize_t index;
- BufferInfo *info =
- mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
- CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM);
+ BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
+ BufferInfo::Status status = BufferInfo::getSafeStatus(info);
+ if (status != BufferInfo::OWNED_BY_DOWNSTREAM) {
+ ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID);
+ mCodec->dumpBuffers(kPortIndexOutput);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return;
+ }
android_native_rect_t crop;
- if (msg->findRect("crop",
- &crop.left, &crop.top, &crop.right, &crop.bottom)) {
- CHECK_EQ(0, native_window_set_crop(
- mCodec->mNativeWindow.get(), &crop));
+ if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) {
+ status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop);
+ ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err);
}
int32_t render;
@@ -4876,6 +5192,14 @@
ATRACE_NAME("render");
// The client wants this buffer to be rendered.
+ // save buffers sent to the surface so we can get render time when they return
+ int64_t mediaTimeUs = -1;
+ info->mData->meta()->findInt64("timeUs", &mediaTimeUs);
+ if (mediaTimeUs >= 0) {
+ mCodec->mRenderTracker.onFrameQueued(
+ mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd)));
+ }
+
int64_t timestampNs = 0;
if (!msg->findInt64("timestampNs", ×tampNs)) {
// TODO: it seems like we should use the timestamp
@@ -4892,21 +5216,25 @@
status_t err;
err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs);
- if (err != OK) {
- ALOGW("failed to set buffer timestamp: %d", err);
- }
+ ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err);
- if ((err = mCodec->mNativeWindow->queueBuffer(
- mCodec->mNativeWindow.get(),
- info->mGraphicBuffer.get(), -1)) == OK) {
+ info->checkReadFence("onOutputBufferDrained before queueBuffer");
+ err = mCodec->mNativeWindow->queueBuffer(
+ mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err == OK) {
info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
} else {
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
info->mStatus = BufferInfo::OWNED_BY_US;
+ // keeping read fence as write fence to avoid clobbering
+ info->mIsReadFence = false;
}
} else {
if (mCodec->mNativeWindow != NULL &&
(info->mData == NULL || info->mData->size() != 0)) {
+ // move read fence into write fence to avoid clobbering
+ info->mIsReadFence = false;
ATRACE_NAME("frame-drop");
}
info->mStatus = BufferInfo::OWNED_BY_US;
@@ -4941,24 +5269,32 @@
if (info != NULL) {
ALOGV("[%s] calling fillBuffer %u",
mCodec->mComponentName.c_str(), info->mBufferID);
-
- CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
- (status_t)OK);
-
- info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+ info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS");
+ status_t err = mCodec->mOMX->fillBuffer(
+ mCodec->mNode, info->mBufferID, info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err == OK) {
+ info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+ } else {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ }
}
}
break;
}
- default:
+ case FREE_BUFFERS:
{
- CHECK_EQ((int)mode, (int)FREE_BUFFERS);
-
- CHECK_EQ((status_t)OK,
- mCodec->freeBuffer(kPortIndexOutput, index));
+ status_t err = mCodec->freeBuffer(kPortIndexOutput, index);
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ }
break;
}
+
+ default:
+ ALOGE("Invalid port mode: %d", mode);
+ return;
}
}
@@ -4981,7 +5317,8 @@
mCodec->mOMX.clear();
mCodec->mQuirks = 0;
mCodec->mFlags = 0;
- mCodec->mUseMetadataOnEncoderOutput = 0;
+ mCodec->mInputMetadataType = kMetadataBufferTypeInvalid;
+ mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid;
mCodec->mComponentName.clear();
}
@@ -5108,7 +5445,7 @@
sp<CodecObserver> observer = new CodecObserver;
IOMX::node_id node = 0;
- status_t err = OMX_ErrorComponentNotFound;
+ status_t err = NAME_NOT_FOUND;
for (size_t matchIndex = 0; matchIndex < matchingCodecs.size();
++matchIndex) {
componentName = matchingCodecs.itemAt(matchIndex).mName.string();
@@ -5141,10 +5478,11 @@
return false;
}
- notify = new AMessage(kWhatOMXMessage, mCodec);
+ notify = new AMessage(kWhatOMXMessageList, mCodec);
observer->setNotificationMessage(notify);
mCodec->mComponentName = componentName;
+ mCodec->mRenderTracker.setComponentName(componentName);
mCodec->mFlags = 0;
if (componentName.endsWith(".secure")) {
@@ -5184,7 +5522,7 @@
mCodec->mInputEOSResult = OK;
mCodec->mDequeueCounter = 0;
- mCodec->mMetaDataBuffersToSubmit = 0;
+ mCodec->mMetadataBuffersToSubmit = 0;
mCodec->mRepeatFrameDelayUs = -1ll;
mCodec->mInputFormat.clear();
mCodec->mOutputFormat.clear();
@@ -5205,7 +5543,7 @@
void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) {
if (!keepComponentAllocated) {
- CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK);
+ (void)mCodec->mOMX->freeNode(mCodec->mNode);
mCodec->changeState(mCodec->mUninitializedState);
}
@@ -5236,9 +5574,9 @@
break;
}
- case ACodec::kWhatUsePersistentInputSurface:
+ case ACodec::kWhatSetInputSurface:
{
- onUsePersistentInputSurface(msg);
+ onSetInputSurface(msg);
handled = true;
break;
}
@@ -5286,11 +5624,13 @@
CHECK(mCodec->mNode != 0);
+ status_t err = OK;
AString mime;
- CHECK(msg->findString("mime", &mime));
-
- status_t err = mCodec->configureCodec(mime.c_str(), msg);
-
+ if (!msg->findString("mime", &mime)) {
+ err = BAD_VALUE;
+ } else {
+ err = mCodec->configureCodec(mime.c_str(), msg);
+ }
if (err != OK) {
ALOGE("[%s] configureCodec returning error %d",
mCodec->mComponentName.c_str(), err);
@@ -5411,7 +5751,7 @@
sp<IGraphicBufferProducer> bufferProducer;
status_t err = mCodec->mOMX->createInputSurface(
- mCodec->mNode, kPortIndexInput, &bufferProducer);
+ mCodec->mNode, kPortIndexInput, &bufferProducer, &mCodec->mInputMetadataType);
if (err == OK) {
err = setupInputSurface();
@@ -5431,9 +5771,9 @@
notify->post();
}
-void ACodec::LoadedState::onUsePersistentInputSurface(
+void ACodec::LoadedState::onSetInputSurface(
const sp<AMessage> &msg) {
- ALOGV("onUsePersistentInputSurface");
+ ALOGV("onSetInputSurface");
sp<AMessage> notify = mCodec->mNotify->dup();
notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted);
@@ -5442,8 +5782,9 @@
CHECK(msg->findObject("input-surface", &obj));
sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get());
- status_t err = mCodec->mOMX->usePersistentInputSurface(
- mCodec->mNode, kPortIndexInput, surface->getBufferConsumer());
+ status_t err = mCodec->mOMX->setInputSurface(
+ mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(),
+ &mCodec->mInputMetadataType);
if (err == OK) {
err = setupInputSurface();
@@ -5453,7 +5794,7 @@
// Can't use mCodec->signalError() here -- MediaCodec won't forward
// the error through because it's in the "configured" state. We
// send a kWhatInputSurfaceAccepted with an error value instead.
- ALOGE("[%s] onUsePersistentInputSurface returning error %d",
+ ALOGE("[%s] onSetInputSurface returning error %d",
mCodec->mComponentName.c_str(), err);
notify->setInt32("err", err);
}
@@ -5463,11 +5804,12 @@
void ACodec::LoadedState::onStart() {
ALOGV("onStart");
- CHECK_EQ(mCodec->mOMX->sendCommand(
- mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle),
- (status_t)OK);
-
- mCodec->changeState(mCodec->mLoadedToIdleState);
+ status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle);
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ } else {
+ mCodec->changeState(mCodec->mLoadedToIdleState);
+ }
}
////////////////////////////////////////////////////////////////////////////////
@@ -5541,14 +5883,25 @@
switch (event) {
case OMX_EventCmdComplete:
{
- CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
- CHECK_EQ(data2, (OMX_U32)OMX_StateIdle);
+ status_t err = OK;
+ if (data1 != (OMX_U32)OMX_CommandStateSet
+ || data2 != (OMX_U32)OMX_StateIdle) {
+ ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)",
+ asString((OMX_COMMANDTYPE)data1), data1,
+ asString((OMX_STATETYPE)data2), data2);
+ err = FAILED_TRANSACTION;
+ }
- CHECK_EQ(mCodec->mOMX->sendCommand(
- mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting),
- (status_t)OK);
+ if (err == OK) {
+ err = mCodec->mOMX->sendCommand(
+ mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting);
+ }
- mCodec->changeState(mCodec->mIdleToExecutingState);
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
+ } else {
+ mCodec->changeState(mCodec->mIdleToExecutingState);
+ }
return true;
}
@@ -5609,8 +5962,14 @@
switch (event) {
case OMX_EventCmdComplete:
{
- CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
- CHECK_EQ(data2, (OMX_U32)OMX_StateExecuting);
+ if (data1 != (OMX_U32)OMX_CommandStateSet
+ || data2 != (OMX_U32)OMX_StateExecuting) {
+ ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)",
+ asString((OMX_COMMANDTYPE)data1), data1,
+ asString((OMX_STATETYPE)data2), data2);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return true;
+ }
mCodec->mExecutingState->resume();
mCodec->changeState(mCodec->mExecutingState);
@@ -5642,59 +6001,77 @@
BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) {
- if (mCodec->submitOutputMetaDataBuffer() != OK)
+ if (mCodec->submitOutputMetadataBuffer() != OK)
break;
}
}
// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
- mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround();
+ mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();
}
void ACodec::ExecutingState::submitRegularOutputBuffers() {
+ bool failed = false;
for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) {
BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i);
if (mCodec->mNativeWindow != NULL) {
- CHECK(info->mStatus == BufferInfo::OWNED_BY_US
- || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW);
+ if (info->mStatus != BufferInfo::OWNED_BY_US
+ && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+ ALOGE("buffers should be owned by us or the surface");
+ failed = true;
+ break;
+ }
if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
continue;
}
} else {
- CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
+ if (info->mStatus != BufferInfo::OWNED_BY_US) {
+ ALOGE("buffers should be owned by us");
+ failed = true;
+ break;
+ }
}
- ALOGV("[%s] calling fillBuffer %u",
- mCodec->mComponentName.c_str(), info->mBufferID);
+ ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID);
- CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
- (status_t)OK);
+ info->checkWriteFence("submitRegularOutputBuffers");
+ status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd);
+ info->mFenceFd = -1;
+ if (err != OK) {
+ failed = true;
+ break;
+ }
info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
}
+
+ if (failed) {
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ }
}
void ACodec::ExecutingState::submitOutputBuffers() {
submitRegularOutputBuffers();
- if (mCodec->mStoreMetaDataInOutputBuffers) {
+ if (mCodec->storingMetadataInDecodedBuffers()) {
submitOutputMetaBuffers();
}
}
void ACodec::ExecutingState::resume() {
if (mActive) {
- ALOGV("[%s] We're already active, no need to resume.",
- mCodec->mComponentName.c_str());
-
+ ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str());
return;
}
submitOutputBuffers();
// Post all available input buffers
- CHECK_GT(mCodec->mBuffers[kPortIndexInput].size(), 0u);
+ if (mCodec->mBuffers[kPortIndexInput].size() == 0u) {
+ ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str());
+ }
+
for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) {
BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
if (info->mStatus == BufferInfo::OWNED_BY_US) {
@@ -5708,6 +6085,7 @@
void ACodec::ExecutingState::stateEntered() {
ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str());
+ mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
mCodec->processDeferredMessages();
}
@@ -5727,11 +6105,16 @@
mActive = false;
- CHECK_EQ(mCodec->mOMX->sendCommand(
- mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle),
- (status_t)OK);
-
- mCodec->changeState(mCodec->mExecutingToIdleState);
+ status_t err = mCodec->mOMX->sendCommand(
+ mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle);
+ if (err != OK) {
+ if (keepComponentAllocated) {
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ }
+ // TODO: do some recovery here.
+ } else {
+ mCodec->changeState(mCodec->mExecutingToIdleState);
+ }
handled = true;
break;
@@ -5749,11 +6132,13 @@
mActive = false;
- CHECK_EQ(mCodec->mOMX->sendCommand(
- mCodec->mNode, OMX_CommandFlush, OMX_ALL),
- (status_t)OK);
+ status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL);
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ } else {
+ mCodec->changeState(mCodec->mFlushingState);
+ }
- mCodec->changeState(mCodec->mFlushingState);
handled = true;
break;
}
@@ -5802,13 +6187,13 @@
}
// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
- case kWhatSubmitOutputMetaDataBufferIfEOS:
+ case kWhatSubmitOutputMetadataBufferIfEOS:
{
if (mCodec->mPortEOS[kPortIndexInput] &&
!mCodec->mPortEOS[kPortIndexOutput]) {
- status_t err = mCodec->submitOutputMetaDataBuffer();
+ status_t err = mCodec->submitOutputMetadataBuffer();
if (err == OK) {
- mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround();
+ mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();
}
}
return true;
@@ -5911,6 +6296,11 @@
notify->post();
}
+bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
+ mCodec->onFrameRendered(mediaTimeUs, systemNano);
+ return true;
+}
+
bool ACodec::ExecutingState::onOMXEvent(
OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
switch (event) {
@@ -5919,7 +6309,7 @@
CHECK_EQ(data1, (OMX_U32)kPortIndexOutput);
if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
- mCodec->mMetaDataBuffersToSubmit = 0;
+ mCodec->mMetadataBuffersToSubmit = 0;
CHECK_EQ(mCodec->mOMX->sendCommand(
mCodec->mNode,
OMX_CommandPortDisable, kPortIndexOutput),
@@ -5998,31 +6388,46 @@
mCodec->mComponentName.c_str());
}
+bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered(
+ int64_t mediaTimeUs, nsecs_t systemNano) {
+ mCodec->onFrameRendered(mediaTimeUs, systemNano);
+ return true;
+}
+
bool ACodec::OutputPortSettingsChangedState::onOMXEvent(
OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
switch (event) {
case OMX_EventCmdComplete:
{
if (data1 == (OMX_U32)OMX_CommandPortDisable) {
- CHECK_EQ(data2, (OMX_U32)kPortIndexOutput);
+ if (data2 != (OMX_U32)kPortIndexOutput) {
+ ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2);
+ return false;
+ }
- ALOGV("[%s] Output port now disabled.",
- mCodec->mComponentName.c_str());
+ ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str());
- CHECK(mCodec->mBuffers[kPortIndexOutput].isEmpty());
- mCodec->mDealer[kPortIndexOutput].clear();
+ status_t err = OK;
+ if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) {
+ ALOGE("disabled port should be empty, but has %zu buffers",
+ mCodec->mBuffers[kPortIndexOutput].size());
+ err = FAILED_TRANSACTION;
+ } else {
+ mCodec->mDealer[kPortIndexOutput].clear();
+ }
- CHECK_EQ(mCodec->mOMX->sendCommand(
- mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput),
- (status_t)OK);
+ if (err == OK) {
+ err = mCodec->mOMX->sendCommand(
+ mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput);
+ }
- status_t err;
- if ((err = mCodec->allocateBuffersOnPort(
- kPortIndexOutput)) != OK) {
- ALOGE("Failed to allocate output port buffers after "
- "port reconfiguration (error 0x%08x)",
- err);
+ if (err == OK) {
+ err = mCodec->allocateBuffersOnPort(kPortIndexOutput);
+ ALOGE_IF(err != OK, "Failed to allocate output port buffers after port "
+ "reconfiguration: (%d)", err);
+ }
+ if (err != OK) {
mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
// This is technically not correct, but appears to be
@@ -6037,12 +6442,14 @@
return true;
} else if (data1 == (OMX_U32)OMX_CommandPortEnable) {
- CHECK_EQ(data2, (OMX_U32)kPortIndexOutput);
+ if (data2 != (OMX_U32)kPortIndexOutput) {
+ ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2);
+ return false;
+ }
mCodec->mSentFormat = false;
- ALOGV("[%s] Output port now reenabled.",
- mCodec->mComponentName.c_str());
+ ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str());
if (mCodec->mExecutingState->active()) {
mCodec->mExecutingState->submitOutputBuffers();
@@ -6076,7 +6483,7 @@
{
// Don't send me a flush request if you previously wanted me
// to shutdown.
- TRESPASS();
+ ALOGW("Ignoring flush request in ExecutingToIdleState");
break;
}
@@ -6108,8 +6515,14 @@
switch (event) {
case OMX_EventCmdComplete:
{
- CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
- CHECK_EQ(data2, (OMX_U32)OMX_StateIdle);
+ if (data1 != (OMX_U32)OMX_CommandStateSet
+ || data2 != (OMX_U32)OMX_StateIdle) {
+ ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)",
+ asString((OMX_COMMANDTYPE)data1), data1,
+ asString((OMX_STATETYPE)data2), data2);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return true;
+ }
mComponentNowIdle = true;
@@ -6132,12 +6545,15 @@
void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() {
if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) {
- CHECK_EQ(mCodec->mOMX->sendCommand(
- mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded),
- (status_t)OK);
-
- CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexInput), (status_t)OK);
- CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexOutput), (status_t)OK);
+ status_t err = mCodec->mOMX->sendCommand(
+ mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded);
+ if (err == OK) {
+ err = mCodec->freeBuffersOnPort(kPortIndexInput);
+ status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput);
+ if (err == OK) {
+ err = err2;
+ }
+ }
if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown)
&& mCodec->mNativeWindow != NULL) {
@@ -6145,7 +6561,12 @@
// them has made it to the display. This allows the OMX
// component teardown to zero out any protected buffers
// without the risk of scanning out one of those buffers.
- mCodec->pushBlankBuffersToNativeWindow();
+ pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get());
+ }
+
+ if (err != OK) {
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return;
}
mCodec->changeState(mCodec->mIdleToLoadedState);
@@ -6188,7 +6609,7 @@
{
// Don't send me a flush request if you previously wanted me
// to shutdown.
- TRESPASS();
+ ALOGE("Got flush request in IdleToLoadedState");
break;
}
@@ -6209,8 +6630,14 @@
switch (event) {
case OMX_EventCmdComplete:
{
- CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
- CHECK_EQ(data2, (OMX_U32)OMX_StateLoaded);
+ if (data1 != (OMX_U32)OMX_CommandStateSet
+ || data2 != (OMX_U32)OMX_StateLoaded) {
+ ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)",
+ asString((OMX_COMMANDTYPE)data1), data1,
+ asString((OMX_STATETYPE)data2), data2);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return true;
+ }
mCodec->changeState(mCodec->mLoadedState);
@@ -6267,22 +6694,35 @@
switch (event) {
case OMX_EventCmdComplete:
{
- CHECK_EQ(data1, (OMX_U32)OMX_CommandFlush);
+ if (data1 != (OMX_U32)OMX_CommandFlush) {
+ ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState",
+ asString((OMX_COMMANDTYPE)data1), data1, data2);
+ mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
+ return true;
+ }
if (data2 == kPortIndexInput || data2 == kPortIndexOutput) {
- CHECK(!mFlushComplete[data2]);
+ if (mFlushComplete[data2]) {
+ ALOGW("Flush already completed for %s port",
+ data2 == kPortIndexInput ? "input" : "output");
+ return true;
+ }
mFlushComplete[data2] = true;
- if (mFlushComplete[kPortIndexInput]
- && mFlushComplete[kPortIndexOutput]) {
+ if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) {
changeStateIfWeOwnAllBuffers();
}
- } else {
- CHECK_EQ(data2, OMX_ALL);
- CHECK(mFlushComplete[kPortIndexInput]);
- CHECK(mFlushComplete[kPortIndexOutput]);
+ } else if (data2 == OMX_ALL) {
+ if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) {
+ ALOGW("received flush complete event for OMX_ALL before ports have been"
+ "flushed (%d/%d)",
+ mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]);
+ return false;
+ }
changeStateIfWeOwnAllBuffers();
+ } else {
+ ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2);
}
return true;
@@ -6332,6 +6772,8 @@
// the native window for rendering. Let's get those back as well.
mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs();
+ mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
+
sp<AMessage> notify = mCodec->mNotify->dup();
notify->setInt32("what", CodecBase::kWhatFlushCompleted);
notify->post();
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 45581f3..69128bd 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -23,6 +23,7 @@
ESDS.cpp \
FileSource.cpp \
FLACExtractor.cpp \
+ FrameRenderTracker.cpp \
HTTPBase.cpp \
JPEGSource.cpp \
MP3Extractor.cpp \
@@ -57,6 +58,7 @@
StagefrightMediaScanner.cpp \
StagefrightMetadataRetriever.cpp \
SurfaceMediaSource.cpp \
+ SurfaceUtils.cpp \
ThrottledSource.cpp \
TimeSource.cpp \
TimedEventQueue.cpp \
@@ -89,6 +91,7 @@
libicuuc \
liblog \
libmedia \
+ libmediautils \
libnetd_client \
libopus \
libsonivox \
@@ -123,7 +126,9 @@
libdl \
libRScpp \
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
+LOCAL_CFLAGS += -Wno-multichar -Werror -Wno-error=deprecated-declarations -Wall \
+ -DENABLE_STAGEFRIGHT_EXPERIMENTS
+
LOCAL_CLANG := true
LOCAL_MODULE:= libstagefright
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index e5a6a9b..34f0148 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -85,6 +85,9 @@
this,
frameCount /*notificationFrames*/);
mInitCheck = mRecord->initCheck();
+ if (mInitCheck != OK) {
+ mRecord.clear();
+ }
} else {
mInitCheck = status;
}
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index df01e7c..4e6c2a6 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -113,11 +113,11 @@
CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
render((const uint8_t *)buffer->data() + buffer->range_offset(),
- buffer->range_length(), timeUs * 1000);
+ buffer->range_length(), timeUs, timeUs * 1000);
}
- void render(const void *data, size_t size, int64_t timestampNs) {
- mTarget->render(data, size, timestampNs, NULL, mFormat);
+ void render(const void *data, size_t size, int64_t mediaTimeUs, nsecs_t renderTimeNs) {
+ (void)mTarget->render(data, size, mediaTimeUs, renderTimeNs, NULL, mFormat);
}
protected:
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 1b788f3..2606e44 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -608,6 +608,16 @@
}
}
+ err = mCamera->sendCommand(
+ CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace);
+
+ // This could happen for CameraHAL1 clients; thus the failure is
+ // not a fatal error
+ if (err != OK) {
+ ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d",
+ mEncoderFormat, mEncoderDataSpace, err);
+ }
+
err = OK;
if (mCameraFlags & FLAGS_HOT_CAMERA) {
mCamera->unlock();
@@ -645,6 +655,9 @@
mStartTimeUs = 0;
mNumInputBuffers = 0;
+ mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ mEncoderDataSpace = HAL_DATASPACE_BT709;
+
if (meta) {
int64_t startTimeUs;
if (meta->findInt64(kKeyTime, &startTimeUs)) {
@@ -656,6 +669,10 @@
CHECK_GT(nBuffers, 0);
mNumInputBuffers = nBuffers;
}
+
+ // TODO: Read in format/dataspace from somewhere
+ // Uncomment to test SW encoders until TODO is resolved
+ // mEncoderFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
}
status_t err;
diff --git a/media/libstagefright/FrameRenderTracker.cpp b/media/libstagefright/FrameRenderTracker.cpp
new file mode 100644
index 0000000..ebd2197
--- /dev/null
+++ b/media/libstagefright/FrameRenderTracker.cpp
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FrameRenderTracker"
+
+#include <inttypes.h>
+#include <gui/Surface.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/FrameRenderTracker.h>
+
+namespace android {
+
+FrameRenderTracker::FrameRenderTracker()
+ : mLastRenderTimeNs(-1),
+ mComponentName("unknown component") {
+}
+
+FrameRenderTracker::~FrameRenderTracker() {
+}
+
+void FrameRenderTracker::setComponentName(const AString &componentName) {
+ mComponentName = componentName;
+}
+
+void FrameRenderTracker::clear(nsecs_t lastRenderTimeNs) {
+ mRenderQueue.clear();
+ mLastRenderTimeNs = lastRenderTimeNs;
+}
+
+void FrameRenderTracker::onFrameQueued(
+ int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer, const sp<Fence> &fence) {
+ mRenderQueue.emplace_back(mediaTimeUs, graphicBuffer, fence);
+}
+
+FrameRenderTracker::Info *FrameRenderTracker::updateInfoForDequeuedBuffer(
+ ANativeWindowBuffer *buf, int fenceFd, int index) {
+ if (index < 0) {
+ return NULL;
+ }
+
+ // see if this is a buffer that was to be rendered
+ std::list<Info>::iterator renderInfo = mRenderQueue.end();
+ for (std::list<Info>::iterator it = mRenderQueue.begin();
+ it != mRenderQueue.end(); ++it) {
+ if (it->mGraphicBuffer->handle == buf->handle) {
+ renderInfo = it;
+ break;
+ }
+ }
+ if (renderInfo == mRenderQueue.end()) {
+ // could have been canceled after fence has signaled
+ return NULL;
+ }
+
+ if (renderInfo->mIndex >= 0) {
+ // buffer has been dequeued before, so there is nothing to do
+ return NULL;
+ }
+
+ // was this frame dropped (we could also infer this if the fence is invalid or a dup of
+ // the queued fence; however, there is no way to figure that out.)
+ if (fenceFd < 0) {
+ // frame is new or was dropped
+ mRenderQueue.erase(renderInfo);
+ return NULL;
+ }
+
+ // store dequeue fence and buffer index
+ renderInfo->mFence = new Fence(::dup(fenceFd));
+ renderInfo->mIndex = index;
+ return &*renderInfo;
+}
+
+status_t FrameRenderTracker::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
+ // ensure monotonic timestamps
+ if (mLastRenderTimeNs >= systemNano) {
+ ALOGW("[%s] Ignoring out of order/stale system nano %lld for media time %lld from codec.",
+ mComponentName.c_str(), (long long)systemNano, (long long)mediaTimeUs);
+ return BAD_VALUE;
+ }
+
+ nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
+ if (systemNano > now) {
+ ALOGW("[%s] Ignoring system nano %lld in the future for media time %lld from codec.",
+ mComponentName.c_str(), (long long)systemNano, (long long)mediaTimeUs);
+ return OK;
+ }
+
+ mRenderQueue.emplace_back(mediaTimeUs, systemNano);
+ mLastRenderTimeNs = systemNano;
+ return OK;
+}
+
+std::list<FrameRenderTracker::Info> FrameRenderTracker::checkFencesAndGetRenderedFrames(
+ const FrameRenderTracker::Info *until, bool dropIncomplete) {
+ std::list<Info> done;
+
+ // complete any frames queued prior to this and drop any incomplete ones if requested
+ for (std::list<Info>::iterator it = mRenderQueue.begin();
+ it != mRenderQueue.end(); ) {
+ bool drop = false; // whether to drop each frame
+ if (it->mIndex < 0) {
+ // frame not yet dequeued (or already rendered on a tunneled surface)
+ drop = dropIncomplete;
+ } else if (it->mFence != NULL) {
+ // check if fence signaled
+ nsecs_t signalTime = it->mFence->getSignalTime();
+ if (signalTime < 0) { // invalid fence
+ drop = true;
+ } else if (signalTime == INT64_MAX) { // unsignaled fence
+ drop = dropIncomplete;
+ } else { // signaled
+ // save render time
+ it->mFence.clear();
+ it->mRenderTimeNs = signalTime;
+ }
+ }
+ bool foundFrame = (Info *)&*it == until;
+
+ // Return frames with signaled fences at the start of the queue, as they are
+ // in submit order, and we don't have to wait for any in-between frames.
+ // Also return any dropped frames.
+ if (drop || (it->mFence == NULL && it == mRenderQueue.begin())) {
+ // (unrendered) dropped frames have their mRenderTimeNs still set to -1
+ done.splice(done.end(), mRenderQueue, it++);
+ } else {
+ ++it;
+ }
+ if (foundFrame) {
+ break;
+ }
+ }
+
+ return done;
+}
+
+void FrameRenderTracker::untrackFrame(const FrameRenderTracker::Info *info) {
+ if (info != NULL) {
+ for (std::list<Info>::iterator it = mRenderQueue.begin();
+ it != mRenderQueue.end(); ++it) {
+ if (&*it == info) {
+ mRenderQueue.erase(it);
+ return;
+ }
+ }
+ }
+}
+
+void FrameRenderTracker::dumpRenderQueue() const {
+ ALOGI("[%s] Render Queue: (last render time: %lldns)",
+ mComponentName.c_str(), (long long)mLastRenderTimeNs);
+ for (std::list<Info>::const_iterator it = mRenderQueue.cbegin();
+ it != mRenderQueue.cend(); ++it) {
+ if (it->mFence == NULL) {
+ ALOGI(" RENDERED: handle: %p, media time: %lldus, index: %zd, render time: %lldns",
+ it->mGraphicBuffer == NULL ? NULL : it->mGraphicBuffer->handle,
+ (long long)it->mMediaTimeUs, it->mIndex, (long long)it->mRenderTimeNs);
+ } else if (it->mIndex < 0) {
+ ALOGI(" QUEUED: handle: %p, media time: %lldus, fence: %s",
+ it->mGraphicBuffer->handle, (long long)it->mMediaTimeUs,
+ it->mFence->isValid() ? "YES" : "NO");
+ } else {
+ ALOGI(" DEQUEUED: handle: %p, media time: %lldus, index: %zd",
+ it->mGraphicBuffer->handle, (long long)it->mMediaTimeUs, it->mIndex);
+ }
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 36b7e94..8bf47b1 100755
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -33,6 +33,7 @@
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDefs.h>
@@ -221,8 +222,7 @@
ssize_t MPEG4DataSource::readAt(off64_t offset, void *data, size_t size) {
Mutex::Autolock autoLock(mLock);
- if (offset >= mCachedOffset
- && offset + size <= mCachedOffset + mCachedSize) {
+ if (isInRange(mCachedOffset, mCachedSize, offset, size)) {
memcpy(data, &mCache[offset - mCachedOffset], size);
return size;
}
@@ -1586,13 +1586,13 @@
break;
}
- // ©xyz
+ // \xA9xyz
case FOURCC(0xA9, 'x', 'y', 'z'):
{
*offset += chunk_size;
- // Best case the total data length inside "©xyz" box
- // would be 8, for instance "©xyz" + "\x00\x04\x15\xc7" + "0+0/",
+ // Best case the total data length inside "\xA9xyz" box
+ // would be 8, for instance "\xA9xyz" + "\x00\x04\x15\xc7" + "0+0/",
// where "\x00\x04" is the text string length with value = 4,
// "\0x15\xc7" is the language code = en, and "0+0" is a
// location (string) value with longitude = 0 and latitude = 0.
@@ -1992,6 +1992,10 @@
size = 0;
}
+ if (SIZE_MAX - chunk_size <= size) {
+ return ERROR_MALFORMED;
+ }
+
uint8_t *buffer = new (std::nothrow) uint8_t[size + chunk_size];
if (buffer == NULL) {
return ERROR_MALFORMED;
@@ -2025,14 +2029,22 @@
*offset += chunk_size;
if (mFileMetaData != NULL) {
- ALOGV("chunk_data_size = %lld and data_offset = %lld",
- (long long)chunk_data_size, (long long)data_offset);
+ ALOGV("chunk_data_size = %" PRId64 " and data_offset = %" PRId64,
+ chunk_data_size, data_offset);
+
+ if (chunk_data_size < 0 || static_cast<uint64_t>(chunk_data_size) >= SIZE_MAX - 1) {
+ return ERROR_MALFORMED;
+ }
sp<ABuffer> buffer = new ABuffer(chunk_data_size + 1);
if (mDataSource->readAt(
data_offset, buffer->data(), chunk_data_size) != (ssize_t)chunk_data_size) {
return ERROR_IO;
}
const int kSkipBytesOfDataBox = 16;
+ if (chunk_data_size <= kSkipBytesOfDataBox) {
+ return ERROR_MALFORMED;
+ }
+
mFileMetaData->setData(
kKeyAlbumArt, MetaData::TYPE_NONE,
buffer->data() + kSkipBytesOfDataBox, chunk_data_size - kSkipBytesOfDataBox);
@@ -2616,11 +2628,11 @@
}
status_t MPEG4Extractor::parse3GPPMetaData(off64_t offset, size_t size, int depth) {
- if (size < 4) {
+ if (size < 4 || size == SIZE_MAX) {
return ERROR_MALFORMED;
}
- uint8_t *buffer = new (std::nothrow) uint8_t[size];
+ uint8_t *buffer = new (std::nothrow) uint8_t[size + 1];
if (buffer == NULL) {
return ERROR_MALFORMED;
}
@@ -2689,6 +2701,10 @@
int len16 = 0; // Number of UTF-16 characters
// smallest possible valid UTF-16 string w BOM: 0xfe 0xff 0x00 0x00
+ if (size < 6) {
+ return ERROR_MALFORMED;
+ }
+
if (size - 6 >= 4) {
len16 = ((size - 6) / 2) - 1; // don't include 0x0000 terminator
framedata = (char16_t *)(buffer + 6);
@@ -2712,6 +2728,7 @@
}
if (isUTF8) {
+ buffer[size] = 0;
mFileMetaData->setCString(metadataKey, (const char *)buffer + 6);
} else {
// Convert from UTF-16 string to UTF-8 string.
@@ -3289,16 +3306,24 @@
mWantsNALFragments = false;
}
+ int32_t tmp;
+ CHECK(mFormat->findInt32(kKeyMaxInputSize, &tmp));
+ size_t max_size = tmp;
+
+ // A somewhat arbitrary limit that should be sufficient for 8k video frames
+ // If you see the message below for a valid input stream: increase the limit
+ if (max_size > 64 * 1024 * 1024) {
+ ALOGE("bogus max input size: %zu", max_size);
+ return ERROR_MALFORMED;
+ }
mGroup = new MediaBufferGroup;
-
- int32_t max_size;
- CHECK(mFormat->findInt32(kKeyMaxInputSize, &max_size));
-
mGroup->add_buffer(new MediaBuffer(max_size));
mSrcBuffer = new (std::nothrow) uint8_t[max_size];
if (mSrcBuffer == NULL) {
// file probably specified a bad max size
+ delete mGroup;
+ mGroup = NULL;
return ERROR_MALFORMED;
}
@@ -4115,12 +4140,12 @@
size_t dstOffset = 0;
while (srcOffset < size) {
- bool isMalFormed = (srcOffset + mNALLengthSize > size);
+ bool isMalFormed = !isInRange((size_t)0u, size, srcOffset, mNALLengthSize);
size_t nalLength = 0;
if (!isMalFormed) {
nalLength = parseNALSize(&mSrcBuffer[srcOffset]);
srcOffset += mNALLengthSize;
- isMalFormed = srcOffset + nalLength > size;
+ isMalFormed = !isInRange((size_t)0u, size, srcOffset, nalLength);
}
if (isMalFormed) {
@@ -4392,12 +4417,12 @@
size_t dstOffset = 0;
while (srcOffset < size) {
- bool isMalFormed = (srcOffset + mNALLengthSize > size);
+ bool isMalFormed = !isInRange((size_t)0u, size, srcOffset, mNALLengthSize);
size_t nalLength = 0;
if (!isMalFormed) {
nalLength = parseNALSize(&mSrcBuffer[srcOffset]);
srcOffset += mNALLengthSize;
- isMalFormed = srcOffset + nalLength > size;
+ isMalFormed = !isInRange((size_t)0u, size, srcOffset, nalLength);
}
if (isMalFormed) {
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 3bc22f2..47f114a 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -113,6 +113,8 @@
mCurrTableEntriesElement(NULL) {
CHECK_GT(mElementCapacity, 0);
CHECK_GT(mEntryCapacity, 0);
+ // Ensure no integer overflow on allocation in add().
+ CHECK_LT(mEntryCapacity, UINT32_MAX / mElementCapacity);
}
// Free the allocated memory.
@@ -369,15 +371,29 @@
mWriterThreadStarted(false),
mOffset(0),
mMdatOffset(0),
+ mMoovBoxBuffer(NULL),
+ mMoovBoxBufferOffset(0),
+ mWriteMoovBoxToMemory(false),
+ mFreeBoxOffset(0),
+ mStreamableFile(false),
mEstimatedMoovBoxSize(0),
mMoovExtraSize(0),
mInterleaveDurationUs(1000000),
+ mTimeScale(-1),
+ mStartTimestampUs(-1ll),
mLatitudex10000(0),
mLongitudex10000(0),
mAreGeoTagsAvailable(false),
mStartTimeOffsetMs(-1),
mMetaKeys(new AMessage()) {
addDeviceMeta();
+
+ // Verify mFd is seekable
+ off64_t off = lseek64(mFd, 0, SEEK_SET);
+ if (off < 0) {
+ ALOGE("cannot seek mFd: %s (%d)", strerror(errno), errno);
+ release();
+ }
}
MPEG4Writer::~MPEG4Writer() {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index ed4f682..7ee84a8 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -21,11 +21,11 @@
#include "include/avc_utils.h"
#include "include/SoftwareRenderer.h"
-#include <binder/IBatteryStats.h>
#include <binder/IMemory.h>
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
#include <binder/MemoryDealer.h>
+#include <gui/BufferQueue.h>
#include <gui/Surface.h>
#include <media/ICrypto.h>
#include <media/IOMX.h>
@@ -46,6 +46,8 @@
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
#include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/SurfaceUtils.h>
+#include <mediautils/BatteryNotifier.h>
#include <private/android_filesystem_config.h>
#include <utils/Log.h>
#include <utils/Singleton.h>
@@ -61,7 +63,7 @@
}
static bool isResourceError(status_t err) {
- return (err == OMX_ErrorInsufficientResources);
+ return (err == NO_MEMORY);
}
static const int kMaxRetry = 2;
@@ -75,7 +77,7 @@
// codec is already gone.
return true;
}
- status_t err = codec->release();
+ status_t err = codec->reclaim();
if (err != OK) {
ALOGW("ResourceManagerClient failed to release codec with err %d", err);
}
@@ -106,134 +108,6 @@
DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
};
-struct MediaCodec::BatteryNotifier : public Singleton<BatteryNotifier> {
- BatteryNotifier();
- virtual ~BatteryNotifier();
-
- void noteStartVideo();
- void noteStopVideo();
- void noteStartAudio();
- void noteStopAudio();
- void onBatteryStatServiceDied();
-
-private:
- struct DeathNotifier : public IBinder::DeathRecipient {
- DeathNotifier() {}
- virtual void binderDied(const wp<IBinder>& /*who*/) {
- BatteryNotifier::getInstance().onBatteryStatServiceDied();
- }
- };
-
- Mutex mLock;
- int32_t mVideoRefCount;
- int32_t mAudioRefCount;
- sp<IBatteryStats> mBatteryStatService;
- sp<DeathNotifier> mDeathNotifier;
-
- sp<IBatteryStats> getBatteryService_l();
-
- DISALLOW_EVIL_CONSTRUCTORS(BatteryNotifier);
-};
-
-ANDROID_SINGLETON_STATIC_INSTANCE(MediaCodec::BatteryNotifier)
-
-MediaCodec::BatteryNotifier::BatteryNotifier() :
- mVideoRefCount(0),
- mAudioRefCount(0) {
-}
-
-sp<IBatteryStats> MediaCodec::BatteryNotifier::getBatteryService_l() {
- if (mBatteryStatService != NULL) {
- return mBatteryStatService;
- }
- // get battery service from service manager
- const sp<IServiceManager> sm(defaultServiceManager());
- if (sm != NULL) {
- const String16 name("batterystats");
- mBatteryStatService =
- interface_cast<IBatteryStats>(sm->getService(name));
- if (mBatteryStatService == NULL) {
- ALOGE("batterystats service unavailable!");
- return NULL;
- }
- mDeathNotifier = new DeathNotifier();
- IInterface::asBinder(mBatteryStatService)->linkToDeath(mDeathNotifier);
- // notify start now if media already started
- if (mVideoRefCount > 0) {
- mBatteryStatService->noteStartVideo(AID_MEDIA);
- }
- if (mAudioRefCount > 0) {
- mBatteryStatService->noteStartAudio(AID_MEDIA);
- }
- }
- return mBatteryStatService;
-}
-
-MediaCodec::BatteryNotifier::~BatteryNotifier() {
- if (mDeathNotifier != NULL) {
- IInterface::asBinder(mBatteryStatService)->
- unlinkToDeath(mDeathNotifier);
- }
-}
-
-void MediaCodec::BatteryNotifier::noteStartVideo() {
- Mutex::Autolock _l(mLock);
- sp<IBatteryStats> batteryService = getBatteryService_l();
- if (mVideoRefCount == 0 && batteryService != NULL) {
- batteryService->noteStartVideo(AID_MEDIA);
- }
- mVideoRefCount++;
-}
-
-void MediaCodec::BatteryNotifier::noteStopVideo() {
- Mutex::Autolock _l(mLock);
- if (mVideoRefCount == 0) {
- ALOGW("BatteryNotifier::noteStop(): video refcount is broken!");
- return;
- }
-
- sp<IBatteryStats> batteryService = getBatteryService_l();
-
- mVideoRefCount--;
- if (mVideoRefCount == 0 && batteryService != NULL) {
- batteryService->noteStopVideo(AID_MEDIA);
- }
-}
-
-void MediaCodec::BatteryNotifier::noteStartAudio() {
- Mutex::Autolock _l(mLock);
- sp<IBatteryStats> batteryService = getBatteryService_l();
- if (mAudioRefCount == 0 && batteryService != NULL) {
- batteryService->noteStartAudio(AID_MEDIA);
- }
- mAudioRefCount++;
-}
-
-void MediaCodec::BatteryNotifier::noteStopAudio() {
- Mutex::Autolock _l(mLock);
- if (mAudioRefCount == 0) {
- ALOGW("BatteryNotifier::noteStop(): audio refcount is broken!");
- return;
- }
-
- sp<IBatteryStats> batteryService = getBatteryService_l();
-
- mAudioRefCount--;
- if (mAudioRefCount == 0 && batteryService != NULL) {
- batteryService->noteStopAudio(AID_MEDIA);
- }
-}
-
-void MediaCodec::BatteryNotifier::onBatteryStatServiceDied() {
- Mutex::Autolock _l(mLock);
- mBatteryStatService.clear();
- mDeathNotifier.clear();
- // Do not reset mVideoRefCount and mAudioRefCount here. The ref
- // counting is independent of the battery service availability.
- // We need this if battery service becomes available after media
- // started.
-}
-
MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy() {
}
@@ -319,6 +193,27 @@
CHECK_EQ(client.connect(), (status_t)OK);
sp<IOMX> omx = client.interface();
+ const sp<IMediaCodecList> mediaCodecList = MediaCodecList::getInstance();
+ if (mediaCodecList == NULL) {
+ ALOGE("Failed to obtain MediaCodecList!");
+ return NULL; // if called from Java should raise IOException
+ }
+
+ AString tmp;
+ sp<AMessage> globalSettings = mediaCodecList->getGlobalSettings();
+ if (globalSettings == NULL || !globalSettings->findString(
+ kMaxEncoderInputBuffers, &tmp)) {
+ ALOGE("Failed to get encoder input buffer count!");
+ return NULL;
+ }
+
+ int32_t bufferCount = strtol(tmp.c_str(), NULL, 10);
+ if (bufferCount <= 0
+ || bufferCount > BufferQueue::MAX_MAX_ACQUIRED_BUFFERS) {
+ ALOGE("Encoder input buffer count is invalid!");
+ return NULL;
+ }
+
sp<IGraphicBufferProducer> bufferProducer;
sp<IGraphicBufferConsumer> bufferConsumer;
@@ -330,11 +225,20 @@
return NULL;
}
+ err = bufferConsumer->setMaxAcquiredBufferCount(bufferCount);
+
+ if (err != NO_ERROR) {
+ ALOGE("Unable to set BQ max acquired buffer count to %u: %d",
+ bufferCount, err);
+ return NULL;
+ }
+
return new PersistentSurface(bufferProducer, bufferConsumer);
}
MediaCodec::MediaCodec(const sp<ALooper> &looper)
: mState(UNINITIALIZED),
+ mReleasedByResourceManager(false),
mLooper(looper),
mCodec(NULL),
mReplyID(0),
@@ -376,10 +280,15 @@
return err;
}
-// static
void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
+ int32_t finalErr = err;
+ if (mReleasedByResourceManager) {
+ // override the err code if MediaCodec has been released by ResourceManager.
+ finalErr = DEAD_OBJECT;
+ }
+
sp<AMessage> response = new AMessage;
- response->setInt32("err", err);
+ response->setInt32("err", finalErr);
response->postReply(replyID);
}
@@ -488,6 +397,12 @@
return PostAndAwaitResponse(msg, &response);
}
+status_t MediaCodec::setOnFrameRenderedNotification(const sp<AMessage> ¬ify) {
+ sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
+ msg->setMessage("on-frame-rendered", notify);
+ return msg->post();
+}
+
status_t MediaCodec::configure(
const sp<AMessage> &format,
const sp<Surface> &surface,
@@ -547,9 +462,9 @@
return err;
}
-status_t MediaCodec::usePersistentInputSurface(
+status_t MediaCodec::setInputSurface(
const sp<PersistentSurface> &surface) {
- sp<AMessage> msg = new AMessage(kWhatUsePersistentInputSurface, this);
+ sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
msg->setObject("input-surface", surface.get());
sp<AMessage> response;
@@ -653,6 +568,14 @@
return PostAndAwaitResponse(msg, &response);
}
+status_t MediaCodec::reclaim() {
+ sp<AMessage> msg = new AMessage(kWhatRelease, this);
+ msg->setInt32("reclaimed", 1);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
status_t MediaCodec::release() {
sp<AMessage> msg = new AMessage(kWhatRelease, this);
@@ -919,6 +842,10 @@
sp<ABuffer> *buffer, sp<AMessage> *format) {
// use mutex instead of a context switch
+ if (mReleasedByResourceManager) {
+ return DEAD_OBJECT;
+ }
+
buffer->clear();
format->clear();
if (!isExecuting()) {
@@ -1008,20 +935,19 @@
}
bool MediaCodec::handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
- sp<AMessage> response = new AMessage;
-
if (!isExecuting() || (mFlags & kFlagIsAsync)
|| (newRequest && (mFlags & kFlagDequeueOutputPending))) {
- response->setInt32("err", INVALID_OPERATION);
+ PostReplyWithError(replyID, INVALID_OPERATION);
} else if (mFlags & kFlagStickyError) {
- response->setInt32("err", getStickyError());
+ PostReplyWithError(replyID, getStickyError());
} else if (mFlags & kFlagOutputBuffersChanged) {
- response->setInt32("err", INFO_OUTPUT_BUFFERS_CHANGED);
+ PostReplyWithError(replyID, INFO_OUTPUT_BUFFERS_CHANGED);
mFlags &= ~kFlagOutputBuffersChanged;
} else if (mFlags & kFlagOutputFormatChanged) {
- response->setInt32("err", INFO_FORMAT_CHANGED);
+ PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
mFlags &= ~kFlagOutputFormatChanged;
} else {
+ sp<AMessage> response = new AMessage;
ssize_t index = dequeuePortBuffer(kPortIndexOutput);
if (index < 0) {
@@ -1056,10 +982,9 @@
}
response->setInt32("flags", flags);
+ response->postReply(replyID);
}
- response->postReply(replyID);
-
return true;
}
@@ -1273,7 +1198,7 @@
case CodecBase::kWhatInputSurfaceAccepted:
{
- // response to initiateUsePersistentInputSurface()
+ // response to initiateSetInputSurface()
status_t err = NO_ERROR;
sp<AMessage> response = new AMessage();
if (!msg->findInt32("err", &err)) {
@@ -1414,6 +1339,18 @@
break;
}
+ case CodecBase::kWhatOutputFramesRendered:
+ {
+ // ignore these in all states except running, and check that we have a
+ // notification set
+ if (mState == STARTED && mOnFrameRenderedNotification != NULL) {
+ sp<AMessage> notify = mOnFrameRenderedNotification->dup();
+ notify->setMessage("data", msg);
+ notify->post();
+ }
+ break;
+ }
+
case CodecBase::kWhatFillThisBuffer:
{
/* size_t index = */updateBuffers(kPortIndexInput, msg);
@@ -1611,6 +1548,15 @@
break;
}
+ case kWhatSetNotification:
+ {
+ sp<AMessage> notify;
+ if (msg->findMessage("on-frame-rendered", ¬ify)) {
+ mOnFrameRenderedNotification = notify;
+ }
+ break;
+ }
+
case kWhatSetCallback:
{
sp<AReplyToken> replyID;
@@ -1659,6 +1605,11 @@
sp<AMessage> format;
CHECK(msg->findMessage("format", &format));
+ int32_t push;
+ if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
+ mFlags |= kFlagPushBlankBuffersOnShutdown;
+ }
+
if (obj != NULL) {
format->setObject("native-window", obj);
status_t err = handleSetSurface(static_cast<Surface *>(obj.get()));
@@ -1725,6 +1676,10 @@
} else {
if (err == OK) {
if (mFlags & kFlagUsesSoftwareRenderer) {
+ if (mSoftRenderer != NULL
+ && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
+ pushBlankBuffersToNativeWindow(mSurface.get());
+ }
mSoftRenderer = new SoftwareRenderer(surface);
// TODO: check if this was successful
} else {
@@ -1750,7 +1705,7 @@
}
case kWhatCreateInputSurface:
- case kWhatUsePersistentInputSurface:
+ case kWhatSetInputSurface:
{
sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
@@ -1768,7 +1723,7 @@
sp<RefBase> obj;
CHECK(msg->findObject("input-surface", &obj));
- mCodec->initiateUsePersistentInputSurface(
+ mCodec->initiateSetInputSurface(
static_cast<PersistentSurface *>(obj.get()));
}
break;
@@ -1808,6 +1763,20 @@
sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
+ // already stopped/released
+ if (mState == UNINITIALIZED && mReleasedByResourceManager) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", OK);
+ response->postReply(replyID);
+ break;
+ }
+
+ int32_t reclaimed = 0;
+ msg->findInt32("reclaimed", &reclaimed);
+ if (reclaimed) {
+ mReleasedByResourceManager = true;
+ }
+
if (!((mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED) // See 1
&& mState != INITIALIZED
&& mState != CONFIGURED && !isExecuting()) {
@@ -1821,6 +1790,8 @@
// and it should be in this case, no harm to allow a release()
// if we're already uninitialized.
sp<AMessage> response = new AMessage;
+ // TODO: we shouldn't throw an exception for stop/release. Change this to wait until
+ // the previous stop/release completes and then reply with OK.
status_t err = mState == targetState ? OK : INVALID_OPERATION;
response->setInt32("err", err);
if (err == OK && targetState == UNINITIALIZED) {
@@ -1848,6 +1819,10 @@
msg->what() == kWhatStop /* keepComponentAllocated */);
returnBuffersToCodec();
+
+ if (mSoftRenderer != NULL && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
+ pushBlankBuffersToNativeWindow(mSurface.get());
+ }
break;
}
@@ -2424,6 +2399,23 @@
return OK;
}
+//static
+size_t MediaCodec::CreateFramesRenderedMessage(
+ std::list<FrameRenderTracker::Info> done, sp<AMessage> &msg) {
+ size_t index = 0;
+
+ for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
+ it != done.cend(); ++it) {
+ if (it->getRenderTimeNs() < 0) {
+ continue; // dropped frame from tracking
+ }
+ msg->setInt64(AStringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
+ msg->setInt64(AStringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
+ ++index;
+ }
+ return index;
+}
+
status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
size_t index;
CHECK(msg->findSize("index", &index));
@@ -2456,26 +2448,37 @@
if (render && info->mData != NULL && info->mData->size() != 0) {
info->mNotify->setInt32("render", true);
- int64_t timestampNs = 0;
- if (msg->findInt64("timestampNs", ×tampNs)) {
- info->mNotify->setInt64("timestampNs", timestampNs);
+ int64_t mediaTimeUs = -1;
+ info->mData->meta()->findInt64("timeUs", &mediaTimeUs);
+
+ int64_t renderTimeNs = 0;
+ if (msg->findInt64("timestampNs", &renderTimeNs)) {
+ info->mNotify->setInt64("timestampNs", renderTimeNs);
} else {
// TODO: it seems like we should use the timestamp
// in the (media)buffer as it potentially came from
// an input surface, but we did not propagate it prior to
// API 20. Perhaps check for target SDK version.
#if 0
- if (info->mData->meta()->findInt64("timeUs", ×tampNs)) {
- ALOGV("using buffer PTS of %" PRId64, timestampNs);
- timestampNs *= 1000;
- }
+ ALOGV("using buffer PTS of %" PRId64, timestampNs);
+ renderTimeNs = mediaTimeUs * 1000;
#endif
}
if (mSoftRenderer != NULL) {
- mSoftRenderer->render(
+ std::list<FrameRenderTracker::Info> doneFrames = mSoftRenderer->render(
info->mData->data(), info->mData->size(),
- timestampNs, NULL, info->mFormat);
+ mediaTimeUs, renderTimeNs, NULL, info->mFormat);
+
+ // if we are running, notify rendered frames
+ if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) {
+ sp<AMessage> notify = mOnFrameRenderedNotification->dup();
+ sp<AMessage> data = new AMessage;
+ if (CreateFramesRenderedMessage(doneFrames, data)) {
+ notify->setMessage("data", data);
+ notify->post();
+ }
+ }
}
}
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index f12a913..d48ede9 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -24,7 +24,9 @@
#include <media/IMediaCodecList.h>
#include <media/IMediaPlayerService.h>
+#include <media/IResourceManagerService.h>
#include <media/MediaCodecInfo.h>
+#include <media/MediaResourcePolicy.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -40,12 +42,10 @@
namespace android {
+const char *kMaxEncoderInputBuffers = "max-video-encoder-input-buffers";
+
static Mutex sInitMutex;
-static MediaCodecList *gCodecList = NULL;
-
-static const char *kProfilingResults = "/data/misc/media/media_codecs_profiling_results.xml";
-
static bool parseBoolean(const char *s) {
if (!strcasecmp(s, "true") || !strcasecmp(s, "yes") || !strcasecmp(s, "y")) {
return true;
@@ -55,51 +55,76 @@
return *s != '\0' && *end == '\0' && res > 0;
}
+static bool isProfilingNeeded() {
+ bool profilingNeeded = true;
+ FILE *resultsFile = fopen(kProfilingResults, "r");
+ if (resultsFile) {
+ AString currentVersion = getProfilingVersionString();
+ size_t currentVersionSize = currentVersion.size();
+ char *versionString = new char[currentVersionSize + 1];
+ fgets(versionString, currentVersionSize + 1, resultsFile);
+ if (strcmp(versionString, currentVersion.c_str()) == 0) {
+ // profiling result up to date
+ profilingNeeded = false;
+ }
+ fclose(resultsFile);
+ delete[] versionString;
+ }
+ return profilingNeeded;
+}
+
// static
sp<IMediaCodecList> MediaCodecList::sCodecList;
// static
-sp<IMediaCodecList> MediaCodecList::getLocalInstance() {
- bool profilingNeeded = false;
- KeyedVector<AString, CodecSettings> updates;
+void *MediaCodecList::profilerThreadWrapper(void * /*arg*/) {
+ ALOGV("Enter profilerThreadWrapper.");
+ MediaCodecList *codecList = new MediaCodecList();
+ if (codecList->initCheck() != OK) {
+ ALOGW("Failed to create a new MediaCodecList, skipping codec profiling.");
+ delete codecList;
+ return NULL;
+ }
+
Vector<sp<MediaCodecInfo>> infos;
+ for (size_t i = 0; i < codecList->countCodecs(); ++i) {
+ infos.push_back(codecList->getCodecInfo(i));
+ }
+ ALOGV("Codec profiling started.");
+ profileCodecs(infos);
+ ALOGV("Codec profiling completed.");
+ codecList->parseTopLevelXMLFile(kProfilingResults, true /* ignore_errors */);
{
Mutex::Autolock autoLock(sInitMutex);
+ sCodecList = codecList;
+ }
+ return NULL;
+}
- if (gCodecList == NULL) {
- gCodecList = new MediaCodecList;
- if (gCodecList->initCheck() == OK) {
- sCodecList = gCodecList;
+// static
+sp<IMediaCodecList> MediaCodecList::getLocalInstance() {
+ Mutex::Autolock autoLock(sInitMutex);
- struct stat s;
- if (stat(kProfilingResults, &s) == -1) {
- // profiling results doesn't existed
- profilingNeeded = true;
- for (size_t i = 0; i < gCodecList->countCodecs(); ++i) {
- infos.push_back(gCodecList->getCodecInfo(i));
- }
+ if (sCodecList == NULL) {
+ MediaCodecList *codecList = new MediaCodecList;
+ if (codecList->initCheck() == OK) {
+ sCodecList = codecList;
+
+ if (isProfilingNeeded()) {
+ ALOGV("Codec profiling needed, will be run in separated thread.");
+ pthread_t profiler;
+ if (pthread_create(&profiler, NULL, profilerThreadWrapper, NULL) != 0) {
+ ALOGW("Failed to create thread for codec profiling.");
}
- } else {
- // failure to initialize may be temporary. retry on next call.
- delete gCodecList;
- gCodecList = NULL;
}
+ } else {
+ // failure to initialize may be temporary. retry on next call.
+ delete codecList;
}
}
- if (profilingNeeded) {
- profileCodecs(infos, &updates);
- }
-
- {
- Mutex::Autolock autoLock(sInitMutex);
- if (updates.size() > 0) {
- gCodecList->updateDetailsForMultipleCodecs(updates);
- }
-
- return sCodecList;
- }
+ return sCodecList;
}
static Mutex sRemoteInitMutex;
@@ -145,19 +170,6 @@
parseTopLevelXMLFile(kProfilingResults, true/* ignore_errors */);
}
-void MediaCodecList::updateDetailsForMultipleCodecs(
- const KeyedVector<AString, CodecSettings>& updates) {
- if (updates.size() == 0) {
- return;
- }
-
- exportResultsToXML(kProfilingResults, updates);
-
- for (size_t i = 0; i < updates.size(); ++i) {
- applyCodecSettings(updates.keyAt(i), updates.valueAt(i), &mCodecInfos);
- }
-}
-
void MediaCodecList::parseTopLevelXMLFile(const char *codecs_xml, bool ignore_errors) {
// get href_base
char *href_base_end = strrchr(codecs_xml, '/');
@@ -187,6 +199,25 @@
return;
}
+ Vector<MediaResourcePolicy> policies;
+ AString value;
+ if (mGlobalSettings->findString(kPolicySupportsMultipleSecureCodecs, &value)) {
+ policies.push_back(
+ MediaResourcePolicy(
+ String8(kPolicySupportsMultipleSecureCodecs),
+ String8(value.c_str())));
+ }
+ if (policies.size() > 0) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("media.resource_manager"));
+ sp<IResourceManagerService> service = interface_cast<IResourceManagerService>(binder);
+ if (service == NULL) {
+ ALOGE("MediaCodecList: failed to get ResourceManagerService");
+ } else {
+ service->config(policies);
+ }
+ }
+
for (size_t i = mCodecInfos.size(); i-- > 0;) {
const MediaCodecInfo &info = *mCodecInfos.itemAt(i).get();
if (info.mCaps.size() == 0) {
diff --git a/media/libstagefright/MediaCodecListOverrides.cpp b/media/libstagefright/MediaCodecListOverrides.cpp
index 265b1ea..a928163 100644
--- a/media/libstagefright/MediaCodecListOverrides.cpp
+++ b/media/libstagefright/MediaCodecListOverrides.cpp
@@ -20,16 +20,30 @@
#include "MediaCodecListOverrides.h"
+#include <cutils/properties.h>
#include <gui/Surface.h>
#include <media/ICrypto.h>
#include <media/IMediaCodecList.h>
#include <media/MediaCodecInfo.h>
-
+#include <media/MediaResourcePolicy.h>
+#include <media/openmax/OMX_IVCommon.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecList.h>
namespace android {
+const char *kProfilingResults = "/data/misc/media/media_codecs_profiling_results.xml";
+
+AString getProfilingVersionString() {
+ char val[PROPERTY_VALUE_MAX];
+ if (property_get("ro.build.display.id", val, NULL) && (strlen(val) > 0)) {
+ return AStringPrintf("<!-- Profiled-with: %s -->", val);
+ }
+
+ return "<!-- Profiled-with: UNKNOWN_BUILD_ID -->";
+}
+
// a limit to avoid allocating unreasonable number of codec instances in the measurement.
// this should be in sync with the MAX_SUPPORTED_INSTANCES defined in MediaCodecInfo.java.
static const int kMaxInstances = 32;
@@ -84,6 +98,7 @@
int32_t bitrate = 0;
getMeasureBitrate(caps, &bitrate);
format->setInt32("bitrate", bitrate);
+ format->setInt32("encoder", 1);
}
if (mime.startsWith("video/")) {
@@ -112,15 +127,67 @@
return format;
}
+static size_t doProfileEncoderInputBuffers(
+ AString name, AString mime, sp<MediaCodecInfo::Capabilities> caps) {
+ ALOGV("doProfileEncoderInputBuffers: name %s, mime %s", name.c_str(), mime.c_str());
+
+ sp<AMessage> format = getMeasureFormat(true /* isEncoder */, mime, caps);
+ if (format == NULL) {
+ return 0;
+ }
+
+ format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
+ ALOGV("doProfileEncoderInputBuffers: format %s", format->debugString().c_str());
+
+ status_t err = OK;
+ sp<ALooper> looper = new ALooper;
+ looper->setName("MediaCodec_looper");
+ looper->start(
+ false /* runOnCallingThread */, false /* canCallJava */, ANDROID_PRIORITY_AUDIO);
+
+ sp<MediaCodec> codec = MediaCodec::CreateByComponentName(looper, name.c_str(), &err);
+ if (err != OK) {
+ ALOGE("Failed to create codec: %s", name.c_str());
+ return 0;
+ }
+
+ err = codec->configure(format, NULL, NULL, MediaCodec::CONFIGURE_FLAG_ENCODE);
+ if (err != OK) {
+ ALOGE("Failed to configure codec: %s with mime: %s", name.c_str(), mime.c_str());
+ codec->release();
+ return 0;
+ }
+
+ sp<IGraphicBufferProducer> bufferProducer;
+ err = codec->createInputSurface(&bufferProducer);
+ if (err != OK) {
+ ALOGE("Failed to create surface: %s with mime: %s", name.c_str(), mime.c_str());
+ codec->release();
+ return 0;
+ }
+
+ int minUndequeued = 0;
+ err = bufferProducer->query(
+ NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeued);
+ if (err != OK) {
+ ALOGE("Failed to query NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS");
+ minUndequeued = 0;
+ }
+
+ err = codec->release();
+ if (err != OK) {
+ ALOGW("Failed to release codec: %s with mime: %s", name.c_str(), mime.c_str());
+ }
+
+ return minUndequeued;
+}
+
static size_t doProfileCodecs(
bool isEncoder, AString name, AString mime, sp<MediaCodecInfo::Capabilities> caps) {
sp<AMessage> format = getMeasureFormat(isEncoder, mime, caps);
if (format == NULL) {
return 0;
}
- if (isEncoder) {
- format->setInt32("encoder", 1);
- }
ALOGV("doProfileCodecs %s %s %s %s",
name.c_str(), mime.c_str(), isEncoder ? "encoder" : "decoder",
format->debugString().c_str());
@@ -142,7 +209,7 @@
}
const sp<Surface> nativeWindow;
const sp<ICrypto> crypto;
- uint32_t flags = 0;
+ uint32_t flags = isEncoder ? MediaCodec::CONFIGURE_FLAG_ENCODE : 0;
ALOGV("doProfileCodecs configure");
err = codec->configure(format, nativeWindow, crypto, flags);
if (err != OK) {
@@ -171,20 +238,6 @@
return codecs.size();
}
-static void printLongString(const char *buf, size_t size) {
- AString print;
- const char *start = buf;
- size_t len;
- size_t totalLen = size;
- while (totalLen > 0) {
- len = (totalLen > 500) ? 500 : totalLen;
- print.setTo(start, len);
- ALOGV("%s", print.c_str());
- totalLen -= len;
- start += len;
- }
-}
-
bool splitString(const AString &s, const AString &delimiter, AString *s1, AString *s2) {
ssize_t pos = s.find(delimiter.c_str());
if (pos < 0) {
@@ -207,11 +260,23 @@
return true;
}
+void profileCodecs(const Vector<sp<MediaCodecInfo>> &infos) {
+ CodecSettings global_results;
+ KeyedVector<AString, CodecSettings> encoder_results;
+ KeyedVector<AString, CodecSettings> decoder_results;
+ profileCodecs(infos, &global_results, &encoder_results, &decoder_results);
+ exportResultsToXML(kProfilingResults, global_results, encoder_results, decoder_results);
+}
+
void profileCodecs(
const Vector<sp<MediaCodecInfo>> &infos,
- KeyedVector<AString, CodecSettings> *results,
+ CodecSettings *global_results,
+ KeyedVector<AString, CodecSettings> *encoder_results,
+ KeyedVector<AString, CodecSettings> *decoder_results,
bool forceToMeasure) {
KeyedVector<AString, sp<MediaCodecInfo::Capabilities>> codecsNeedMeasure;
+ AString supportMultipleSecureCodecs = "true";
+ size_t maxEncoderInputBuffers = 0;
for (size_t i = 0; i < infos.size(); ++i) {
const sp<MediaCodecInfo> info = infos[i];
AString name = info->getCodecName();
@@ -240,157 +305,107 @@
AString key = name;
key.append(" ");
key.append(mimes[i]);
- key.append(" ");
- key.append(info->isEncoder() ? "encoder" : "decoder");
- results->add(key, settings);
+
+ if (info->isEncoder()) {
+ encoder_results->add(key, settings);
+ } else {
+ decoder_results->add(key, settings);
+ }
+
+ if (name.endsWith(".secure")) {
+ if (max <= 1) {
+ supportMultipleSecureCodecs = "false";
+ }
+ }
+ if (info->isEncoder() && mimes[i].startsWith("video/")) {
+ size_t encoderInputBuffers =
+ doProfileEncoderInputBuffers(name, mimes[i], caps);
+ if (encoderInputBuffers > maxEncoderInputBuffers) {
+ maxEncoderInputBuffers = encoderInputBuffers;
+ }
+ }
}
}
}
+ if (maxEncoderInputBuffers > 0) {
+ char tmp[32];
+ sprintf(tmp, "%zu", maxEncoderInputBuffers);
+ global_results->add(kMaxEncoderInputBuffers, tmp);
+ }
+ global_results->add(kPolicySupportsMultipleSecureCodecs, supportMultipleSecureCodecs);
}
-void applyCodecSettings(
- const AString& codecInfo,
- const CodecSettings &settings,
- Vector<sp<MediaCodecInfo>> *infos) {
- AString name;
- AString mime;
- AString type;
- if (!splitString(codecInfo, " ", &name, &mime, &type)) {
- return;
- }
-
- for (size_t i = 0; i < infos->size(); ++i) {
- const sp<MediaCodecInfo> &info = infos->itemAt(i);
- if (name != info->getCodecName()) {
- continue;
- }
-
- Vector<AString> mimes;
- info->getSupportedMimes(&mimes);
- for (size_t j = 0; j < mimes.size(); ++j) {
- if (mimes[j] != mime) {
- continue;
- }
- const sp<MediaCodecInfo::Capabilities> &caps = info->getCapabilitiesFor(mime.c_str());
- for (size_t k = 0; k < settings.size(); ++k) {
- caps->getDetails()->setString(
- settings.keyAt(k).c_str(), settings.valueAt(k).c_str());
- }
- }
- }
-}
-
-void exportResultsToXML(const char *fileName, const KeyedVector<AString, CodecSettings>& results) {
-#if LOG_NDEBUG == 0
- ALOGE("measurement results");
+static AString globalResultsToXml(const CodecSettings& results) {
+ AString ret;
for (size_t i = 0; i < results.size(); ++i) {
- ALOGE("key %s", results.keyAt(i).c_str());
- const CodecSettings &settings = results.valueAt(i);
- for (size_t j = 0; j < settings.size(); ++j) {
- ALOGE("name %s value %s", settings.keyAt(j).c_str(), settings.valueAt(j).c_str());
- }
+ AString setting = AStringPrintf(
+ " <Setting name=\"%s\" value=\"%s\" />\n",
+ results.keyAt(i).c_str(),
+ results.valueAt(i).c_str());
+ ret.append(setting);
}
-#endif
+ return ret;
+}
- AString overrides;
- FILE *f = fopen(fileName, "rb");
- if (f != NULL) {
- fseek(f, 0, SEEK_END);
- long size = ftell(f);
- rewind(f);
-
- char *buf = (char *)malloc(size);
- if (fread(buf, size, 1, f) == 1) {
- overrides.setTo(buf, size);
- if (!LOG_NDEBUG) {
- ALOGV("Existing overrides:");
- printLongString(buf, size);
- }
- } else {
- ALOGE("Failed to read %s", fileName);
- }
- fclose(f);
- free(buf);
- }
-
+static AString codecResultsToXml(const KeyedVector<AString, CodecSettings>& results) {
+ AString ret;
for (size_t i = 0; i < results.size(); ++i) {
AString name;
AString mime;
- AString type;
- if (!splitString(results.keyAt(i), " ", &name, &mime, &type)) {
+ if (!splitString(results.keyAt(i), " ", &name, &mime)) {
continue;
}
- name = AStringPrintf("\"%s\"", name.c_str());
- mime = AStringPrintf("\"%s\"", mime.c_str());
- ALOGV("name(%s) mime(%s) type(%s)", name.c_str(), mime.c_str(), type.c_str());
- ssize_t posCodec = overrides.find(name.c_str());
- size_t posInsert = 0;
- if (posCodec < 0) {
- AString encodersDecoders = (type == "encoder") ? "<Encoders>" : "<Decoders>";
- AString encodersDecodersEnd = (type == "encoder") ? "</Encoders>" : "</Decoders>";
- ssize_t posEncodersDecoders = overrides.find(encodersDecoders.c_str());
- if (posEncodersDecoders < 0) {
- AString mediaCodecs = "<MediaCodecs>";
- ssize_t posMediaCodec = overrides.find(mediaCodecs.c_str());
- if (posMediaCodec < 0) {
- posMediaCodec = overrides.size();
- overrides.insert("\n<MediaCodecs>\n</MediaCodecs>\n", posMediaCodec);
- posMediaCodec = overrides.find(mediaCodecs.c_str(), posMediaCodec);
- }
- posEncodersDecoders = posMediaCodec + mediaCodecs.size();
- AString codecs = AStringPrintf(
- "\n %s\n %s", encodersDecoders.c_str(), encodersDecodersEnd.c_str());
- overrides.insert(codecs.c_str(), posEncodersDecoders);
- posEncodersDecoders = overrides.find(encodersDecoders.c_str(), posEncodersDecoders);
- }
- posCodec = posEncodersDecoders + encodersDecoders.size();
- AString codec = AStringPrintf(
- "\n <MediaCodec name=%s type=%s update=\"true\" >\n </MediaCodec>",
- name.c_str(),
- mime.c_str());
- overrides.insert(codec.c_str(), posCodec);
- posCodec = overrides.find(name.c_str());
- }
-
- // insert to existing entry
- ssize_t posMime = overrides.find(mime.c_str(), posCodec);
- ssize_t posEnd = overrides.find(">", posCodec);
- if (posEnd < 0) {
- ALOGE("Format error in overrides file.");
- return;
- }
- if (posMime < 0 || posMime > posEnd) {
- // new mime for an existing component
- AString codecEnd = "</MediaCodec>";
- posInsert = overrides.find(codecEnd.c_str(), posCodec) + codecEnd.size();
- AString codec = AStringPrintf(
- "\n <MediaCodec name=%s type=%s update=\"true\" >\n </MediaCodec>",
- name.c_str(),
- mime.c_str());
- overrides.insert(codec.c_str(), posInsert);
- posInsert = overrides.find(">", posInsert) + 1;
- } else {
- posInsert = posEnd + 1;
- }
-
+ AString codec =
+ AStringPrintf(" <MediaCodec name=\"%s\" type=\"%s\" update=\"true\" >\n",
+ name.c_str(),
+ mime.c_str());
+ ret.append(codec);
CodecSettings settings = results.valueAt(i);
for (size_t i = 0; i < settings.size(); ++i) {
// WARNING: we assume all the settings are "Limit". Currently we have only one type
// of setting in this case, which is "max-supported-instances".
- AString strInsert = AStringPrintf(
- "\n <Limit name=\"%s\" value=\"%s\" />",
+ AString setting = AStringPrintf(
+ " <Limit name=\"%s\" value=\"%s\" />\n",
settings.keyAt(i).c_str(),
settings.valueAt(i).c_str());
- overrides.insert(strInsert, posInsert);
+ ret.append(setting);
}
+ ret.append(" </MediaCodec>\n");
+ }
+ return ret;
+}
+
+void exportResultsToXML(
+ const char *fileName,
+ const CodecSettings& global_results,
+ const KeyedVector<AString, CodecSettings>& encoder_results,
+ const KeyedVector<AString, CodecSettings>& decoder_results) {
+ if (global_results.size() == 0 && encoder_results.size() == 0 && decoder_results.size() == 0) {
+ return;
}
- if (!LOG_NDEBUG) {
- ALOGV("New overrides:");
- printLongString(overrides.c_str(), overrides.size());
+ AString overrides;
+ overrides.append(getProfilingVersionString());
+ overrides.append("\n");
+ overrides.append("<MediaCodecs>\n");
+ if (global_results.size() > 0) {
+ overrides.append(" <Settings>\n");
+ overrides.append(globalResultsToXml(global_results));
+ overrides.append(" </Settings>\n");
}
+ if (encoder_results.size() > 0) {
+ overrides.append(" <Encoders>\n");
+ overrides.append(codecResultsToXml(encoder_results));
+ overrides.append(" </Encoders>\n");
+ }
+ if (decoder_results.size() > 0) {
+ overrides.append(" <Decoders>\n");
+ overrides.append(codecResultsToXml(decoder_results));
+ overrides.append(" </Decoders>\n");
+ }
+ overrides.append("</MediaCodecs>\n");
- f = fopen(fileName, "wb");
+ FILE *f = fopen(fileName, "wb");
if (f == NULL) {
ALOGE("Failed to open %s for writing.", fileName);
return;
diff --git a/media/libstagefright/MediaCodecListOverrides.h b/media/libstagefright/MediaCodecListOverrides.h
index c6cc2ea..d4bb225 100644
--- a/media/libstagefright/MediaCodecListOverrides.h
+++ b/media/libstagefright/MediaCodecListOverrides.h
@@ -26,24 +26,31 @@
namespace android {
+extern const char *kProfilingVersionString;
+extern const char *kProfilingResults;
+
struct MediaCodecInfo;
+AString getProfilingVersionString();
+
bool splitString(const AString &s, const AString &delimiter, AString *s1, AString *s2);
-bool splitString(
- const AString &s, const AString &delimiter, AString *s1, AString *s2, AString *s3);
+// profile codecs and save the result to xml file named kProfilingResults.
+void profileCodecs(const Vector<sp<MediaCodecInfo>> &infos);
+// profile codecs and save the result to global_results, encoder_results and decoder_results.
void profileCodecs(
const Vector<sp<MediaCodecInfo>> &infos,
- KeyedVector<AString, CodecSettings> *results,
- bool forceToMeasure = false); // forceToMeasure is mainly for testing
+ CodecSettings *global_results,
+ KeyedVector<AString, CodecSettings> *encoder_results,
+ KeyedVector<AString, CodecSettings> *decoder_results,
+ bool forceToMeasure = false);
-void applyCodecSettings(
- const AString& codecInfo,
- const CodecSettings &settings,
- Vector<sp<MediaCodecInfo>> *infos);
-
-void exportResultsToXML(const char *fileName, const KeyedVector<AString, CodecSettings>& results);
+void exportResultsToXML(
+ const char *fileName,
+ const CodecSettings& global_results,
+ const KeyedVector<AString, CodecSettings>& encoder_results,
+ const KeyedVector<AString, CodecSettings>& decoder_results);
} // namespace android
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 9b57733..e089c46 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -427,7 +427,7 @@
// When using persistent surface, we are only interested in the
// consumer, but have to use PersistentSurface as a wrapper to
// pass consumer over messages (similar to BufferProducerWrapper)
- err = mEncoder->usePersistentInputSurface(
+ err = mEncoder->setInputSurface(
new PersistentSurface(NULL, mGraphicBufferConsumer));
} else {
err = mEncoder->createInputSurface(&mGraphicBufferProducer);
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index b0a65d2..2a50692 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -62,6 +62,6 @@
const char *MEDIA_MIMETYPE_TEXT_SUBRIP = "application/x-subrip";
const char *MEDIA_MIMETYPE_TEXT_VTT = "text/vtt";
const char *MEDIA_MIMETYPE_TEXT_CEA_608 = "text/cea-608";
-const char *MEDIA_MIMETYPE_DATA_METADATA = "application/octet-stream";
+const char *MEDIA_MIMETYPE_DATA_TIMED_ID3 = "application/x-id3v4";
} // namespace android
diff --git a/media/libstagefright/MediaSync.cpp b/media/libstagefright/MediaSync.cpp
index 97264fb..b402e48 100644
--- a/media/libstagefright/MediaSync.cpp
+++ b/media/libstagefright/MediaSync.cpp
@@ -49,6 +49,7 @@
mMutex(),
mReleaseCondition(),
mNumOutstandingBuffers(0),
+ mUsageFlagsFromOutput(0),
mNativeSampleRateInHz(0),
mNumFramesWritten(0),
mHasAudio(false),
@@ -82,10 +83,8 @@
status_t MediaSync::setSurface(const sp<IGraphicBufferProducer> &output) {
Mutex::Autolock lock(mMutex);
- // TODO: support suface change.
- if (mOutput != NULL) {
- ALOGE("setSurface: output surface has already been configured.");
- return INVALID_OPERATION;
+ if (output == mOutput) {
+ return NO_ERROR; // same output surface.
}
if (output == NULL && mSyncSettings.mSource == AVSYNC_SOURCE_VSYNC) {
@@ -94,8 +93,24 @@
}
if (output != NULL) {
+ int newUsage = 0;
+ output->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &newUsage);
+
+ // Check usage flags only when current output surface has been used to create input surface.
+ if (mOutput != NULL && mInput != NULL) {
+ int ignoredFlags = (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER
+ | GRALLOC_USAGE_EXTERNAL_DISP);
+ // New output surface is not allowed to add new usage flag except ignored ones.
+ if ((newUsage & ~(mUsageFlagsFromOutput | ignoredFlags)) != 0) {
+ ALOGE("setSurface: new output surface has new usage flag not used by current one.");
+ return BAD_VALUE;
+ }
+ }
+
+ // Try to connect to new output surface. If failed, current output surface will not
+ // be changed.
IGraphicBufferProducer::QueueBufferOutput queueBufferOutput;
- sp<OutputListener> listener(new OutputListener(this));
+ sp<OutputListener> listener(new OutputListener(this, output));
IInterface::asBinder(output)->linkToDeath(listener);
status_t status =
output->connect(listener,
@@ -106,10 +121,18 @@
ALOGE("setSurface: failed to connect (%d)", status);
return status;
}
-
- mOutput = output;
}
+ if (mOutput != NULL) {
+ mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
+ while (!mBuffersSentToOutput.isEmpty()) {
+ returnBufferToInput_l(mBuffersSentToOutput.valueAt(0), Fence::NO_FENCE);
+ mBuffersSentToOutput.removeItemsAt(0);
+ }
+ }
+
+ mOutput = output;
+
return NO_ERROR;
}
@@ -181,9 +204,9 @@
if (status == NO_ERROR) {
bufferConsumer->setConsumerName(String8("MediaSync"));
// propagate usage bits from output surface
- int usage = 0;
- mOutput->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &usage);
- bufferConsumer->setConsumerUsageBits(usage);
+ mUsageFlagsFromOutput = 0;
+ mOutput->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &mUsageFlagsFromOutput);
+ bufferConsumer->setConsumerUsageBits(mUsageFlagsFromOutput);
*outBufferProducer = bufferProducer;
mInput = bufferConsumer;
}
@@ -602,12 +625,24 @@
return;
}
+ if (mBuffersSentToOutput.indexOfKey(bufferItem.mGraphicBuffer->getId()) >= 0) {
+ // Something is wrong since this buffer should be held by output now, bail.
+ mInput->consumerDisconnect();
+ onAbandoned_l(true /* isInput */);
+ return;
+ }
+ mBuffersSentToOutput.add(bufferItem.mGraphicBuffer->getId(), bufferItem.mGraphicBuffer);
+
ALOGV("queued buffer %#llx to output", (long long)bufferItem.mGraphicBuffer->getId());
}
-void MediaSync::onBufferReleasedByOutput() {
+void MediaSync::onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output) {
Mutex::Autolock lock(mMutex);
+ if (output != mOutput) {
+ return; // This is not the current output, ignore.
+ }
+
sp<GraphicBuffer> buffer;
sp<Fence> fence;
status_t status = mOutput->detachNextBuffer(&buffer, &fence);
@@ -628,6 +663,13 @@
return;
}
+ ssize_t ix = mBuffersSentToOutput.indexOfKey(buffer->getId());
+ if (ix < 0) {
+ // The buffer is unknown, maybe leftover, ignore.
+ return;
+ }
+ mBuffersSentToOutput.removeItemsAt(ix);
+
returnBufferToInput_l(buffer, fence);
}
@@ -727,13 +769,15 @@
mSync->onAbandoned_l(true /* isInput */);
}
-MediaSync::OutputListener::OutputListener(const sp<MediaSync> &sync)
- : mSync(sync) {}
+MediaSync::OutputListener::OutputListener(const sp<MediaSync> &sync,
+ const sp<IGraphicBufferProducer> &output)
+ : mSync(sync),
+ mOutput(output) {}
MediaSync::OutputListener::~OutputListener() {}
void MediaSync::OutputListener::onBufferReleased() {
- mSync->onBufferReleasedByOutput();
+ mSync->onBufferReleasedByOutput(mOutput);
}
void MediaSync::OutputListener::binderDied(const wp<IBinder> &/* who */) {
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index 1c53b40..f82636b 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -583,6 +583,13 @@
Mutex::Autolock autoLock(mLock);
+ // If we're disconnecting, return EOS and don't access *data pointer.
+ // data could be on the stack of the caller to NuCachedSource2::readAt(),
+ // which may have exited already.
+ if (mDisconnecting) {
+ return ERROR_END_OF_STREAM;
+ }
+
if (!mFetching) {
mLastAccessPos = offset;
restartPrefetcherIfNecessary_l(
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index 44695ce..e69890d 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -72,7 +72,7 @@
node_id node, OMX_STATETYPE* state);
virtual status_t storeMetaDataInBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL enable);
+ node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type);
virtual status_t prepareForAdaptivePlayback(
node_id node, OMX_U32 port_index, OMX_BOOL enable,
@@ -90,7 +90,7 @@
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer);
+ buffer_id *buffer, OMX_U32 allottedSize);
virtual status_t useGraphicBuffer(
node_id node, OMX_U32 port_index,
@@ -102,15 +102,15 @@
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index,
- sp<IGraphicBufferProducer> *bufferProducer);
+ sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type);
virtual status_t createPersistentInputSurface(
sp<IGraphicBufferProducer> *bufferProducer,
sp<IGraphicBufferConsumer> *bufferConsumer);
- virtual status_t usePersistentInputSurface(
+ virtual status_t setInputSurface(
node_id node, OMX_U32 port_index,
- const sp<IGraphicBufferConsumer> &bufferConsumer);
+ const sp<IGraphicBufferConsumer> &bufferConsumer, MetadataBufferType *type);
virtual status_t signalEndOfInputStream(node_id node);
@@ -120,18 +120,18 @@
virtual status_t allocateBufferWithBackup(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer);
+ buffer_id *buffer, OMX_U32 allottedSize);
virtual status_t freeBuffer(
node_id node, OMX_U32 port_index, buffer_id buffer);
- virtual status_t fillBuffer(node_id node, buffer_id buffer);
+ virtual status_t fillBuffer(node_id node, buffer_id buffer, int fenceFd);
virtual status_t emptyBuffer(
node_id node,
buffer_id buffer,
OMX_U32 range_offset, OMX_U32 range_length,
- OMX_U32 flags, OMX_TICKS timestamp);
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);
virtual status_t getExtensionIndex(
node_id node,
@@ -292,8 +292,8 @@
}
status_t MuxOMX::storeMetaDataInBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL enable) {
- return getOMX(node)->storeMetaDataInBuffers(node, port_index, enable);
+ node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type) {
+ return getOMX(node)->storeMetaDataInBuffers(node, port_index, enable, type);
}
status_t MuxOMX::prepareForAdaptivePlayback(
@@ -322,8 +322,8 @@
status_t MuxOMX::useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer) {
- return getOMX(node)->useBuffer(node, port_index, params, buffer);
+ buffer_id *buffer, OMX_U32 allottedSize) {
+ return getOMX(node)->useBuffer(node, port_index, params, buffer, allottedSize);
}
status_t MuxOMX::useGraphicBuffer(
@@ -342,9 +342,9 @@
status_t MuxOMX::createInputSurface(
node_id node, OMX_U32 port_index,
- sp<IGraphicBufferProducer> *bufferProducer) {
+ sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
status_t err = getOMX(node)->createInputSurface(
- node, port_index, bufferProducer);
+ node, port_index, bufferProducer, type);
return err;
}
@@ -356,11 +356,10 @@
bufferProducer, bufferConsumer);
}
-status_t MuxOMX::usePersistentInputSurface(
+status_t MuxOMX::setInputSurface(
node_id node, OMX_U32 port_index,
- const sp<IGraphicBufferConsumer> &bufferConsumer) {
- return getOMX(node)->usePersistentInputSurface(
- node, port_index, bufferConsumer);
+ const sp<IGraphicBufferConsumer> &bufferConsumer, MetadataBufferType *type) {
+ return getOMX(node)->setInputSurface(node, port_index, bufferConsumer, type);
}
status_t MuxOMX::signalEndOfInputStream(node_id node) {
@@ -376,9 +375,9 @@
status_t MuxOMX::allocateBufferWithBackup(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer) {
+ buffer_id *buffer, OMX_U32 allottedSize) {
return getOMX(node)->allocateBufferWithBackup(
- node, port_index, params, buffer);
+ node, port_index, params, buffer, allottedSize);
}
status_t MuxOMX::freeBuffer(
@@ -386,17 +385,17 @@
return getOMX(node)->freeBuffer(node, port_index, buffer);
}
-status_t MuxOMX::fillBuffer(node_id node, buffer_id buffer) {
- return getOMX(node)->fillBuffer(node, buffer);
+status_t MuxOMX::fillBuffer(node_id node, buffer_id buffer, int fenceFd) {
+ return getOMX(node)->fillBuffer(node, buffer, fenceFd);
}
status_t MuxOMX::emptyBuffer(
node_id node,
buffer_id buffer,
OMX_U32 range_offset, OMX_U32 range_length,
- OMX_U32 flags, OMX_TICKS timestamp) {
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
return getOMX(node)->emptyBuffer(
- node, buffer, range_offset, range_length, flags, timestamp);
+ node, buffer, range_offset, range_length, flags, timestamp, fenceFd);
}
status_t MuxOMX::getExtensionIndex(
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 8d4bab8..96aa808 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -43,6 +43,7 @@
#include <media/stagefright/MediaExtractor.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXCodec.h>
+#include <media/stagefright/SurfaceUtils.h>
#include <media/stagefright/Utils.h>
#include <media/stagefright/SkipCutBuffer.h>
#include <utils/Vector.h>
@@ -115,12 +116,15 @@
}
// from IOMXObserver
- virtual void onMessage(const omx_message &msg) {
+ virtual void onMessages(const std::list<omx_message> &messages) {
sp<OMXCodec> codec = mTarget.promote();
if (codec.get() != NULL) {
Mutex::Autolock autoLock(codec->mLock);
- codec->on_message(msg);
+ for (std::list<omx_message>::const_iterator it = messages.cbegin();
+ it != messages.cend(); ++it) {
+ codec->on_message(*it);
+ }
codec.clear();
}
}
@@ -1677,7 +1681,7 @@
&info.mData);
} else {
err = mOMX->allocateBufferWithBackup(
- mNode, portIndex, mem, &buffer);
+ mNode, portIndex, mem, &buffer, mem->size());
}
} else if (portIndex == kPortIndexOutput
&& (mQuirks & kRequiresAllocateBufferOnOutputPorts)) {
@@ -1689,10 +1693,10 @@
&info.mData);
} else {
err = mOMX->allocateBufferWithBackup(
- mNode, portIndex, mem, &buffer);
+ mNode, portIndex, mem, &buffer, mem->size());
}
} else {
- err = mOMX->useBuffer(mNode, portIndex, mem, &buffer);
+ err = mOMX->useBuffer(mNode, portIndex, mem, &buffer, mem->size());
}
if (err != OK) {
@@ -1783,35 +1787,6 @@
return OK;
}
-status_t OMXCodec::applyRotation() {
- sp<MetaData> meta = mSource->getFormat();
-
- int32_t rotationDegrees;
- if (!meta->findInt32(kKeyRotation, &rotationDegrees)) {
- rotationDegrees = 0;
- }
-
- uint32_t transform;
- switch (rotationDegrees) {
- case 0: transform = 0; break;
- case 90: transform = HAL_TRANSFORM_ROT_90; break;
- case 180: transform = HAL_TRANSFORM_ROT_180; break;
- case 270: transform = HAL_TRANSFORM_ROT_270; break;
- default: transform = 0; break;
- }
-
- status_t err = OK;
-
- if (transform) {
- err = native_window_set_buffers_transform(
- mNativeWindow.get(), transform);
- ALOGE("native_window_set_buffers_transform failed: %s (%d)",
- strerror(-err), -err);
- }
-
- return err;
-}
-
status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
// Get the number of buffers needed.
OMX_PARAM_PORTDEFINITIONTYPE def;
@@ -1825,30 +1800,11 @@
return err;
}
- err = native_window_set_buffers_dimensions(
- mNativeWindow.get(),
- def.format.video.nFrameWidth,
- def.format.video.nFrameHeight);
+ sp<MetaData> meta = mSource->getFormat();
- if (err != 0) {
- ALOGE("native_window_set_buffers_dimensions failed: %s (%d)",
- strerror(-err), -err);
- return err;
- }
-
- err = native_window_set_buffers_format(
- mNativeWindow.get(),
- def.format.video.eColorFormat);
-
- if (err != 0) {
- ALOGE("native_window_set_buffers_format failed: %s (%d)",
- strerror(-err), -err);
- return err;
- }
-
- err = applyRotation();
- if (err != OK) {
- return err;
+ int32_t rotationDegrees;
+ if (!meta->findInt32(kKeyRotation, &rotationDegrees)) {
+ rotationDegrees = 0;
}
// Set up the native window.
@@ -1859,34 +1815,19 @@
// XXX: Currently this error is logged, but not fatal.
usage = 0;
}
+
if (mFlags & kEnableGrallocUsageProtected) {
usage |= GRALLOC_USAGE_PROTECTED;
}
- // Make sure to check whether either Stagefright or the video decoder
- // requested protected buffers.
- if (usage & GRALLOC_USAGE_PROTECTED) {
- // Verify that the ANativeWindow sends images directly to
- // SurfaceFlinger.
- int queuesToNativeWindow = 0;
- err = mNativeWindow->query(
- mNativeWindow.get(), NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER,
- &queuesToNativeWindow);
- if (err != 0) {
- ALOGE("error authenticating native window: %d", err);
- return err;
- }
- if (queuesToNativeWindow != 1) {
- ALOGE("native window could not be authenticated");
- return PERMISSION_DENIED;
- }
- }
-
- ALOGV("native_window_set_usage usage=0x%x", usage);
- err = native_window_set_usage(
- mNativeWindow.get(), usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
+ err = setNativeWindowSizeFormatAndUsage(
+ mNativeWindow.get(),
+ def.format.video.nFrameWidth,
+ def.format.video.nFrameHeight,
+ def.format.video.eColorFormat,
+ rotationDegrees,
+ usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
if (err != 0) {
- ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
return err;
}
@@ -2053,156 +1994,6 @@
return bufInfo;
}
-status_t OMXCodec::pushBlankBuffersToNativeWindow() {
- status_t err = NO_ERROR;
- ANativeWindowBuffer* anb = NULL;
- int numBufs = 0;
- int minUndequeuedBufs = 0;
-
- // We need to reconnect to the ANativeWindow as a CPU client to ensure that
- // no frames get dropped by SurfaceFlinger assuming that these are video
- // frames.
- err = native_window_api_disconnect(mNativeWindow.get(),
- NATIVE_WINDOW_API_MEDIA);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)",
- strerror(-err), -err);
- return err;
- }
-
- err = native_window_api_connect(mNativeWindow.get(),
- NATIVE_WINDOW_API_CPU);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: api_connect failed: %s (%d)",
- strerror(-err), -err);
- return err;
- }
-
- err = native_window_set_buffers_dimensions(mNativeWindow.get(), 1, 1);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: set_buffers_dimensions failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- err = native_window_set_buffers_format(mNativeWindow.get(), HAL_PIXEL_FORMAT_RGBX_8888);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: set_buffers_format failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- err = native_window_set_usage(mNativeWindow.get(),
- GRALLOC_USAGE_SW_WRITE_OFTEN);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: set_usage failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- err = native_window_set_scaling_mode(mNativeWindow.get(),
- NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
- if (err != OK) {
- ALOGE("error pushing blank frames: set_scaling_mode failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- err = mNativeWindow->query(mNativeWindow.get(),
- NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: MIN_UNDEQUEUED_BUFFERS query "
- "failed: %s (%d)", strerror(-err), -err);
- goto error;
- }
-
- numBufs = minUndequeuedBufs + 1;
- err = native_window_set_buffer_count(mNativeWindow.get(), numBufs);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: set_buffer_count failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- // We push numBufs + 1 buffers to ensure that we've drawn into the same
- // buffer twice. This should guarantee that the buffer has been displayed
- // on the screen and then been replaced, so an previous video frames are
- // guaranteed NOT to be currently displayed.
- for (int i = 0; i < numBufs + 1; i++) {
- err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &anb);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: dequeueBuffer failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- sp<GraphicBuffer> buf(new GraphicBuffer(anb, false));
-
- // Fill the buffer with the a 1x1 checkerboard pattern ;)
- uint32_t* img = NULL;
- err = buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img));
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: lock failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- *img = 0;
-
- err = buf->unlock();
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: unlock failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- err = mNativeWindow->queueBuffer(mNativeWindow.get(),
- buf->getNativeBuffer(), -1);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: queueBuffer failed: %s (%d)",
- strerror(-err), -err);
- goto error;
- }
-
- anb = NULL;
- }
-
-error:
-
- if (err != NO_ERROR) {
- // Clean up after an error.
- if (anb != NULL) {
- mNativeWindow->cancelBuffer(mNativeWindow.get(), anb, -1);
- }
-
- native_window_api_disconnect(mNativeWindow.get(),
- NATIVE_WINDOW_API_CPU);
- native_window_api_connect(mNativeWindow.get(),
- NATIVE_WINDOW_API_MEDIA);
-
- return err;
- } else {
- // Clean up after success.
- err = native_window_api_disconnect(mNativeWindow.get(),
- NATIVE_WINDOW_API_CPU);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)",
- strerror(-err), -err);
- return err;
- }
-
- err = native_window_api_connect(mNativeWindow.get(),
- NATIVE_WINDOW_API_MEDIA);
- if (err != NO_ERROR) {
- ALOGE("error pushing blank frames: api_connect failed: %s (%d)",
- strerror(-err), -err);
- return err;
- }
-
- return NO_ERROR;
- }
-}
-
int64_t OMXCodec::getDecodingTimeUs() {
CHECK(mIsEncoder && mIsVideo);
@@ -2784,7 +2575,7 @@
// them has made it to the display. This allows the OMX
// component teardown to zero out any protected buffers
// without the risk of scanning out one of those buffers.
- pushBlankBuffersToNativeWindow();
+ pushBlankBuffersToNativeWindow(mNativeWindow.get());
}
setState(IDLE_TO_LOADED);
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index d577034..1c663a3 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -21,6 +21,7 @@
#include "include/OggExtractor.h"
#include <cutils/properties.h>
+#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/MediaBuffer.h>
@@ -65,24 +66,28 @@
OggSource &operator=(const OggSource &);
};
-struct MyVorbisExtractor {
- MyVorbisExtractor(const sp<DataSource> &source);
- virtual ~MyVorbisExtractor();
+struct MyOggExtractor {
+ MyOggExtractor(
+ const sp<DataSource> &source,
+ const char *mimeType,
+ size_t numHeaders,
+ int64_t seekPreRollUs);
+ virtual ~MyOggExtractor();
sp<MetaData> getFormat() const;
// Returns an approximate bitrate in bits per second.
- uint64_t approxBitrate();
+ virtual uint64_t approxBitrate() const = 0;
status_t seekToTime(int64_t timeUs);
status_t seekToOffset(off64_t offset);
- status_t readNextPacket(MediaBuffer **buffer, bool conf);
+ virtual status_t readNextPacket(MediaBuffer **buffer) = 0;
status_t init();
sp<MetaData> getFileMetaData() { return mFileMeta; }
-private:
+protected:
struct Page {
uint64_t mGranulePosition;
int32_t mPrevPacketSize;
@@ -102,12 +107,17 @@
sp<DataSource> mSource;
off64_t mOffset;
Page mCurrentPage;
+ uint64_t mCurGranulePosition;
uint64_t mPrevGranulePosition;
size_t mCurrentPageSize;
bool mFirstPacketInPage;
uint64_t mCurrentPageSamples;
size_t mNextLaceIndex;
+ const char *mMimeType;
+ size_t mNumHeaders;
+ int64_t mSeekPreRollUs;
+
off64_t mFirstDataOffset;
vorbis_info mVi;
@@ -121,10 +131,26 @@
ssize_t readPage(off64_t offset, Page *page);
status_t findNextPage(off64_t startOffset, off64_t *pageOffset);
- status_t verifyHeader(
- MediaBuffer *buffer, uint8_t type);
+ virtual int64_t getTimeUsOfGranule(uint64_t granulePos) const = 0;
- int32_t packetBlockSize(MediaBuffer *buffer);
+ // Extract codec format, metadata tags, and various codec specific data;
+ // the format and CSD's are required to setup the decoders for the enclosed media content.
+ //
+ // Valid values for `type` are:
+ // 1 - bitstream identification header
+ // 3 - comment header
+ // 5 - codec setup header (Vorbis only)
+ virtual status_t verifyHeader(MediaBuffer *buffer, uint8_t type) = 0;
+
+ // Read the next ogg packet from the underlying data source; optionally
+ // calculate the timestamp for the output packet whilst pretending
+ // that we are parsing an Ogg Vorbis stream.
+ //
+ // *buffer is NULL'ed out immediately upon entry, and if successful a new buffer is allocated;
+ // clients are responsible for releasing the original buffer.
+ status_t _readNextPacket(MediaBuffer **buffer, bool calcVorbisTimestamp);
+
+ int32_t getPacketBlockSize(MediaBuffer *buffer);
void parseFileMetaData();
@@ -132,8 +158,61 @@
void buildTableOfContents();
- MyVorbisExtractor(const MyVorbisExtractor &);
- MyVorbisExtractor &operator=(const MyVorbisExtractor &);
+ MyOggExtractor(const MyOggExtractor &);
+ MyOggExtractor &operator=(const MyOggExtractor &);
+};
+
+struct MyVorbisExtractor : public MyOggExtractor {
+ MyVorbisExtractor(const sp<DataSource> &source)
+ : MyOggExtractor(source,
+ MEDIA_MIMETYPE_AUDIO_VORBIS,
+ /* numHeaders */ 3,
+ /* seekPreRollUs */ 0) {
+ }
+
+ virtual uint64_t approxBitrate() const;
+
+ virtual status_t readNextPacket(MediaBuffer **buffer) {
+ return _readNextPacket(buffer, /* calcVorbisTimestamp = */ true);
+ }
+
+protected:
+ virtual int64_t getTimeUsOfGranule(uint64_t granulePos) const {
+ return granulePos * 1000000ll / mVi.rate;
+ }
+
+ virtual status_t verifyHeader(MediaBuffer *buffer, uint8_t type);
+};
+
+struct MyOpusExtractor : public MyOggExtractor {
+ static const int32_t kOpusSampleRate = 48000;
+ static const int64_t kOpusSeekPreRollUs = 80000; // 80 ms
+
+ MyOpusExtractor(const sp<DataSource> &source)
+ : MyOggExtractor(source, MEDIA_MIMETYPE_AUDIO_OPUS, /*numHeaders*/ 2, kOpusSeekPreRollUs),
+ mChannelCount(0),
+ mCodecDelay(0),
+ mStartGranulePosition(-1) {
+ }
+
+ virtual uint64_t approxBitrate() const {
+ return 0;
+ }
+
+ virtual status_t readNextPacket(MediaBuffer **buffer);
+
+protected:
+ virtual int64_t getTimeUsOfGranule(uint64_t granulePos) const;
+ virtual status_t verifyHeader(MediaBuffer *buffer, uint8_t type);
+
+private:
+ status_t verifyOpusHeader(MediaBuffer *buffer);
+ status_t verifyOpusComments(MediaBuffer *buffer);
+ uint32_t getNumSamplesInPacket(MediaBuffer *buffer) const;
+
+ uint8_t mChannelCount;
+ uint16_t mCodecDelay;
+ int64_t mStartGranulePosition;
};
static void extractAlbumArt(
@@ -179,13 +258,14 @@
int64_t seekTimeUs;
ReadOptions::SeekMode mode;
if (options && options->getSeekTo(&seekTimeUs, &mode)) {
- if (mExtractor->mImpl->seekToTime(seekTimeUs) != OK) {
- return ERROR_END_OF_STREAM;
+ status_t err = mExtractor->mImpl->seekToTime(seekTimeUs);
+ if (err != OK) {
+ return err;
}
}
MediaBuffer *packet;
- status_t err = mExtractor->mImpl->readNextPacket(&packet, /* conf = */ false);
+ status_t err = mExtractor->mImpl->readNextPacket(&packet);
if (err != OK) {
return err;
@@ -209,14 +289,22 @@
////////////////////////////////////////////////////////////////////////////////
-MyVorbisExtractor::MyVorbisExtractor(const sp<DataSource> &source)
+MyOggExtractor::MyOggExtractor(
+ const sp<DataSource> &source,
+ const char *mimeType,
+ size_t numHeaders,
+ int64_t seekPreRollUs)
: mSource(source),
mOffset(0),
+ mCurGranulePosition(0),
mPrevGranulePosition(0),
mCurrentPageSize(0),
mFirstPacketInPage(true),
mCurrentPageSamples(0),
mNextLaceIndex(0),
+ mMimeType(mimeType),
+ mNumHeaders(numHeaders),
+ mSeekPreRollUs(seekPreRollUs),
mFirstDataOffset(-1) {
mCurrentPage.mNumSegments = 0;
@@ -224,16 +312,16 @@
vorbis_comment_init(&mVc);
}
-MyVorbisExtractor::~MyVorbisExtractor() {
+MyOggExtractor::~MyOggExtractor() {
vorbis_comment_clear(&mVc);
vorbis_info_clear(&mVi);
}
-sp<MetaData> MyVorbisExtractor::getFormat() const {
+sp<MetaData> MyOggExtractor::getFormat() const {
return mMeta;
}
-status_t MyVorbisExtractor::findNextPage(
+status_t MyOggExtractor::findNextPage(
off64_t startOffset, off64_t *pageOffset) {
*pageOffset = startOffset;
@@ -264,7 +352,7 @@
// it (if any) and return its granule position.
// To do this we back up from the "current" page's offset until we find any
// page preceding it and then scan forward to just before the current page.
-status_t MyVorbisExtractor::findPrevGranulePosition(
+status_t MyOggExtractor::findPrevGranulePosition(
off64_t pageOffset, uint64_t *granulePos) {
*granulePos = 0;
@@ -280,7 +368,11 @@
ALOGV("backing up %lld bytes", (long long)(pageOffset - prevGuess));
status_t err = findNextPage(prevGuess, &prevPageOffset);
- if (err != OK) {
+ if (err == ERROR_END_OF_STREAM) {
+ // We are at the last page and didn't back off enough;
+ // back off 5000 bytes more and try again.
+ continue;
+ } else if (err != OK) {
return err;
}
@@ -314,11 +406,20 @@
}
}
-status_t MyVorbisExtractor::seekToTime(int64_t timeUs) {
+status_t MyOggExtractor::seekToTime(int64_t timeUs) {
+ timeUs -= mSeekPreRollUs;
+ if (timeUs < 0) {
+ timeUs = 0;
+ }
+
if (mTableOfContents.isEmpty()) {
// Perform approximate seeking based on avg. bitrate.
+ uint64_t bps = approxBitrate();
+ if (bps <= 0) {
+ return INVALID_OPERATION;
+ }
- off64_t pos = timeUs * approxBitrate() / 8000000ll;
+ off64_t pos = timeUs * bps / 8000000ll;
ALOGV("seeking to offset %lld", (long long)pos);
return seekToOffset(pos);
@@ -353,7 +454,7 @@
return seekToOffset(entry.mPageOffset);
}
-status_t MyVorbisExtractor::seekToOffset(off64_t offset) {
+status_t MyOggExtractor::seekToOffset(off64_t offset) {
if (mFirstDataOffset >= 0 && offset < mFirstDataOffset) {
// Once we know where the actual audio data starts (past the headers)
// don't ever seek to anywhere before that.
@@ -386,7 +487,7 @@
return OK;
}
-ssize_t MyVorbisExtractor::readPage(off64_t offset, Page *page) {
+ssize_t MyOggExtractor::readPage(off64_t offset, Page *page) {
uint8_t header[27];
ssize_t n;
if ((n = mSource->readAt(offset, header, sizeof(header)))
@@ -457,7 +558,110 @@
return sizeof(header) + page->mNumSegments + totalSize;
}
-status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out, bool conf) {
+status_t MyOpusExtractor::readNextPacket(MediaBuffer **out) {
+ if (mOffset <= mFirstDataOffset && mStartGranulePosition < 0) {
+ // The first sample might not start at time 0; find out where by subtracting
+ // the number of samples on the first page from the granule position
+ // (position of last complete sample) of the first page. This happens
+ // the first time before we attempt to read a packet from the first page.
+ MediaBuffer *mBuf;
+ uint32_t numSamples = 0;
+ uint64_t curGranulePosition = 0;
+ while (true) {
+ status_t err = _readNextPacket(&mBuf, /* calcVorbisTimestamp = */false);
+ if (err != OK && err != ERROR_END_OF_STREAM) {
+ return err;
+ }
+ // First two pages are header pages.
+ if (err == ERROR_END_OF_STREAM || mCurrentPage.mPageNo > 2) {
+ break;
+ }
+ curGranulePosition = mCurrentPage.mGranulePosition;
+ numSamples += getNumSamplesInPacket(mBuf);
+ mBuf->release();
+ mBuf = NULL;
+ }
+
+ if (curGranulePosition > numSamples) {
+ mStartGranulePosition = curGranulePosition - numSamples;
+ } else {
+ mStartGranulePosition = 0;
+ }
+ seekToOffset(0);
+ }
+
+ status_t err = _readNextPacket(out, /* calcVorbisTimestamp = */false);
+ if (err != OK) {
+ return err;
+ }
+
+ int32_t currentPageSamples;
+ // Calculate timestamps by accumulating durations starting from the first sample of a page;
+ // We assume that we only seek to page boundaries.
+ if ((*out)->meta_data()->findInt32(kKeyValidSamples, ¤tPageSamples)) {
+ // first packet in page
+ if (mOffset == mFirstDataOffset) {
+ currentPageSamples -= mStartGranulePosition;
+ (*out)->meta_data()->setInt32(kKeyValidSamples, currentPageSamples);
+ }
+ mCurGranulePosition = mCurrentPage.mGranulePosition - currentPageSamples;
+ }
+
+ int64_t timeUs = getTimeUsOfGranule(mCurGranulePosition);
+ (*out)->meta_data()->setInt64(kKeyTime, timeUs);
+
+ uint32_t frames = getNumSamplesInPacket(*out);
+ mCurGranulePosition += frames;
+ return OK;
+}
+
+uint32_t MyOpusExtractor::getNumSamplesInPacket(MediaBuffer *buffer) const {
+ if (buffer == NULL || buffer->range_length() < 1) {
+ return 0;
+ }
+
+ uint8_t *data = (uint8_t *)buffer->data() + buffer->range_offset();
+ uint8_t toc = data[0];
+ uint8_t config = (toc >> 3) & 0x1f;
+ uint32_t frameSizesUs[] = {
+ 10000, 20000, 40000, 60000, // 0...3
+ 10000, 20000, 40000, 60000, // 4...7
+ 10000, 20000, 40000, 60000, // 8...11
+ 10000, 20000, // 12...13
+ 10000, 20000, // 14...15
+ 2500, 5000, 10000, 20000, // 16...19
+ 2500, 5000, 10000, 20000, // 20...23
+ 2500, 5000, 10000, 20000, // 24...27
+ 2500, 5000, 10000, 20000 // 28...31
+ };
+ uint32_t frameSizeUs = frameSizesUs[config];
+
+ uint32_t numFrames;
+ uint8_t c = toc & 3;
+ switch (c) {
+ case 0:
+ numFrames = 1;
+ break;
+ case 1:
+ case 2:
+ numFrames = 2;
+ break;
+ case 3:
+ if (buffer->range_length() < 3) {
+ numFrames = 0;
+ } else {
+ numFrames = data[2] & 0x3f;
+ }
+ break;
+ default:
+ TRESPASS();
+ }
+
+ uint32_t numSamples = frameSizeUs * numFrames * kOpusSampleRate / 1000000;
+ return numSamples;
+}
+
+status_t MyOggExtractor::_readNextPacket(MediaBuffer **out, bool calcVorbisTimestamp) {
*out = NULL;
MediaBuffer *buffer = NULL;
@@ -523,9 +727,8 @@
mFirstPacketInPage = false;
}
- // ignore timestamp for configuration packets
- if (!conf) {
- int32_t curBlockSize = packetBlockSize(buffer);
+ if (calcVorbisTimestamp) {
+ int32_t curBlockSize = getPacketBlockSize(buffer);
if (mCurrentPage.mPrevPacketSize < 0) {
mCurrentPage.mPrevPacketSize = curBlockSize;
mCurrentPage.mPrevPacketPos =
@@ -597,43 +800,24 @@
}
}
-status_t MyVorbisExtractor::init() {
+status_t MyOggExtractor::init() {
mMeta = new MetaData;
- mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_VORBIS);
+ mMeta->setCString(kKeyMIMEType, mMimeType);
- MediaBuffer *packet;
status_t err;
- if ((err = readNextPacket(&packet, /* conf = */ true)) != OK) {
- return err;
- }
- ALOGV("read packet of size %zu\n", packet->range_length());
- err = verifyHeader(packet, 1);
- packet->release();
- packet = NULL;
- if (err != OK) {
- return err;
- }
-
- if ((err = readNextPacket(&packet, /* conf = */ true)) != OK) {
- return err;
- }
- ALOGV("read packet of size %zu\n", packet->range_length());
- err = verifyHeader(packet, 3);
- packet->release();
- packet = NULL;
- if (err != OK) {
- return err;
- }
-
- if ((err = readNextPacket(&packet, /* conf = */ true)) != OK) {
- return err;
- }
- ALOGV("read packet of size %zu\n", packet->range_length());
- err = verifyHeader(packet, 5);
- packet->release();
- packet = NULL;
- if (err != OK) {
- return err;
+ MediaBuffer *packet;
+ for (size_t i = 0; i < mNumHeaders; ++i) {
+ // ignore timestamp for configuration packets
+ if ((err = _readNextPacket(&packet, /* calcVorbisTimestamp = */ false)) != OK) {
+ return err;
+ }
+ ALOGV("read packet of size %zu\n", packet->range_length());
+ err = verifyHeader(packet, /* type = */ i * 2 + 1);
+ packet->release();
+ packet = NULL;
+ if (err != OK) {
+ return err;
+ }
}
mFirstDataOffset = mOffset + mCurrentPageSize;
@@ -649,7 +833,7 @@
// we can only approximate using avg. bitrate if seeking to
// the end is too expensive or impossible (live streaming).
- int64_t durationUs = lastGranulePosition * 1000000ll / mVi.rate;
+ int64_t durationUs = getTimeUsOfGranule(lastGranulePosition);
mMeta->setInt64(kKeyDuration, durationUs);
@@ -659,7 +843,7 @@
return OK;
}
-void MyVorbisExtractor::buildTableOfContents() {
+void MyOggExtractor::buildTableOfContents() {
off64_t offset = mFirstDataOffset;
Page page;
ssize_t pageSize;
@@ -670,7 +854,7 @@
mTableOfContents.editItemAt(mTableOfContents.size() - 1);
entry.mPageOffset = offset;
- entry.mTimeUs = page.mGranulePosition * 1000000ll / mVi.rate;
+ entry.mTimeUs = getTimeUsOfGranule(page.mGranulePosition);
offset += (size_t)pageSize;
}
@@ -698,7 +882,7 @@
}
}
-int32_t MyVorbisExtractor::packetBlockSize(MediaBuffer *buffer) {
+int32_t MyOggExtractor::getPacketBlockSize(MediaBuffer *buffer) {
const uint8_t *data =
(const uint8_t *)buffer->data() + buffer->range_offset();
@@ -727,6 +911,144 @@
return vorbis_packet_blocksize(&mVi, &pack);
}
+int64_t MyOpusExtractor::getTimeUsOfGranule(uint64_t granulePos) const {
+ uint64_t pcmSamplePosition = 0;
+ if (granulePos > mCodecDelay) {
+ pcmSamplePosition = granulePos - mCodecDelay;
+ }
+ return pcmSamplePosition * 1000000ll / kOpusSampleRate;
+}
+
+status_t MyOpusExtractor::verifyHeader(MediaBuffer *buffer, uint8_t type) {
+ switch (type) {
+ // there are actually no header types defined in the Opus spec; we choose 1 and 3 to mean
+ // header and comments such that we can share code with MyVorbisExtractor.
+ case 1:
+ return verifyOpusHeader(buffer);
+ case 3:
+ return verifyOpusComments(buffer);
+ default:
+ return INVALID_OPERATION;
+ }
+}
+
+status_t MyOpusExtractor::verifyOpusHeader(MediaBuffer *buffer) {
+ const size_t kOpusHeaderSize = 19;
+ const uint8_t *data =
+ (const uint8_t *)buffer->data() + buffer->range_offset();
+
+ size_t size = buffer->range_length();
+
+ if (size < kOpusHeaderSize
+ || memcmp(data, "OpusHead", 8)
+ || /* version = */ data[8] != 1) {
+ return ERROR_MALFORMED;
+ }
+
+ mChannelCount = data[9];
+ mCodecDelay = U16LE_AT(&data[10]);
+
+ mMeta->setData(kKeyOpusHeader, 0, data, size);
+ mMeta->setInt32(kKeySampleRate, kOpusSampleRate);
+ mMeta->setInt32(kKeyChannelCount, mChannelCount);
+ mMeta->setInt64(kKeyOpusSeekPreRoll /* ns */, kOpusSeekPreRollUs * 1000 /* = 80 ms*/);
+ mMeta->setInt64(kKeyOpusCodecDelay /* ns */,
+ mCodecDelay /* sample/s */ * 1000000000 / kOpusSampleRate);
+
+ return OK;
+}
+
+status_t MyOpusExtractor::verifyOpusComments(MediaBuffer *buffer) {
+ // add artificial framing bit so we can reuse _vorbis_unpack_comment
+ int32_t commentSize = buffer->range_length() + 1;
+ sp<ABuffer> aBuf = new ABuffer(commentSize);
+ if (aBuf->capacity() <= buffer->range_length()) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t* commentData = aBuf->data();
+ memcpy(commentData,
+ (uint8_t *)buffer->data() + buffer->range_offset(),
+ buffer->range_length());
+
+ ogg_buffer buf;
+ buf.data = commentData;
+ buf.size = commentSize;
+ buf.refcount = 1;
+ buf.ptr.owner = NULL;
+
+ ogg_reference ref;
+ ref.buffer = &buf;
+ ref.begin = 0;
+ ref.length = commentSize;
+ ref.next = NULL;
+
+ oggpack_buffer bits;
+ oggpack_readinit(&bits, &ref);
+
+ // skip 'OpusTags'
+ const char *OpusTags = "OpusTags";
+ const int32_t headerLen = strlen(OpusTags);
+ int32_t framingBitOffset = headerLen;
+ for (int i = 0; i < headerLen; ++i) {
+ char chr = oggpack_read(&bits, 8);
+ if (chr != OpusTags[i]) {
+ return ERROR_MALFORMED;
+ }
+ }
+
+ int32_t vendorLen = oggpack_read(&bits, 32);
+ framingBitOffset += 4;
+ if (vendorLen < 0 || vendorLen > commentSize - 8) {
+ return ERROR_MALFORMED;
+ }
+ // skip vendor string
+ framingBitOffset += vendorLen;
+ for (int i = 0; i < vendorLen; ++i) {
+ oggpack_read(&bits, 8);
+ }
+
+ int32_t n = oggpack_read(&bits, 32);
+ framingBitOffset += 4;
+ if (n < 0 || n > ((commentSize - oggpack_bytes(&bits)) >> 2)) {
+ return ERROR_MALFORMED;
+ }
+ for (int i = 0; i < n; ++i) {
+ int32_t len = oggpack_read(&bits, 32);
+ framingBitOffset += 4;
+ if (len < 0 || len > (commentSize - oggpack_bytes(&bits))) {
+ return ERROR_MALFORMED;
+ }
+ framingBitOffset += len;
+ for (int j = 0; j < len; ++j) {
+ oggpack_read(&bits, 8);
+ }
+ }
+ if (framingBitOffset < 0 || framingBitOffset >= commentSize) {
+ return ERROR_MALFORMED;
+ }
+ commentData[framingBitOffset] = 1;
+
+ buf.data = commentData + headerLen;
+ buf.size = commentSize - headerLen;
+ buf.refcount = 1;
+ buf.ptr.owner = NULL;
+
+ ref.buffer = &buf;
+ ref.begin = 0;
+ ref.length = commentSize - headerLen;
+ ref.next = NULL;
+
+ oggpack_readinit(&bits, &ref);
+ int err = _vorbis_unpack_comment(&mVc, &bits);
+ if (0 != err) {
+ return ERROR_MALFORMED;
+ }
+
+ parseFileMetaData();
+ return OK;
+}
+
status_t MyVorbisExtractor::verifyHeader(
MediaBuffer *buffer, uint8_t type) {
const uint8_t *data =
@@ -753,7 +1075,9 @@
oggpack_buffer bits;
oggpack_readinit(&bits, &ref);
- CHECK_EQ(oggpack_read(&bits, 8), type);
+ if (oggpack_read(&bits, 8) != type) {
+ return ERROR_MALFORMED;
+ }
for (size_t i = 0; i < 6; ++i) {
oggpack_read(&bits, 8); // skip 'vorbis'
}
@@ -761,7 +1085,9 @@
switch (type) {
case 1:
{
- CHECK_EQ(0, _vorbis_unpack_info(&mVi, &bits));
+ if (0 != _vorbis_unpack_info(&mVi, &bits)) {
+ return ERROR_MALFORMED;
+ }
mMeta->setData(kKeyVorbisInfo, 0, data, size);
mMeta->setInt32(kKeySampleRate, mVi.rate);
@@ -810,7 +1136,7 @@
return OK;
}
-uint64_t MyVorbisExtractor::approxBitrate() {
+uint64_t MyVorbisExtractor::approxBitrate() const {
if (mVi.bitrate_nominal != 0) {
return mVi.bitrate_nominal;
}
@@ -818,7 +1144,7 @@
return (mVi.bitrate_lower + mVi.bitrate_upper) / 2;
}
-void MyVorbisExtractor::parseFileMetaData() {
+void MyOggExtractor::parseFileMetaData() {
mFileMeta = new MetaData;
mFileMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_OGG);
@@ -1022,11 +1348,23 @@
: mDataSource(source),
mInitCheck(NO_INIT),
mImpl(NULL) {
- mImpl = new MyVorbisExtractor(mDataSource);
- mInitCheck = mImpl->seekToOffset(0);
+ for (int i = 0; i < 2; ++i) {
+ if (mImpl != NULL) {
+ delete mImpl;
+ }
+ if (i == 0) {
+ mImpl = new MyVorbisExtractor(mDataSource);
+ } else {
+ mImpl = new MyOpusExtractor(mDataSource);
+ }
+ mInitCheck = mImpl->seekToOffset(0);
- if (mInitCheck == OK) {
- mInitCheck = mImpl->init();
+ if (mInitCheck == OK) {
+ mInitCheck = mImpl->init();
+ if (mInitCheck == OK) {
+ break;
+ }
+ }
}
}
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index 7f98485..40df34d 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -335,7 +335,7 @@
}
mTimeToSampleCount = U32_AT(&header[4]);
- uint64_t allocSize = mTimeToSampleCount * 2 * (uint64_t)sizeof(uint32_t);
+ uint64_t allocSize = (uint64_t)mTimeToSampleCount * 2 * sizeof(uint32_t);
if (allocSize > SIZE_MAX) {
return ERROR_OUT_OF_RANGE;
}
@@ -383,7 +383,7 @@
}
mNumCompositionTimeDeltaEntries = numEntries;
- uint64_t allocSize = numEntries * 2 * (uint64_t)sizeof(uint32_t);
+ uint64_t allocSize = (uint64_t)numEntries * 2 * sizeof(uint32_t);
if (allocSize > SIZE_MAX) {
return ERROR_OUT_OF_RANGE;
}
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
new file mode 100644
index 0000000..6b62e43
--- /dev/null
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SurfaceUtils"
+#include <utils/Log.h>
+
+#include <media/stagefright/SurfaceUtils.h>
+
+#include <gui/Surface.h>
+
+namespace android {
+
+status_t setNativeWindowSizeFormatAndUsage(
+ ANativeWindow *nativeWindow /* nonnull */,
+ int width, int height, int format, int rotation, int usage) {
+ status_t err = native_window_set_buffers_dimensions(nativeWindow, width, height);
+ if (err != NO_ERROR) {
+ ALOGE("native_window_set_buffers_dimensions failed: %s (%d)", strerror(-err), -err);
+ return err;
+ }
+
+ err = native_window_set_buffers_format(nativeWindow, format);
+ if (err != NO_ERROR) {
+ ALOGE("native_window_set_buffers_format failed: %s (%d)", strerror(-err), -err);
+ return err;
+ }
+
+ int transform = 0;
+ if ((rotation % 90) == 0) {
+ switch ((rotation / 90) & 3) {
+ case 1: transform = HAL_TRANSFORM_ROT_90; break;
+ case 2: transform = HAL_TRANSFORM_ROT_180; break;
+ case 3: transform = HAL_TRANSFORM_ROT_270; break;
+ default: transform = 0; break;
+ }
+ }
+
+ err = native_window_set_buffers_transform(nativeWindow, transform);
+ if (err != NO_ERROR) {
+ ALOGE("native_window_set_buffers_transform failed: %s (%d)", strerror(-err), -err);
+ return err;
+ }
+
+ // Make sure to check whether either Stagefright or the video decoder
+ // requested protected buffers.
+ if (usage & GRALLOC_USAGE_PROTECTED) {
+ // Verify that the ANativeWindow sends images directly to
+ // SurfaceFlinger.
+ int queuesToNativeWindow = 0;
+ err = nativeWindow->query(
+ nativeWindow, NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &queuesToNativeWindow);
+ if (err != NO_ERROR) {
+ ALOGE("error authenticating native window: %s (%d)", strerror(-err), -err);
+ return err;
+ }
+ if (queuesToNativeWindow != 1) {
+ ALOGE("native window could not be authenticated");
+ return PERMISSION_DENIED;
+ }
+ }
+
+ int consumerUsage = 0;
+ err = nativeWindow->query(nativeWindow, NATIVE_WINDOW_CONSUMER_USAGE_BITS, &consumerUsage);
+ if (err != NO_ERROR) {
+ ALOGW("failed to get consumer usage bits. ignoring");
+ err = NO_ERROR;
+ }
+
+ int finalUsage = usage | consumerUsage;
+ ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = %#x", usage, consumerUsage, finalUsage);
+ err = native_window_set_usage(nativeWindow, finalUsage);
+ if (err != NO_ERROR) {
+ ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+ return err;
+ }
+
+ err = native_window_set_scaling_mode(
+ nativeWindow, NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
+ if (err != NO_ERROR) {
+ ALOGE("native_window_set_scaling_mode failed: %s (%d)", strerror(-err), -err);
+ return err;
+ }
+
+ ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage %#x",
+ nativeWindow, width, height, format, rotation, finalUsage);
+ return NO_ERROR;
+}
+
+status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */) {
+ status_t err = NO_ERROR;
+ ANativeWindowBuffer* anb = NULL;
+ int numBufs = 0;
+ int minUndequeuedBufs = 0;
+
+ // We need to reconnect to the ANativeWindow as a CPU client to ensure that
+ // no frames get dropped by SurfaceFlinger assuming that these are video
+ // frames.
+ err = native_window_api_disconnect(nativeWindow, NATIVE_WINDOW_API_MEDIA);
+ if (err != NO_ERROR) {
+ ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", strerror(-err), -err);
+ return err;
+ }
+
+ err = native_window_api_connect(nativeWindow, NATIVE_WINDOW_API_CPU);
+ if (err != NO_ERROR) {
+ ALOGE("error pushing blank frames: api_connect failed: %s (%d)", strerror(-err), -err);
+ (void)native_window_api_connect(nativeWindow, NATIVE_WINDOW_API_MEDIA);
+ return err;
+ }
+
+ err = setNativeWindowSizeFormatAndUsage(
+ nativeWindow, 1, 1, HAL_PIXEL_FORMAT_RGBX_8888, 0, GRALLOC_USAGE_SW_WRITE_OFTEN);
+ if (err != NO_ERROR) {
+ goto error;
+ }
+
+ static_cast<Surface*>(nativeWindow)->getIGraphicBufferProducer()->allowAllocation(true);
+
+ err = nativeWindow->query(nativeWindow,
+ NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs);
+ if (err != NO_ERROR) {
+ ALOGE("error pushing blank frames: MIN_UNDEQUEUED_BUFFERS query "
+ "failed: %s (%d)", strerror(-err), -err);
+ goto error;
+ }
+
+ numBufs = minUndequeuedBufs + 1;
+ err = native_window_set_buffer_count(nativeWindow, numBufs);
+ if (err != NO_ERROR) {
+ ALOGE("error pushing blank frames: set_buffer_count failed: %s (%d)", strerror(-err), -err);
+ goto error;
+ }
+
+ // We push numBufs + 1 buffers to ensure that we've drawn into the same
+ // buffer twice. This should guarantee that the buffer has been displayed
+ // on the screen and then been replaced, so an previous video frames are
+ // guaranteed NOT to be currently displayed.
+ for (int i = 0; i < numBufs + 1; i++) {
+ err = native_window_dequeue_buffer_and_wait(nativeWindow, &anb);
+ if (err != NO_ERROR) {
+ ALOGE("error pushing blank frames: dequeueBuffer failed: %s (%d)",
+ strerror(-err), -err);
+ break;
+ }
+
+ sp<GraphicBuffer> buf(new GraphicBuffer(anb, false));
+
+ // Fill the buffer with the a 1x1 checkerboard pattern ;)
+ uint32_t *img = NULL;
+ err = buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img));
+ if (err != NO_ERROR) {
+ ALOGE("error pushing blank frames: lock failed: %s (%d)", strerror(-err), -err);
+ break;
+ }
+
+ *img = 0;
+
+ err = buf->unlock();
+ if (err != NO_ERROR) {
+ ALOGE("error pushing blank frames: unlock failed: %s (%d)", strerror(-err), -err);
+ break;
+ }
+
+ err = nativeWindow->queueBuffer(nativeWindow, buf->getNativeBuffer(), -1);
+ if (err != NO_ERROR) {
+ ALOGE("error pushing blank frames: queueBuffer failed: %s (%d)", strerror(-err), -err);
+ break;
+ }
+
+ anb = NULL;
+ }
+
+error:
+
+ if (anb != NULL) {
+ nativeWindow->cancelBuffer(nativeWindow, anb, -1);
+ anb = NULL;
+ }
+
+ // Clean up after success or error.
+ status_t err2 = native_window_api_disconnect(nativeWindow, NATIVE_WINDOW_API_CPU);
+ if (err2 != NO_ERROR) {
+ ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", strerror(-err2), -err2);
+ if (err == NO_ERROR) {
+ err = err2;
+ }
+ }
+
+ err2 = native_window_api_connect(nativeWindow, NATIVE_WINDOW_API_MEDIA);
+ if (err2 != NO_ERROR) {
+ ALOGE("error pushing blank frames: api_connect failed: %s (%d)", strerror(-err), -err);
+ if (err == NO_ERROR) {
+ err = err2;
+ }
+ }
+
+ return err;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 413628d..6828b54 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -601,16 +601,18 @@
// reassemble the csd data into its original form
sp<ABuffer> csd0;
if (msg->findBuffer("csd-0", &csd0)) {
- if (mime.startsWith("video/")) { // do we need to be stricter than this?
+ if (mime == MEDIA_MIMETYPE_VIDEO_AVC) {
sp<ABuffer> csd1;
if (msg->findBuffer("csd-1", &csd1)) {
char avcc[1024]; // that oughta be enough, right?
size_t outsize = reassembleAVCC(csd0, csd1, avcc);
meta->setData(kKeyAVCC, kKeyAVCC, avcc, outsize);
}
- } else if (mime.startsWith("audio/")) {
+ } else if (mime == MEDIA_MIMETYPE_AUDIO_AAC || mime == MEDIA_MIMETYPE_VIDEO_MPEG4) {
int csd0size = csd0->size();
char esds[csd0size + 31];
+ // The written ESDS is actually for an audio stream, but it's enough
+ // for transporting the CSD to muxers.
reassembleESDS(csd0, esds);
meta->setData(kKeyESDS, kKeyESDS, esds, sizeof(esds));
}
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index 10937ec..965c55e 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -75,7 +75,7 @@
SoftAAC2::~SoftAAC2() {
aacDecoder_Close(mAACDecoder);
- delete mOutputDelayRingBuffer;
+ delete[] mOutputDelayRingBuffer;
}
void SoftAAC2::initPorts() {
diff --git a/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp b/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp
index d1b0f76..a9723ea 100644
--- a/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp
+++ b/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp
@@ -428,7 +428,15 @@
}
}
-void SoftAMR::onPortFlushCompleted(OMX_U32 /* portIndex */) {
+void SoftAMR::onPortFlushCompleted(OMX_U32 portIndex) {
+ ALOGV("onPortFlushCompleted portindex %d, resetting frame ", portIndex);
+ if (portIndex == 0) {
+ if (mMode == MODE_NARROW) {
+ Speech_Decode_Frame_reset(mState);
+ } else {
+ pvDecoder_AmrWb_Reset(mState, 0 /* reset_all */);
+ }
+ }
}
void SoftAMR::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
old mode 100644
new mode 100755
index 08e956a..8ac337a
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
@@ -121,6 +121,7 @@
mIvColorFormat(IV_YUV_420P),
mNewWidth(mWidth),
mNewHeight(mHeight),
+ mNewLevel(0),
mChangingResolution(false) {
initPorts(
kNumBuffers, INPUT_BUF_SIZE, kNumBuffers, CODEC_MIME_TYPE);
@@ -303,18 +304,22 @@
uint32_t displayHeight = outputBufferHeight();
uint32_t displaySizeY = displayStride * displayHeight;
- if (displaySizeY > (1920 * 1088)) {
- i4_level = 50;
- } else if (displaySizeY > (1280 * 720)) {
- i4_level = 40;
- } else if (displaySizeY > (720 * 576)) {
- i4_level = 31;
- } else if (displaySizeY > (624 * 320)) {
- i4_level = 30;
- } else if (displaySizeY > (352 * 288)) {
- i4_level = 21;
+ if(mNewLevel == 0){
+ if (displaySizeY > (1920 * 1088)) {
+ i4_level = 50;
+ } else if (displaySizeY > (1280 * 720)) {
+ i4_level = 40;
+ } else if (displaySizeY > (720 * 576)) {
+ i4_level = 31;
+ } else if (displaySizeY > (624 * 320)) {
+ i4_level = 30;
+ } else if (displaySizeY > (352 * 288)) {
+ i4_level = 21;
+ } else {
+ i4_level = 20;
+ }
} else {
- i4_level = 20;
+ i4_level = mNewLevel;
}
{
@@ -691,6 +696,7 @@
bool unsupportedDimensions =
(IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF));
bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
+ bool unsupportedLevel = (IH264D_UNSUPPORTED_LEVEL == (s_dec_op.u4_error_code & 0xFF));
GETTIME(&mTimeEnd, NULL);
/* Compute time taken for decode() */
@@ -722,6 +728,18 @@
return;
}
+ if (unsupportedLevel && !mFlushNeeded) {
+
+ mNewLevel = 51;
+
+ CHECK_EQ(reInitDecoder(), (status_t)OK);
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx);
+
+ ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+ return;
+ }
+
// If the decoder is in the changing resolution mode and there is no output present,
// that means the switching is done and it's ready to reset the decoder and the plugin.
if (mChangingResolution && !s_dec_op.u4_output_present) {
@@ -745,6 +763,17 @@
continue;
}
+ if (unsupportedLevel) {
+
+ if (mFlushNeeded) {
+ setFlushMode();
+ }
+
+ mNewLevel = 51;
+ mInitNeeded = true;
+ continue;
+ }
+
if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
uint32_t width = s_dec_op.u4_pic_wd;
uint32_t height = s_dec_op.u4_pic_ht;
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.h b/media/libstagefright/codecs/avcdec/SoftAVCDec.h
old mode 100644
new mode 100755
index 191a71d..2067810
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.h
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.h
@@ -100,6 +100,7 @@
bool mInitNeeded;
uint32_t mNewWidth;
uint32_t mNewHeight;
+ uint32_t mNewLevel;
// The input stream has changed to a different resolution, which is still supported by the
// codec. So the codec is switching to decode the new resolution.
bool mChangingResolution;
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
index 6afac74..a00f324 100755
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
@@ -634,6 +634,10 @@
}
if (mConversionBuffer == NULL) {
+ if (((uint64_t)mStride * mHeight) > (((uint64_t)INT32_MAX / 3) * 2)) {
+ ALOGE("Buffer size is too big.");
+ return OMX_ErrorUndefined;
+ }
mConversionBuffer = (uint8_t *)malloc(mStride * mHeight * 3 / 2);
if (mConversionBuffer == NULL) {
ALOGE("Allocating conversion buffer failed.");
@@ -679,6 +683,10 @@
}
/* Allocate array to hold memory records */
+ if (mNumMemRecords > SIZE_MAX / sizeof(iv_mem_rec_t)) {
+ ALOGE("requested memory size is too big.");
+ return OMX_ErrorUndefined;
+ }
mMemRecords = (iv_mem_rec_t *)malloc(mNumMemRecords * sizeof(iv_mem_rec_t));
if (NULL == mMemRecords) {
ALOGE("Unable to allocate memory for hold memory records: Size %zu",
@@ -1188,7 +1196,10 @@
BufferInfo *outputBufferInfo = *outQueue.begin();
OMX_BUFFERHEADERTYPE *outputBufferHeader = outputBufferInfo->mHeader;
- if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS &&
+ inputBufferHeader->nFilledLen == 0) {
+ mSawInputEOS = true;
+
inQueue.erase(inQueue.begin());
inputBufferInfo->mOwnedByUs = false;
notifyEmptyBufferDone(inputBufferHeader);
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp
index 90d7c6b..af19bfe 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp
@@ -95,6 +95,11 @@
#ifdef DEC_INTERNAL_MEMORY_OPT
video->vol = (Vol **) IMEM_VOL;
#else
+ if ((size_t)nLayers > SIZE_MAX / sizeof(Vol *)) {
+ status = PV_FALSE;
+ goto fail;
+ }
+
video->vol = (Vol **) oscl_malloc(nLayers * sizeof(Vol *));
#endif
if (video->vol == NULL) status = PV_FALSE;
@@ -128,6 +133,11 @@
else oscl_memset(video->prevVop, 0, sizeof(Vop));
video->memoryUsage += (sizeof(Vop) * 2);
+ if ((size_t)nLayers > SIZE_MAX / sizeof(Vop *)) {
+ status = PV_FALSE;
+ goto fail;
+ }
+
video->vopHeader = (Vop **) oscl_malloc(sizeof(Vop *) * nLayers);
#endif
if (video->vopHeader == NULL) status = PV_FALSE;
@@ -277,6 +287,7 @@
status = PV_FALSE;
}
+fail:
if (status == PV_FALSE) PVCleanUpVideoDecoder(decCtrl);
return status;
@@ -305,6 +316,10 @@
video->nMBPerRow * video->nMBPerCol;
}
+ if (((uint64_t)video->width * video->height) > (uint64_t)INT32_MAX / sizeof(PIXEL)) {
+ return PV_FALSE;
+ }
+
size = (int32)sizeof(PIXEL) * video->width * video->height;
#ifdef PV_MEMORY_POOL
decCtrl->size = size;
@@ -320,6 +335,9 @@
video->prevVop->uChan = video->prevVop->yChan + size;
video->prevVop->vChan = video->prevVop->uChan + (size >> 2);
#else
+ if (size > INT32_MAX / 3 * 2) {
+ return PV_FALSE;
+ }
video->currVop->yChan = (PIXEL *) oscl_malloc(size * 3 / 2); /* Allocate memory for all VOP OKA 3/2/1*/
if (video->currVop->yChan == NULL) status = PV_FALSE;
@@ -347,6 +365,10 @@
{
oscl_memset(video->prevEnhcVop, 0, sizeof(Vop));
#ifndef PV_MEMORY_POOL
+ if (size > INT32_MAX / 3 * 2) {
+ return PV_FALSE;
+ }
+
video->prevEnhcVop->yChan = (PIXEL *) oscl_malloc(size * 3 / 2); /* Allocate memory for all VOP OKA 3/2/1*/
if (video->prevEnhcVop->yChan == NULL) status = PV_FALSE;
video->prevEnhcVop->uChan = video->prevEnhcVop->yChan + size;
@@ -403,10 +425,17 @@
if (video->acPredFlag == NULL) status = PV_FALSE;
video->memoryUsage += (nTotalMB);
+ if ((size_t)nTotalMB > SIZE_MAX / sizeof(typeDCStore)) {
+ return PV_FALSE;
+ }
video->predDC = (typeDCStore *) oscl_malloc(nTotalMB * sizeof(typeDCStore));
if (video->predDC == NULL) status = PV_FALSE;
video->memoryUsage += (nTotalMB * sizeof(typeDCStore));
+ if (nMBPerRow > INT32_MAX - 1
+ || (size_t)(nMBPerRow + 1) > SIZE_MAX / sizeof(typeDCACStore)) {
+ return PV_FALSE;
+ }
video->predDCAC_col = (typeDCACStore *) oscl_malloc((nMBPerRow + 1) * sizeof(typeDCACStore));
if (video->predDCAC_col == NULL) status = PV_FALSE;
video->memoryUsage += ((nMBPerRow + 1) * sizeof(typeDCACStore));
@@ -422,6 +451,10 @@
video->headerInfo.CBP = (uint8 *) oscl_malloc(nTotalMB);
if (video->headerInfo.CBP == NULL) status = PV_FALSE;
video->memoryUsage += nTotalMB;
+
+ if ((size_t)nTotalMB > SIZE_MAX / sizeof(int16)) {
+ return PV_FALSE;
+ }
video->QPMB = (int16 *) oscl_malloc(nTotalMB * sizeof(int16));
if (video->QPMB == NULL) status = PV_FALSE;
video->memoryUsage += (nTotalMB * sizeof(int));
@@ -439,6 +472,9 @@
video->memoryUsage += sizeof(MacroBlock);
}
/* Allocating motion vector space */
+ if ((size_t)nTotalMB > SIZE_MAX / (sizeof(MOT) * 4)) {
+ return PV_FALSE;
+ }
video->motX = (MOT *) oscl_malloc(sizeof(MOT) * 4 * nTotalMB);
if (video->motX == NULL) status = PV_FALSE;
video->motY = (MOT *) oscl_malloc(sizeof(MOT) * 4 * nTotalMB);
@@ -472,6 +508,9 @@
}
#else
+ if (nTotalMB > INT32_MAX / 6) {
+ return PV_FALSE;
+ }
video->pstprcTypCur = (uint8 *) oscl_malloc(nTotalMB * 6);
video->memoryUsage += (nTotalMB * 6);
if (video->pstprcTypCur == NULL)
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_api.cpp b/media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_api.cpp
index 946e3d0..da27377 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_api.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/src/mp4enc_api.cpp
@@ -610,6 +610,10 @@
max = temp_w * temp_h;
max_width = ((temp_w + 15) >> 4) << 4;
max_height = ((temp_h + 15) >> 4) << 4;
+ if (((uint64_t)max_width * max_height) > (uint64_t)INT32_MAX
+ || temp_w > INT32_MAX - 15 || temp_h > INT32_MAX - 15) {
+ goto CLEAN_UP;
+ }
nTotalMB = ((max_width * max_height) >> 8);
}
@@ -654,6 +658,9 @@
/* Allocating motion vector space and interpolation memory*/
+ if ((size_t)nTotalMB > SIZE_MAX / sizeof(MOT *)) {
+ goto CLEAN_UP;
+ }
video->mot = (MOT **)M4VENC_MALLOC(sizeof(MOT *) * nTotalMB);
if (video->mot == NULL) goto CLEAN_UP;
@@ -676,11 +683,17 @@
/* so that compilers can generate faster code to indexing the */
/* data inside (by using << instead of *). 04/14/2000. */
/* 5/29/01, use decoder lib ACDC prediction memory scheme. */
+ if ((size_t)nTotalMB > SIZE_MAX / sizeof(typeDCStore)) {
+ goto CLEAN_UP;
+ }
video->predDC = (typeDCStore *) M4VENC_MALLOC(nTotalMB * sizeof(typeDCStore));
if (video->predDC == NULL) goto CLEAN_UP;
if (!video->encParams->H263_Enabled)
{
+ if ((size_t)((max_width >> 4) + 1) > SIZE_MAX / sizeof(typeDCACStore)) {
+ goto CLEAN_UP;
+ }
video->predDCAC_col = (typeDCACStore *) M4VENC_MALLOC(((max_width >> 4) + 1) * sizeof(typeDCACStore));
if (video->predDCAC_col == NULL) goto CLEAN_UP;
@@ -688,6 +701,9 @@
/* the rest will be used for storing horizontal (row) AC coefficients */
video->predDCAC_row = video->predDCAC_col + 1; /* ACDC */
+ if ((size_t)nTotalMB > SIZE_MAX / sizeof(Int)) {
+ goto CLEAN_UP;
+ }
video->acPredFlag = (Int *) M4VENC_MALLOC(nTotalMB * sizeof(Int)); /* Memory for acPredFlag */
if (video->acPredFlag == NULL) goto CLEAN_UP;
}
@@ -741,8 +757,15 @@
offset = (pitch << 4) + 16;
max_height += 32;
}
+ if (((uint64_t)pitch * max_height) > (uint64_t)INT32_MAX) {
+ goto CLEAN_UP;
+ }
size = pitch * max_height;
+ if (size > INT32_MAX - (size >> 1)
+ || (size_t)(size + (size >> 1)) > SIZE_MAX / sizeof(PIXEL)) {
+ goto CLEAN_UP;
+ }
video->currVop->yChan = (PIXEL *)M4VENC_MALLOC(sizeof(PIXEL) * (size + (size >> 1))); /* Memory for currVop Y */
if (video->currVop->yChan == NULL) goto CLEAN_UP;
video->currVop->uChan = video->currVop->yChan + size;/* Memory for currVop U */
@@ -841,6 +864,9 @@
/* /// End /////////////////////////////////////// */
+ if ((size_t)nLayers > SIZE_MAX / sizeof(Vol *)) {
+ goto CLEAN_UP;
+ }
video->vol = (Vol **)M4VENC_MALLOC(nLayers * sizeof(Vol *)); /* Memory for VOL pointers */
/* Memory allocation and Initialization of Vols and writing of headers */
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
index 6322dc2..7ff9ee7 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
@@ -403,6 +403,14 @@
BufferInfo *inInfo = *inQueue.begin();
OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+ // Ignore CSD re-submissions.
+ if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+ return;
+ }
+
BufferInfo *outInfo = *outQueue.begin();
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
index 8f356b6..c559682 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
@@ -364,7 +364,7 @@
} else {
numFrames = vorbis_dsp_pcmout(
mState, (int16_t *)outHeader->pBuffer,
- kMaxNumSamplesPerBuffer);
+ (kMaxNumSamplesPerBuffer / mVi->channels));
if (numFrames < 0) {
ALOGE("vorbis_dsp_pcmout returned %d", numFrames);
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 21da707..d22451b 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -196,17 +196,29 @@
mNativeWindow.get(), transform));
}
-void SoftwareRenderer::render(
- const void *data, size_t /*size*/, int64_t timestampNs,
+void SoftwareRenderer::clearTracker() {
+ mRenderTracker.clear(-1 /* lastRenderTimeNs */);
+}
+
+std::list<FrameRenderTracker::Info> SoftwareRenderer::render(
+ const void *data, size_t size, int64_t mediaTimeUs, nsecs_t renderTimeNs,
void* /*platformPrivate*/, const sp<AMessage>& format) {
resetFormatIfChanged(format);
+ FrameRenderTracker::Info *info = NULL;
ANativeWindowBuffer *buf;
- int err;
- if ((err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(),
- &buf)) != 0) {
+ int fenceFd = -1;
+ int err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
+ if (err == 0 && fenceFd >= 0) {
+ info = mRenderTracker.updateInfoForDequeuedBuffer(buf, fenceFd, 0);
+ sp<Fence> fence = new Fence(fenceFd);
+ err = fence->waitForever("SoftwareRenderer::render");
+ }
+ if (err != 0) {
ALOGW("Surface::dequeueBuffer returned error %d", err);
- return;
+ // complete (drop) dequeued frame if fence wait failed; otherwise,
+ // this returns an empty list as no frames should have rendered and not yet returned.
+ return mRenderTracker.checkFencesAndGetRenderedFrames(info, false /* dropIncomplete */);
}
GraphicBufferMapper &mapper = GraphicBufferMapper::get();
@@ -228,6 +240,9 @@
buf->stride, buf->height,
0, 0, mCropWidth - 1, mCropHeight - 1);
} else if (mColorFormat == OMX_COLOR_FormatYUV420Planar) {
+ if ((size_t)mWidth * mHeight * 3 / 2 > size) {
+ goto skip_copying;
+ }
const uint8_t *src_y = (const uint8_t *)data;
const uint8_t *src_u =
(const uint8_t *)data + mWidth * mHeight;
@@ -258,6 +273,9 @@
}
} else if (mColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
|| mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ if ((size_t)mWidth * mHeight * 3 / 2 > size) {
+ goto skip_copying;
+ }
const uint8_t *src_y = (const uint8_t *)data;
const uint8_t *src_uv = (const uint8_t *)data
+ mWidth * (mHeight - mCropTop / 2);
@@ -289,6 +307,9 @@
dst_v += dst_c_stride;
}
} else if (mColorFormat == OMX_COLOR_Format24bitRGB888) {
+ if ((size_t)mWidth * mHeight * 3 > size) {
+ goto skip_copying;
+ }
uint8_t* srcPtr = (uint8_t*)data;
uint8_t* dstPtr = (uint8_t*)dst;
@@ -298,6 +319,9 @@
dstPtr += buf->stride * 3;
}
} else if (mColorFormat == OMX_COLOR_Format32bitARGB8888) {
+ if ((size_t)mWidth * mHeight * 4 > size) {
+ goto skip_copying;
+ }
uint8_t *srcPtr, *dstPtr;
for (size_t y = 0; y < (size_t)mCropHeight; ++y) {
@@ -312,6 +336,9 @@
}
}
} else if (mColorFormat == OMX_COLOR_Format32BitRGBA8888) {
+ if ((size_t)mWidth * mHeight * 4 > size) {
+ goto skip_copying;
+ }
uint8_t* srcPtr = (uint8_t*)data;
uint8_t* dstPtr = (uint8_t*)dst;
@@ -324,18 +351,24 @@
LOG_ALWAYS_FATAL("bad color format %#x", mColorFormat);
}
+skip_copying:
CHECK_EQ(0, mapper.unlock(buf->handle));
- if ((err = native_window_set_buffers_timestamp(mNativeWindow.get(),
- timestampNs)) != 0) {
- ALOGW("Surface::set_buffers_timestamp returned error %d", err);
+ if (renderTimeNs >= 0) {
+ if ((err = native_window_set_buffers_timestamp(mNativeWindow.get(),
+ renderTimeNs)) != 0) {
+ ALOGW("Surface::set_buffers_timestamp returned error %d", err);
+ }
}
- if ((err = mNativeWindow->queueBuffer(mNativeWindow.get(), buf,
- -1)) != 0) {
+ if ((err = mNativeWindow->queueBuffer(mNativeWindow.get(), buf, -1)) != 0) {
ALOGW("Surface::queueBuffer returned error %d", err);
+ } else {
+ mRenderTracker.onFrameQueued(mediaTimeUs, (GraphicBuffer *)buf, Fence::NO_FENCE);
}
+
buf = NULL;
+ return mRenderTracker.checkFencesAndGetRenderedFrames(info, info != NULL /* dropIncomplete */);
}
} // namespace android
diff --git a/media/libstagefright/filters/MediaFilter.cpp b/media/libstagefright/filters/MediaFilter.cpp
index fa9d630..0cf6b06 100644
--- a/media/libstagefright/filters/MediaFilter.cpp
+++ b/media/libstagefright/filters/MediaFilter.cpp
@@ -76,9 +76,9 @@
(new AMessage(kWhatCreateInputSurface, this))->post();
}
-void MediaFilter::initiateUsePersistentInputSurface(
+void MediaFilter::initiateSetInputSurface(
const sp<PersistentSurface> & /* surface */) {
- ALOGW("initiateUsePersistentInputSurface() unsupported");
+ ALOGW("initiateSetInputSurface() unsupported");
}
void MediaFilter::initiateStart() {
diff --git a/media/libstagefright/foundation/ADebug.cpp b/media/libstagefright/foundation/ADebug.cpp
index ec4a960..0d1cea4 100644
--- a/media/libstagefright/foundation/ADebug.cpp
+++ b/media/libstagefright/foundation/ADebug.cpp
@@ -19,6 +19,7 @@
#include <ctype.h>
#define LOG_TAG "ADebug"
+#include <cutils/atomic.h>
#include <utils/Log.h>
#include <utils/misc.h>
@@ -113,5 +114,43 @@
return debugName;
}
+//static
+bool ADebug::getExperimentFlag(
+ bool allow, const char *name, uint64_t modulo,
+ uint64_t limit, uint64_t plus, uint64_t timeDivisor) {
+ static volatile int32_t haveSerial = 0;
+ static uint64_t serialNum;
+ if (!android_atomic_acquire_load(&haveSerial)) {
+ // calculate initial counter value based on serial number
+ static char serial[PROPERTY_VALUE_MAX];
+ property_get("ro.serialno", serial, "0");
+ uint64_t num = 0; // it is okay for this number to overflow
+ for (size_t i = 0; i < NELEM(serial) && serial[i] != '\0'; ++i) {
+ const char &c = serial[i];
+ // try to use most letters of serialno
+ if (isdigit(c)) {
+ num = num * 10 + (c - '0');
+ } else if (islower(c)) {
+ num = num * 26 + (c - 'a');
+ } else if (isupper(c)) {
+ num = num * 26 + (c - 'A');
+ } else {
+ num = num * 256 + c;
+ }
+ }
+ ALOGI("got serial");
+ serialNum = num;
+ android_atomic_release_store(1, &haveSerial);
+ }
+ ALOGI("serial: %llu, time: %llu", (long long)serialNum, (long long)time(NULL));
+ // MINOR: use modulo for counter and time, so that their sum does not
+ // roll over, and mess up the correlation between related experiments.
+ // e.g. keep (a mod 2N) = 0 impl (a mod N) = 0
+ time_t counter = (time(NULL) / timeDivisor) % modulo + plus + serialNum % modulo;
+ bool enable = allow && (counter % modulo < limit);
+ ALOGI("experiment '%s': %s", name, enable ? "ENABLED" : "disabled");
+ return enable;
+}
+
} // namespace android
diff --git a/media/libstagefright/http/MediaHTTP.cpp b/media/libstagefright/http/MediaHTTP.cpp
index bb89567..801ff26 100644
--- a/media/libstagefright/http/MediaHTTP.cpp
+++ b/media/libstagefright/http/MediaHTTP.cpp
@@ -30,12 +30,11 @@
namespace android {
MediaHTTP::MediaHTTP(const sp<IMediaHTTPConnection> &conn)
- : mInitCheck(NO_INIT),
+ : mInitCheck((conn != NULL) ? OK : NO_INIT),
mHTTPConnection(conn),
mCachedSizeValid(false),
mCachedSize(0ll),
mDrmManagerClient(NULL) {
- mInitCheck = OK;
}
MediaHTTP::~MediaHTTP() {
@@ -54,7 +53,10 @@
if (headers != NULL) {
extHeaders = *headers;
}
- extHeaders.add(String8("User-Agent"), String8(MakeUserAgent().c_str()));
+
+ if (extHeaders.indexOfKey(String8("User-Agent")) < 0) {
+ extHeaders.add(String8("User-Agent"), String8(MakeUserAgent().c_str()));
+ }
bool success = mHTTPConnection->connect(uri, &extHeaders);
@@ -171,6 +173,10 @@
}
String8 MediaHTTP::getUri() {
+ if (mInitCheck != OK) {
+ return String8::empty();
+ }
+
String8 uri;
if (OK == mHTTPConnection->getUri(&uri)) {
return uri;
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index d8c38e7..27509cb 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -1406,7 +1406,7 @@
sp<AMessage> format = new AMessage();
format->setInt32("type", MEDIA_TRACK_TYPE_METADATA);
format->setString("language", "und");
- format->setString("mime", MEDIA_MIMETYPE_DATA_METADATA);
+ format->setString("mime", MEDIA_MIMETYPE_DATA_TIMED_ID3);
return format;
}
return mPlaylist->getTrackInfo(trackIndex);
@@ -1503,11 +1503,10 @@
ALOGV("discarding fetcher-%d", fetcher->getFetcherID());
fetcher->stopAsync();
} else {
- float threshold = -1.0f; // always finish fetching by default
+ float threshold = 0.0f; // default to pause after current block (47Kbytes)
bool disconnect = false;
if (timeUs >= 0ll) {
// seeking, no need to finish fetching
- threshold = 0.0f;
disconnect = true;
} else if (delayRemoval) {
// adapting, abort if remaining of current segment is over threshold
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 53087b6..4851528 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -1082,6 +1082,16 @@
mSeqNumber, firstSeqNumberInPlaylist,
firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1);
+ if (mTSParser != NULL) {
+ mTSParser->signalEOS(ERROR_END_OF_STREAM);
+ // Use an empty buffer; we don't have any new data, just want to extract
+ // potential new access units after flush. Reset mSeqNumber to
+ // lastSeqNumberInPlaylist such that we set the correct access unit
+ // properties in extractAndQueueAccessUnitsFromTs.
+ sp<ABuffer> buffer = new ABuffer(0);
+ mSeqNumber = lastSeqNumberInPlaylist;
+ extractAndQueueAccessUnitsFromTs(buffer);
+ }
notifyError(ERROR_END_OF_STREAM);
} else {
// It's possible that we were never able to download the playlist.
@@ -1424,11 +1434,17 @@
int64_t minDiffUs, maxDiffUs;
if (mSeekMode == LiveSession::kSeekModeNextSample) {
+ // if the previous fetcher paused in the middle of a segment, we
+ // want to start at a segment that overlaps the last sample
minDiffUs = -mPlaylist->getTargetDuration();
maxDiffUs = 0ll;
} else {
+ // if the previous fetcher paused at the end of a segment, ideally
+ // we want to start at the segment that's roughly aligned with its
+ // next segment, but if the two variants are not well aligned we
+ // adjust the diff to within (-T/2, T/2)
minDiffUs = -mPlaylist->getTargetDuration() / 2;
- maxDiffUs = mPlaylist->getTargetDuration();
+ maxDiffUs = mPlaylist->getTargetDuration() / 2;
}
int32_t oldSeqNumber = mSeqNumber;
@@ -1611,6 +1627,9 @@
ALOGE("MPEG2 Transport streams do not contain subtitles.");
return ERROR_MALFORMED;
}
+ if (stream == LiveSession::STREAMTYPE_METADATA) {
+ continue;
+ }
ATSParser::SourceType type =LiveSession::getSourceTypeForStream(stream);
sp<AnotherPacketSource> source =
static_cast<AnotherPacketSource *>(
diff --git a/media/libstagefright/include/MPEG2TSExtractor.h b/media/libstagefright/include/MPEG2TSExtractor.h
index 4dd340c..8eb8f6c 100644
--- a/media/libstagefright/include/MPEG2TSExtractor.h
+++ b/media/libstagefright/include/MPEG2TSExtractor.h
@@ -20,7 +20,9 @@
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaSource.h>
#include <utils/threads.h>
+#include <utils/KeyedVector.h>
#include <utils/Vector.h>
namespace android {
@@ -54,10 +56,21 @@
Vector<sp<AnotherPacketSource> > mSourceImpls;
+ Vector<KeyedVector<int64_t, off64_t> > mSyncPoints;
+ // Sync points used for seeking --- normally one for video track is used.
+ // If no video track is present, audio track will be used instead.
+ KeyedVector<int64_t, off64_t> *mSeekSyncPoints;
+
off64_t mOffset;
void init();
status_t feedMore();
+ status_t seek(int64_t seekTimeUs,
+ const MediaSource::ReadOptions::SeekMode& seekMode);
+ status_t queueDiscontinuityForSeek(int64_t actualSeekTimeUs);
+ status_t seekBeyond(int64_t seekTimeUs);
+
+ status_t feedUntilBufferAvailable(const sp<AnotherPacketSource> &impl);
DISALLOW_EVIL_CONSTRUCTORS(MPEG2TSExtractor);
};
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index b1ee628..d468dfc 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -69,7 +69,7 @@
node_id node, OMX_U32 port_index, OMX_U32* usage);
virtual status_t storeMetaDataInBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL enable);
+ node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type);
virtual status_t prepareForAdaptivePlayback(
node_id node, OMX_U32 portIndex, OMX_BOOL enable,
@@ -81,7 +81,7 @@
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer);
+ buffer_id *buffer, OMX_U32 allottedSize);
virtual status_t useGraphicBuffer(
node_id node, OMX_U32 port_index,
@@ -93,15 +93,17 @@
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index,
- sp<IGraphicBufferProducer> *bufferProducer);
+ sp<IGraphicBufferProducer> *bufferProducer,
+ MetadataBufferType *type);
virtual status_t createPersistentInputSurface(
sp<IGraphicBufferProducer> *bufferProducer,
sp<IGraphicBufferConsumer> *bufferConsumer);
- virtual status_t usePersistentInputSurface(
+ virtual status_t setInputSurface(
node_id node, OMX_U32 port_index,
- const sp<IGraphicBufferConsumer> &bufferConsumer);
+ const sp<IGraphicBufferConsumer> &bufferConsumer,
+ MetadataBufferType *type);
virtual status_t signalEndOfInputStream(node_id node);
@@ -111,18 +113,18 @@
virtual status_t allocateBufferWithBackup(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer);
+ buffer_id *buffer, OMX_U32 allottedSize);
virtual status_t freeBuffer(
node_id node, OMX_U32 port_index, buffer_id buffer);
- virtual status_t fillBuffer(node_id node, buffer_id buffer);
+ virtual status_t fillBuffer(node_id node, buffer_id buffer, int fenceFd);
virtual status_t emptyBuffer(
node_id node,
buffer_id buffer,
OMX_U32 range_offset, OMX_U32 range_length,
- OMX_U32 flags, OMX_TICKS timestamp);
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);
virtual status_t getExtensionIndex(
node_id node,
@@ -146,10 +148,10 @@
OMX_IN OMX_PTR pEventData);
OMX_ERRORTYPE OnEmptyBufferDone(
- node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);
+ node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer, int fenceFd);
OMX_ERRORTYPE OnFillBufferDone(
- node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);
+ node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer, int fenceFd);
void invalidateNodeID(node_id node);
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index 03c9a8a..f68e0a9 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -58,7 +58,8 @@
status_t getGraphicBufferUsage(OMX_U32 portIndex, OMX_U32* usage);
- status_t storeMetaDataInBuffers(OMX_U32 portIndex, OMX_BOOL enable);
+ status_t storeMetaDataInBuffers(
+ OMX_U32 portIndex, OMX_BOOL enable, MetadataBufferType *type);
status_t prepareForAdaptivePlayback(
OMX_U32 portIndex, OMX_BOOL enable,
@@ -70,7 +71,7 @@
status_t useBuffer(
OMX_U32 portIndex, const sp<IMemory> ¶ms,
- OMX::buffer_id *buffer);
+ OMX::buffer_id *buffer, OMX_U32 allottedSize);
status_t useGraphicBuffer(
OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
@@ -81,14 +82,16 @@
OMX::buffer_id buffer);
status_t createInputSurface(
- OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer);
+ OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer,
+ MetadataBufferType *type);
static status_t createPersistentInputSurface(
sp<IGraphicBufferProducer> *bufferProducer,
sp<IGraphicBufferConsumer> *bufferConsumer);
- status_t usePersistentInputSurface(
- OMX_U32 portIndex, const sp<IGraphicBufferConsumer> &bufferConsumer);
+ status_t setInputSurface(
+ OMX_U32 portIndex, const sp<IGraphicBufferConsumer> &bufferConsumer,
+ MetadataBufferType *type);
status_t signalEndOfInputStream();
@@ -98,21 +101,20 @@
status_t allocateBufferWithBackup(
OMX_U32 portIndex, const sp<IMemory> ¶ms,
- OMX::buffer_id *buffer);
+ OMX::buffer_id *buffer, OMX_U32 allottedSize);
status_t freeBuffer(OMX_U32 portIndex, OMX::buffer_id buffer);
- status_t fillBuffer(OMX::buffer_id buffer);
+ status_t fillBuffer(OMX::buffer_id buffer, int fenceFd);
status_t emptyBuffer(
OMX::buffer_id buffer,
OMX_U32 rangeOffset, OMX_U32 rangeLength,
- OMX_U32 flags, OMX_TICKS timestamp);
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);
- status_t emptyDirectBuffer(
- OMX_BUFFERHEADERTYPE *header,
- OMX_U32 rangeOffset, OMX_U32 rangeLength,
- OMX_U32 flags, OMX_TICKS timestamp);
+ status_t emptyGraphicBuffer(
+ OMX_BUFFERHEADERTYPE *header, const sp<GraphicBuffer> &buffer,
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);
status_t getExtensionIndex(
const char *parameterName, OMX_INDEXTYPE *index);
@@ -123,6 +125,8 @@
const void *data,
size_t size);
+ // handles messages and removes them from the list
+ void onMessages(std::list<omx_message> &messages);
void onMessage(const omx_message &msg);
void onObserverDied(OMXMaster *master);
void onGetHandleFailed();
@@ -151,12 +155,12 @@
OMX::buffer_id mID;
};
Vector<ActiveBuffer> mActiveBuffers;
-#ifdef __LP64__
+ // for buffer ptr to buffer id translation
Mutex mBufferIDLock;
uint32_t mBufferIDCount;
KeyedVector<OMX::buffer_id, OMX_BUFFERHEADERTYPE *> mBufferIDToBufferHeader;
KeyedVector<OMX_BUFFERHEADERTYPE *, OMX::buffer_id> mBufferHeaderToBufferID;
-#endif
+ MetadataBufferType mMetadataType[2];
// For debug support
char *mName;
@@ -204,18 +208,35 @@
OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);
status_t storeMetaDataInBuffers_l(
- OMX_U32 portIndex, OMX_BOOL enable,
- OMX_BOOL useGraphicBuffer, OMX_BOOL *usingGraphicBufferInMeta);
+ OMX_U32 portIndex, OMX_BOOL enable, MetadataBufferType *type);
+
+ // Stores fence into buffer if it is ANWBuffer type and has enough space.
+ // otherwise, waits for the fence to signal. Takes ownership of |fenceFd|.
+ status_t storeFenceInMeta_l(
+ OMX_BUFFERHEADERTYPE *header, int fenceFd, OMX_U32 portIndex);
+
+ // Retrieves the fence from buffer if ANWBuffer type and has enough space. Otherwise, returns -1
+ int retrieveFenceFromMeta_l(
+ OMX_BUFFERHEADERTYPE *header, OMX_U32 portIndex);
status_t emptyBuffer_l(
OMX_BUFFERHEADERTYPE *header,
- OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr);
+ OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr, int fenceFd);
+
+ status_t updateGraphicBufferInMeta_l(
+ OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
+ OMX::buffer_id buffer, OMX_BUFFERHEADERTYPE *header);
status_t createGraphicBufferSource(
- OMX_U32 portIndex, sp<IGraphicBufferConsumer> consumer = NULL);
+ OMX_U32 portIndex, sp<IGraphicBufferConsumer> consumer /* nullable */,
+ MetadataBufferType *type);
sp<GraphicBufferSource> getGraphicBufferSource();
void setGraphicBufferSource(const sp<GraphicBufferSource>& bufferSource);
+ // Handles |msg|, and may modify it. Returns true iff completely handled it and
+ // |msg| does not need to be sent to the event listener.
+ bool handleMessage(omx_message &msg);
+
OMXNodeInstance(const OMXNodeInstance &);
OMXNodeInstance &operator=(const OMXNodeInstance &);
};
diff --git a/media/libstagefright/include/OggExtractor.h b/media/libstagefright/include/OggExtractor.h
index e97c8cd..c647cbb 100644
--- a/media/libstagefright/include/OggExtractor.h
+++ b/media/libstagefright/include/OggExtractor.h
@@ -27,7 +27,7 @@
class DataSource;
class String8;
-struct MyVorbisExtractor;
+struct MyOggExtractor;
struct OggSource;
struct OggExtractor : public MediaExtractor {
@@ -48,7 +48,7 @@
sp<DataSource> mDataSource;
status_t mInitCheck;
- MyVorbisExtractor *mImpl;
+ MyOggExtractor *mImpl;
OggExtractor(const OggExtractor &);
OggExtractor &operator=(const OggExtractor &);
diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h
index fa3ea89..9e652d5 100644
--- a/media/libstagefright/include/SoftwareRenderer.h
+++ b/media/libstagefright/include/SoftwareRenderer.h
@@ -19,9 +19,12 @@
#define SOFTWARE_RENDERER_H_
#include <media/stagefright/ColorConverter.h>
+#include <media/stagefright/FrameRenderTracker.h>
#include <utils/RefBase.h>
#include <system/window.h>
+#include <list>
+
namespace android {
struct AMessage;
@@ -32,9 +35,10 @@
~SoftwareRenderer();
- void render(
- const void *data, size_t size, int64_t timestampNs,
+ std::list<FrameRenderTracker::Info> render(
+ const void *data, size_t size, int64_t mediaTimeUs, nsecs_t renderTimeNs,
void *platformPrivate, const sp<AMessage> &format);
+ void clearTracker();
private:
enum YUVMode {
@@ -48,6 +52,7 @@
int32_t mWidth, mHeight;
int32_t mCropLeft, mCropTop, mCropRight, mCropBottom;
int32_t mCropWidth, mCropHeight;
+ FrameRenderTracker mRenderTracker;
SoftwareRenderer(const SoftwareRenderer &);
SoftwareRenderer &operator=(const SoftwareRenderer &);
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index 0d071b2..db429f6 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -54,10 +54,13 @@
bool parsePSISection(
unsigned pid, ABitReader *br, status_t *err);
+ // Pass to appropriate stream according to pid, and set event if it's a PES
+ // with a sync frame.
+ // Note that the method itself does not touch event.
bool parsePID(
unsigned pid, unsigned continuity_counter,
unsigned payload_unit_start_indicator,
- ABitReader *br, status_t *err);
+ ABitReader *br, status_t *err, SyncEvent *event);
void signalDiscontinuity(
DiscontinuityType type, const sp<AMessage> &extra);
@@ -118,10 +121,14 @@
unsigned pid() const { return mElementaryPID; }
void setPID(unsigned pid) { mElementaryPID = pid; }
+ // Parse the payload and set event when PES with a sync frame is detected.
+ // This method knows when a PES starts; so record mPesStartOffset in that
+ // case.
status_t parse(
unsigned continuity_counter,
unsigned payload_unit_start_indicator,
- ABitReader *br);
+ ABitReader *br,
+ SyncEvent *event);
void signalDiscontinuity(
DiscontinuityType type, const sp<AMessage> &extra);
@@ -150,17 +157,24 @@
bool mEOSReached;
uint64_t mPrevPTS;
+ off64_t mPesStartOffset;
ElementaryStreamQueue *mQueue;
- status_t flush();
- status_t parsePES(ABitReader *br);
+ // Flush accumulated payload if necessary --- i.e. at EOS or at the start of
+ // another payload. event is set if the flushed payload is PES with a sync
+ // frame.
+ status_t flush(SyncEvent *event);
+ // Strip and parse PES headers and pass remaining payload into onPayload
+ // with parsed metadata. event is set if the PES contains a sync frame.
+ status_t parsePES(ABitReader *br, SyncEvent *event);
+ // Feed the payload into mQueue and if a packet is identified, queue it
+ // into mSource. If the packet is a sync frame. set event with start offset
+ // and timestamp of the packet.
void onPayloadData(
unsigned PTS_DTS_flags, uint64_t PTS, uint64_t DTS,
- const uint8_t *data, size_t size);
-
- void extractAACFrames(const sp<ABuffer> &buffer);
+ const uint8_t *data, size_t size, SyncEvent *event);
DISALLOW_EVIL_CONSTRUCTORS(Stream);
};
@@ -190,6 +204,17 @@
DISALLOW_EVIL_CONSTRUCTORS(PSISection);
};
+ATSParser::SyncEvent::SyncEvent(off64_t offset)
+ : mInit(false), mOffset(offset), mTimeUs(0) {}
+
+void ATSParser::SyncEvent::init(off64_t offset, const sp<MediaSource> &source,
+ int64_t timeUs) {
+ mInit = true;
+ mOffset = offset;
+ mMediaSource = source;
+ mTimeUs = timeUs;
+}
+
////////////////////////////////////////////////////////////////////////////////
ATSParser::Program::Program(
@@ -220,7 +245,7 @@
bool ATSParser::Program::parsePID(
unsigned pid, unsigned continuity_counter,
unsigned payload_unit_start_indicator,
- ABitReader *br, status_t *err) {
+ ABitReader *br, status_t *err, SyncEvent *event) {
*err = OK;
ssize_t index = mStreams.indexOfKey(pid);
@@ -229,7 +254,7 @@
}
*err = mStreams.editValueAt(index)->parse(
- continuity_counter, payload_unit_start_indicator, br);
+ continuity_counter, payload_unit_start_indicator, br, event);
return true;
}
@@ -628,7 +653,8 @@
status_t ATSParser::Stream::parse(
unsigned continuity_counter,
- unsigned payload_unit_start_indicator, ABitReader *br) {
+ unsigned payload_unit_start_indicator, ABitReader *br,
+ SyncEvent *event) {
if (mQueue == NULL) {
return OK;
}
@@ -659,12 +685,13 @@
mExpectedContinuityCounter = (continuity_counter + 1) & 0x0f;
if (payload_unit_start_indicator) {
+ off64_t offset = (event != NULL) ? event->getOffset() : 0;
if (mPayloadStarted) {
// Otherwise we run the danger of receiving the trailing bytes
// of a PES packet that we never saw the start of and assuming
// we have a a complete PES packet.
- status_t err = flush();
+ status_t err = flush(event);
if (err != OK) {
return err;
@@ -672,6 +699,7 @@
}
mPayloadStarted = true;
+ mPesStartOffset = offset;
}
if (!mPayloadStarted) {
@@ -746,6 +774,7 @@
}
mPayloadStarted = false;
+ mEOSReached = false;
mBuffer->setRange(0, 0);
bool clearFormat = false;
@@ -784,10 +813,10 @@
mSource->signalEOS(finalResult);
}
mEOSReached = true;
- flush();
+ flush(NULL);
}
-status_t ATSParser::Stream::parsePES(ABitReader *br) {
+status_t ATSParser::Stream::parsePES(ABitReader *br, SyncEvent *event) {
unsigned packet_startcode_prefix = br->getBits(24);
ALOGV("packet_startcode_prefix = 0x%08x", packet_startcode_prefix);
@@ -972,13 +1001,13 @@
}
onPayloadData(
- PTS_DTS_flags, PTS, DTS, br->data(), dataLength);
+ PTS_DTS_flags, PTS, DTS, br->data(), dataLength, event);
br->skipBits(dataLength * 8);
} else {
onPayloadData(
PTS_DTS_flags, PTS, DTS,
- br->data(), br->numBitsLeft() / 8);
+ br->data(), br->numBitsLeft() / 8, event);
size_t payloadSizeBits = br->numBitsLeft();
if (payloadSizeBits % 8 != 0u) {
@@ -1002,8 +1031,8 @@
return OK;
}
-status_t ATSParser::Stream::flush() {
- if (mBuffer->size() == 0) {
+status_t ATSParser::Stream::flush(SyncEvent *event) {
+ if (mBuffer == NULL || mBuffer->size() == 0) {
return OK;
}
@@ -1011,7 +1040,7 @@
ABitReader br(mBuffer->data(), mBuffer->size());
- status_t err = parsePES(&br);
+ status_t err = parsePES(&br, event);
mBuffer->setRange(0, 0);
@@ -1020,7 +1049,7 @@
void ATSParser::Stream::onPayloadData(
unsigned PTS_DTS_flags, uint64_t PTS, uint64_t /* DTS */,
- const uint8_t *data, size_t size) {
+ const uint8_t *data, size_t size, SyncEvent *event) {
#if 0
ALOGI("payload streamType 0x%02x, PTS = 0x%016llx, dPTS = %lld",
mStreamType,
@@ -1047,6 +1076,7 @@
}
sp<ABuffer> accessUnit;
+ bool found = false;
while ((accessUnit = mQueue->dequeueAccessUnit()) != NULL) {
if (mSource == NULL) {
sp<MetaData> meta = mQueue->getFormat();
@@ -1074,6 +1104,17 @@
}
mSource->queueAccessUnit(accessUnit);
}
+
+ if ((event != NULL) && !found && mQueue->getFormat() != NULL) {
+ int32_t sync = 0;
+ if (accessUnit->meta()->findInt32("isSync", &sync) && sync) {
+ int64_t timeUs;
+ if (accessUnit->meta()->findInt64("timeUs", &timeUs)) {
+ found = true;
+ event->init(mPesStartOffset, mSource, timeUs);
+ }
+ }
+ }
}
}
@@ -1126,14 +1167,15 @@
ATSParser::~ATSParser() {
}
-status_t ATSParser::feedTSPacket(const void *data, size_t size) {
+status_t ATSParser::feedTSPacket(const void *data, size_t size,
+ SyncEvent *event) {
if (size != kTSPacketSize) {
ALOGE("Wrong TS packet size");
return BAD_VALUE;
}
ABitReader br((const uint8_t *)data, kTSPacketSize);
- return parseTS(&br);
+ return parseTS(&br, event);
}
void ATSParser::signalDiscontinuity(
@@ -1261,7 +1303,8 @@
status_t ATSParser::parsePID(
ABitReader *br, unsigned PID,
unsigned continuity_counter,
- unsigned payload_unit_start_indicator) {
+ unsigned payload_unit_start_indicator,
+ SyncEvent *event) {
ssize_t sectionIndex = mPSISections.indexOfKey(PID);
if (sectionIndex >= 0) {
@@ -1333,7 +1376,7 @@
status_t err;
if (mPrograms.editItemAt(i)->parsePID(
PID, continuity_counter, payload_unit_start_indicator,
- br, &err)) {
+ br, &err, event)) {
if (err != OK) {
return err;
}
@@ -1354,6 +1397,11 @@
unsigned adaptation_field_length = br->getBits(8);
if (adaptation_field_length > 0) {
+ if (adaptation_field_length * 8 > br->numBitsLeft()) {
+ ALOGV("Adaptation field should be included in a single TS packet.");
+ return ERROR_MALFORMED;
+ }
+
unsigned discontinuity_indicator = br->getBits(1);
if (discontinuity_indicator) {
@@ -1404,7 +1452,7 @@
return OK;
}
-status_t ATSParser::parseTS(ABitReader *br) {
+status_t ATSParser::parseTS(ABitReader *br, SyncEvent *event) {
ALOGV("---");
unsigned sync_byte = br->getBits(8);
@@ -1443,8 +1491,8 @@
}
if (err == OK) {
if (adaptation_field_control == 1 || adaptation_field_control == 3) {
- err = parsePID(
- br, PID, continuity_counter, payload_unit_start_indicator);
+ err = parsePID(br, PID, continuity_counter,
+ payload_unit_start_indicator, event);
}
}
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index 4def333..430a8d5 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -22,6 +22,7 @@
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaSource.h>
#include <utils/KeyedVector.h>
#include <utils/Vector.h>
#include <utils/RefBase.h>
@@ -30,7 +31,6 @@
class ABitReader;
struct ABuffer;
-struct MediaSource;
struct ATSParser : public RefBase {
enum DiscontinuityType {
@@ -62,9 +62,43 @@
ALIGNED_VIDEO_DATA = 2,
};
+ // Event is used to signal sync point event at feedTSPacket().
+ struct SyncEvent {
+ SyncEvent(off64_t offset);
+
+ void init(off64_t offset, const sp<MediaSource> &source,
+ int64_t timeUs);
+
+ bool isInit() { return mInit; }
+ off64_t getOffset() { return mOffset; }
+ const sp<MediaSource> &getMediaSource() { return mMediaSource; }
+ int64_t getTimeUs() { return mTimeUs; }
+
+ private:
+ bool mInit;
+ /*
+ * mInit == false: the current offset
+ * mInit == true: the start offset of sync payload
+ */
+ off64_t mOffset;
+ /* The media source object for this event. */
+ sp<MediaSource> mMediaSource;
+ /* The timestamp of the sync frame. */
+ int64_t mTimeUs;
+ };
+
ATSParser(uint32_t flags = 0);
- status_t feedTSPacket(const void *data, size_t size);
+ // Feed a TS packet into the parser. uninitialized event with the start
+ // offset of this TS packet goes in, and if the parser detects PES with
+ // a sync frame, the event will be initiailzed with the start offset of the
+ // PES. Note that the offset of the event can be different from what we fed,
+ // as a PES may consist of multiple TS packets.
+ //
+ // Even in the case feedTSPacket() returns non-OK value, event still may be
+ // initialized if the parsing failed after the detection.
+ status_t feedTSPacket(
+ const void *data, size_t size, SyncEvent *event = NULL);
void signalDiscontinuity(
DiscontinuityType type, const sp<AMessage> &extra);
@@ -126,15 +160,25 @@
void parseProgramAssociationTable(ABitReader *br);
void parseProgramMap(ABitReader *br);
- void parsePES(ABitReader *br);
+ // Parse PES packet where br is pointing to. If the PES contains a sync
+ // frame, set event with the time and the start offset of this PES.
+ // Note that the method itself does not touch event.
+ void parsePES(ABitReader *br, SyncEvent *event);
+ // Strip remaining packet headers and pass to appropriate program/stream
+ // to parse the payload. If the payload turns out to be PES and contains
+ // a sync frame, event shall be set with the time and start offset of the
+ // PES.
+ // Note that the method itself does not touch event.
status_t parsePID(
ABitReader *br, unsigned PID,
unsigned continuity_counter,
- unsigned payload_unit_start_indicator);
+ unsigned payload_unit_start_indicator,
+ SyncEvent *event);
status_t parseAdaptationField(ABitReader *br, unsigned PID);
- status_t parseTS(ABitReader *br);
+ // see feedTSPacket().
+ status_t parseTS(ABitReader *br, SyncEvent *event);
void updatePCR(unsigned PID, uint64_t PCR, size_t byteOffsetFromStart);
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index 7b5b46a..36ec367 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -56,6 +56,8 @@
if (clearFormat) {
mFormat.clear();
}
+
+ mEOSReached = false;
}
// Parse AC3 header assuming the current ptr is start position of syncframe,
@@ -1391,7 +1393,7 @@
if (mFormat == NULL) {
mFormat = new MetaData;
- mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_DATA_METADATA);
+ mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_DATA_TIMED_ID3);
}
return accessUnit;
diff --git a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
index f5c33cf..aae3e9f 100644
--- a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
+++ b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
@@ -16,17 +16,22 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MPEG2TSExtractor"
+
+#include <inttypes.h>
#include <utils/Log.h>
#include "include/MPEG2TSExtractor.h"
#include "include/NuCachedSource2.h"
+#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
+#include <media/IStreamSource.h>
#include <utils/String8.h>
#include "AnotherPacketSource.h"
@@ -40,7 +45,7 @@
MPEG2TSSource(
const sp<MPEG2TSExtractor> &extractor,
const sp<AnotherPacketSource> &impl,
- bool seekable);
+ bool doesSeek);
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
@@ -54,8 +59,8 @@
sp<AnotherPacketSource> mImpl;
// If there are both audio and video streams, only the video stream
- // will be seekable, otherwise the single stream will be seekable.
- bool mSeekable;
+ // will signal seek on the extractor; otherwise the single stream will seek.
+ bool mDoesSeek;
DISALLOW_EVIL_CONSTRUCTORS(MPEG2TSSource);
};
@@ -63,10 +68,10 @@
MPEG2TSSource::MPEG2TSSource(
const sp<MPEG2TSExtractor> &extractor,
const sp<AnotherPacketSource> &impl,
- bool seekable)
+ bool doesSeek)
: mExtractor(extractor),
mImpl(impl),
- mSeekable(seekable) {
+ mDoesSeek(doesSeek) {
}
status_t MPEG2TSSource::start(MetaData *params) {
@@ -85,27 +90,18 @@
MediaBuffer **out, const ReadOptions *options) {
*out = NULL;
- status_t finalResult;
- while (!mImpl->hasBufferAvailable(&finalResult)) {
- if (finalResult != OK) {
- return ERROR_END_OF_STREAM;
- }
-
- status_t err = mExtractor->feedMore();
+ int64_t seekTimeUs;
+ ReadOptions::SeekMode seekMode;
+ if (mDoesSeek && options && options->getSeekTo(&seekTimeUs, &seekMode)) {
+ // seek is needed
+ status_t err = mExtractor->seek(seekTimeUs, seekMode);
if (err != OK) {
- mImpl->signalEOS(err);
+ return err;
}
}
- int64_t seekTimeUs;
- ReadOptions::SeekMode seekMode;
- if (mSeekable && options && options->getSeekTo(&seekTimeUs, &seekMode)) {
- // A seek was requested, but we don't actually support seeking and so can only "seek" to
- // the current position
- int64_t nextBufTimeUs;
- if (mImpl->nextBufferTime(&nextBufTimeUs) != OK || seekTimeUs != nextBufTimeUs) {
- return ERROR_UNSUPPORTED;
- }
+ if (mExtractor->feedUntilBufferAvailable(mImpl) != OK) {
+ return ERROR_END_OF_STREAM;
}
return mImpl->read(out, options);
@@ -129,23 +125,10 @@
return NULL;
}
- bool seekable = true;
- if (mSourceImpls.size() > 1) {
- if (mSourceImpls.size() != 2u) {
- ALOGE("Wrong size");
- return NULL;
- }
-
- sp<MetaData> meta = mSourceImpls.editItemAt(index)->getFormat();
- const char *mime;
- CHECK(meta->findCString(kKeyMIMEType, &mime));
-
- if (!strncasecmp("audio/", mime, 6)) {
- seekable = false;
- }
- }
-
- return new MPEG2TSSource(this, mSourceImpls.editItemAt(index), seekable);
+ // The seek reference track (video if present; audio otherwise) performs
+ // seek requests, while other tracks ignore requests.
+ return new MPEG2TSSource(this, mSourceImpls.editItemAt(index),
+ (mSeekSyncPoints == &mSyncPoints.editItemAt(index)));
}
sp<MetaData> MPEG2TSExtractor::getTrackMetaData(
@@ -164,7 +147,7 @@
void MPEG2TSExtractor::init() {
bool haveAudio = false;
bool haveVideo = false;
- int numPacketsParsed = 0;
+ int64_t startTime = ALooper::GetNowUs();
while (feedMore() == OK) {
if (haveAudio && haveVideo) {
@@ -178,6 +161,8 @@
if (impl != NULL) {
haveVideo = true;
mSourceImpls.push(impl);
+ mSyncPoints.push();
+ mSeekSyncPoints = &mSyncPoints.editTop();
}
}
@@ -189,15 +174,75 @@
if (impl != NULL) {
haveAudio = true;
mSourceImpls.push(impl);
+ mSyncPoints.push();
+ if (!haveVideo) {
+ mSeekSyncPoints = &mSyncPoints.editTop();
+ }
}
}
- if (++numPacketsParsed > 10000) {
+ // Wait only for 2 seconds to detect audio/video streams.
+ if (ALooper::GetNowUs() - startTime > 2000000ll) {
break;
}
}
- ALOGI("haveAudio=%d, haveVideo=%d", haveAudio, haveVideo);
+ off64_t size;
+ if (mDataSource->getSize(&size) == OK && (haveAudio || haveVideo)) {
+ sp<AnotherPacketSource> impl = haveVideo
+ ? (AnotherPacketSource *)mParser->getSource(
+ ATSParser::VIDEO).get()
+ : (AnotherPacketSource *)mParser->getSource(
+ ATSParser::AUDIO).get();
+ size_t prevSyncSize = 1;
+ int64_t durationUs = -1;
+ List<int64_t> durations;
+ // Estimate duration --- stabilize until you get <500ms deviation.
+ while (feedMore() == OK
+ && ALooper::GetNowUs() - startTime <= 2000000ll) {
+ if (mSeekSyncPoints->size() > prevSyncSize) {
+ prevSyncSize = mSeekSyncPoints->size();
+ int64_t diffUs = mSeekSyncPoints->keyAt(prevSyncSize - 1)
+ - mSeekSyncPoints->keyAt(0);
+ off64_t diffOffset = mSeekSyncPoints->valueAt(prevSyncSize - 1)
+ - mSeekSyncPoints->valueAt(0);
+ durationUs = size * diffUs / diffOffset;
+ durations.push_back(durationUs);
+ if (durations.size() > 5) {
+ durations.erase(durations.begin());
+ int64_t min = *durations.begin();
+ int64_t max = *durations.begin();
+ for (List<int64_t>::iterator i = durations.begin();
+ i != durations.end();
+ ++i) {
+ if (min > *i) {
+ min = *i;
+ }
+ if (max < *i) {
+ max = *i;
+ }
+ }
+ if (max - min < 500 * 1000) {
+ break;
+ }
+ }
+ }
+ }
+ status_t err;
+ int64_t bufferedDurationUs;
+ bufferedDurationUs = impl->getBufferedDurationUs(&err);
+ if (err == ERROR_END_OF_STREAM) {
+ durationUs = bufferedDurationUs;
+ }
+ if (durationUs > 0) {
+ const sp<MetaData> meta = impl->getFormat();
+ meta->setInt64(kKeyDuration, durationUs);
+ impl->setFormat(meta);
+ }
+ }
+
+ ALOGI("haveAudio=%d, haveVideo=%d, elaspedTime=%" PRId64,
+ haveAudio, haveVideo, ALooper::GetNowUs() - startTime);
}
status_t MPEG2TSExtractor::feedMore() {
@@ -213,12 +258,195 @@
return (n < 0) ? (status_t)n : ERROR_END_OF_STREAM;
}
+ ATSParser::SyncEvent event(mOffset);
mOffset += n;
- return mParser->feedTSPacket(packet, kTSPacketSize);
+ status_t err = mParser->feedTSPacket(packet, kTSPacketSize, &event);
+ if (event.isInit()) {
+ for (size_t i = 0; i < mSourceImpls.size(); ++i) {
+ if (mSourceImpls[i].get() == event.getMediaSource().get()) {
+ mSyncPoints.editItemAt(i).add(
+ event.getTimeUs(), event.getOffset());
+ break;
+ }
+ }
+ }
+ return err;
}
uint32_t MPEG2TSExtractor::flags() const {
- return CAN_PAUSE;
+ return CAN_PAUSE | CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD;
+}
+
+status_t MPEG2TSExtractor::seek(int64_t seekTimeUs,
+ const MediaSource::ReadOptions::SeekMode &seekMode) {
+ if (mSeekSyncPoints == NULL || mSeekSyncPoints->isEmpty()) {
+ ALOGW("No sync point to seek to.");
+ // ... and therefore we have nothing useful to do here.
+ return OK;
+ }
+
+ // Determine whether we're seeking beyond the known area.
+ bool shouldSeekBeyond =
+ (seekTimeUs > mSeekSyncPoints->keyAt(mSeekSyncPoints->size() - 1));
+
+ // Determine the sync point to seek.
+ size_t index = 0;
+ for (; index < mSeekSyncPoints->size(); ++index) {
+ int64_t timeUs = mSeekSyncPoints->keyAt(index);
+ if (timeUs > seekTimeUs) {
+ break;
+ }
+ }
+
+ switch (seekMode) {
+ case MediaSource::ReadOptions::SEEK_NEXT_SYNC:
+ if (index == mSeekSyncPoints->size()) {
+ ALOGW("Next sync not found; starting from the latest sync.");
+ --index;
+ }
+ break;
+ case MediaSource::ReadOptions::SEEK_CLOSEST_SYNC:
+ case MediaSource::ReadOptions::SEEK_CLOSEST:
+ ALOGW("seekMode not supported: %d; falling back to PREVIOUS_SYNC",
+ seekMode);
+ // fall-through
+ case MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC:
+ if (index == 0) {
+ ALOGW("Previous sync not found; starting from the earliest "
+ "sync.");
+ } else {
+ --index;
+ }
+ break;
+ }
+ if (!shouldSeekBeyond || mOffset <= mSeekSyncPoints->valueAt(index)) {
+ int64_t actualSeekTimeUs = mSeekSyncPoints->keyAt(index);
+ mOffset = mSeekSyncPoints->valueAt(index);
+ status_t err = queueDiscontinuityForSeek(actualSeekTimeUs);
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ if (shouldSeekBeyond) {
+ status_t err = seekBeyond(seekTimeUs);
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ // Fast-forward to sync frame.
+ for (size_t i = 0; i < mSourceImpls.size(); ++i) {
+ const sp<AnotherPacketSource> &impl = mSourceImpls[i];
+ status_t err;
+ feedUntilBufferAvailable(impl);
+ while (impl->hasBufferAvailable(&err)) {
+ sp<AMessage> meta = impl->getMetaAfterLastDequeued(0);
+ sp<ABuffer> buffer;
+ if (meta == NULL) {
+ return UNKNOWN_ERROR;
+ }
+ int32_t sync;
+ if (meta->findInt32("isSync", &sync) && sync) {
+ break;
+ }
+ err = impl->dequeueAccessUnit(&buffer);
+ if (err != OK) {
+ return err;
+ }
+ feedUntilBufferAvailable(impl);
+ }
+ }
+
+ return OK;
+}
+
+status_t MPEG2TSExtractor::queueDiscontinuityForSeek(int64_t actualSeekTimeUs) {
+ // Signal discontinuity
+ sp<AMessage> extra(new AMessage);
+ extra->setInt64(IStreamListener::kKeyMediaTimeUs, actualSeekTimeUs);
+ mParser->signalDiscontinuity(ATSParser::DISCONTINUITY_TIME, extra);
+
+ // After discontinuity, impl should only have discontinuities
+ // with the last being what we queued. Dequeue them all here.
+ for (size_t i = 0; i < mSourceImpls.size(); ++i) {
+ const sp<AnotherPacketSource> &impl = mSourceImpls.itemAt(i);
+ sp<ABuffer> buffer;
+ status_t err;
+ while (impl->hasBufferAvailable(&err)) {
+ if (err != OK) {
+ return err;
+ }
+ err = impl->dequeueAccessUnit(&buffer);
+ // If the source contains anything but discontinuity, that's
+ // a programming mistake.
+ CHECK(err == INFO_DISCONTINUITY);
+ }
+ }
+
+ // Feed until we have a buffer for each source.
+ for (size_t i = 0; i < mSourceImpls.size(); ++i) {
+ const sp<AnotherPacketSource> &impl = mSourceImpls.itemAt(i);
+ sp<ABuffer> buffer;
+ status_t err = feedUntilBufferAvailable(impl);
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ return OK;
+}
+
+status_t MPEG2TSExtractor::seekBeyond(int64_t seekTimeUs) {
+ // If we're seeking beyond where we know --- read until we reach there.
+ size_t syncPointsSize = mSeekSyncPoints->size();
+
+ while (seekTimeUs > mSeekSyncPoints->keyAt(
+ mSeekSyncPoints->size() - 1)) {
+ status_t err;
+ if (syncPointsSize < mSeekSyncPoints->size()) {
+ syncPointsSize = mSeekSyncPoints->size();
+ int64_t syncTimeUs = mSeekSyncPoints->keyAt(syncPointsSize - 1);
+ // Dequeue buffers before sync point in order to avoid too much
+ // cache building up.
+ sp<ABuffer> buffer;
+ for (size_t i = 0; i < mSourceImpls.size(); ++i) {
+ const sp<AnotherPacketSource> &impl = mSourceImpls[i];
+ int64_t timeUs;
+ while ((err = impl->nextBufferTime(&timeUs)) == OK) {
+ if (timeUs < syncTimeUs) {
+ impl->dequeueAccessUnit(&buffer);
+ } else {
+ break;
+ }
+ }
+ if (err != OK && err != -EWOULDBLOCK) {
+ return err;
+ }
+ }
+ }
+ if (feedMore() != OK) {
+ return ERROR_END_OF_STREAM;
+ }
+ }
+
+ return OK;
+}
+
+status_t MPEG2TSExtractor::feedUntilBufferAvailable(
+ const sp<AnotherPacketSource> &impl) {
+ status_t finalResult;
+ while (!impl->hasBufferAvailable(&finalResult)) {
+ if (finalResult != OK) {
+ return finalResult;
+ }
+
+ status_t err = feedMore();
+ if (err != OK) {
+ impl->signalEOS(err);
+ }
+ }
+ return OK;
}
////////////////////////////////////////////////////////////////////////////////
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index 01cd8f0..ac6bf0d 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -29,6 +29,7 @@
#include <media/hardware/MetadataBufferType.h>
#include <ui/GraphicBuffer.h>
#include <gui/BufferItem.h>
+#include <HardwareAPI.h>
#include <inttypes.h>
#include "FrameDropper.h"
@@ -37,13 +38,78 @@
static const bool EXTRA_CHECK = true;
+GraphicBufferSource::PersistentProxyListener::PersistentProxyListener(
+ const wp<IGraphicBufferConsumer> &consumer,
+ const wp<ConsumerListener>& consumerListener) :
+ mConsumerListener(consumerListener),
+ mConsumer(consumer) {}
+
+GraphicBufferSource::PersistentProxyListener::~PersistentProxyListener() {}
+
+void GraphicBufferSource::PersistentProxyListener::onFrameAvailable(
+ const BufferItem& item) {
+ sp<ConsumerListener> listener(mConsumerListener.promote());
+ if (listener != NULL) {
+ listener->onFrameAvailable(item);
+ } else {
+ sp<IGraphicBufferConsumer> consumer(mConsumer.promote());
+ if (consumer == NULL) {
+ return;
+ }
+ BufferItem bi;
+ status_t err = consumer->acquireBuffer(&bi, 0);
+ if (err != OK) {
+ ALOGE("PersistentProxyListener: acquireBuffer failed (%d)", err);
+ return;
+ }
+
+ err = consumer->detachBuffer(bi.mBuf);
+ if (err != OK) {
+ ALOGE("PersistentProxyListener: detachBuffer failed (%d)", err);
+ return;
+ }
+
+ err = consumer->attachBuffer(&bi.mBuf, bi.mGraphicBuffer);
+ if (err != OK) {
+ ALOGE("PersistentProxyListener: attachBuffer failed (%d)", err);
+ return;
+ }
+
+ err = consumer->releaseBuffer(bi.mBuf, 0,
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, bi.mFence);
+ if (err != OK) {
+ ALOGE("PersistentProxyListener: releaseBuffer failed (%d)", err);
+ }
+ }
+}
+
+void GraphicBufferSource::PersistentProxyListener::onFrameReplaced(
+ const BufferItem& item) {
+ sp<ConsumerListener> listener(mConsumerListener.promote());
+ if (listener != NULL) {
+ listener->onFrameReplaced(item);
+ }
+}
+
+void GraphicBufferSource::PersistentProxyListener::onBuffersReleased() {
+ sp<ConsumerListener> listener(mConsumerListener.promote());
+ if (listener != NULL) {
+ listener->onBuffersReleased();
+ }
+}
+
+void GraphicBufferSource::PersistentProxyListener::onSidebandStreamChanged() {
+ sp<ConsumerListener> listener(mConsumerListener.promote());
+ if (listener != NULL) {
+ listener->onSidebandStreamChanged();
+ }
+}
GraphicBufferSource::GraphicBufferSource(
OMXNodeInstance* nodeInstance,
uint32_t bufferWidth,
uint32_t bufferHeight,
uint32_t bufferCount,
- bool useGraphicBufferInMeta,
const sp<IGraphicBufferConsumer> &consumer) :
mInitCheck(UNKNOWN_ERROR),
mNodeInstance(nodeInstance),
@@ -64,12 +130,12 @@
mLatestBufferId(-1),
mLatestBufferFrameNum(0),
mLatestBufferUseCount(0),
+ mLatestBufferFence(Fence::NO_FENCE),
mRepeatBufferDeferred(false),
mTimePerCaptureUs(-1ll),
mTimePerFrameUs(-1ll),
mPrevCaptureUs(-1ll),
- mPrevFrameUs(-1ll),
- mUseGraphicBufferInMeta(useGraphicBufferInMeta) {
+ mPrevFrameUs(-1ll) {
ALOGV("GraphicBufferSource w=%u h=%u c=%u",
bufferWidth, bufferHeight, bufferCount);
@@ -101,7 +167,12 @@
// dropping to 0 at the end of the ctor. Since all we need is a wp<...>
// that's what we create.
wp<BufferQueue::ConsumerListener> listener = static_cast<BufferQueue::ConsumerListener*>(this);
- sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
+ sp<IConsumerListener> proxy;
+ if (!mIsPersistent) {
+ proxy = new BufferQueue::ProxyConsumerListener(listener);
+ } else {
+ proxy = new PersistentProxyListener(mConsumer, listener);
+ }
mInitCheck = mConsumer->consumerConnect(proxy, false);
if (mInitCheck != NO_ERROR) {
@@ -227,9 +298,8 @@
mCodecBuffers.add(codecBuffer);
}
-void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {
+void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header, int fenceFd) {
Mutex::Autolock autoLock(mMutex);
-
if (!mExecuting) {
return;
}
@@ -238,6 +308,9 @@
if (cbi < 0) {
// This should never happen.
ALOGE("codecBufferEmptied: buffer not recognized (h=%p)", header);
+ if (fenceFd >= 0) {
+ ::close(fenceFd);
+ }
return;
}
@@ -259,30 +332,33 @@
}
// No GraphicBuffer to deal with, no additional input or output is
// expected, so just return.
+ if (fenceFd >= 0) {
+ ::close(fenceFd);
+ }
return;
}
- if (EXTRA_CHECK) {
+ if (EXTRA_CHECK && header->nAllocLen >= sizeof(MetadataBufferType)) {
// Pull the graphic buffer handle back out of the buffer, and confirm
// that it matches expectations.
OMX_U8* data = header->pBuffer;
MetadataBufferType type = *(MetadataBufferType *)data;
- if (type == kMetadataBufferTypeGrallocSource) {
- buffer_handle_t bufferHandle;
- memcpy(&bufferHandle, data + 4, sizeof(buffer_handle_t));
- if (bufferHandle != codecBuffer.mGraphicBuffer->handle) {
+ if (type == kMetadataBufferTypeGrallocSource
+ && header->nAllocLen >= sizeof(VideoGrallocMetadata)) {
+ VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)data;
+ if (grallocMeta.pHandle != codecBuffer.mGraphicBuffer->handle) {
// should never happen
ALOGE("codecBufferEmptied: buffer's handle is %p, expected %p",
- bufferHandle, codecBuffer.mGraphicBuffer->handle);
+ grallocMeta.pHandle, codecBuffer.mGraphicBuffer->handle);
CHECK(!"codecBufferEmptied: mismatched buffer");
}
- } else if (type == kMetadataBufferTypeGraphicBuffer) {
- GraphicBuffer *buffer;
- memcpy(&buffer, data + 4, sizeof(buffer));
- if (buffer != codecBuffer.mGraphicBuffer.get()) {
+ } else if (type == kMetadataBufferTypeANWBuffer
+ && header->nAllocLen >= sizeof(VideoNativeMetadata)) {
+ VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)data;
+ if (nativeMeta.pBuffer != codecBuffer.mGraphicBuffer->getNativeBuffer()) {
// should never happen
ALOGE("codecBufferEmptied: buffer is %p, expected %p",
- buffer, codecBuffer.mGraphicBuffer.get());
+ nativeMeta.pBuffer, codecBuffer.mGraphicBuffer->getNativeBuffer());
CHECK(!"codecBufferEmptied: mismatched buffer");
}
}
@@ -292,6 +368,7 @@
// If we find a match, release that slot. If we don't, the BufferQueue
// has dropped that GraphicBuffer, and there's nothing for us to release.
int id = codecBuffer.mBuf;
+ sp<Fence> fence = new Fence(fenceFd);
if (mBufferSlot[id] != NULL &&
mBufferSlot[id]->handle == codecBuffer.mGraphicBuffer->handle) {
ALOGV("cbi %d matches bq slot %d, handle=%p",
@@ -305,15 +382,17 @@
int outSlot;
mConsumer->attachBuffer(&outSlot, mBufferSlot[id]);
mConsumer->releaseBuffer(outSlot, 0,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence);
+ mBufferSlot[id] = NULL;
} else {
mConsumer->releaseBuffer(id, codecBuffer.mFrameNumber,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence);
}
}
} else {
ALOGV("codecBufferEmptied: no match for emptied buffer in cbi %d",
cbi);
+ // we will not reuse codec buffer, so there is no need to wait for fence
}
// Mark the codec buffer as available by clearing the GraphicBuffer ref.
@@ -393,9 +472,10 @@
if (mIsPersistent) {
mConsumer->detachBuffer(item.mBuf);
+ mBufferSlot[item.mBuf] = NULL;
mConsumer->attachBuffer(&item.mBuf, item.mGraphicBuffer);
mConsumer->releaseBuffer(item.mBuf, 0,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
} else {
mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
@@ -448,13 +528,6 @@
mNumFramesAvailable--;
- // Wait for it to become available.
- err = item.mFence->waitForever("GraphicBufferSource::fillCodecBuffer_l");
- if (err != OK) {
- ALOGW("failed to wait for buffer fence: %d", err);
- // keep going
- }
-
// If this is the first time we're seeing this buffer, add it to our
// slot table.
if (item.mGraphicBuffer != NULL) {
@@ -488,13 +561,15 @@
ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf);
if (mIsPersistent) {
mConsumer->detachBuffer(item.mBuf);
+ mBufferSlot[item.mBuf] = NULL;
mConsumer->attachBuffer(&item.mBuf, item.mGraphicBuffer);
mConsumer->releaseBuffer(item.mBuf, 0,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
} else {
mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
}
+ // item.mFence is released at the end of this method
} else {
ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi);
setLatestBuffer_l(item, dropped);
@@ -521,9 +596,10 @@
mLatestBufferFrameNum,
EGL_NO_DISPLAY,
EGL_NO_SYNC_KHR,
- Fence::NO_FENCE);
+ mLatestBufferFence);
mLatestBufferId = -1;
mLatestBufferFrameNum = 0;
+ mLatestBufferFence = Fence::NO_FENCE;
return false;
}
@@ -538,6 +614,7 @@
item.mBuf = mLatestBufferId;
item.mFrameNumber = mLatestBufferFrameNum;
item.mTimestamp = mRepeatLastFrameTimestamp;
+ item.mFence = mLatestBufferFence;
status_t err = submitBuffer_l(item, cbi);
@@ -575,14 +652,15 @@
int outSlot;
mConsumer->attachBuffer(&outSlot, mBufferSlot[mLatestBufferId]);
-
mConsumer->releaseBuffer(outSlot, 0,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, mLatestBufferFence);
+ mBufferSlot[mLatestBufferId] = NULL;
} else {
mConsumer->releaseBuffer(
mLatestBufferId, mLatestBufferFrameNum,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, mLatestBufferFence);
}
+ // mLatestBufferFence will be set to new fence just below
}
}
@@ -593,6 +671,7 @@
mLatestBufferUseCount = dropped ? 0 : 1;
mRepeatBufferDeferred = false;
mRepeatLastFrameCount = kRepeatLastFrameCount;
+ mLatestBufferFence = item.mFence;
if (mReflector != NULL) {
sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector);
@@ -688,8 +767,7 @@
return timeUs;
}
-status_t GraphicBufferSource::submitBuffer_l(
- const BufferItem &item, int cbi) {
+status_t GraphicBufferSource::submitBuffer_l(const BufferItem &item, int cbi) {
ALOGV("submitBuffer_l cbi=%d", cbi);
int64_t timeUs = getTimestamp(item);
@@ -703,36 +781,18 @@
codecBuffer.mFrameNumber = item.mFrameNumber;
OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader;
- CHECK(header->nAllocLen >= 4 + sizeof(buffer_handle_t));
- OMX_U8* data = header->pBuffer;
- buffer_handle_t handle;
- if (!mUseGraphicBufferInMeta) {
- const OMX_U32 type = kMetadataBufferTypeGrallocSource;
- handle = codecBuffer.mGraphicBuffer->handle;
- memcpy(data, &type, 4);
- memcpy(data + 4, &handle, sizeof(buffer_handle_t));
- } else {
- // codecBuffer holds a reference to the GraphicBuffer, so
- // it is valid while it is with the OMX component
- const OMX_U32 type = kMetadataBufferTypeGraphicBuffer;
- memcpy(data, &type, 4);
- // passing a non-reference-counted graphicBuffer
- GraphicBuffer *buffer = codecBuffer.mGraphicBuffer.get();
- handle = buffer->handle;
- memcpy(data + 4, &buffer, sizeof(buffer));
- }
-
- status_t err = mNodeInstance->emptyDirectBuffer(header, 0,
- 4 + sizeof(buffer_handle_t), OMX_BUFFERFLAG_ENDOFFRAME,
- timeUs);
+ sp<GraphicBuffer> buffer = codecBuffer.mGraphicBuffer;
+ status_t err = mNodeInstance->emptyGraphicBuffer(
+ header, buffer, OMX_BUFFERFLAG_ENDOFFRAME, timeUs,
+ item.mFence->isValid() ? item.mFence->dup() : -1);
if (err != OK) {
- ALOGW("WARNING: emptyDirectBuffer failed: 0x%x", err);
+ ALOGW("WARNING: emptyNativeWindowBuffer failed: 0x%x", err);
codecBuffer.mGraphicBuffer = NULL;
return err;
}
- ALOGV("emptyDirectBuffer succeeded, h=%p p=%p bufhandle=%p",
- header, header->pBuffer, handle);
+ ALOGV("emptyNativeWindowBuffer succeeded, h=%p p=%p buf=%p bufhandle=%p",
+ header, header->pBuffer, buffer->getNativeBuffer(), buffer->handle);
return OK;
}
@@ -755,19 +815,9 @@
CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi));
OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader;
- if (EXTRA_CHECK) {
- // Guard against implementations that don't check nFilledLen.
- size_t fillLen = 4 + sizeof(buffer_handle_t);
- CHECK(header->nAllocLen >= fillLen);
- OMX_U8* data = header->pBuffer;
- memset(data, 0xcd, fillLen);
- }
-
- uint64_t timestamp = 0; // does this matter?
-
- status_t err = mNodeInstance->emptyDirectBuffer(header, /*offset*/ 0,
- /*length*/ 0, OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_EOS,
- timestamp);
+ status_t err = mNodeInstance->emptyGraphicBuffer(
+ header, NULL /* buffer */, OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_EOS,
+ 0 /* timestamp */, -1 /* fenceFd */);
if (err != OK) {
ALOGW("emptyDirectBuffer EOS failed: 0x%x", err);
} else {
@@ -827,9 +877,10 @@
if (mIsPersistent) {
mConsumer->detachBuffer(item.mBuf);
+ mBufferSlot[item.mBuf] = NULL;
mConsumer->attachBuffer(&item.mBuf, item.mGraphicBuffer);
mConsumer->releaseBuffer(item.mBuf, 0,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
} else {
mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index 1047fb3..2a8c218 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -55,7 +55,6 @@
uint32_t bufferWidth,
uint32_t bufferHeight,
uint32_t bufferCount,
- bool useGraphicBufferInMeta = false,
const sp<IGraphicBufferConsumer> &consumer = NULL
);
@@ -94,7 +93,7 @@
// Called from OnEmptyBufferDone. If we have a BQ buffer available,
// fill it with a new frame of data; otherwise, just mark it as available.
- void codecBufferEmptied(OMX_BUFFERHEADERTYPE* header);
+ void codecBufferEmptied(OMX_BUFFERHEADERTYPE* header, int fenceFd);
// Called when omx_message::FILL_BUFFER_DONE is received. (Currently the
// buffer source will fix timestamp in the header if needed.)
@@ -161,6 +160,31 @@
virtual void onSidebandStreamChanged();
private:
+ // PersistentProxyListener is similar to BufferQueue::ProxyConsumerListener
+ // except that it returns (acquire/detach/re-attache/release) buffers
+ // in onFrameAvailable() if the actual consumer object is no longer valid.
+ //
+ // This class is used in persistent input surface case to prevent buffer
+ // loss when onFrameAvailable() is received while we don't have a valid
+ // consumer around.
+ class PersistentProxyListener : public BnConsumerListener {
+ public:
+ PersistentProxyListener(
+ const wp<IGraphicBufferConsumer> &consumer,
+ const wp<ConsumerListener>& consumerListener);
+ virtual ~PersistentProxyListener();
+ virtual void onFrameAvailable(const BufferItem& item) override;
+ virtual void onFrameReplaced(const BufferItem& item) override;
+ virtual void onBuffersReleased() override;
+ virtual void onSidebandStreamChanged() override;
+ private:
+ // mConsumerListener is a weak reference to the IConsumerListener.
+ wp<ConsumerListener> mConsumerListener;
+ // mConsumer is a weak reference to the IGraphicBufferConsumer, use
+ // a weak ref to avoid circular ref between mConsumer and this class
+ wp<IGraphicBufferConsumer> mConsumer;
+ };
+
// Keep track of codec input buffers. They may either be available
// (mGraphicBuffer == NULL) or in use by the codec.
struct CodecBuffer {
@@ -275,6 +299,7 @@
int mLatestBufferId;
uint64_t mLatestBufferFrameNum;
int32_t mLatestBufferUseCount;
+ sp<Fence> mLatestBufferFence;
// The previous buffer should've been repeated but
// no codec buffer was available at the time.
@@ -286,7 +311,7 @@
int64_t mPrevCaptureUs;
int64_t mPrevFrameUs;
- bool mUseGraphicBufferInMeta;
+ MetadataBufferType mMetadataBufferType;
void onMessageReceived(const sp<AMessage> &msg);
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 876abb8..cb7ab5e 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -34,6 +34,7 @@
#include <OMX_AsString.h>
#include <OMX_Component.h>
+#include <OMX_VideoExt.h>
namespace android {
@@ -61,7 +62,11 @@
struct OMX::CallbackDispatcher : public RefBase {
CallbackDispatcher(OMXNodeInstance *owner);
- void post(const omx_message &msg);
+ // Posts |msg| to the listener's queue. If |realTime| is true, the listener thread is notified
+ // that a new message is available on the queue. Otherwise, the message stays on the queue, but
+ // the listener is not notified of it. It will process this message when a subsequent message
+ // is posted with |realTime| set to true.
+ void post(const omx_message &msg, bool realTime = true);
bool loop();
@@ -74,11 +79,11 @@
OMXNodeInstance *mOwner;
bool mDone;
Condition mQueueChanged;
- List<omx_message> mQueue;
+ std::list<omx_message> mQueue;
sp<CallbackDispatcherThread> mThread;
- void dispatch(const omx_message &msg);
+ void dispatch(std::list<omx_message> &messages);
CallbackDispatcher(const CallbackDispatcher &);
CallbackDispatcher &operator=(const CallbackDispatcher &);
@@ -109,24 +114,26 @@
}
}
-void OMX::CallbackDispatcher::post(const omx_message &msg) {
+void OMX::CallbackDispatcher::post(const omx_message &msg, bool realTime) {
Mutex::Autolock autoLock(mLock);
mQueue.push_back(msg);
- mQueueChanged.signal();
+ if (realTime) {
+ mQueueChanged.signal();
+ }
}
-void OMX::CallbackDispatcher::dispatch(const omx_message &msg) {
+void OMX::CallbackDispatcher::dispatch(std::list<omx_message> &messages) {
if (mOwner == NULL) {
ALOGV("Would have dispatched a message to a node that's already gone.");
return;
}
- mOwner->onMessage(msg);
+ mOwner->onMessages(messages);
}
bool OMX::CallbackDispatcher::loop() {
for (;;) {
- omx_message msg;
+ std::list<omx_message> messages;
{
Mutex::Autolock autoLock(mLock);
@@ -138,11 +145,10 @@
break;
}
- msg = *mQueue.begin();
- mQueue.erase(mQueue.begin());
+ messages.swap(mQueue);
}
- dispatch(msg);
+ dispatch(messages);
}
return false;
@@ -332,8 +338,8 @@
}
status_t OMX::storeMetaDataInBuffers(
- node_id node, OMX_U32 port_index, OMX_BOOL enable) {
- return findInstance(node)->storeMetaDataInBuffers(port_index, enable);
+ node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type) {
+ return findInstance(node)->storeMetaDataInBuffers(port_index, enable, type);
}
status_t OMX::prepareForAdaptivePlayback(
@@ -352,9 +358,9 @@
status_t OMX::useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer) {
+ buffer_id *buffer, OMX_U32 allottedSize) {
return findInstance(node)->useBuffer(
- port_index, params, buffer);
+ port_index, params, buffer, allottedSize);
}
status_t OMX::useGraphicBuffer(
@@ -373,9 +379,9 @@
status_t OMX::createInputSurface(
node_id node, OMX_U32 port_index,
- sp<IGraphicBufferProducer> *bufferProducer) {
+ sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
return findInstance(node)->createInputSurface(
- port_index, bufferProducer);
+ port_index, bufferProducer, type);
}
status_t OMX::createPersistentInputSurface(
@@ -385,11 +391,10 @@
bufferProducer, bufferConsumer);
}
-status_t OMX::usePersistentInputSurface(
+status_t OMX::setInputSurface(
node_id node, OMX_U32 port_index,
- const sp<IGraphicBufferConsumer> &bufferConsumer) {
- return findInstance(node)->usePersistentInputSurface(
- port_index, bufferConsumer);
+ const sp<IGraphicBufferConsumer> &bufferConsumer, MetadataBufferType *type) {
+ return findInstance(node)->setInputSurface(port_index, bufferConsumer, type);
}
@@ -406,9 +411,9 @@
status_t OMX::allocateBufferWithBackup(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
- buffer_id *buffer) {
+ buffer_id *buffer, OMX_U32 allottedSize) {
return findInstance(node)->allocateBufferWithBackup(
- port_index, params, buffer);
+ port_index, params, buffer, allottedSize);
}
status_t OMX::freeBuffer(node_id node, OMX_U32 port_index, buffer_id buffer) {
@@ -416,17 +421,17 @@
port_index, buffer);
}
-status_t OMX::fillBuffer(node_id node, buffer_id buffer) {
- return findInstance(node)->fillBuffer(buffer);
+status_t OMX::fillBuffer(node_id node, buffer_id buffer, int fenceFd) {
+ return findInstance(node)->fillBuffer(buffer, fenceFd);
}
status_t OMX::emptyBuffer(
node_id node,
buffer_id buffer,
OMX_U32 range_offset, OMX_U32 range_length,
- OMX_U32 flags, OMX_TICKS timestamp) {
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
return findInstance(node)->emptyBuffer(
- buffer, range_offset, range_length, flags, timestamp);
+ buffer, range_offset, range_length, flags, timestamp, fenceFd);
}
status_t OMX::getExtensionIndex(
@@ -451,31 +456,56 @@
OMX_IN OMX_EVENTTYPE eEvent,
OMX_IN OMX_U32 nData1,
OMX_IN OMX_U32 nData2,
- OMX_IN OMX_PTR /* pEventData */) {
+ OMX_IN OMX_PTR pEventData) {
ALOGV("OnEvent(%d, %" PRIu32", %" PRIu32 ")", eEvent, nData1, nData2);
// Forward to OMXNodeInstance.
findInstance(node)->onEvent(eEvent, nData1, nData2);
+ sp<OMX::CallbackDispatcher> dispatcher = findDispatcher(node);
+
+ // output rendered events are not processed as regular events until they hit the observer
+ if (eEvent == OMX_EventOutputRendered) {
+ if (pEventData == NULL) {
+ return OMX_ErrorBadParameter;
+ }
+
+ // process data from array
+ OMX_VIDEO_RENDEREVENTTYPE *renderData = (OMX_VIDEO_RENDEREVENTTYPE *)pEventData;
+ for (size_t i = 0; i < nData1; ++i) {
+ omx_message msg;
+ msg.type = omx_message::FRAME_RENDERED;
+ msg.node = node;
+ msg.fenceFd = -1;
+ msg.u.render_data.timestamp = renderData[i].nMediaTimeUs;
+ msg.u.render_data.nanoTime = renderData[i].nSystemTimeNs;
+
+ dispatcher->post(msg, false /* realTime */);
+ }
+ return OMX_ErrorNone;
+ }
+
omx_message msg;
msg.type = omx_message::EVENT;
msg.node = node;
+ msg.fenceFd = -1;
msg.u.event_data.event = eEvent;
msg.u.event_data.data1 = nData1;
msg.u.event_data.data2 = nData2;
- findDispatcher(node)->post(msg);
+ dispatcher->post(msg, true /* realTime */);
return OMX_ErrorNone;
}
OMX_ERRORTYPE OMX::OnEmptyBufferDone(
- node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer) {
+ node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer, int fenceFd) {
ALOGV("OnEmptyBufferDone buffer=%p", pBuffer);
omx_message msg;
msg.type = omx_message::EMPTY_BUFFER_DONE;
msg.node = node;
+ msg.fenceFd = fenceFd;
msg.u.buffer_data.buffer = buffer;
findDispatcher(node)->post(msg);
@@ -484,12 +514,13 @@
}
OMX_ERRORTYPE OMX::OnFillBufferDone(
- node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer) {
+ node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer, int fenceFd) {
ALOGV("OnFillBufferDone buffer=%p", pBuffer);
omx_message msg;
msg.type = omx_message::FILL_BUFFER_DONE;
msg.node = node;
+ msg.fenceFd = fenceFd;
msg.u.extended_buffer_data.buffer = buffer;
msg.u.extended_buffer_data.range_offset = pBuffer->nOffset;
msg.u.extended_buffer_data.range_length = pBuffer->nFilledLen;
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 04293d6..6ee1a77 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -32,6 +32,7 @@
#include <gui/BufferQueue.h>
#include <HardwareAPI.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/MediaErrors.h>
#include <utils/misc.h>
@@ -75,11 +76,11 @@
#define SIMPLE_NEW_BUFFER(buffer_id, port, size, data) \
NEW_BUFFER_FMT(buffer_id, port, "%zu@%p", (size), (data))
-#define EMPTY_BUFFER(addr, header) "%#x [%u@%p]", \
- (addr), (header)->nAllocLen, (header)->pBuffer
-#define FULL_BUFFER(addr, header) "%#" PRIxPTR " [%u@%p (%u..+%u) f=%x ts=%lld]", \
+#define EMPTY_BUFFER(addr, header, fenceFd) "%#x [%u@%p fc=%d]", \
+ (addr), (header)->nAllocLen, (header)->pBuffer, (fenceFd)
+#define FULL_BUFFER(addr, header, fenceFd) "%#" PRIxPTR " [%u@%p (%u..+%u) f=%x ts=%lld fc=%d]", \
(intptr_t)(addr), (header)->nAllocLen, (header)->pBuffer, \
- (header)->nOffset, (header)->nFilledLen, (header)->nFlags, (header)->nTimeStamp
+ (header)->nOffset, (header)->nFilledLen, (header)->nFlags, (header)->nTimeStamp, (fenceFd)
#define WITH_STATS_WRAPPER(fmt, ...) fmt " { IN=%zu/%zu OUT=%zu/%zu }", ##__VA_ARGS__, \
mInputBuffersWithCodec.size(), mNumPortBuffers[kPortIndexInput], \
@@ -135,6 +136,18 @@
header->nFilledLen);
}
+ // return either the codec or the backup buffer
+ sp<ABuffer> getBuffer(const OMX_BUFFERHEADERTYPE *header, bool backup) {
+ sp<ABuffer> buf;
+ if (backup && mMem != NULL) {
+ buf = new ABuffer(mMem->pointer(), mMem->size());
+ } else {
+ buf = new ABuffer(header->pBuffer, header->nAllocLen);
+ }
+ buf->setRange(header->nOffset, header->nFilledLen);
+ return buf;
+ }
+
void setGraphicBuffer(const sp<GraphicBuffer> &graphicBuffer) {
mGraphicBuffer = graphicBuffer;
}
@@ -169,10 +182,8 @@
mNodeID(0),
mHandle(NULL),
mObserver(observer),
- mDying(false)
-#ifdef __LP64__
- , mBufferIDCount(0)
-#endif
+ mDying(false),
+ mBufferIDCount(0)
{
mName = ADebug::GetDebugName(name);
DEBUG = ADebug::GetDebugLevelFromProperty(name, "debug.stagefright.omx-debug");
@@ -182,6 +193,8 @@
mNumPortBuffers[1] = 0;
mDebugLevelBumpPendingBuffers[0] = 0;
mDebugLevelBumpPendingBuffers[1] = 0;
+ mMetadataType[0] = kMetadataBufferTypeInvalid;
+ mMetadataType[1] = kMetadataBufferTypeInvalid;
}
OMXNodeInstance::~OMXNodeInstance() {
@@ -488,63 +501,73 @@
}
status_t OMXNodeInstance::storeMetaDataInBuffers(
- OMX_U32 portIndex,
- OMX_BOOL enable) {
+ OMX_U32 portIndex, OMX_BOOL enable, MetadataBufferType *type) {
Mutex::Autolock autolock(mLock);
CLOG_CONFIG(storeMetaDataInBuffers, "%s:%u en:%d", portString(portIndex), portIndex, enable);
- return storeMetaDataInBuffers_l(
- portIndex, enable,
- OMX_FALSE /* useGraphicBuffer */, NULL /* usingGraphicBufferInMetadata */);
+ return storeMetaDataInBuffers_l(portIndex, enable, type);
}
status_t OMXNodeInstance::storeMetaDataInBuffers_l(
- OMX_U32 portIndex,
- OMX_BOOL enable,
- OMX_BOOL useGraphicBuffer,
- OMX_BOOL *usingGraphicBufferInMetadata) {
+ OMX_U32 portIndex, OMX_BOOL enable, MetadataBufferType *type) {
+ if (portIndex != kPortIndexInput && portIndex != kPortIndexOutput) {
+ return BAD_VALUE;
+ }
+
OMX_INDEXTYPE index;
OMX_STRING name = const_cast<OMX_STRING>(
"OMX.google.android.index.storeMetaDataInBuffers");
- OMX_STRING graphicBufferName = const_cast<OMX_STRING>(
- "OMX.google.android.index.storeGraphicBufferInMetaData");
- if (usingGraphicBufferInMetadata == NULL) {
- usingGraphicBufferInMetadata = &useGraphicBuffer;
- }
+ OMX_STRING nativeBufferName = const_cast<OMX_STRING>(
+ "OMX.google.android.index.storeANWBufferInMetadata");
+ MetadataBufferType negotiatedType;
- OMX_ERRORTYPE err =
- (useGraphicBuffer && portIndex == kPortIndexInput)
- ? OMX_GetExtensionIndex(mHandle, graphicBufferName, &index)
- : OMX_ErrorBadParameter;
- if (err == OMX_ErrorNone) {
- *usingGraphicBufferInMetadata = OMX_TRUE;
- name = graphicBufferName;
- } else {
- err = OMX_GetExtensionIndex(mHandle, name, &index);
- }
+ StoreMetaDataInBuffersParams params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = portIndex;
+ params.bStoreMetaData = enable;
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, nativeBufferName, &index);
OMX_ERRORTYPE xerr = err;
if (err == OMX_ErrorNone) {
- StoreMetaDataInBuffersParams params;
- InitOMXParams(¶ms);
- params.nPortIndex = portIndex;
- params.bStoreMetaData = enable;
-
err = OMX_SetParameter(mHandle, index, ¶ms);
+ if (err == OMX_ErrorNone) {
+ name = nativeBufferName; // set name for debugging
+ negotiatedType = kMetadataBufferTypeANWBuffer;
+ }
+ }
+ if (err != OMX_ErrorNone) {
+ err = OMX_GetExtensionIndex(mHandle, name, &index);
+ xerr = err;
+ if (err == OMX_ErrorNone) {
+ negotiatedType = kMetadataBufferTypeGrallocSource;
+ err = OMX_SetParameter(mHandle, index, ¶ms);
+ }
}
// don't log loud error if component does not support metadata mode on the output
if (err != OMX_ErrorNone) {
- *usingGraphicBufferInMetadata = OMX_FALSE;
if (err == OMX_ErrorUnsupportedIndex && portIndex == kPortIndexOutput) {
CLOGW("component does not support metadata mode; using fallback");
} else if (xerr != OMX_ErrorNone) {
CLOG_ERROR(getExtensionIndex, xerr, "%s", name);
} else {
- CLOG_ERROR(setParameter, err, "%s(%#x): %s:%u en=%d GB=%d", name, index,
- portString(portIndex), portIndex, enable, useGraphicBuffer);
+ CLOG_ERROR(setParameter, err, "%s(%#x): %s:%u en=%d type=%d", name, index,
+ portString(portIndex), portIndex, enable, negotiatedType);
}
+ negotiatedType = mMetadataType[portIndex];
+ } else {
+ if (!enable) {
+ negotiatedType = kMetadataBufferTypeInvalid;
+ }
+ mMetadataType[portIndex] = negotiatedType;
}
+ CLOG_CONFIG(storeMetaDataInBuffers, "%s:%u negotiated %s:%d",
+ portString(portIndex), portIndex, asString(negotiatedType), negotiatedType);
+
+ if (type != NULL) {
+ *type = negotiatedType;
+ }
+
return StatusFromOMXError(err);
}
@@ -622,8 +645,11 @@
status_t OMXNodeInstance::useBuffer(
OMX_U32 portIndex, const sp<IMemory> ¶ms,
- OMX::buffer_id *buffer) {
+ OMX::buffer_id *buffer, OMX_U32 allottedSize) {
Mutex::Autolock autoLock(mLock);
+ if (allottedSize > params->size()) {
+ return BAD_VALUE;
+ }
BufferMeta *buffer_meta = new BufferMeta(params);
@@ -631,10 +657,11 @@
OMX_ERRORTYPE err = OMX_UseBuffer(
mHandle, &header, portIndex, buffer_meta,
- params->size(), static_cast<OMX_U8 *>(params->pointer()));
+ allottedSize, static_cast<OMX_U8 *>(params->pointer()));
if (err != OMX_ErrorNone) {
- CLOG_ERROR(useBuffer, err, SIMPLE_BUFFER(portIndex, params->size(), params->pointer()));
+ CLOG_ERROR(useBuffer, err, SIMPLE_BUFFER(
+ portIndex, (size_t)allottedSize, params->pointer()));
delete buffer_meta;
buffer_meta = NULL;
@@ -656,7 +683,7 @@
}
CLOG_BUFFER(useBuffer, NEW_BUFFER_FMT(
- *buffer, portIndex, "%zu@%p", params->size(), params->pointer()));
+ *buffer, portIndex, "%u(%zu)@%p", allottedSize, params->size(), params->pointer()));
return OK;
}
@@ -772,37 +799,59 @@
return OK;
}
-status_t OMXNodeInstance::updateGraphicBufferInMeta(
+status_t OMXNodeInstance::updateGraphicBufferInMeta_l(
OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
- OMX::buffer_id buffer) {
- Mutex::Autolock autoLock(mLock);
+ OMX::buffer_id buffer, OMX_BUFFERHEADERTYPE *header) {
+ if (portIndex != kPortIndexInput && portIndex != kPortIndexOutput) {
+ return BAD_VALUE;
+ }
- OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer);
- VideoDecoderOutputMetaData *metadata =
- (VideoDecoderOutputMetaData *)(header->pBuffer);
BufferMeta *bufferMeta = (BufferMeta *)(header->pAppPrivate);
bufferMeta->setGraphicBuffer(graphicBuffer);
- metadata->eType = kMetadataBufferTypeGrallocSource;
- metadata->pHandle = graphicBuffer->handle;
+ if (mMetadataType[portIndex] == kMetadataBufferTypeGrallocSource
+ && header->nAllocLen >= sizeof(VideoGrallocMetadata)) {
+ VideoGrallocMetadata &metadata = *(VideoGrallocMetadata *)(header->pBuffer);
+ metadata.eType = kMetadataBufferTypeGrallocSource;
+ metadata.pHandle = graphicBuffer == NULL ? NULL : graphicBuffer->handle;
+ } else if (mMetadataType[portIndex] == kMetadataBufferTypeANWBuffer
+ && header->nAllocLen >= sizeof(VideoNativeMetadata)) {
+ VideoNativeMetadata &metadata = *(VideoNativeMetadata *)(header->pBuffer);
+ metadata.eType = kMetadataBufferTypeANWBuffer;
+ metadata.pBuffer = graphicBuffer == NULL ? NULL : graphicBuffer->getNativeBuffer();
+ metadata.nFenceFd = -1;
+ } else {
+ CLOG_BUFFER(updateGraphicBufferInMeta, "%s:%u, %#x bad type (%d) or size (%u)",
+ portString(portIndex), portIndex, buffer, mMetadataType[portIndex], header->nAllocLen);
+ return BAD_VALUE;
+ }
+
CLOG_BUFFER(updateGraphicBufferInMeta, "%s:%u, %#x := %p",
portString(portIndex), portIndex, buffer, graphicBuffer->handle);
return OK;
}
+status_t OMXNodeInstance::updateGraphicBufferInMeta(
+ OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
+ OMX::buffer_id buffer) {
+ Mutex::Autolock autoLock(mLock);
+ OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer);
+ return updateGraphicBufferInMeta_l(portIndex, graphicBuffer, buffer, header);
+}
+
status_t OMXNodeInstance::createGraphicBufferSource(
- OMX_U32 portIndex, sp<IGraphicBufferConsumer> bufferConsumer) {
+ OMX_U32 portIndex, sp<IGraphicBufferConsumer> bufferConsumer, MetadataBufferType *type) {
status_t err;
const sp<GraphicBufferSource>& surfaceCheck = getGraphicBufferSource();
if (surfaceCheck != NULL) {
+ if (portIndex < NELEM(mMetadataType) && type != NULL) {
+ *type = mMetadataType[portIndex];
+ }
return ALREADY_EXISTS;
}
- // Input buffers will hold meta-data (gralloc references).
- OMX_BOOL usingGraphicBuffer = OMX_FALSE;
- err = storeMetaDataInBuffers_l(
- portIndex, OMX_TRUE,
- OMX_TRUE /* useGraphicBuffer */, &usingGraphicBuffer);
+ // Input buffers will hold meta-data (ANativeWindowBuffer references).
+ err = storeMetaDataInBuffers_l(portIndex, OMX_TRUE, type);
if (err != OK) {
return err;
}
@@ -832,7 +881,6 @@
def.format.video.nFrameWidth,
def.format.video.nFrameHeight,
def.nBufferCountActual,
- usingGraphicBuffer,
bufferConsumer);
if ((err = bufferSource->initCheck()) != OK) {
@@ -844,9 +892,9 @@
}
status_t OMXNodeInstance::createInputSurface(
- OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer) {
+ OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
Mutex::Autolock autolock(mLock);
- status_t err = createGraphicBufferSource(portIndex);
+ status_t err = createGraphicBufferSource(portIndex, NULL /* bufferConsumer */, type);
if (err != OK) {
return err;
@@ -868,17 +916,9 @@
consumer->setConsumerName(name);
consumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER);
- status_t err = consumer->setMaxAcquiredBufferCount(
- BufferQueue::MAX_MAX_ACQUIRED_BUFFERS);
- if (err != NO_ERROR) {
- ALOGE("Unable to set BQ max acquired buffer count to %u: %d",
- BufferQueue::MAX_MAX_ACQUIRED_BUFFERS, err);
- return err;
- }
-
sp<BufferQueue::ProxyConsumerListener> proxy =
new BufferQueue::ProxyConsumerListener(NULL);
- err = consumer->consumerConnect(proxy, false);
+ status_t err = consumer->consumerConnect(proxy, false);
if (err != NO_ERROR) {
ALOGE("Error connecting to BufferQueue: %s (%d)",
strerror(-err), err);
@@ -891,10 +931,11 @@
return OK;
}
-status_t OMXNodeInstance::usePersistentInputSurface(
- OMX_U32 portIndex, const sp<IGraphicBufferConsumer> &bufferConsumer) {
+status_t OMXNodeInstance::setInputSurface(
+ OMX_U32 portIndex, const sp<IGraphicBufferConsumer> &bufferConsumer,
+ MetadataBufferType *type) {
Mutex::Autolock autolock(mLock);
- return createGraphicBufferSource(portIndex, bufferConsumer);
+ return createGraphicBufferSource(portIndex, bufferConsumer, type);
}
status_t OMXNodeInstance::signalEndOfInputStream() {
@@ -949,19 +990,21 @@
status_t OMXNodeInstance::allocateBufferWithBackup(
OMX_U32 portIndex, const sp<IMemory> ¶ms,
- OMX::buffer_id *buffer) {
+ OMX::buffer_id *buffer, OMX_U32 allottedSize) {
Mutex::Autolock autoLock(mLock);
+ if (allottedSize > params->size()) {
+ return BAD_VALUE;
+ }
BufferMeta *buffer_meta = new BufferMeta(params, true);
OMX_BUFFERHEADERTYPE *header;
OMX_ERRORTYPE err = OMX_AllocateBuffer(
- mHandle, &header, portIndex, buffer_meta, params->size());
-
+ mHandle, &header, portIndex, buffer_meta, allottedSize);
if (err != OMX_ErrorNone) {
CLOG_ERROR(allocateBufferWithBackup, err,
- SIMPLE_BUFFER(portIndex, params->size(), params->pointer()));
+ SIMPLE_BUFFER(portIndex, (size_t)allottedSize, params->pointer()));
delete buffer_meta;
buffer_meta = NULL;
@@ -981,8 +1024,8 @@
bufferSource->addCodecBuffer(header);
}
- CLOG_BUFFER(allocateBufferWithBackup, NEW_BUFFER_FMT(*buffer, portIndex, "%zu@%p :> %p",
- params->size(), params->pointer(), header->pBuffer));
+ CLOG_BUFFER(allocateBufferWithBackup, NEW_BUFFER_FMT(*buffer, portIndex, "%zu@%p :> %u@%p",
+ params->size(), params->pointer(), allottedSize, header->pBuffer));
return OK;
}
@@ -1007,7 +1050,7 @@
return StatusFromOMXError(err);
}
-status_t OMXNodeInstance::fillBuffer(OMX::buffer_id buffer) {
+status_t OMXNodeInstance::fillBuffer(OMX::buffer_id buffer, int fenceFd) {
Mutex::Autolock autoLock(mLock);
OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer);
@@ -1015,15 +1058,22 @@
header->nOffset = 0;
header->nFlags = 0;
+ // meta now owns fenceFd
+ status_t res = storeFenceInMeta_l(header, fenceFd, kPortIndexOutput);
+ if (res != OK) {
+ CLOG_ERROR(fillBuffer::storeFenceInMeta, res, EMPTY_BUFFER(buffer, header, fenceFd));
+ return res;
+ }
+
{
Mutex::Autolock _l(mDebugLock);
mOutputBuffersWithCodec.add(header);
- CLOG_BUMPED_BUFFER(fillBuffer, WITH_STATS(EMPTY_BUFFER(buffer, header)));
+ CLOG_BUMPED_BUFFER(fillBuffer, WITH_STATS(EMPTY_BUFFER(buffer, header, fenceFd)));
}
OMX_ERRORTYPE err = OMX_FillThisBuffer(mHandle, header);
if (err != OMX_ErrorNone) {
- CLOG_ERROR(fillBuffer, err, EMPTY_BUFFER(buffer, header));
+ CLOG_ERROR(fillBuffer, err, EMPTY_BUFFER(buffer, header, fenceFd));
Mutex::Autolock _l(mDebugLock);
mOutputBuffersWithCodec.remove(header);
}
@@ -1033,18 +1083,46 @@
status_t OMXNodeInstance::emptyBuffer(
OMX::buffer_id buffer,
OMX_U32 rangeOffset, OMX_U32 rangeLength,
- OMX_U32 flags, OMX_TICKS timestamp) {
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
Mutex::Autolock autoLock(mLock);
OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer);
- header->nFilledLen = rangeLength;
- header->nOffset = rangeOffset;
-
BufferMeta *buffer_meta =
static_cast<BufferMeta *>(header->pAppPrivate);
- buffer_meta->CopyToOMX(header);
+ sp<ABuffer> backup = buffer_meta->getBuffer(header, true /* backup */);
+ sp<ABuffer> codec = buffer_meta->getBuffer(header, false /* backup */);
- return emptyBuffer_l(header, flags, timestamp, (intptr_t)buffer);
+ // convert incoming ANW meta buffers if component is configured for gralloc metadata mode
+ if (mMetadataType[kPortIndexInput] == kMetadataBufferTypeGrallocSource
+ && backup->capacity() >= sizeof(VideoNativeMetadata)
+ && codec->capacity() >= sizeof(VideoGrallocMetadata)
+ && ((VideoNativeMetadata *)backup->base())->eType
+ == kMetadataBufferTypeANWBuffer) {
+ VideoNativeMetadata &backupMeta = *(VideoNativeMetadata *)backup->base();
+ VideoGrallocMetadata &codecMeta = *(VideoGrallocMetadata *)codec->base();
+ CLOG_BUFFER(emptyBuffer, "converting ANWB %p to handle %p",
+ backupMeta.pBuffer, backupMeta.pBuffer->handle);
+ codecMeta.pHandle = backupMeta.pBuffer->handle;
+ codecMeta.eType = kMetadataBufferTypeGrallocSource;
+ header->nFilledLen = rangeLength ? sizeof(codecMeta) : 0;
+ header->nOffset = 0;
+ } else {
+ // rangeLength and rangeOffset must be a subset of the allocated data in the buffer.
+ // corner case: we permit rangeOffset == end-of-buffer with rangeLength == 0.
+ if (rangeOffset > header->nAllocLen
+ || rangeLength > header->nAllocLen - rangeOffset) {
+ if (fenceFd >= 0) {
+ ::close(fenceFd);
+ }
+ return BAD_VALUE;
+ }
+ header->nFilledLen = rangeLength;
+ header->nOffset = rangeOffset;
+
+ buffer_meta->CopyToOMX(header);
+ }
+
+ return emptyBuffer_l(header, flags, timestamp, (intptr_t)buffer, fenceFd);
}
// log queued buffer activity for the next few input and/or output frames
@@ -1071,11 +1149,62 @@
}
}
+status_t OMXNodeInstance::storeFenceInMeta_l(
+ OMX_BUFFERHEADERTYPE *header, int fenceFd, OMX_U32 portIndex) {
+ // propagate fence if component supports it; wait for it otherwise
+ OMX_U32 metaSize = portIndex == kPortIndexInput ? header->nFilledLen : header->nAllocLen;
+ if (mMetadataType[portIndex] == kMetadataBufferTypeANWBuffer
+ && metaSize >= sizeof(VideoNativeMetadata)) {
+ VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)(header->pBuffer);
+ if (nativeMeta.nFenceFd >= 0) {
+ ALOGE("fence (%d) already exists in meta", nativeMeta.nFenceFd);
+ if (fenceFd >= 0) {
+ ::close(fenceFd);
+ }
+ return ALREADY_EXISTS;
+ }
+ nativeMeta.nFenceFd = fenceFd;
+ } else if (fenceFd >= 0) {
+ CLOG_BUFFER(storeFenceInMeta, "waiting for fence %d", fenceFd);
+ sp<Fence> fence = new Fence(fenceFd);
+ return fence->wait(IOMX::kFenceTimeoutMs);
+ }
+ return OK;
+}
+
+int OMXNodeInstance::retrieveFenceFromMeta_l(
+ OMX_BUFFERHEADERTYPE *header, OMX_U32 portIndex) {
+ OMX_U32 metaSize = portIndex == kPortIndexInput ? header->nAllocLen : header->nFilledLen;
+ int fenceFd = -1;
+ if (mMetadataType[portIndex] == kMetadataBufferTypeANWBuffer
+ && header->nAllocLen >= sizeof(VideoNativeMetadata)) {
+ VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)(header->pBuffer);
+ if (nativeMeta.eType == kMetadataBufferTypeANWBuffer) {
+ fenceFd = nativeMeta.nFenceFd;
+ nativeMeta.nFenceFd = -1;
+ }
+ if (metaSize < sizeof(nativeMeta) && fenceFd >= 0) {
+ CLOG_ERROR(foundFenceInEmptyMeta, BAD_VALUE, FULL_BUFFER(
+ NULL, header, nativeMeta.nFenceFd));
+ fenceFd = -1;
+ }
+ }
+ return fenceFd;
+}
+
status_t OMXNodeInstance::emptyBuffer_l(
- OMX_BUFFERHEADERTYPE *header, OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr) {
+ OMX_BUFFERHEADERTYPE *header, OMX_U32 flags, OMX_TICKS timestamp,
+ intptr_t debugAddr, int fenceFd) {
header->nFlags = flags;
header->nTimeStamp = timestamp;
+ status_t res = storeFenceInMeta_l(header, fenceFd, kPortIndexInput);
+ if (res != OK) {
+ CLOG_ERROR(emptyBuffer::storeFenceInMeta, res, WITH_STATS(
+ FULL_BUFFER(debugAddr, header, fenceFd)));
+ return res;
+ }
+
{
Mutex::Autolock _l(mDebugLock);
mInputBuffersWithCodec.add(header);
@@ -1085,11 +1214,11 @@
bumpDebugLevel_l(2 /* numInputBuffers */, 0 /* numOutputBuffers */);
}
- CLOG_BUMPED_BUFFER(emptyBuffer, WITH_STATS(FULL_BUFFER(debugAddr, header)));
+ CLOG_BUMPED_BUFFER(emptyBuffer, WITH_STATS(FULL_BUFFER(debugAddr, header, fenceFd)));
}
OMX_ERRORTYPE err = OMX_EmptyThisBuffer(mHandle, header);
- CLOG_IF_ERROR(emptyBuffer, err, FULL_BUFFER(debugAddr, header));
+ CLOG_IF_ERROR(emptyBuffer, err, FULL_BUFFER(debugAddr, header, fenceFd));
{
Mutex::Autolock _l(mDebugLock);
@@ -1104,16 +1233,21 @@
}
// like emptyBuffer, but the data is already in header->pBuffer
-status_t OMXNodeInstance::emptyDirectBuffer(
- OMX_BUFFERHEADERTYPE *header,
- OMX_U32 rangeOffset, OMX_U32 rangeLength,
- OMX_U32 flags, OMX_TICKS timestamp) {
+status_t OMXNodeInstance::emptyGraphicBuffer(
+ OMX_BUFFERHEADERTYPE *header, const sp<GraphicBuffer> &graphicBuffer,
+ OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
Mutex::Autolock autoLock(mLock);
+ OMX::buffer_id buffer = findBufferID(header);
+ status_t err = updateGraphicBufferInMeta_l(kPortIndexInput, graphicBuffer, buffer, header);
+ if (err != OK) {
+ CLOG_ERROR(emptyGraphicBuffer, err, FULL_BUFFER(
+ (intptr_t)header->pBuffer, header, fenceFd));
+ return err;
+ }
- header->nFilledLen = rangeLength;
- header->nOffset = rangeOffset;
-
- return emptyBuffer_l(header, flags, timestamp, (intptr_t)header->pBuffer);
+ header->nOffset = 0;
+ header->nFilledLen = graphicBuffer == NULL ? 0 : header->nAllocLen;
+ return emptyBuffer_l(header, flags, timestamp, (intptr_t)header->pBuffer, fenceFd);
}
status_t OMXNodeInstance::getExtensionIndex(
@@ -1225,7 +1359,7 @@
}
}
-void OMXNodeInstance::onMessage(const omx_message &msg) {
+bool OMXNodeInstance::handleMessage(omx_message &msg) {
const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
if (msg.type == omx_message::FILL_BUFFER_DONE) {
@@ -1237,7 +1371,8 @@
mOutputBuffersWithCodec.remove(buffer);
CLOG_BUMPED_BUFFER(
- FBD, WITH_STATS(FULL_BUFFER(msg.u.extended_buffer_data.buffer, buffer)));
+ FBD, WITH_STATS(FULL_BUFFER(
+ msg.u.extended_buffer_data.buffer, buffer, msg.fenceFd)));
unbumpDebugLevel_l(kPortIndexOutput);
}
@@ -1251,10 +1386,7 @@
// fix up the buffer info (especially timestamp) if needed
bufferSource->codecBufferFilled(buffer);
- omx_message newMsg = msg;
- newMsg.u.extended_buffer_data.timestamp = buffer->nTimeStamp;
- mObserver->onMessage(newMsg);
- return;
+ msg.u.extended_buffer_data.timestamp = buffer->nTimeStamp;
}
} else if (msg.type == omx_message::EMPTY_BUFFER_DONE) {
OMX_BUFFERHEADERTYPE *buffer =
@@ -1265,7 +1397,7 @@
mInputBuffersWithCodec.remove(buffer);
CLOG_BUMPED_BUFFER(
- EBD, WITH_STATS(EMPTY_BUFFER(msg.u.buffer_data.buffer, buffer)));
+ EBD, WITH_STATS(EMPTY_BUFFER(msg.u.buffer_data.buffer, buffer, msg.fenceFd)));
}
if (bufferSource != NULL) {
@@ -1274,12 +1406,26 @@
// Don't dispatch a message back to ACodec, since it doesn't
// know that anyone asked to have the buffer emptied and will
// be very confused.
- bufferSource->codecBufferEmptied(buffer);
- return;
+ bufferSource->codecBufferEmptied(buffer, msg.fenceFd);
+ return true;
}
}
- mObserver->onMessage(msg);
+ return false;
+}
+
+void OMXNodeInstance::onMessages(std::list<omx_message> &messages) {
+ for (std::list<omx_message>::iterator it = messages.begin(); it != messages.end(); ) {
+ if (handleMessage(*it)) {
+ messages.erase(it++);
+ } else {
+ ++it;
+ }
+ }
+
+ if (!messages.empty()) {
+ mObserver->onMessages(messages);
+ }
}
void OMXNodeInstance::onObserverDied(OMXMaster *master) {
@@ -1369,8 +1515,9 @@
if (instance->mDying) {
return OMX_ErrorNone;
}
+ int fenceFd = instance->retrieveFenceFromMeta_l(pBuffer, kPortIndexOutput);
return instance->owner()->OnEmptyBufferDone(instance->nodeID(),
- instance->findBufferID(pBuffer), pBuffer);
+ instance->findBufferID(pBuffer), pBuffer, fenceFd);
}
// static
@@ -1382,8 +1529,9 @@
if (instance->mDying) {
return OMX_ErrorNone;
}
+ int fenceFd = instance->retrieveFenceFromMeta_l(pBuffer, kPortIndexOutput);
return instance->owner()->OnFillBufferDone(instance->nodeID(),
- instance->findBufferID(pBuffer), pBuffer);
+ instance->findBufferID(pBuffer), pBuffer, fenceFd);
}
void OMXNodeInstance::addActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id) {
@@ -1422,8 +1570,6 @@
}
}
-#ifdef __LP64__
-
OMX::buffer_id OMXNodeInstance::makeBufferID(OMX_BUFFERHEADERTYPE *bufferHeader) {
if (bufferHeader == NULL) {
return 0;
@@ -1466,23 +1612,4 @@
mBufferIDToBufferHeader.removeItem(buffer);
}
-#else
-
-OMX::buffer_id OMXNodeInstance::makeBufferID(OMX_BUFFERHEADERTYPE *bufferHeader) {
- return (OMX::buffer_id)bufferHeader;
-}
-
-OMX_BUFFERHEADERTYPE *OMXNodeInstance::findBufferHeader(OMX::buffer_id buffer) {
- return (OMX_BUFFERHEADERTYPE *)buffer;
-}
-
-OMX::buffer_id OMXNodeInstance::findBufferID(OMX_BUFFERHEADERTYPE *bufferHeader) {
- return (OMX::buffer_id)bufferHeader;
-}
-
-void OMXNodeInstance::invalidateBufferID(OMX::buffer_id buffer __unused) {
-}
-
-#endif
-
} // namespace android
diff --git a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
index d4d6217..9dd26fb 100644
--- a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
@@ -155,7 +155,7 @@
uint32_t rawBufferSize =
inDef->format.video.nStride * inDef->format.video.nSliceHeight * 3 / 2;
if (inDef->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
- inDef->nBufferSize = 4 + max(sizeof(buffer_handle_t), sizeof(GraphicBuffer *));
+ inDef->nBufferSize = max(sizeof(VideoNativeMetadata), sizeof(VideoGrallocMetadata));
} else {
inDef->nBufferSize = rawBufferSize;
}
@@ -482,8 +482,8 @@
size_t dstVStride = height;
MetadataBufferType bufferType = *(MetadataBufferType *)src;
- bool usingGraphicBuffer = bufferType == kMetadataBufferTypeGraphicBuffer;
- if (!usingGraphicBuffer && bufferType != kMetadataBufferTypeGrallocSource) {
+ bool usingANWBuffer = bufferType == kMetadataBufferTypeANWBuffer;
+ if (!usingANWBuffer && bufferType != kMetadataBufferTypeGrallocSource) {
ALOGE("Unsupported metadata type (%d)", bufferType);
return NULL;
}
@@ -499,13 +499,14 @@
int format;
size_t srcStride;
size_t srcVStride;
- if (usingGraphicBuffer) {
- if (srcSize < sizeof(OMX_U32) + sizeof(GraphicBuffer *)) {
- ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(OMX_U32) + sizeof(GraphicBuffer *));
+ if (usingANWBuffer) {
+ if (srcSize < sizeof(VideoNativeMetadata)) {
+ ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(VideoNativeMetadata));
return NULL;
}
- GraphicBuffer *buffer = *(GraphicBuffer **)(src + sizeof(OMX_U32));
+ VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)src;
+ ANativeWindowBuffer *buffer = nativeMeta.pBuffer;
handle = buffer->handle;
format = buffer->format;
srcStride = buffer->stride;
@@ -516,15 +517,26 @@
// TODO do we need to support other formats?
srcStride *= 4;
}
+
+ if (nativeMeta.nFenceFd >= 0) {
+ sp<Fence> fence = new Fence(nativeMeta.nFenceFd);
+ nativeMeta.nFenceFd = -1;
+ status_t err = fence->wait(IOMX::kFenceTimeoutMs);
+ if (err != OK) {
+ ALOGE("Timed out waiting on input fence");
+ return NULL;
+ }
+ }
} else {
// TODO: remove this part. Check if anyone uses this.
- if (srcSize < sizeof(OMX_U32) + sizeof(buffer_handle_t)) {
- ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(OMX_U32) + sizeof(buffer_handle_t));
+ if (srcSize < sizeof(VideoGrallocMetadata)) {
+ ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(VideoGrallocMetadata));
return NULL;
}
- handle = *(buffer_handle_t *)(src + sizeof(OMX_U32));
+ VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)(src);
+ handle = grallocMeta.pHandle;
// assume HAL_PIXEL_FORMAT_RGBA_8888
// there is no way to get the src stride without the graphic buffer
format = HAL_PIXEL_FORMAT_RGBA_8888;
@@ -606,7 +618,7 @@
OMX_ERRORTYPE SoftVideoEncoderOMXComponent::getExtensionIndex(
const char *name, OMX_INDEXTYPE *index) {
if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers") ||
- !strcmp(name, "OMX.google.android.index.storeGraphicBufferInMetaData")) {
+ !strcmp(name, "OMX.google.android.index.storeANWBufferInMetadata")) {
*(int32_t*)index = kStoreMetaDataExtensionIndex;
return OMX_ErrorNone;
}
diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp
index 67ff145..644b6ed 100644
--- a/media/libstagefright/omx/tests/OMXHarness.cpp
+++ b/media/libstagefright/omx/tests/OMXHarness.cpp
@@ -64,9 +64,11 @@
return mOMX != 0 ? OK : NO_INIT;
}
-void Harness::onMessage(const omx_message &msg) {
+void Harness::onMessages(const std::list<omx_message> &messages) {
Mutex::Autolock autoLock(mLock);
- mMessageQueue.push_back(msg);
+ for (std::list<omx_message>::const_iterator it = messages.cbegin(); it != messages.cend(); ) {
+ mMessageQueue.push_back(*it++);
+ }
mMessageAddedCondition.signal();
}
@@ -193,7 +195,7 @@
CHECK(buffer.mMemory != NULL);
err = mOMX->allocateBufferWithBackup(
- node, portIndex, buffer.mMemory, &buffer.mID);
+ node, portIndex, buffer.mMemory, &buffer.mID, buffer.mMemory->size());
EXPECT_SUCCESS(err, "allocateBuffer");
buffers->push(buffer);
diff --git a/media/libstagefright/omx/tests/OMXHarness.h b/media/libstagefright/omx/tests/OMXHarness.h
index bb8fd0c..1ebf3aa 100644
--- a/media/libstagefright/omx/tests/OMXHarness.h
+++ b/media/libstagefright/omx/tests/OMXHarness.h
@@ -74,7 +74,7 @@
status_t testAll();
- virtual void onMessage(const omx_message &msg);
+ virtual void onMessages(const std::list<omx_message> &messages);
protected:
virtual ~Harness();
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 00f071b..e64a7a1 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -1673,21 +1673,11 @@
}
size_t n = strlen(baseURL);
- if (baseURL[n - 1] == '/') {
- out->setTo(baseURL);
- out->append(url);
- } else {
- const char *slashPos = strrchr(baseURL, '/');
-
- if (slashPos > &baseURL[6]) {
- out->setTo(baseURL, slashPos - baseURL);
- } else {
- out->setTo(baseURL);
- }
-
+ out->setTo(baseURL);
+ if (baseURL[n - 1] != '/') {
out->append("/");
- out->append(url);
}
+ out->append(url);
return true;
}
@@ -1747,7 +1737,7 @@
}
if (!mAllTracksHaveTime) {
- bool allTracksHaveTime = true;
+ bool allTracksHaveTime = (mTracks.size() > 0);
for (size_t i = 0; i < mTracks.size(); ++i) {
TrackInfo *track = &mTracks.editItemAt(i);
if (track->mNTPAnchorUs < 0) {
diff --git a/media/libstagefright/tests/MediaCodecListOverrides_test.cpp b/media/libstagefright/tests/MediaCodecListOverrides_test.cpp
index 170cde3..ab547be 100644
--- a/media/libstagefright/tests/MediaCodecListOverrides_test.cpp
+++ b/media/libstagefright/tests/MediaCodecListOverrides_test.cpp
@@ -31,29 +31,8 @@
static const char kTestOverridesStr[] =
"<MediaCodecs>\n"
" <Settings>\n"
-" <Setting name=\"max-max-supported-instances\" value=\"8\" update=\"true\" />\n"
-" </Settings>\n"
-" <Encoders>\n"
-" <MediaCodec name=\"OMX.qcom.video.encoder.mpeg4\" type=\"video/mp4v-es\" update=\"true\" >\n"
-" <Quirk name=\"requires-allocate-on-input-ports\" />\n"
-" <Limit name=\"bitrate\" range=\"1-20000000\" />\n"
-" <Feature name=\"can-swap-width-height\" />\n"
-" </MediaCodec>\n"
-" </Encoders>\n"
-" <Decoders>\n"
-" <MediaCodec name=\"OMX.qcom.video.decoder.avc\" type=\"video/avc\" update=\"true\" >\n"
-" <Quirk name=\"requires-allocate-on-input-ports\" />\n"
-" <Limit name=\"size\" min=\"64x64\" max=\"1920x1088\" />\n"
-" </MediaCodec>\n"
-" <MediaCodec name=\"OMX.qcom.video.decoder.mpeg2\" type=\"different_mime\" update=\"true\" >\n"
-" </MediaCodec>\n"
-" </Decoders>\n"
-"</MediaCodecs>\n";
-
-static const char kTestOverridesStrNew1[] =
-"<MediaCodecs>\n"
-" <Settings>\n"
-" <Setting name=\"max-max-supported-instances\" value=\"8\" update=\"true\" />\n"
+" <Setting name=\"supports-multiple-secure-codecs\" value=\"false\" />\n"
+" <Setting name=\"supports-secure-with-non-secure-codec\" value=\"true\" />\n"
" </Settings>\n"
" <Encoders>\n"
" <MediaCodec name=\"OMX.qcom.video.encoder.avc\" type=\"video/avc\" update=\"true\" >\n"
@@ -61,57 +40,21 @@
" </MediaCodec>\n"
" <MediaCodec name=\"OMX.qcom.video.encoder.mpeg4\" type=\"video/mp4v-es\" update=\"true\" >\n"
" <Limit name=\"max-supported-instances\" value=\"4\" />\n"
-" <Quirk name=\"requires-allocate-on-input-ports\" />\n"
-" <Limit name=\"bitrate\" range=\"1-20000000\" />\n"
-" <Feature name=\"can-swap-width-height\" />\n"
" </MediaCodec>\n"
" </Encoders>\n"
" <Decoders>\n"
-" <MediaCodec name=\"OMX.qcom.video.decoder.mpeg4\" type=\"video/mp4v-es\" update=\"true\" >\n"
-" <Limit name=\"max-supported-instances\" value=\"3\" />\n"
+" <MediaCodec name=\"OMX.qcom.video.decoder.avc.secure\" type=\"video/avc\" update=\"true\" >\n"
+" <Limit name=\"max-supported-instances\" value=\"1\" />\n"
" </MediaCodec>\n"
" <MediaCodec name=\"OMX.qcom.video.decoder.h263\" type=\"video/3gpp\" update=\"true\" >\n"
" <Limit name=\"max-supported-instances\" value=\"4\" />\n"
" </MediaCodec>\n"
-" <MediaCodec name=\"OMX.qcom.video.decoder.avc.secure\" type=\"video/avc\" update=\"true\" >\n"
-" <Limit name=\"max-supported-instances\" value=\"1\" />\n"
-" </MediaCodec>\n"
-" <MediaCodec name=\"OMX.qcom.video.decoder.avc\" type=\"video/avc\" update=\"true\" >\n"
-" <Quirk name=\"requires-allocate-on-input-ports\" />\n"
-" <Limit name=\"size\" min=\"64x64\" max=\"1920x1088\" />\n"
-" </MediaCodec>\n"
-" <MediaCodec name=\"OMX.qcom.video.decoder.mpeg2\" type=\"different_mime\" update=\"true\" >\n"
-" </MediaCodec>\n"
" <MediaCodec name=\"OMX.qcom.video.decoder.mpeg2\" type=\"video/mpeg2\" update=\"true\" >\n"
" <Limit name=\"max-supported-instances\" value=\"3\" />\n"
" </MediaCodec>\n"
-" </Decoders>\n"
-"</MediaCodecs>\n";
-
-static const char kTestOverridesStrNew2[] =
-"\n"
-"<MediaCodecs>\n"
-" <Encoders>\n"
-" <MediaCodec name=\"OMX.qcom.video.encoder.mpeg4\" type=\"video/mp4v-es\" update=\"true\" >\n"
-" <Limit name=\"max-supported-instances\" value=\"4\" />\n"
-" </MediaCodec>\n"
-" <MediaCodec name=\"OMX.qcom.video.encoder.avc\" type=\"video/avc\" update=\"true\" >\n"
-" <Limit name=\"max-supported-instances\" value=\"4\" />\n"
-" </MediaCodec>\n"
-" </Encoders>\n"
-" <Decoders>\n"
" <MediaCodec name=\"OMX.qcom.video.decoder.mpeg4\" type=\"video/mp4v-es\" update=\"true\" >\n"
" <Limit name=\"max-supported-instances\" value=\"3\" />\n"
" </MediaCodec>\n"
-" <MediaCodec name=\"OMX.qcom.video.decoder.mpeg2\" type=\"video/mpeg2\" update=\"true\" >\n"
-" <Limit name=\"max-supported-instances\" value=\"3\" />\n"
-" </MediaCodec>\n"
-" <MediaCodec name=\"OMX.qcom.video.decoder.h263\" type=\"video/3gpp\" update=\"true\" >\n"
-" <Limit name=\"max-supported-instances\" value=\"4\" />\n"
-" </MediaCodec>\n"
-" <MediaCodec name=\"OMX.qcom.video.decoder.avc.secure\" type=\"video/avc\" update=\"true\" >\n"
-" <Limit name=\"max-supported-instances\" value=\"1\" />\n"
-" </MediaCodec>\n"
" </Decoders>\n"
"</MediaCodecs>\n";
@@ -119,53 +62,6 @@
public:
MediaCodecListOverridesTest() {}
- void verifyOverrides(const KeyedVector<AString, CodecSettings> &overrides) {
- EXPECT_EQ(3u, overrides.size());
-
- EXPECT_TRUE(overrides.keyAt(0) == "OMX.qcom.video.decoder.avc video/avc decoder");
- const CodecSettings &settings0 = overrides.valueAt(0);
- EXPECT_EQ(1u, settings0.size());
- EXPECT_TRUE(settings0.keyAt(0) == "max-supported-instances");
- EXPECT_TRUE(settings0.valueAt(0) == "4");
-
- EXPECT_TRUE(overrides.keyAt(1) == "OMX.qcom.video.encoder.avc video/avc encoder");
- const CodecSettings &settings1 = overrides.valueAt(1);
- EXPECT_EQ(1u, settings1.size());
- EXPECT_TRUE(settings1.keyAt(0) == "max-supported-instances");
- EXPECT_TRUE(settings1.valueAt(0) == "3");
-
- EXPECT_TRUE(overrides.keyAt(2) == "global");
- const CodecSettings &settings2 = overrides.valueAt(2);
- EXPECT_EQ(3u, settings2.size());
- EXPECT_TRUE(settings2.keyAt(0) == "max-max-supported-instances");
- EXPECT_TRUE(settings2.valueAt(0) == "8");
- EXPECT_TRUE(settings2.keyAt(1) == "supports-multiple-secure-codecs");
- EXPECT_TRUE(settings2.valueAt(1) == "false");
- EXPECT_TRUE(settings2.keyAt(2) == "supports-secure-with-non-secure-codec");
- EXPECT_TRUE(settings2.valueAt(2) == "true");
- }
-
- void verifySetting(const sp<AMessage> &details, const char *name, const char *value) {
- AString value1;
- EXPECT_TRUE(details->findString(name, &value1));
- EXPECT_TRUE(value1 == value);
- }
-
- void createTestInfos(Vector<sp<MediaCodecInfo>> *infos) {
- const char *name = "OMX.qcom.video.decoder.avc";
- const bool encoder = false;
- const char *mime = "video/avc";
- sp<MediaCodecInfo> info = new MediaCodecInfo(name, encoder, mime);
- infos->push_back(info);
- const sp<MediaCodecInfo::Capabilities> caps = info->getCapabilitiesFor(mime);
- const sp<AMessage> details = caps->getDetails();
- details->setString("cap1", "value1");
- details->setString("max-max-supported-instances", "16");
-
- info = new MediaCodecInfo("anothercodec", true, "anothermime");
- infos->push_back(info);
- }
-
void addMaxInstancesSetting(
const AString &key,
const AString &value,
@@ -175,16 +71,34 @@
results->add(key, settings);
}
- void exportTestResultsToXML(const char *fileName) {
- KeyedVector<AString, CodecSettings> r;
- addMaxInstancesSetting("OMX.qcom.video.decoder.avc.secure video/avc decoder", "1", &r);
- addMaxInstancesSetting("OMX.qcom.video.decoder.h263 video/3gpp decoder", "4", &r);
- addMaxInstancesSetting("OMX.qcom.video.decoder.mpeg2 video/mpeg2 decoder", "3", &r);
- addMaxInstancesSetting("OMX.qcom.video.decoder.mpeg4 video/mp4v-es decoder", "3", &r);
- addMaxInstancesSetting("OMX.qcom.video.encoder.avc video/avc encoder", "4", &r);
- addMaxInstancesSetting("OMX.qcom.video.encoder.mpeg4 video/mp4v-es encoder", "4", &r);
+ void verifyProfileResults(const KeyedVector<AString, CodecSettings> &results) {
+ EXPECT_LT(0u, results.size());
+ for (size_t i = 0; i < results.size(); ++i) {
+ AString key = results.keyAt(i);
+ CodecSettings settings = results.valueAt(i);
+ EXPECT_EQ(1u, settings.size());
+ EXPECT_TRUE(settings.keyAt(0) == "max-supported-instances");
+ AString valueS = settings.valueAt(0);
+ int32_t value = strtol(valueS.c_str(), NULL, 10);
+ EXPECT_LT(0, value);
+ ALOGV("profileCodecs results %s %s", key.c_str(), valueS.c_str());
+ }
+ }
- exportResultsToXML(fileName, r);
+ void exportTestResultsToXML(const char *fileName) {
+ CodecSettings gR;
+ gR.add("supports-multiple-secure-codecs", "false");
+ gR.add("supports-secure-with-non-secure-codec", "true");
+ KeyedVector<AString, CodecSettings> eR;
+ addMaxInstancesSetting("OMX.qcom.video.encoder.avc video/avc", "4", &eR);
+ addMaxInstancesSetting("OMX.qcom.video.encoder.mpeg4 video/mp4v-es", "4", &eR);
+ KeyedVector<AString, CodecSettings> dR;
+ addMaxInstancesSetting("OMX.qcom.video.decoder.avc.secure video/avc", "1", &dR);
+ addMaxInstancesSetting("OMX.qcom.video.decoder.h263 video/3gpp", "4", &dR);
+ addMaxInstancesSetting("OMX.qcom.video.decoder.mpeg2 video/mpeg2", "3", &dR);
+ addMaxInstancesSetting("OMX.qcom.video.decoder.mpeg4 video/mp4v-es", "3", &dR);
+
+ exportResultsToXML(fileName, gR, eR, dR);
}
};
@@ -198,18 +112,6 @@
EXPECT_TRUE(splitString(s, delimiter, &s1, &s2));
EXPECT_TRUE(s1 == "abc");
EXPECT_TRUE(s2 == "123");
-
- s = "abc123xyz";
- delimiter = ",";
- AString s3;
- EXPECT_FALSE(splitString(s, delimiter, &s1, &s2, &s3));
- s = "abc,123xyz";
- EXPECT_FALSE(splitString(s, delimiter, &s1, &s2, &s3));
- s = "abc,123,xyz";
- EXPECT_TRUE(splitString(s, delimiter, &s1, &s2, &s3));
- EXPECT_TRUE(s1 == "abc");
- EXPECT_TRUE(s2 == "123" );
- EXPECT_TRUE(s3 == "xyz");
}
// TODO: the codec component never returns OMX_EventCmdComplete in unit test.
@@ -219,76 +121,16 @@
for (size_t i = 0; i < list->countCodecs(); ++i) {
infos.push_back(list->getCodecInfo(i));
}
- KeyedVector<AString, CodecSettings> results;
- profileCodecs(infos, &results, true /* forceToMeasure */);
- EXPECT_LT(0u, results.size());
- for (size_t i = 0; i < results.size(); ++i) {
- AString key = results.keyAt(i);
- CodecSettings settings = results.valueAt(i);
- EXPECT_EQ(1u, settings.size());
- EXPECT_TRUE(settings.keyAt(0) == "max-supported-instances");
- AString valueS = settings.valueAt(0);
- int32_t value = strtol(valueS.c_str(), NULL, 10);
- EXPECT_LT(0, value);
- ALOGV("profileCodecs results %s %s", key.c_str(), valueS.c_str());
- }
+ CodecSettings global_results;
+ KeyedVector<AString, CodecSettings> encoder_results;
+ KeyedVector<AString, CodecSettings> decoder_results;
+ profileCodecs(
+ infos, &global_results, &encoder_results, &decoder_results, true /* forceToMeasure */);
+ verifyProfileResults(encoder_results);
+ verifyProfileResults(decoder_results);
}
-TEST_F(MediaCodecListOverridesTest, applyCodecSettings) {
- AString codecInfo = "OMX.qcom.video.decoder.avc video/avc decoder";
- Vector<sp<MediaCodecInfo>> infos;
- createTestInfos(&infos);
- CodecSettings settings;
- settings.add("max-supported-instances", "3");
- settings.add("max-max-supported-instances", "8");
- applyCodecSettings(codecInfo, settings, &infos);
-
- EXPECT_EQ(2u, infos.size());
- EXPECT_TRUE(AString(infos[0]->getCodecName()) == "OMX.qcom.video.decoder.avc");
- const sp<AMessage> details = infos[0]->getCapabilitiesFor("video/avc")->getDetails();
- verifySetting(details, "max-supported-instances", "3");
- verifySetting(details, "max-max-supported-instances", "8");
-
- EXPECT_TRUE(AString(infos[1]->getCodecName()) == "anothercodec");
- EXPECT_EQ(0u, infos[1]->getCapabilitiesFor("anothermime")->getDetails()->countEntries());
-}
-
-TEST_F(MediaCodecListOverridesTest, exportResultsToExistingFile) {
- const char *fileName = "/sdcard/mediacodec_list_overrides_test.xml";
- remove(fileName);
-
- FILE *f = fopen(fileName, "wb");
- if (f == NULL) {
- ALOGW("Failed to open %s for writing.", fileName);
- return;
- }
- EXPECT_EQ(
- strlen(kTestOverridesStr),
- fwrite(kTestOverridesStr, 1, strlen(kTestOverridesStr), f));
- fclose(f);
-
- exportTestResultsToXML(fileName);
-
- // verify
- AString overrides;
- f = fopen(fileName, "rb");
- ASSERT_TRUE(f != NULL);
- fseek(f, 0, SEEK_END);
- long size = ftell(f);
- rewind(f);
-
- char *buf = (char *)malloc(size);
- EXPECT_EQ((size_t)1, fread(buf, size, 1, f));
- overrides.setTo(buf, size);
- fclose(f);
- free(buf);
-
- EXPECT_TRUE(overrides == kTestOverridesStrNew1);
-
- remove(fileName);
-}
-
-TEST_F(MediaCodecListOverridesTest, exportResultsToEmptyFile) {
+TEST_F(MediaCodecListOverridesTest, exportTestResultsToXML) {
const char *fileName = "/sdcard/mediacodec_list_overrides_test.xml";
remove(fileName);
@@ -308,7 +150,11 @@
fclose(f);
free(buf);
- EXPECT_TRUE(overrides == kTestOverridesStrNew2);
+ AString expected;
+ expected.append(getProfilingVersionString());
+ expected.append("\n");
+ expected.append(kTestOverridesStr);
+ EXPECT_TRUE(overrides == expected);
remove(fileName);
}
diff --git a/media/libstagefright/tests/Utils_test.cpp b/media/libstagefright/tests/Utils_test.cpp
index 5c323c1..c1e663c 100644
--- a/media/libstagefright/tests/Utils_test.cpp
+++ b/media/libstagefright/tests/Utils_test.cpp
@@ -192,6 +192,87 @@
ASSERT_EQ(max(-4.3, 8.6), 8.6);
ASSERT_EQ(max(8.6, -4.3), 8.6);
+ ASSERT_FALSE(isInRange(-43, 86u, -44));
+ ASSERT_TRUE(isInRange(-43, 87u, -43));
+ ASSERT_TRUE(isInRange(-43, 88u, -1));
+ ASSERT_TRUE(isInRange(-43, 89u, 0));
+ ASSERT_TRUE(isInRange(-43, 90u, 46));
+ ASSERT_FALSE(isInRange(-43, 91u, 48));
+ ASSERT_FALSE(isInRange(-43, 92u, 50));
+
+ ASSERT_FALSE(isInRange(43, 86u, 42));
+ ASSERT_TRUE(isInRange(43, 87u, 43));
+ ASSERT_TRUE(isInRange(43, 88u, 44));
+ ASSERT_TRUE(isInRange(43, 89u, 131));
+ ASSERT_FALSE(isInRange(43, 90u, 133));
+ ASSERT_FALSE(isInRange(43, 91u, 135));
+
+ ASSERT_FALSE(isInRange(43u, 86u, 42u));
+ ASSERT_TRUE(isInRange(43u, 85u, 43u));
+ ASSERT_TRUE(isInRange(43u, 84u, 44u));
+ ASSERT_TRUE(isInRange(43u, 83u, 125u));
+ ASSERT_FALSE(isInRange(43u, 82u, 125u));
+ ASSERT_FALSE(isInRange(43u, 81u, 125u));
+
+ ASSERT_FALSE(isInRange(-43, ~0u, 43));
+ ASSERT_FALSE(isInRange(-43, ~0u, 44));
+ ASSERT_FALSE(isInRange(-43, ~0u, ~0));
+ ASSERT_FALSE(isInRange(-43, ~0u, 41));
+ ASSERT_FALSE(isInRange(-43, ~0u, 40));
+
+ ASSERT_FALSE(isInRange(43u, ~0u, 43u));
+ ASSERT_FALSE(isInRange(43u, ~0u, 41u));
+ ASSERT_FALSE(isInRange(43u, ~0u, 40u));
+ ASSERT_FALSE(isInRange(43u, ~0u, ~0u));
+
+ ASSERT_FALSE(isInRange(-43, 86u, -44, 0u));
+ ASSERT_FALSE(isInRange(-43, 86u, -44, 1u));
+ ASSERT_FALSE(isInRange(-43, 86u, -44, 2u));
+ ASSERT_FALSE(isInRange(-43, 86u, -44, ~0u));
+ ASSERT_TRUE(isInRange(-43, 87u, -43, 0u));
+ ASSERT_TRUE(isInRange(-43, 87u, -43, 1u));
+ ASSERT_TRUE(isInRange(-43, 87u, -43, 86u));
+ ASSERT_TRUE(isInRange(-43, 87u, -43, 87u));
+ ASSERT_FALSE(isInRange(-43, 87u, -43, 88u));
+ ASSERT_FALSE(isInRange(-43, 87u, -43, ~0u));
+ ASSERT_TRUE(isInRange(-43, 88u, -1, 0u));
+ ASSERT_TRUE(isInRange(-43, 88u, -1, 45u));
+ ASSERT_TRUE(isInRange(-43, 88u, -1, 46u));
+ ASSERT_FALSE(isInRange(-43, 88u, -1, 47u));
+ ASSERT_FALSE(isInRange(-43, 88u, -1, ~3u));
+ ASSERT_TRUE(isInRange(-43, 90u, 46, 0u));
+ ASSERT_TRUE(isInRange(-43, 90u, 46, 1u));
+ ASSERT_FALSE(isInRange(-43, 90u, 46, 2u));
+ ASSERT_FALSE(isInRange(-43, 91u, 48, 0u));
+ ASSERT_FALSE(isInRange(-43, 91u, 48, 2u));
+ ASSERT_FALSE(isInRange(-43, 91u, 48, ~6u));
+ ASSERT_FALSE(isInRange(-43, 92u, 50, 0u));
+ ASSERT_FALSE(isInRange(-43, 92u, 50, 1u));
+
+ ASSERT_FALSE(isInRange(43u, 86u, 42u, 0u));
+ ASSERT_FALSE(isInRange(43u, 86u, 42u, 1u));
+ ASSERT_FALSE(isInRange(43u, 86u, 42u, 2u));
+ ASSERT_FALSE(isInRange(43u, 86u, 42u, ~0u));
+ ASSERT_TRUE(isInRange(43u, 87u, 43u, 0u));
+ ASSERT_TRUE(isInRange(43u, 87u, 43u, 1u));
+ ASSERT_TRUE(isInRange(43u, 87u, 43u, 86u));
+ ASSERT_TRUE(isInRange(43u, 87u, 43u, 87u));
+ ASSERT_FALSE(isInRange(43u, 87u, 43u, 88u));
+ ASSERT_FALSE(isInRange(43u, 87u, 43u, ~0u));
+ ASSERT_TRUE(isInRange(43u, 88u, 60u, 0u));
+ ASSERT_TRUE(isInRange(43u, 88u, 60u, 70u));
+ ASSERT_TRUE(isInRange(43u, 88u, 60u, 71u));
+ ASSERT_FALSE(isInRange(43u, 88u, 60u, 72u));
+ ASSERT_FALSE(isInRange(43u, 88u, 60u, ~3u));
+ ASSERT_TRUE(isInRange(43u, 90u, 132u, 0u));
+ ASSERT_TRUE(isInRange(43u, 90u, 132u, 1u));
+ ASSERT_FALSE(isInRange(43u, 90u, 132u, 2u));
+ ASSERT_FALSE(isInRange(43u, 91u, 134u, 0u));
+ ASSERT_FALSE(isInRange(43u, 91u, 134u, 2u));
+ ASSERT_FALSE(isInRange(43u, 91u, 134u, ~6u));
+ ASSERT_FALSE(isInRange(43u, 92u, 136u, 0u));
+ ASSERT_FALSE(isInRange(43u, 92u, 136u, 1u));
+
ASSERT_EQ(periodicError(124, 100), 24);
ASSERT_EQ(periodicError(288, 100), 12);
ASSERT_EQ(periodicError(-345, 100), 45);
diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk
index ba47172..78dfbb1 100644
--- a/media/mediaserver/Android.mk
+++ b/media/mediaserver/Android.mk
@@ -11,13 +11,15 @@
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
- main_mediaserver.cpp
+ main_mediaserver.cpp \
+ icuutils.cpp
LOCAL_SHARED_LIBRARIES := \
libaudioflinger \
libaudiopolicyservice \
libcamera_metadata\
libcameraservice \
+ libicuuc \
libmedialogservice \
libresourcemanagerservice \
libcutils \
diff --git a/media/mediaserver/IcuUtils.h b/media/mediaserver/IcuUtils.h
new file mode 100644
index 0000000..52fab6d
--- /dev/null
+++ b/media/mediaserver/IcuUtils.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ICU_UTILS_H
+#define ICU_UTILS_H
+
+// Initializes ICU or dies trying. This must be called when the process
+// is single threaded.
+void initializeIcuOrDie();
+
+#endif // ICU_UTILS_H
+
diff --git a/media/mediaserver/icuutils.cpp b/media/mediaserver/icuutils.cpp
new file mode 100644
index 0000000..4015849
--- /dev/null
+++ b/media/mediaserver/icuutils.cpp
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "IcuUtils.h"
+
+#include "unicode/putil.h"
+#include "unicode/uclean.h"
+#include "unicode/utypes.h"
+#include "utils/Log.h"
+
+#include <stdlib.h>
+
+void initializeIcuOrDie() {
+ const char* systemPathPrefix = getenv("ANDROID_ROOT");
+ LOG_ALWAYS_FATAL_IF(systemPathPrefix == NULL, "ANDROID_ROOT environment variable not set");
+
+ char buf[256];
+ const int num_written = snprintf(buf, sizeof(buf), "%s/usr/icu/", systemPathPrefix);
+ LOG_ALWAYS_FATAL_IF((num_written < 0 || static_cast<size_t>(num_written) >= sizeof(buf)),
+ "Unable to construct ICU path.");
+
+ u_setDataDirectory(buf);
+ UErrorCode status = U_ZERO_ERROR;
+
+ // u_setDataDirectory doesn't try doing anything with the directory we gave
+ // it, so we'll have to call u_init to make sure it was successful.
+ u_init(&status);
+ LOG_ALWAYS_FATAL_IF(!U_SUCCESS(status), "Failed to initialize ICU %s", u_errorName(status));
+}
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index 06b3c6e..27a40b2 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -31,6 +31,7 @@
// from LOCAL_C_INCLUDES
#include "AudioFlinger.h"
#include "CameraService.h"
+#include "IcuUtils.h"
#include "MediaLogService.h"
#include "MediaPlayerService.h"
#include "ResourceManagerService.h"
@@ -124,6 +125,7 @@
prctl(PR_SET_PDEATHSIG, SIGKILL); // if parent media.log dies before me, kill me also
setpgid(0, 0); // but if I die first, don't kill my parent
}
+ initializeIcuOrDie();
sp<ProcessState> proc(ProcessState::self());
sp<IServiceManager> sm = defaultServiceManager();
ALOGI("ServiceManager: %p", sm.get());
diff --git a/media/utils/Android.mk b/media/utils/Android.mk
new file mode 100644
index 0000000..dfadbc8
--- /dev/null
+++ b/media/utils/Android.mk
@@ -0,0 +1,39 @@
+# Copyright 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ BatteryNotifier.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libbinder \
+ libcutils \
+ liblog \
+ libutils \
+
+LOCAL_C_INCLUDES := $(LOCAL_PATH)/include
+
+LOCAL_CFLAGS += \
+ -Wall \
+ -Wextra \
+ -Werror \
+
+LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/include
+
+LOCAL_MODULE := libmediautils
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/utils/BatteryNotifier.cpp b/media/utils/BatteryNotifier.cpp
new file mode 100644
index 0000000..7f9cd7a
--- /dev/null
+++ b/media/utils/BatteryNotifier.cpp
@@ -0,0 +1,213 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "include/mediautils/BatteryNotifier.h"
+
+#include <binder/IServiceManager.h>
+#include <utils/Log.h>
+#include <private/android_filesystem_config.h>
+
+namespace android {
+
+void BatteryNotifier::DeathNotifier::binderDied(const wp<IBinder>& /*who*/) {
+ BatteryNotifier::getInstance().onBatteryStatServiceDied();
+}
+
+BatteryNotifier::BatteryNotifier() : mVideoRefCount(0), mAudioRefCount(0) {}
+
+BatteryNotifier::~BatteryNotifier() {
+ Mutex::Autolock _l(mLock);
+ if (mDeathNotifier != nullptr) {
+ IInterface::asBinder(mBatteryStatService)->unlinkToDeath(mDeathNotifier);
+ }
+}
+
+void BatteryNotifier::noteStartVideo() {
+ Mutex::Autolock _l(mLock);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+ if (mVideoRefCount == 0 && batteryService != nullptr) {
+ batteryService->noteStartVideo(AID_MEDIA);
+ }
+ mVideoRefCount++;
+}
+
+void BatteryNotifier::noteStopVideo() {
+ Mutex::Autolock _l(mLock);
+ if (mVideoRefCount == 0) {
+ ALOGW("%s: video refcount is broken.", __FUNCTION__);
+ return;
+ }
+
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+
+ mVideoRefCount--;
+ if (mVideoRefCount == 0 && batteryService != nullptr) {
+ batteryService->noteStopVideo(AID_MEDIA);
+ }
+}
+
+void BatteryNotifier::noteResetVideo() {
+ Mutex::Autolock _l(mLock);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+ mVideoRefCount = 0;
+ if (batteryService != nullptr) {
+ batteryService->noteResetAudio();
+ }
+}
+
+void BatteryNotifier::noteStartAudio() {
+ Mutex::Autolock _l(mLock);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+ if (mAudioRefCount == 0 && batteryService != nullptr) {
+ batteryService->noteStartAudio(AID_MEDIA);
+ }
+ mAudioRefCount++;
+}
+
+void BatteryNotifier::noteStopAudio() {
+ Mutex::Autolock _l(mLock);
+ if (mAudioRefCount == 0) {
+ ALOGW("%s: audio refcount is broken.", __FUNCTION__);
+ return;
+ }
+
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+
+ mAudioRefCount--;
+ if (mAudioRefCount == 0 && batteryService != nullptr) {
+ batteryService->noteStopAudio(AID_MEDIA);
+ }
+}
+
+void BatteryNotifier::noteResetAudio() {
+ Mutex::Autolock _l(mLock);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+ mAudioRefCount = 0;
+ if (batteryService != nullptr) {
+ batteryService->noteResetAudio();
+ }
+}
+
+void BatteryNotifier::noteFlashlightOn(const String8& id, int uid) {
+ Mutex::Autolock _l(mLock);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+
+ std::pair<String8, int> k = std::make_pair(id, uid);
+ if (!mFlashlightState[k]) {
+ mFlashlightState[k] = true;
+ if (batteryService != nullptr) {
+ batteryService->noteFlashlightOn(uid);
+ }
+ }
+}
+
+void BatteryNotifier::noteFlashlightOff(const String8& id, int uid) {
+ Mutex::Autolock _l(mLock);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+
+ std::pair<String8, int> k = std::make_pair(id, uid);
+ if (mFlashlightState[k]) {
+ mFlashlightState[k] = false;
+ if (batteryService != nullptr) {
+ batteryService->noteFlashlightOff(uid);
+ }
+ }
+}
+
+void BatteryNotifier::noteResetFlashlight() {
+ Mutex::Autolock _l(mLock);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+ mFlashlightState.clear();
+ if (batteryService != nullptr) {
+ batteryService->noteResetFlashlight();
+ }
+}
+
+void BatteryNotifier::noteStartCamera(const String8& id, int uid) {
+ Mutex::Autolock _l(mLock);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+ std::pair<String8, int> k = std::make_pair(id, uid);
+ if (!mCameraState[k]) {
+ mCameraState[k] = true;
+ if (batteryService != nullptr) {
+ batteryService->noteStartCamera(uid);
+ }
+ }
+}
+
+void BatteryNotifier::noteStopCamera(const String8& id, int uid) {
+ Mutex::Autolock _l(mLock);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+ std::pair<String8, int> k = std::make_pair(id, uid);
+ if (mCameraState[k]) {
+ mCameraState[k] = false;
+ if (batteryService != nullptr) {
+ batteryService->noteStopCamera(uid);
+ }
+ }
+}
+
+void BatteryNotifier::noteResetCamera() {
+ Mutex::Autolock _l(mLock);
+ sp<IBatteryStats> batteryService = getBatteryService_l();
+ mCameraState.clear();
+ if (batteryService != nullptr) {
+ batteryService->noteResetCamera();
+ }
+}
+
+void BatteryNotifier::onBatteryStatServiceDied() {
+ Mutex::Autolock _l(mLock);
+ mBatteryStatService.clear();
+ mDeathNotifier.clear();
+ // Do not reset mVideoRefCount and mAudioRefCount here. The ref
+ // counting is independent of the battery service availability.
+ // We need this if battery service becomes available after media
+ // started.
+
+}
+
+sp<IBatteryStats> BatteryNotifier::getBatteryService_l() {
+ if (mBatteryStatService != nullptr) {
+ return mBatteryStatService;
+ }
+ // Get battery service from service manager
+ const sp<IServiceManager> sm(defaultServiceManager());
+ if (sm != nullptr) {
+ const String16 name("batterystats");
+ mBatteryStatService = interface_cast<IBatteryStats>(sm->checkService(name));
+ if (mBatteryStatService == nullptr) {
+ ALOGE("batterystats service unavailable!");
+ return nullptr;
+ }
+
+ mDeathNotifier = new DeathNotifier();
+ IInterface::asBinder(mBatteryStatService)->linkToDeath(mDeathNotifier);
+
+ // Notify start now if media already started
+ if (mVideoRefCount > 0) {
+ mBatteryStatService->noteStartVideo(AID_MEDIA);
+ }
+ if (mAudioRefCount > 0) {
+ mBatteryStatService->noteStartAudio(AID_MEDIA);
+ }
+ }
+ return mBatteryStatService;
+}
+
+ANDROID_SINGLETON_STATIC_INSTANCE(BatteryNotifier);
+
+} // namespace android
diff --git a/media/utils/README b/media/utils/README
new file mode 100644
index 0000000..65ab0b8
--- /dev/null
+++ b/media/utils/README
@@ -0,0 +1,4 @@
+This is a common shared library for media utility classes.
+
+Consider adding your utility class/function here if it will
+be used across several of the media libraries.
diff --git a/media/utils/include/mediautils/BatteryNotifier.h b/media/utils/include/mediautils/BatteryNotifier.h
new file mode 100644
index 0000000..49048042
--- /dev/null
+++ b/media/utils/include/mediautils/BatteryNotifier.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_BATTERY_NOTIFIER_H
+#define MEDIA_BATTERY_NOTIFIER_H
+
+#include <binder/IBatteryStats.h>
+#include <utils/Singleton.h>
+#include <utils/String8.h>
+
+#include <map>
+#include <utility>
+
+namespace android {
+
+/**
+ * Class used for logging battery life events in mediaserver.
+ */
+class BatteryNotifier : public Singleton<BatteryNotifier> {
+
+ friend class Singleton<BatteryNotifier>;
+ BatteryNotifier();
+
+public:
+ ~BatteryNotifier();
+
+ void noteStartVideo();
+ void noteStopVideo();
+ void noteResetVideo();
+ void noteStartAudio();
+ void noteStopAudio();
+ void noteResetAudio();
+ void noteFlashlightOn(const String8& id, int uid);
+ void noteFlashlightOff(const String8& id, int uid);
+ void noteResetFlashlight();
+ void noteStartCamera(const String8& id, int uid);
+ void noteStopCamera(const String8& id, int uid);
+ void noteResetCamera();
+
+private:
+ void onBatteryStatServiceDied();
+
+ class DeathNotifier : public IBinder::DeathRecipient {
+ virtual void binderDied(const wp<IBinder>& /*who*/);
+ };
+
+ Mutex mLock;
+ int mVideoRefCount;
+ int mAudioRefCount;
+ std::map<std::pair<String8, int>, bool> mFlashlightState;
+ std::map<std::pair<String8, int>, bool> mCameraState;
+ sp<IBatteryStats> mBatteryStatService;
+ sp<DeathNotifier> mDeathNotifier;
+
+ sp<IBatteryStats> getBatteryService_l();
+};
+
+} // namespace android
+
+#endif // MEDIA_BATTERY_NOTIFIER_H
diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk
index c359be5..debcdf9 100644
--- a/services/audioflinger/Android.mk
+++ b/services/audioflinger/Android.mk
@@ -29,12 +29,6 @@
include $(CLEAR_VARS)
-# Clang++ aborts on AudioMixer.cpp,
-# b/18373866, "do not know how to split this operator."
-ifeq ($(filter $(TARGET_ARCH),arm arm64),$(TARGET_ARCH))
- LOCAL_CLANG := false
-endif
-
LOCAL_SRC_FILES:= \
AudioFlinger.cpp \
Threads.cpp \
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 485e320..52fce34 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -181,7 +181,8 @@
mIsLowRamDevice(true),
mIsDeviceTypeKnown(false),
mGlobalEffectEnableTime(0),
- mPrimaryOutputSampleRate(0)
+ mPrimaryOutputSampleRate(0),
+ mSystemReady(false)
{
getpid_cached = getpid();
char value[PROPERTY_VALUE_MAX];
@@ -757,8 +758,12 @@
// assigned to HALs which do not have master volume support will apply
// master volume during the mix operation. Threads with HALs which do
// support master volume will simply ignore the setting.
- for (size_t i = 0; i < mPlaybackThreads.size(); i++)
+ for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+ if (mPlaybackThreads.valueAt(i)->isDuplicating()) {
+ continue;
+ }
mPlaybackThreads.valueAt(i)->setMasterVolume(value);
+ }
return NO_ERROR;
}
@@ -875,8 +880,12 @@
// assigned to HALs which do not have master mute support will apply master
// mute during the mix operation. Threads with HALs which do support master
// mute will simply ignore the setting.
- for (size_t i = 0; i < mPlaybackThreads.size(); i++)
+ for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+ if (mPlaybackThreads.valueAt(i)->isDuplicating()) {
+ continue;
+ }
mPlaybackThreads.valueAt(i)->setMasterMute(muted);
+ }
return NO_ERROR;
}
@@ -1714,6 +1723,26 @@
return (audio_hw_sync_t)value;
}
+status_t AudioFlinger::systemReady()
+{
+ Mutex::Autolock _l(mLock);
+ ALOGI("%s", __FUNCTION__);
+ if (mSystemReady) {
+ ALOGW("%s called twice", __FUNCTION__);
+ return NO_ERROR;
+ }
+ mSystemReady = true;
+ for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+ ThreadBase *thread = (ThreadBase *)mPlaybackThreads.valueAt(i).get();
+ thread->systemReady();
+ }
+ for (size_t i = 0; i < mRecordThreads.size(); i++) {
+ ThreadBase *thread = (ThreadBase *)mRecordThreads.valueAt(i).get();
+ thread->systemReady();
+ }
+ return NO_ERROR;
+}
+
// setAudioHwSyncForSession_l() must be called with AudioFlinger::mLock held
void AudioFlinger::setAudioHwSyncForSession_l(PlaybackThread *thread, audio_session_t sessionId)
{
@@ -1786,15 +1815,15 @@
PlaybackThread *thread;
if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
- thread = new OffloadThread(this, outputStream, *output, devices);
+ thread = new OffloadThread(this, outputStream, *output, devices, mSystemReady);
ALOGV("openOutput_l() created offload output: ID %d thread %p", *output, thread);
} else if ((flags & AUDIO_OUTPUT_FLAG_DIRECT)
|| !isValidPcmSinkFormat(config->format)
|| !isValidPcmSinkChannelMask(config->channel_mask)) {
- thread = new DirectOutputThread(this, outputStream, *output, devices);
+ thread = new DirectOutputThread(this, outputStream, *output, devices, mSystemReady);
ALOGV("openOutput_l() created direct output: ID %d thread %p", *output, thread);
} else {
- thread = new MixerThread(this, outputStream, *output, devices);
+ thread = new MixerThread(this, outputStream, *output, devices, mSystemReady);
ALOGV("openOutput_l() created mixer output: ID %d thread %p", *output, thread);
}
mPlaybackThreads.add(*output, thread);
@@ -1865,7 +1894,7 @@
}
audio_io_handle_t id = nextUniqueId();
- DuplicatingThread *thread = new DuplicatingThread(this, thread1, id);
+ DuplicatingThread *thread = new DuplicatingThread(this, thread1, id, mSystemReady);
thread->addOutputTrack(thread2);
mPlaybackThreads.add(id, thread);
// notify client processes of the new output creation
@@ -1894,11 +1923,10 @@
if (thread->type() == ThreadBase::MIXER) {
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
- if (mPlaybackThreads.valueAt(i)->type() == ThreadBase::DUPLICATING) {
+ if (mPlaybackThreads.valueAt(i)->isDuplicating()) {
DuplicatingThread *dupThread =
(DuplicatingThread *)mPlaybackThreads.valueAt(i).get();
dupThread->removeOutputTrack((MixerThread *)thread.get());
-
}
}
}
@@ -1927,7 +1955,7 @@
// The thread entity (active unit of execution) is no longer running here,
// but the ThreadBase container still exists.
- if (thread->type() != ThreadBase::DUPLICATING) {
+ if (!thread->isDuplicating()) {
closeOutputFinish(thread);
}
@@ -2113,7 +2141,8 @@
inputStream,
*input,
primaryOutputDevice_l(),
- devices
+ devices,
+ mSystemReady
#ifdef TEE_SINK
, teeSink
#endif
@@ -2375,6 +2404,9 @@
{
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
PlaybackThread *thread = mPlaybackThreads.valueAt(i).get();
+ if(thread->isDuplicating()) {
+ continue;
+ }
AudioStreamOut *output = thread->getOutput();
if (output != NULL && output->audioHwDev == mPrimaryHardwareDev) {
return thread;
@@ -2689,7 +2721,7 @@
// Check whether the destination thread has a channel count of FCC_2, which is
// currently required for (most) effects. Prevent moving the effect chain here rather
// than disabling the addEffect_l() call in dstThread below.
- if ((dstThread->type() == ThreadBase::MIXER || dstThread->type() == ThreadBase::DUPLICATING) &&
+ if ((dstThread->type() == ThreadBase::MIXER || dstThread->isDuplicating()) &&
dstThread->mChannelCount != FCC_2) {
ALOGW("moveEffectChain_l() effect chain failed because"
" destination thread %p channel count(%u) != %u",
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 51b2610..d087ced 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -257,6 +257,9 @@
/* Get the HW synchronization source used for an audio session */
virtual audio_hw_sync_t getAudioHwSyncForSession(audio_session_t sessionId);
+ /* Indicate JAVA services are ready (scheduling, power management ...) */
+ virtual status_t systemReady();
+
virtual status_t onTransact(
uint32_t code,
const Parcel& data,
@@ -356,6 +359,15 @@
// check that channelMask is the "canonical" one we expect for the channelCount.
return channelMask == audio_channel_out_mask_from_count(channelCount);
}
+ case AUDIO_CHANNEL_REPRESENTATION_INDEX:
+ if (kEnableExtendedChannels) {
+ const uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
+ if (channelCount >= FCC_2 // mono is not supported at this time
+ && channelCount <= AudioMixer::MAX_NUM_CHANNELS) {
+ return true;
+ }
+ }
+ return false;
default:
return false;
}
@@ -752,6 +764,7 @@
uint32_t mPrimaryOutputSampleRate; // sample rate of the primary output, or zero if none
// protected by mHardwareLock
+ bool mSystemReady;
};
#undef INCLUDING_FROM_AUDIOFLINGER_H
diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp
index 7040af4..01efc53 100644
--- a/services/audioflinger/AudioMixer.cpp
+++ b/services/audioflinger/AudioMixer.cpp
@@ -66,6 +66,13 @@
#define ARRAY_SIZE(x) (sizeof(x)/sizeof((x)[0]))
#endif
+// TODO: Move these macro/inlines to a header file.
+template <typename T>
+static inline
+T max(const T& x, const T& y) {
+ return x > y ? x : y;
+}
+
// Set kUseNewMixer to true to use the new mixer engine always. Otherwise the
// original code will be used for stereo sinks, the new mixer for multichannel.
static const bool kUseNewMixer = true;
@@ -499,41 +506,99 @@
static inline bool setVolumeRampVariables(float newVolume, int32_t ramp,
int16_t *pIntSetVolume, int32_t *pIntPrevVolume, int32_t *pIntVolumeInc,
float *pSetVolume, float *pPrevVolume, float *pVolumeInc) {
+ // check floating point volume to see if it is identical to the previously
+ // set volume.
+ // We do not use a tolerance here (and reject changes too small)
+ // as it may be confusing to use a different value than the one set.
+ // If the resulting volume is too small to ramp, it is a direct set of the volume.
if (newVolume == *pSetVolume) {
return false;
}
- /* set the floating point volume variables */
- if (ramp != 0) {
- *pVolumeInc = (newVolume - *pSetVolume) / ramp;
- *pPrevVolume = *pSetVolume;
+ if (newVolume < 0) {
+ newVolume = 0; // should not have negative volumes
} else {
- *pVolumeInc = 0;
- *pPrevVolume = newVolume;
+ switch (fpclassify(newVolume)) {
+ case FP_SUBNORMAL:
+ case FP_NAN:
+ newVolume = 0;
+ break;
+ case FP_ZERO:
+ break; // zero volume is fine
+ case FP_INFINITE:
+ // Infinite volume could be handled consistently since
+ // floating point math saturates at infinities,
+ // but we limit volume to unity gain float.
+ // ramp = 0; break;
+ //
+ newVolume = AudioMixer::UNITY_GAIN_FLOAT;
+ break;
+ case FP_NORMAL:
+ default:
+ // Floating point does not have problems with overflow wrap
+ // that integer has. However, we limit the volume to
+ // unity gain here.
+ // TODO: Revisit the volume limitation and perhaps parameterize.
+ if (newVolume > AudioMixer::UNITY_GAIN_FLOAT) {
+ newVolume = AudioMixer::UNITY_GAIN_FLOAT;
+ }
+ break;
+ }
}
- *pSetVolume = newVolume;
- /* set the legacy integer volume variables */
- int32_t intVolume = newVolume * AudioMixer::UNITY_GAIN_INT;
- if (intVolume > AudioMixer::UNITY_GAIN_INT) {
- intVolume = AudioMixer::UNITY_GAIN_INT;
- } else if (intVolume < 0) {
- ALOGE("negative volume %.7g", newVolume);
- intVolume = 0; // should never happen, but for safety check.
+ // set floating point volume ramp
+ if (ramp != 0) {
+ // when the ramp completes, *pPrevVolume is set to *pSetVolume, so there
+ // is no computational mismatch; hence equality is checked here.
+ ALOGD_IF(*pPrevVolume != *pSetVolume, "previous float ramp hasn't finished,"
+ " prev:%f set_to:%f", *pPrevVolume, *pSetVolume);
+ const float inc = (newVolume - *pPrevVolume) / ramp; // could be inf, nan, subnormal
+ const float maxv = max(newVolume, *pPrevVolume); // could be inf, cannot be nan, subnormal
+
+ if (isnormal(inc) // inc must be a normal number (no subnormals, infinite, nan)
+ && maxv + inc != maxv) { // inc must make forward progress
+ *pVolumeInc = inc;
+ // ramp is set now.
+ // Note: if newVolume is 0, then near the end of the ramp,
+ // it may be possible that the ramped volume may be subnormal or
+ // temporarily negative by a small amount or subnormal due to floating
+ // point inaccuracies.
+ } else {
+ ramp = 0; // ramp not allowed
+ }
}
- if (intVolume == *pIntSetVolume) {
- *pIntVolumeInc = 0;
- /* TODO: integer/float workaround: ignore floating volume ramp */
+
+ // compute and check integer volume, no need to check negative values
+ // The integer volume is limited to "unity_gain" to avoid wrapping and other
+ // audio artifacts, so it never reaches the range limit of U4.28.
+ // We safely use signed 16 and 32 bit integers here.
+ const float scaledVolume = newVolume * AudioMixer::UNITY_GAIN_INT; // not neg, subnormal, nan
+ const int32_t intVolume = (scaledVolume >= (float)AudioMixer::UNITY_GAIN_INT) ?
+ AudioMixer::UNITY_GAIN_INT : (int32_t)scaledVolume;
+
+ // set integer volume ramp
+ if (ramp != 0) {
+ // integer volume is U4.12 (to use 16 bit multiplies), but ramping uses U4.28.
+ // when the ramp completes, *pIntPrevVolume is set to *pIntSetVolume << 16, so there
+ // is no computational mismatch; hence equality is checked here.
+ ALOGD_IF(*pIntPrevVolume != *pIntSetVolume << 16, "previous int ramp hasn't finished,"
+ " prev:%d set_to:%d", *pIntPrevVolume, *pIntSetVolume << 16);
+ const int32_t inc = ((intVolume << 16) - *pIntPrevVolume) / ramp;
+
+ if (inc != 0) { // inc must make forward progress
+ *pIntVolumeInc = inc;
+ } else {
+ ramp = 0; // ramp not allowed
+ }
+ }
+
+ // if no ramp, or ramp not allowed, then clear float and integer increments
+ if (ramp == 0) {
*pVolumeInc = 0;
*pPrevVolume = newVolume;
- return true;
- }
- if (ramp != 0) {
- *pIntVolumeInc = ((intVolume - *pIntSetVolume) << 16) / ramp;
- *pIntPrevVolume = (*pIntVolumeInc == 0 ? intVolume : *pIntSetVolume) << 16;
- } else {
*pIntVolumeInc = 0;
*pIntPrevVolume = intVolume << 16;
}
+ *pSetVolume = newVolume;
*pIntSetVolume = intVolume;
return true;
}
@@ -708,11 +773,10 @@
// FIXME this is flawed for dynamic sample rates, as we choose the resampler
// quality level based on the initial ratio, but that could change later.
// Should have a way to distinguish tracks with static ratios vs. dynamic ratios.
- if (!((trackSampleRate == 44100 && devSampleRate == 48000) ||
- (trackSampleRate == 48000 && devSampleRate == 44100))) {
- quality = AudioResampler::DYN_LOW_QUALITY;
- } else {
+ if (isMusicRate(trackSampleRate)) {
quality = AudioResampler::DEFAULT_QUALITY;
+ } else {
+ quality = AudioResampler::DYN_LOW_QUALITY;
}
// TODO: Remove MONO_HACK. Resampler sees #channels after the downmixer
@@ -775,7 +839,8 @@
{
if (useFloat) {
for (uint32_t i = 0; i < MAX_NUM_VOLUMES; i++) {
- if (mVolumeInc[i] != 0 && fabs(mVolume[i] - mPrevVolume[i]) <= fabs(mVolumeInc[i])) {
+ if ((mVolumeInc[i] > 0 && mPrevVolume[i] + mVolumeInc[i] >= mVolume[i]) ||
+ (mVolumeInc[i] < 0 && mPrevVolume[i] + mVolumeInc[i] <= mVolume[i])) {
volumeInc[i] = 0;
prevVolume[i] = volume[i] << 16;
mVolumeInc[i] = 0.;
diff --git a/services/audioflinger/BufferProviders.cpp b/services/audioflinger/BufferProviders.cpp
index 77bf4ac..8a580e8 100644
--- a/services/audioflinger/BufferProviders.cpp
+++ b/services/audioflinger/BufferProviders.cpp
@@ -292,46 +292,8 @@
ALOGV("RemixBufferProvider(%p)(%#x, %#x, %#x) %zu %zu",
this, format, inputChannelMask, outputChannelMask,
mInputChannels, mOutputChannels);
-
- const audio_channel_representation_t inputRepresentation =
- audio_channel_mask_get_representation(inputChannelMask);
- const audio_channel_representation_t outputRepresentation =
- audio_channel_mask_get_representation(outputChannelMask);
- const uint32_t inputBits = audio_channel_mask_get_bits(inputChannelMask);
- const uint32_t outputBits = audio_channel_mask_get_bits(outputChannelMask);
-
- switch (inputRepresentation) {
- case AUDIO_CHANNEL_REPRESENTATION_POSITION:
- switch (outputRepresentation) {
- case AUDIO_CHANNEL_REPRESENTATION_POSITION:
- memcpy_by_index_array_initialization(mIdxAry, ARRAY_SIZE(mIdxAry),
- outputBits, inputBits);
- return;
- case AUDIO_CHANNEL_REPRESENTATION_INDEX:
- // TODO: output channel index mask not currently allowed
- // fall through
- default:
- break;
- }
- break;
- case AUDIO_CHANNEL_REPRESENTATION_INDEX:
- switch (outputRepresentation) {
- case AUDIO_CHANNEL_REPRESENTATION_POSITION:
- memcpy_by_index_array_initialization_src_index(mIdxAry, ARRAY_SIZE(mIdxAry),
- outputBits, inputBits);
- return;
- case AUDIO_CHANNEL_REPRESENTATION_INDEX:
- // TODO: output channel index mask not currently allowed
- // fall through
- default:
- break;
- }
- break;
- default:
- break;
- }
- LOG_ALWAYS_FATAL("invalid channel mask conversion from %#x to %#x",
- inputChannelMask, outputChannelMask);
+ (void) memcpy_by_index_array_initialization_from_channel_mask(
+ mIdxAry, ARRAY_SIZE(mIdxAry), outputChannelMask, inputChannelMask);
}
void RemixBufferProvider::copyFrames(void *dst, const void *src, size_t frames)
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 8bccb47..949c91d 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -335,13 +335,21 @@
// TODO: handle configuration of effects replacing track process
channelMask = thread->channelMask();
+ mConfig.outputCfg.channels = channelMask;
if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
mConfig.inputCfg.channels = AUDIO_CHANNEL_OUT_MONO;
} else {
mConfig.inputCfg.channels = channelMask;
+ // TODO: Update this logic when multichannel effects are implemented.
+ // For offloaded tracks consider mono output as stereo for proper effect initialization
+ if (channelMask == AUDIO_CHANNEL_OUT_MONO) {
+ mConfig.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ mConfig.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ ALOGV("Overriding effect input and output as STEREO");
+ }
}
- mConfig.outputCfg.channels = channelMask;
+
mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
mConfig.inputCfg.samplingRate = thread->sampleRate();
diff --git a/services/audioflinger/ServiceUtilities.cpp b/services/audioflinger/ServiceUtilities.cpp
index 0a718fb..2e68dad 100644
--- a/services/audioflinger/ServiceUtilities.cpp
+++ b/services/audioflinger/ServiceUtilities.cpp
@@ -18,6 +18,7 @@
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
#include <binder/PermissionCache.h>
+#include <private/android_filesystem_config.h>
#include "ServiceUtilities.h"
/* When performing permission checks we do not use permission cache for
@@ -53,6 +54,10 @@
}
const uid_t uid = IPCThreadState::self()->getCallingUid();
+
+ // To permit command-line native tests
+ if (uid == AID_ROOT) return true;
+
String16 checkedOpPackageName = opPackageName;
// In some cases the calling code has no access to the package it runs under.
@@ -83,7 +88,7 @@
}
AppOpsManager appOps;
- if (appOps.noteOp(AppOpsManager::OP_RECORD_AUDIO, uid, opPackageName)
+ if (appOps.noteOp(AppOpsManager::OP_RECORD_AUDIO, uid, checkedOpPackageName)
!= AppOpsManager::MODE_ALLOWED) {
ALOGE("Request denied by app op OP_RECORD_AUDIO");
return false;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 8b8dd78..ad445a5 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -356,13 +356,47 @@
AUDIO_DEVICE_OUT_SPEAKER, "SPEAKER",
AUDIO_DEVICE_OUT_WIRED_HEADSET, "WIRED_HEADSET",
AUDIO_DEVICE_OUT_WIRED_HEADPHONE, "WIRED_HEADPHONE",
+ AUDIO_DEVICE_OUT_BLUETOOTH_SCO, "BLUETOOTH_SCO",
+ AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET, "BLUETOOTH_SCO_HEADSET",
+ AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT, "BLUETOOTH_SCO_CARKIT",
+ AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, "BLUETOOTH_A2DP",
+ AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES, "BLUETOOTH_A2DP_HEADPHONES",
+ AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER, "BLUETOOTH_A2DP_SPEAKER",
+ AUDIO_DEVICE_OUT_AUX_DIGITAL, "AUX_DIGITAL",
+ AUDIO_DEVICE_OUT_HDMI, "HDMI",
+ AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET, "ANLG_DOCK_HEADSET",
+ AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, "DGTL_DOCK_HEADSET",
+ AUDIO_DEVICE_OUT_USB_ACCESSORY, "USB_ACCESSORY",
+ AUDIO_DEVICE_OUT_USB_DEVICE, "USB_DEVICE",
AUDIO_DEVICE_OUT_TELEPHONY_TX, "TELEPHONY_TX",
+ AUDIO_DEVICE_OUT_LINE, "LINE",
+ AUDIO_DEVICE_OUT_HDMI_ARC, "HDMI_ARC",
+ AUDIO_DEVICE_OUT_SPDIF, "SPDIF",
+ AUDIO_DEVICE_OUT_FM, "FM",
+ AUDIO_DEVICE_OUT_AUX_LINE, "AUX_LINE",
+ AUDIO_DEVICE_OUT_SPEAKER_SAFE, "SPEAKER_SAFE",
AUDIO_DEVICE_NONE, "NONE", // must be last
}, mappingsIn[] = {
+ AUDIO_DEVICE_IN_COMMUNICATION, "COMMUNICATION",
+ AUDIO_DEVICE_IN_AMBIENT, "AMBIENT",
AUDIO_DEVICE_IN_BUILTIN_MIC, "BUILTIN_MIC",
+ AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, "BLUETOOTH_SCO_HEADSET",
AUDIO_DEVICE_IN_WIRED_HEADSET, "WIRED_HEADSET",
+ AUDIO_DEVICE_IN_AUX_DIGITAL, "AUX_DIGITAL",
AUDIO_DEVICE_IN_VOICE_CALL, "VOICE_CALL",
+ AUDIO_DEVICE_IN_TELEPHONY_RX, "TELEPHONY_RX",
+ AUDIO_DEVICE_IN_BACK_MIC, "BACK_MIC",
AUDIO_DEVICE_IN_REMOTE_SUBMIX, "REMOTE_SUBMIX",
+ AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET, "ANLG_DOCK_HEADSET",
+ AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET, "DGTL_DOCK_HEADSET",
+ AUDIO_DEVICE_IN_USB_ACCESSORY, "USB_ACCESSORY",
+ AUDIO_DEVICE_IN_USB_DEVICE, "USB_DEVICE",
+ AUDIO_DEVICE_IN_FM_TUNER, "FM_TUNER",
+ AUDIO_DEVICE_IN_TV_TUNER, "TV_TUNER",
+ AUDIO_DEVICE_IN_LINE, "LINE",
+ AUDIO_DEVICE_IN_SPDIF, "SPDIF",
+ AUDIO_DEVICE_IN_BLUETOOTH_A2DP, "BLUETOOTH_A2DP",
+ AUDIO_DEVICE_IN_LOOPBACK, "LOOPBACK",
AUDIO_DEVICE_NONE, "NONE", // must be last
};
String8 result;
@@ -487,7 +521,7 @@
}
AudioFlinger::ThreadBase::ThreadBase(const sp<AudioFlinger>& audioFlinger, audio_io_handle_t id,
- audio_devices_t outDevice, audio_devices_t inDevice, type_t type)
+ audio_devices_t outDevice, audio_devices_t inDevice, type_t type, bool systemReady)
: Thread(false /*canCallJava*/),
mType(type),
mAudioFlinger(audioFlinger),
@@ -498,8 +532,10 @@
mStandby(false), mOutDevice(outDevice), mInDevice(inDevice),
mAudioSource(AUDIO_SOURCE_DEFAULT), mId(id),
// mName will be set by concrete (non-virtual) subclass
- mDeathRecipient(new PMDeathRecipient(this))
+ mDeathRecipient(new PMDeathRecipient(this)),
+ mSystemReady(systemReady)
{
+ memset(&mPatch, 0, sizeof(struct audio_patch));
}
AudioFlinger::ThreadBase::~ThreadBase()
@@ -566,6 +602,11 @@
{
status_t status = NO_ERROR;
+ if (event->mRequiresSystemReady && !mSystemReady) {
+ event->mWaitStatus = false;
+ mPendingConfigEvents.add(event);
+ return status;
+ }
mConfigEvents.add(event);
ALOGV("sendConfigEvent_l() num events %d event %d", mConfigEvents.size(), event->mType);
mWaitWorkCV.signal();
@@ -597,6 +638,12 @@
sendConfigEvent_l(configEvent);
}
+void AudioFlinger::ThreadBase::sendPrioConfigEvent(pid_t pid, pid_t tid, int32_t prio)
+{
+ Mutex::Autolock _l(mLock);
+ sendPrioConfigEvent_l(pid, tid, prio);
+}
+
// sendPrioConfigEvent_l() must be called with ThreadBase::mLock held
void AudioFlinger::ThreadBase::sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio)
{
@@ -696,49 +743,63 @@
String8 channelMaskToString(audio_channel_mask_t mask, bool output) {
String8 s;
- if (output) {
- if (mask & AUDIO_CHANNEL_OUT_FRONT_LEFT) s.append("front-left, ");
- if (mask & AUDIO_CHANNEL_OUT_FRONT_RIGHT) s.append("front-right, ");
- if (mask & AUDIO_CHANNEL_OUT_FRONT_CENTER) s.append("front-center, ");
- if (mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) s.append("low freq, ");
- if (mask & AUDIO_CHANNEL_OUT_BACK_LEFT) s.append("back-left, ");
- if (mask & AUDIO_CHANNEL_OUT_BACK_RIGHT) s.append("back-right, ");
- if (mask & AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER) s.append("front-left-of-center, ");
- if (mask & AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER) s.append("front-right-of-center, ");
- if (mask & AUDIO_CHANNEL_OUT_BACK_CENTER) s.append("back-center, ");
- if (mask & AUDIO_CHANNEL_OUT_SIDE_LEFT) s.append("side-left, ");
- if (mask & AUDIO_CHANNEL_OUT_SIDE_RIGHT) s.append("side-right, ");
- if (mask & AUDIO_CHANNEL_OUT_TOP_CENTER) s.append("top-center ,");
- if (mask & AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT) s.append("top-front-left, ");
- if (mask & AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER) s.append("top-front-center, ");
- if (mask & AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT) s.append("top-front-right, ");
- if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_LEFT) s.append("top-back-left, ");
- if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_CENTER) s.append("top-back-center, " );
- if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT) s.append("top-back-right, " );
- if (mask & ~AUDIO_CHANNEL_OUT_ALL) s.append("unknown, ");
- } else {
- if (mask & AUDIO_CHANNEL_IN_LEFT) s.append("left, ");
- if (mask & AUDIO_CHANNEL_IN_RIGHT) s.append("right, ");
- if (mask & AUDIO_CHANNEL_IN_FRONT) s.append("front, ");
- if (mask & AUDIO_CHANNEL_IN_BACK) s.append("back, ");
- if (mask & AUDIO_CHANNEL_IN_LEFT_PROCESSED) s.append("left-processed, ");
- if (mask & AUDIO_CHANNEL_IN_RIGHT_PROCESSED) s.append("right-processed, ");
- if (mask & AUDIO_CHANNEL_IN_FRONT_PROCESSED) s.append("front-processed, ");
- if (mask & AUDIO_CHANNEL_IN_BACK_PROCESSED) s.append("back-processed, ");
- if (mask & AUDIO_CHANNEL_IN_PRESSURE) s.append("pressure, ");
- if (mask & AUDIO_CHANNEL_IN_X_AXIS) s.append("X, ");
- if (mask & AUDIO_CHANNEL_IN_Y_AXIS) s.append("Y, ");
- if (mask & AUDIO_CHANNEL_IN_Z_AXIS) s.append("Z, ");
- if (mask & AUDIO_CHANNEL_IN_VOICE_UPLINK) s.append("voice-uplink, ");
- if (mask & AUDIO_CHANNEL_IN_VOICE_DNLINK) s.append("voice-dnlink, ");
- if (mask & ~AUDIO_CHANNEL_IN_ALL) s.append("unknown, ");
+ const audio_channel_representation_t representation =
+ audio_channel_mask_get_representation(mask);
+
+ switch (representation) {
+ case AUDIO_CHANNEL_REPRESENTATION_POSITION: {
+ if (output) {
+ if (mask & AUDIO_CHANNEL_OUT_FRONT_LEFT) s.append("front-left, ");
+ if (mask & AUDIO_CHANNEL_OUT_FRONT_RIGHT) s.append("front-right, ");
+ if (mask & AUDIO_CHANNEL_OUT_FRONT_CENTER) s.append("front-center, ");
+ if (mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) s.append("low freq, ");
+ if (mask & AUDIO_CHANNEL_OUT_BACK_LEFT) s.append("back-left, ");
+ if (mask & AUDIO_CHANNEL_OUT_BACK_RIGHT) s.append("back-right, ");
+ if (mask & AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER) s.append("front-left-of-center, ");
+ if (mask & AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER) s.append("front-right-of-center, ");
+ if (mask & AUDIO_CHANNEL_OUT_BACK_CENTER) s.append("back-center, ");
+ if (mask & AUDIO_CHANNEL_OUT_SIDE_LEFT) s.append("side-left, ");
+ if (mask & AUDIO_CHANNEL_OUT_SIDE_RIGHT) s.append("side-right, ");
+ if (mask & AUDIO_CHANNEL_OUT_TOP_CENTER) s.append("top-center ,");
+ if (mask & AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT) s.append("top-front-left, ");
+ if (mask & AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER) s.append("top-front-center, ");
+ if (mask & AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT) s.append("top-front-right, ");
+ if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_LEFT) s.append("top-back-left, ");
+ if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_CENTER) s.append("top-back-center, " );
+ if (mask & AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT) s.append("top-back-right, " );
+ if (mask & ~AUDIO_CHANNEL_OUT_ALL) s.append("unknown, ");
+ } else {
+ if (mask & AUDIO_CHANNEL_IN_LEFT) s.append("left, ");
+ if (mask & AUDIO_CHANNEL_IN_RIGHT) s.append("right, ");
+ if (mask & AUDIO_CHANNEL_IN_FRONT) s.append("front, ");
+ if (mask & AUDIO_CHANNEL_IN_BACK) s.append("back, ");
+ if (mask & AUDIO_CHANNEL_IN_LEFT_PROCESSED) s.append("left-processed, ");
+ if (mask & AUDIO_CHANNEL_IN_RIGHT_PROCESSED) s.append("right-processed, ");
+ if (mask & AUDIO_CHANNEL_IN_FRONT_PROCESSED) s.append("front-processed, ");
+ if (mask & AUDIO_CHANNEL_IN_BACK_PROCESSED) s.append("back-processed, ");
+ if (mask & AUDIO_CHANNEL_IN_PRESSURE) s.append("pressure, ");
+ if (mask & AUDIO_CHANNEL_IN_X_AXIS) s.append("X, ");
+ if (mask & AUDIO_CHANNEL_IN_Y_AXIS) s.append("Y, ");
+ if (mask & AUDIO_CHANNEL_IN_Z_AXIS) s.append("Z, ");
+ if (mask & AUDIO_CHANNEL_IN_VOICE_UPLINK) s.append("voice-uplink, ");
+ if (mask & AUDIO_CHANNEL_IN_VOICE_DNLINK) s.append("voice-dnlink, ");
+ if (mask & ~AUDIO_CHANNEL_IN_ALL) s.append("unknown, ");
+ }
+ const int len = s.length();
+ if (len > 2) {
+ char *str = s.lockBuffer(len); // needed?
+ s.unlockBuffer(len - 2); // remove trailing ", "
+ }
+ return s;
}
- int len = s.length();
- if (s.length() > 2) {
- char *str = s.lockBuffer(len);
- s.unlockBuffer(len - 2);
+ case AUDIO_CHANNEL_REPRESENTATION_INDEX:
+ s.appendFormat("index mask, bits:%#x", audio_channel_mask_get_bits(mask));
+ return s;
+ default:
+ s.appendFormat("unknown mask, representation:%d bits:%#x",
+ representation, audio_channel_mask_get_bits(mask));
+ return s;
}
- return s;
}
void AudioFlinger::ThreadBase::dumpBase(int fd, const Vector<String16>& args __unused)
@@ -879,8 +940,7 @@
}
void AudioFlinger::ThreadBase::getPowerManager_l() {
-
- if (mPowerManager == 0) {
+ if (mSystemReady && mPowerManager == 0) {
// use checkService() to avoid blocking if power service is not up yet
sp<IBinder> binder =
defaultServiceManager()->checkService(String16("power"));
@@ -894,7 +954,6 @@
}
void AudioFlinger::ThreadBase::updateWakeLockUids_l(const SortedVector<int> &uids) {
-
getPowerManager_l();
if (mWakeLockToken == NULL) {
ALOGE("no wake lock to update!");
@@ -1336,6 +1395,20 @@
AUDIO_PORT_CONFIG_FORMAT;
}
+void AudioFlinger::ThreadBase::systemReady()
+{
+ Mutex::Autolock _l(mLock);
+ if (mSystemReady) {
+ return;
+ }
+ mSystemReady = true;
+
+ for (size_t i = 0; i < mPendingConfigEvents.size(); i++) {
+ sendConfigEvent_l(mPendingConfigEvents.editItemAt(i));
+ }
+ mPendingConfigEvents.clear();
+}
+
// ----------------------------------------------------------------------------
// Playback
@@ -1345,8 +1418,9 @@
AudioStreamOut* output,
audio_io_handle_t id,
audio_devices_t device,
- type_t type)
- : ThreadBase(audioFlinger, id, device, AUDIO_DEVICE_NONE, type),
+ type_t type,
+ bool systemReady)
+ : ThreadBase(audioFlinger, id, device, AUDIO_DEVICE_NONE, type, systemReady),
mNormalFrameCount(0), mSinkBuffer(NULL),
mMixerBufferEnabled(AudioFlinger::kEnableExtendedPrecision),
mMixerBuffer(NULL),
@@ -1365,7 +1439,7 @@
mLastWriteTime(0), mNumWrites(0), mNumDelayedWrites(0), mInWrite(false),
mMixerStatus(MIXER_IDLE),
mMixerStatusIgnoringFastTracks(MIXER_IDLE),
- standbyDelay(AudioFlinger::mStandbyTimeInNsecs),
+ mStandbyDelayNs(AudioFlinger::mStandbyTimeInNsecs),
mBytesRemaining(0),
mCurrentWriteLength(0),
mUseAsyncWrite(false),
@@ -1571,10 +1645,12 @@
) &&
// PCM data
audio_is_linear_pcm(format) &&
- // identical channel mask to sink, or mono in and stereo sink
+ // TODO: extract as a data library function that checks that a computationally
+ // expensive downmixer is not required: isFastOutputChannelConversion()
(channelMask == mChannelMask ||
- (channelMask == AUDIO_CHANNEL_OUT_MONO &&
- mChannelMask == AUDIO_CHANNEL_OUT_STEREO)) &&
+ mChannelMask != AUDIO_CHANNEL_OUT_STEREO ||
+ (channelMask == AUDIO_CHANNEL_OUT_MONO
+ /* && mChannelMask == AUDIO_CHANNEL_OUT_STEREO */)) &&
// hardware sample rate
(sampleRate == mSampleRate) &&
// normal mixer has an associated fast mixer
@@ -1930,6 +2006,7 @@
switch (event) {
case AUDIO_OUTPUT_OPENED:
case AUDIO_OUTPUT_CONFIG_CHANGED:
+ desc->mPatch = mPatch;
desc->mChannelMask = mChannelMask;
desc->mSamplingRate = mSampleRate;
desc->mFormat = mFormat;
@@ -2115,6 +2192,10 @@
ALOGI("HAL output buffer size %u frames, normal sink buffer size %u frames", mFrameCount,
mNormalFrameCount);
+ // Check if we want to throttle the processing to no more than 2x normal rate
+ mThreadThrottle = property_get_bool("af.thread.throttle", true /* default_value */);
+ mHalfBufferMs = mNormalFrameCount * 1000 / (2 * mSampleRate);
+
// mSinkBuffer is the sink buffer. Size is always multiple-of-16 frames.
// Originally this was int16_t[] array, need to remove legacy implications.
free(mSinkBuffer);
@@ -2416,9 +2497,9 @@
/*
The derived values that are cached:
- mSinkBufferSize from frame count * frame size
- - activeSleepTime from activeSleepTimeUs()
- - idleSleepTime from idleSleepTimeUs()
- - standbyDelay from mActiveSleepTimeUs (DIRECT only)
+ - mActiveSleepTimeUs from activeSleepTimeUs()
+ - mIdleSleepTimeUs from idleSleepTimeUs()
+ - mStandbyDelayNs from mActiveSleepTimeUs (DIRECT only)
- maxPeriod from frame count and sample rate (MIXER only)
The parameters that affect these derived values are:
@@ -2435,8 +2516,8 @@
void AudioFlinger::PlaybackThread::cacheParameters_l()
{
mSinkBufferSize = mNormalFrameCount * mFrameSize;
- activeSleepTime = activeSleepTimeUs();
- idleSleepTime = idleSleepTimeUs();
+ mActiveSleepTimeUs = activeSleepTimeUs();
+ mIdleSleepTimeUs = idleSleepTimeUs();
}
void AudioFlinger::PlaybackThread::invalidateTracks(audio_stream_type_t streamType)
@@ -2603,7 +2684,7 @@
{
Vector< sp<Track> > tracksToRemove;
- standbyTime = systemTime();
+ mStandbyTimeNs = systemTime();
// MIXER
nsecs_t lastWarning = 0;
@@ -2615,7 +2696,7 @@
int lastGeneration = 0;
cacheParameters_l();
- sleepTime = idleSleepTime;
+ mSleepTimeUs = mIdleSleepTimeUs;
if (mType == MIXER) {
sleepTimeShift = 0;
@@ -2677,19 +2758,29 @@
if (exitPending()) {
break;
}
- releaseWakeLock_l();
+ bool released = false;
+ // The following works around a bug in the offload driver. Ideally we would release
+ // the wake lock every time, but that causes the last offload buffer(s) to be
+ // dropped while the device is on battery, so we need to hold a wake lock during
+ // the drain phase.
+ if (mBytesRemaining && !(mDrainSequence & 1)) {
+ releaseWakeLock_l();
+ released = true;
+ }
mWakeLockUids.clear();
mActiveTracksGeneration++;
ALOGV("wait async completion");
mWaitWorkCV.wait(mLock);
ALOGV("async completion/wake");
- acquireWakeLock_l();
- standbyTime = systemTime() + standbyDelay;
- sleepTime = 0;
+ if (released) {
+ acquireWakeLock_l();
+ }
+ mStandbyTimeNs = systemTime() + mStandbyDelayNs;
+ mSleepTimeUs = 0;
continue;
}
- if ((!mActiveTracks.size() && systemTime() > standbyTime) ||
+ if ((!mActiveTracks.size() && systemTime() > mStandbyTimeNs) ||
isSuspended()) {
// put audio hardware into standby after short delay
if (shouldStandby_l()) {
@@ -2724,8 +2815,8 @@
mBytesRemaining = 0;
checkSilentMode_l();
- standbyTime = systemTime() + standbyDelay;
- sleepTime = idleSleepTime;
+ mStandbyTimeNs = systemTime() + mStandbyDelayNs;
+ mSleepTimeUs = mIdleSleepTimeUs;
if (mType == MIXER) {
sleepTimeShift = 0;
}
@@ -2756,15 +2847,15 @@
threadLoop_mix();
} else if ((mMixerStatus != MIXER_DRAIN_TRACK)
&& (mMixerStatus != MIXER_DRAIN_ALL)) {
- // threadLoop_sleepTime sets sleepTime to 0 if data
+ // threadLoop_sleepTime sets mSleepTimeUs to 0 if data
// must be written to HAL
threadLoop_sleepTime();
- if (sleepTime == 0) {
+ if (mSleepTimeUs == 0) {
mCurrentWriteLength = mSinkBufferSize;
}
}
// Either threadLoop_mix() or threadLoop_sleepTime() should have set
- // mMixerBuffer with data if mMixerBufferValid is true and sleepTime == 0.
+ // mMixerBuffer with data if mMixerBufferValid is true and mSleepTimeUs == 0.
// Merge mMixerBuffer data into mEffectBuffer (if any effects are valid)
// or mSinkBuffer (if there are no effects).
//
@@ -2772,7 +2863,7 @@
// support higher precision, this needs to move.
//
// mMixerBufferValid is only set true by MixerThread::prepareTracks_l().
- // TODO use sleepTime == 0 as an additional condition.
+ // TODO use mSleepTimeUs == 0 as an additional condition.
if (mMixerBufferValid) {
void *buffer = mEffectBufferValid ? mEffectBuffer : mSinkBuffer;
audio_format_t format = mEffectBufferValid ? mEffectBufferFormat : mFormat;
@@ -2783,14 +2874,14 @@
mBytesRemaining = mCurrentWriteLength;
if (isSuspended()) {
- sleepTime = suspendSleepTimeUs();
+ mSleepTimeUs = suspendSleepTimeUs();
// simulate write to HAL when suspended
mBytesWritten += mSinkBufferSize;
mBytesRemaining = 0;
}
// only process effects if we're going to write
- if (sleepTime == 0 && mType != OFFLOAD) {
+ if (mSleepTimeUs == 0 && mType != OFFLOAD) {
for (size_t i = 0; i < effectChains.size(); i ++) {
effectChains[i]->process_l();
}
@@ -2809,7 +2900,7 @@
// Only if the Effects buffer is enabled and there is data in the
// Effects buffer (buffer valid), we need to
// copy into the sink buffer.
- // TODO use sleepTime == 0 as an additional condition.
+ // TODO use mSleepTimeUs == 0 as an additional condition.
if (mEffectBufferValid) {
//ALOGV("writing effect buffer to sink buffer format %#x", mFormat);
memcpy_by_audio_format(mSinkBuffer, mFormat, mEffectBuffer, mEffectBufferFormat,
@@ -2820,10 +2911,11 @@
unlockEffectChains(effectChains);
if (!waitingAsyncCallback()) {
- // sleepTime == 0 means we must write to audio hardware
- if (sleepTime == 0) {
+ // mSleepTimeUs == 0 means we must write to audio hardware
+ if (mSleepTimeUs == 0) {
+ ssize_t ret = 0;
if (mBytesRemaining) {
- ssize_t ret = threadLoop_write();
+ ret = threadLoop_write();
if (ret < 0) {
mBytesRemaining = 0;
} else {
@@ -2834,11 +2926,11 @@
(mMixerStatus == MIXER_DRAIN_ALL)) {
threadLoop_drain();
}
- if (mType == MIXER) {
+ if (mType == MIXER && !mStandby) {
// write blocked detection
nsecs_t now = systemTime();
nsecs_t delta = now - mLastWriteTime;
- if (!mStandby && delta > maxPeriod) {
+ if (delta > maxPeriod) {
mNumDelayedWrites++;
if ((now - lastWarning) > kWarningThrottleNs) {
ATRACE_NAME("underrun");
@@ -2847,11 +2939,36 @@
lastWarning = now;
}
}
+
+ if (mThreadThrottle
+ && mMixerStatus == MIXER_TRACKS_READY // we are mixing (active tracks)
+ && ret > 0) { // we wrote something
+ // Limit MixerThread data processing to no more than twice the
+ // expected processing rate.
+ //
+ // This helps prevent underruns with NuPlayer and other applications
+ // which may set up buffers that are close to the minimum size, or use
+ // deep buffers, and rely on a double-buffering sleep strategy to fill.
+ //
+ // The throttle smooths out sudden large data drains from the device,
+ // e.g. when it comes out of standby, which often causes problems with
+ // (1) mixer threads without a fast mixer (which has its own warm-up)
+ // (2) minimum buffer sized tracks (even if the track is full,
+ // the app won't fill fast enough to handle the sudden draw).
+
+ const int32_t deltaMs = delta / 1000000;
+ const int32_t throttleMs = mHalfBufferMs - deltaMs;
+ if ((signed)mHalfBufferMs >= throttleMs && throttleMs > 0) {
+ usleep(throttleMs * 1000);
+ ALOGD("mixer(%p) throttle: ret(%zd) deltaMs(%d) requires sleep %d ms",
+ this, ret, deltaMs, throttleMs);
+ }
+ }
}
} else {
ATRACE_BEGIN("sleep");
- usleep(sleepTime);
+ usleep(mSleepTimeUs);
ATRACE_END();
}
}
@@ -3002,6 +3119,7 @@
mEffectChains[i]->setDevice_l(type);
}
mOutDevice = type;
+ mPatch = *patch;
if (mOutput->audioHwDev->version() >= AUDIO_DEVICE_API_VERSION_3_0) {
audio_hw_device_t *hwDevice = mOutput->audioHwDev->hwDevice();
@@ -3028,6 +3146,7 @@
param.toString().string());
*handle = AUDIO_PATCH_HANDLE_NONE;
}
+ sendIoConfigEvent_l(AUDIO_OUTPUT_CONFIG_CHANGED);
return status;
}
@@ -3104,8 +3223,8 @@
// ----------------------------------------------------------------------------
AudioFlinger::MixerThread::MixerThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
- audio_io_handle_t id, audio_devices_t device, type_t type)
- : PlaybackThread(audioFlinger, output, id, device, type),
+ audio_io_handle_t id, audio_devices_t device, bool systemReady, type_t type)
+ : PlaybackThread(audioFlinger, output, id, device, type, systemReady),
// mAudioMixer below
// mFastMixer below
mFastMixerFutex(0)
@@ -3238,11 +3357,7 @@
// start the fast mixer
mFastMixer->run("FastMixer", PRIORITY_URGENT_AUDIO);
pid_t tid = mFastMixer->getTid();
- int err = requestPriority(getpid_cached, tid, kPriorityFastMixer);
- if (err != 0) {
- ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; error %d",
- kPriorityFastMixer, getpid_cached, tid, err);
- }
+ sendPrioConfigEvent(getpid_cached, tid, kPriorityFastMixer);
#ifdef AUDIO_WATCHDOG
// create and start the watchdog
@@ -3250,11 +3365,7 @@
mAudioWatchdog->setDump(&mAudioWatchdogDump);
mAudioWatchdog->run("AudioWatchdog", PRIORITY_URGENT_AUDIO);
tid = mAudioWatchdog->getTid();
- err = requestPriority(getpid_cached, tid, kPriorityFastMixer);
- if (err != 0) {
- ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; error %d",
- kPriorityFastMixer, getpid_cached, tid, err);
- }
+ sendPrioConfigEvent(getpid_cached, tid, kPriorityFastMixer);
#endif
}
@@ -3453,11 +3564,11 @@
// Only increase sleep time if the mixer is ready for two consecutive times to avoid
// that a steady state of alternating ready/not ready conditions keeps the sleep time
// such that we would underrun the audio HAL.
- if ((sleepTime == 0) && (sleepTimeShift > 0)) {
+ if ((mSleepTimeUs == 0) && (sleepTimeShift > 0)) {
sleepTimeShift--;
}
- sleepTime = 0;
- standbyTime = systemTime() + standbyDelay;
+ mSleepTimeUs = 0;
+ mStandbyTimeNs = systemTime() + mStandbyDelayNs;
//TODO: delay standby when effects have a tail
}
@@ -3466,11 +3577,11 @@
{
// If no tracks are ready, sleep once for the duration of an output
// buffer size, then write 0s to the output
- if (sleepTime == 0) {
+ if (mSleepTimeUs == 0) {
if (mMixerStatus == MIXER_TRACKS_ENABLED) {
- sleepTime = activeSleepTime >> sleepTimeShift;
- if (sleepTime < kMinThreadSleepTimeUs) {
- sleepTime = kMinThreadSleepTimeUs;
+ mSleepTimeUs = mActiveSleepTimeUs >> sleepTimeShift;
+ if (mSleepTimeUs < kMinThreadSleepTimeUs) {
+ mSleepTimeUs = kMinThreadSleepTimeUs;
}
// reduce sleep time in case of consecutive application underruns to avoid
// starving the audio HAL. As activeSleepTimeUs() is larger than a buffer
@@ -3480,7 +3591,7 @@
sleepTimeShift++;
}
} else {
- sleepTime = idleSleepTime;
+ mSleepTimeUs = mIdleSleepTimeUs;
}
} else if (mBytesWritten != 0 || (mMixerStatus == MIXER_TRACKS_ENABLED)) {
// clear out mMixerBuffer or mSinkBuffer, to ensure buffers are cleared
@@ -3490,7 +3601,7 @@
} else {
memset(mSinkBuffer, 0, mSinkBufferSize);
}
- sleepTime = 0;
+ mSleepTimeUs = 0;
ALOGV_IF(mBytesWritten == 0 && (mMixerStatus == MIXER_TRACKS_ENABLED),
"anticipated start");
}
@@ -3943,6 +4054,8 @@
}
} else {
if (framesReady < desiredFrames && !track->isStopped() && !track->isPaused()) {
+ ALOGV("track(%p) underrun, framesReady(%zu) < framesDesired(%zd)",
+ track, framesReady, desiredFrames);
track->mAudioTrackServerProxy->tallyUnderrunFrames(desiredFrames);
}
// clear effect chain input buffer if an active track underruns to avoid sending
@@ -4280,16 +4393,16 @@
// ----------------------------------------------------------------------------
AudioFlinger::DirectOutputThread::DirectOutputThread(const sp<AudioFlinger>& audioFlinger,
- AudioStreamOut* output, audio_io_handle_t id, audio_devices_t device)
- : PlaybackThread(audioFlinger, output, id, device, DIRECT)
+ AudioStreamOut* output, audio_io_handle_t id, audio_devices_t device, bool systemReady)
+ : PlaybackThread(audioFlinger, output, id, device, DIRECT, systemReady)
// mLeftVolFloat, mRightVolFloat
{
}
AudioFlinger::DirectOutputThread::DirectOutputThread(const sp<AudioFlinger>& audioFlinger,
AudioStreamOut* output, audio_io_handle_t id, uint32_t device,
- ThreadBase::type_t type)
- : PlaybackThread(audioFlinger, output, id, device, type)
+ ThreadBase::type_t type, bool systemReady)
+ : PlaybackThread(audioFlinger, output, id, device, type, systemReady)
// mLeftVolFloat, mRightVolFloat
{
}
@@ -4429,7 +4542,7 @@
track->mRetryCount = kMaxTrackRetriesDirect;
mActiveTrack = t;
mixerStatus = MIXER_TRACKS_READY;
- if (usesHwAvSync() && mHwPaused) {
+ if (mHwPaused) {
doHwResume = true;
mHwPaused = false;
}
@@ -4481,7 +4594,7 @@
android_atomic_or(CBLK_DISABLED, &cblk->mFlags);
} else if (last) {
mixerStatus = MIXER_TRACKS_ENABLED;
- if (usesHwAvSync() && !mHwPaused && !mStandby) {
+ if (mHwSupportsPause && !mHwPaused && !mStandby) {
doHwPause = true;
mHwPaused = true;
}
@@ -4539,8 +4652,8 @@
mActiveTrack->releaseBuffer(&buffer);
}
mCurrentWriteLength = curBuf - (int8_t *)mSinkBuffer;
- sleepTime = 0;
- standbyTime = systemTime() + standbyDelay;
+ mSleepTimeUs = 0;
+ mStandbyTimeNs = systemTime() + mStandbyDelayNs;
mActiveTrack.clear();
}
@@ -4548,18 +4661,18 @@
{
// do not write to HAL when paused
if (mHwPaused || (usesHwAvSync() && mStandby)) {
- sleepTime = idleSleepTime;
+ mSleepTimeUs = mIdleSleepTimeUs;
return;
}
- if (sleepTime == 0) {
+ if (mSleepTimeUs == 0) {
if (mMixerStatus == MIXER_TRACKS_ENABLED) {
- sleepTime = activeSleepTime;
+ mSleepTimeUs = mActiveSleepTimeUs;
} else {
- sleepTime = idleSleepTime;
+ mSleepTimeUs = mIdleSleepTimeUs;
}
} else if (mBytesWritten != 0 && audio_is_linear_pcm(mFormat)) {
memset(mSinkBuffer, 0, mFrameCount * mFrameSize);
- sleepTime = 0;
+ mSleepTimeUs = 0;
}
}
@@ -4595,7 +4708,7 @@
mTracks[mTracks.size() - 1]->mState == TrackBase::IDLE;
}
- return !mStandby && !(trackPaused || (usesHwAvSync() && mHwPaused && !trackStopped));
+ return !mStandby && !(trackPaused || (mHwPaused && !trackStopped));
}
// getTrackName_l() must be called with ThreadBase::mLock held
@@ -4700,11 +4813,11 @@
// hardware resources as soon as possible
// no delay on outputs with HW A/V sync
if (usesHwAvSync()) {
- standbyDelay = 0;
- } else if (audio_is_linear_pcm(mFormat)) {
- standbyDelay = microseconds(activeSleepTime*2);
+ mStandbyDelayNs = 0;
+ } else if ((mType == OFFLOAD) && !audio_is_linear_pcm(mFormat)) {
+ mStandbyDelayNs = kOffloadStandbyDelayNs;
} else {
- standbyDelay = kOffloadStandbyDelayNs;
+ mStandbyDelayNs = microseconds(mActiveSleepTimeUs*2);
}
}
@@ -4818,8 +4931,8 @@
// ----------------------------------------------------------------------------
AudioFlinger::OffloadThread::OffloadThread(const sp<AudioFlinger>& audioFlinger,
- AudioStreamOut* output, audio_io_handle_t id, uint32_t device)
- : DirectOutputThread(audioFlinger, output, id, device, OFFLOAD),
+ AudioStreamOut* output, audio_io_handle_t id, uint32_t device, bool systemReady)
+ : DirectOutputThread(audioFlinger, output, id, device, OFFLOAD, systemReady),
mPausedBytesRemaining(0)
{
//FIXME: mStandby should be set to true by ThreadBase constructor
@@ -4884,7 +4997,7 @@
if (track->isPausing()) {
track->setPaused();
if (last) {
- if (!mHwPaused) {
+ if (mHwSupportsPause && !mHwPaused) {
doHwPause = true;
mHwPaused = true;
}
@@ -4920,7 +5033,7 @@
// resume an interrupted write
}
// enable write to audio HAL
- sleepTime = 0;
+ mSleepTimeUs = 0;
// Do not handle new data in this iteration even if track->framesReady()
mixerStatus = MIXER_TRACKS_ENABLED;
@@ -4980,8 +5093,8 @@
// do not modify drain sequence if we are already draining. This happens
// when resuming from pause after drain.
if ((mDrainSequence & 1) == 0) {
- sleepTime = 0;
- standbyTime = systemTime() + standbyDelay;
+ mSleepTimeUs = 0;
+ mStandbyTimeNs = systemTime() + mStandbyDelayNs;
mixerStatus = MIXER_DRAIN_TRACK;
mDrainSequence += 2;
}
@@ -5096,9 +5209,9 @@
// ----------------------------------------------------------------------------
AudioFlinger::DuplicatingThread::DuplicatingThread(const sp<AudioFlinger>& audioFlinger,
- AudioFlinger::MixerThread* mainThread, audio_io_handle_t id)
+ AudioFlinger::MixerThread* mainThread, audio_io_handle_t id, bool systemReady)
: MixerThread(audioFlinger, mainThread->getOutput(), id, mainThread->outDevice(),
- DUPLICATING),
+ systemReady, DUPLICATING),
mWaitTimeMs(UINT_MAX)
{
addOutputTrack(mainThread);
@@ -5123,19 +5236,19 @@
memset(mSinkBuffer, 0, mSinkBufferSize);
}
}
- sleepTime = 0;
+ mSleepTimeUs = 0;
writeFrames = mNormalFrameCount;
mCurrentWriteLength = mSinkBufferSize;
- standbyTime = systemTime() + standbyDelay;
+ mStandbyTimeNs = systemTime() + mStandbyDelayNs;
}
void AudioFlinger::DuplicatingThread::threadLoop_sleepTime()
{
- if (sleepTime == 0) {
+ if (mSleepTimeUs == 0) {
if (mMixerStatus == MIXER_TRACKS_ENABLED) {
- sleepTime = activeSleepTime;
+ mSleepTimeUs = mActiveSleepTimeUs;
} else {
- sleepTime = idleSleepTime;
+ mSleepTimeUs = mIdleSleepTimeUs;
}
} else if (mBytesWritten != 0) {
if (mMixerStatus == MIXER_TRACKS_ENABLED) {
@@ -5145,7 +5258,7 @@
// flush remaining overflow buffers in output tracks
writeFrames = 0;
}
- sleepTime = 0;
+ mSleepTimeUs = 0;
}
}
@@ -5211,10 +5324,13 @@
mOutputTracks[i]->destroy();
mOutputTracks.removeAt(i);
updateWaitTime_l();
+ if (thread->getOutput() == mOutput) {
+ mOutput = NULL;
+ }
return;
}
}
- ALOGV("removeOutputTrack(): unkonwn thread: %p", thread);
+ ALOGV("removeOutputTrack(): unknown thread: %p", thread);
}
// caller must hold mLock
@@ -5275,12 +5391,13 @@
AudioStreamIn *input,
audio_io_handle_t id,
audio_devices_t outDevice,
- audio_devices_t inDevice
+ audio_devices_t inDevice,
+ bool systemReady
#ifdef TEE_SINK
, const sp<NBAIO_Sink>& teeSink
#endif
) :
- ThreadBase(audioFlinger, id, outDevice, inDevice, RECORD),
+ ThreadBase(audioFlinger, id, outDevice, inDevice, RECORD, systemReady),
mInput(input), mActiveTracksGen(0), mRsmpInBuffer(NULL),
// mRsmpInFrames and mRsmpInFramesP2 are set by readInputParameters_l()
mRsmpInRear(0)
@@ -5328,11 +5445,11 @@
}
initFastCapture =
// either capture sample rate is same as (a reasonable) primary output sample rate
- (((primaryOutputSampleRate == 44100 || primaryOutputSampleRate == 48000) &&
+ ((isMusicRate(primaryOutputSampleRate) &&
(mSampleRate == primaryOutputSampleRate)) ||
// or primary output sample rate is unknown, and capture sample rate is reasonable
((primaryOutputSampleRate == 0) &&
- ((mSampleRate == 44100 || mSampleRate == 48000)))) &&
+ isMusicRate(mSampleRate))) &&
// and the buffer size is < 12 ms
(mFrameCount * 1000) / mSampleRate < 12;
break;
@@ -5399,12 +5516,7 @@
// start the fast capture
mFastCapture->run("FastCapture", ANDROID_PRIORITY_URGENT_AUDIO);
pid_t tid = mFastCapture->getTid();
- int err = requestPriority(getpid_cached, tid, kPriorityFastMixer);
- if (err != 0) {
- ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; error %d",
- kPriorityFastCapture, getpid_cached, tid, err);
- }
-
+ sendPrioConfigEvent(getpid_cached, tid, kPriorityFastMixer);
#ifdef AUDIO_WATCHDOG
// FIXME
#endif
@@ -6435,6 +6547,9 @@
return NO_ERROR;
}
+ ALOGV("RecordBufferConverter updateParameters srcMask:%#x dstMask:%#x"
+ " srcFormat:%#x dstFormat:%#x srcRate:%u dstRate:%u",
+ srcChannelMask, dstChannelMask, srcFormat, dstFormat, srcSampleRate, dstSampleRate);
const bool valid =
audio_is_input_channel(srcChannelMask)
&& audio_is_input_channel(dstChannelMask)
@@ -6595,11 +6710,8 @@
audio_format_t reqFormat = mFormat;
uint32_t samplingRate = mSampleRate;
+ // TODO this may change if we want to support capture from HDMI PCM multi channel (e.g on TVs).
audio_channel_mask_t channelMask = audio_channel_in_mask_from_count(mChannelCount);
- // possible that we are > 2 channels, use channel index mask
- if (channelMask == AUDIO_CHANNEL_INVALID && mChannelCount <= FCC_8) {
- audio_channel_mask_for_index_assignment_from_count(mChannelCount);
- }
AudioParameter param = AudioParameter(keyValuePair);
int value;
@@ -6691,9 +6803,7 @@
(mInput->stream->common.get_sample_rate(&mInput->stream->common)
<= (AUDIO_RESAMPLER_DOWN_RATIO_MAX * samplingRate)) &&
audio_channel_count_from_in_mask(
- mInput->stream->common.get_channels(&mInput->stream->common)) <= FCC_2 &&
- (channelMask == AUDIO_CHANNEL_IN_MONO ||
- channelMask == AUDIO_CHANNEL_IN_STEREO)) {
+ mInput->stream->common.get_channels(&mInput->stream->common)) <= FCC_8) {
status = NO_ERROR;
}
if (status == NO_ERROR) {
@@ -6727,6 +6837,7 @@
switch (event) {
case AUDIO_INPUT_OPENED:
case AUDIO_INPUT_CONFIG_CHANGED:
+ desc->mPatch = mPatch;
desc->mChannelMask = mChannelMask;
desc->mSamplingRate = mSampleRate;
desc->mFormat = mFormat;
@@ -6884,6 +6995,7 @@
// store new device and send to effects
mInDevice = patch->sources[0].ext.device.type;
+ mPatch = *patch;
for (size_t i = 0; i < mEffectChains.size(); i++) {
mEffectChains[i]->setDevice_l(mInDevice);
}
@@ -6936,6 +7048,8 @@
*handle = AUDIO_PATCH_HANDLE_NONE;
}
+ sendIoConfigEvent_l(AUDIO_INPUT_CONFIG_CHANGED);
+
return status;
}
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 8167bd1..7b4fb14 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -35,7 +35,8 @@
static const char *threadTypeToString(type_t type);
ThreadBase(const sp<AudioFlinger>& audioFlinger, audio_io_handle_t id,
- audio_devices_t outDevice, audio_devices_t inDevice, type_t type);
+ audio_devices_t outDevice, audio_devices_t inDevice, type_t type,
+ bool systemReady);
virtual ~ThreadBase();
virtual status_t readyToRun();
@@ -92,10 +93,13 @@
Condition mCond; // condition for status return
status_t mStatus; // status communicated to sender
bool mWaitStatus; // true if sender is waiting for status
+ bool mRequiresSystemReady; // true if must wait for system ready to enter event queue
sp<ConfigEventData> mData; // event specific parameter data
protected:
- ConfigEvent(int type) : mType(type), mStatus(NO_ERROR), mWaitStatus(false), mData(NULL) {}
+ ConfigEvent(int type, bool requiresSystemReady = false) :
+ mType(type), mStatus(NO_ERROR), mWaitStatus(false),
+ mRequiresSystemReady(requiresSystemReady), mData(NULL) {}
};
class IoConfigEventData : public ConfigEventData {
@@ -136,7 +140,7 @@
class PrioConfigEvent : public ConfigEvent {
public:
PrioConfigEvent(pid_t pid, pid_t tid, int32_t prio) :
- ConfigEvent(CFG_EVENT_PRIO) {
+ ConfigEvent(CFG_EVENT_PRIO, true) {
mData = new PrioConfigEventData(pid, tid, prio);
}
virtual ~PrioConfigEvent() {}
@@ -230,6 +234,8 @@
// static externally-visible
type_t type() const { return mType; }
+ bool isDuplicating() const { return (mType == DUPLICATING); }
+
audio_io_handle_t id() const { return mId;}
// dynamic externally-visible
@@ -256,6 +262,7 @@
status_t sendConfigEvent_l(sp<ConfigEvent>& event);
void sendIoConfigEvent(audio_io_config_event event);
void sendIoConfigEvent_l(audio_io_config_event event);
+ void sendPrioConfigEvent(pid_t pid, pid_t tid, int32_t prio);
void sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio);
status_t sendSetParameterConfigEvent_l(const String8& keyValuePair);
status_t sendCreateAudioPatchConfigEvent(const struct audio_patch *patch,
@@ -357,6 +364,8 @@
virtual sp<IMemory> pipeMemory() const { return 0; }
+ void systemReady();
+
mutable Mutex mLock;
protected:
@@ -416,6 +425,7 @@
size_t mBufferSize; // HAL buffer size for read() or write()
Vector< sp<ConfigEvent> > mConfigEvents;
+ Vector< sp<ConfigEvent> > mPendingConfigEvents; // events awaiting system ready
// These fields are written and read by thread itself without lock or barrier,
// and read by other threads without lock or barrier via standby(), outDevice()
@@ -426,6 +436,7 @@
bool mStandby; // Whether thread is currently in standby.
audio_devices_t mOutDevice; // output device
audio_devices_t mInDevice; // input device
+ struct audio_patch mPatch;
audio_source_t mAudioSource;
const audio_io_handle_t mId;
@@ -442,6 +453,7 @@
mSuspendedSessions;
static const size_t kLogSize = 4 * 1024;
sp<NBLog::Writer> mNBLogWriter;
+ bool mSystemReady;
};
// --- PlaybackThread ---
@@ -467,7 +479,7 @@
static const int8_t kMaxTrackRetriesOffload = 20;
PlaybackThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
- audio_io_handle_t id, audio_devices_t device, type_t type);
+ audio_io_handle_t id, audio_devices_t device, type_t type, bool systemReady);
virtual ~PlaybackThread();
void dump(int fd, const Vector<String16>& args);
@@ -601,6 +613,9 @@
// updated by readOutputParameters_l()
size_t mNormalFrameCount; // normal mixer and effects
+ bool mThreadThrottle; // throttle the thread processing
+ uint32_t mHalfBufferMs; // half the buffer size in milliseconds
+
void* mSinkBuffer; // frame size aligned sink buffer
// TODO:
@@ -743,14 +758,14 @@
bool mInWrite;
// FIXME rename these former local variables of threadLoop to standard "m" names
- nsecs_t standbyTime;
+ nsecs_t mStandbyTimeNs;
size_t mSinkBufferSize;
// cached copies of activeSleepTimeUs() and idleSleepTimeUs() made by cacheParameters_l()
- uint32_t activeSleepTime;
- uint32_t idleSleepTime;
+ uint32_t mActiveSleepTimeUs;
+ uint32_t mIdleSleepTimeUs;
- uint32_t sleepTime;
+ uint32_t mSleepTimeUs;
// mixer status returned by prepareTracks_l()
mixer_state mMixerStatus; // current cycle
@@ -763,7 +778,7 @@
uint32_t sleepTimeShift;
// same as AudioFlinger::mStandbyTimeInNsecs except for DIRECT which uses a shorter value
- nsecs_t standbyDelay;
+ nsecs_t mStandbyDelayNs;
// MIXER only
nsecs_t maxPeriod;
@@ -839,6 +854,7 @@
AudioStreamOut* output,
audio_io_handle_t id,
audio_devices_t device,
+ bool systemReady,
type_t type = MIXER);
virtual ~MixerThread();
@@ -900,7 +916,7 @@
public:
DirectOutputThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
- audio_io_handle_t id, audio_devices_t device);
+ audio_io_handle_t id, audio_devices_t device, bool systemReady);
virtual ~DirectOutputThread();
// Thread virtuals
@@ -930,7 +946,8 @@
float mRightVolFloat;
DirectOutputThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
- audio_io_handle_t id, uint32_t device, ThreadBase::type_t type);
+ audio_io_handle_t id, uint32_t device, ThreadBase::type_t type,
+ bool systemReady);
void processVolume_l(Track *track, bool lastTrack);
// prepareTracks_l() tells threadLoop_mix() the name of the single active track
@@ -943,7 +960,7 @@
public:
OffloadThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
- audio_io_handle_t id, uint32_t device);
+ audio_io_handle_t id, uint32_t device, bool systemReady);
virtual ~OffloadThread() {};
virtual void flushHw_l();
@@ -998,7 +1015,7 @@
class DuplicatingThread : public MixerThread {
public:
DuplicatingThread(const sp<AudioFlinger>& audioFlinger, MixerThread* mainThread,
- audio_io_handle_t id);
+ audio_io_handle_t id, bool systemReady);
virtual ~DuplicatingThread();
// Thread virtuals
@@ -1174,7 +1191,8 @@
AudioStreamIn *input,
audio_io_handle_t id,
audio_devices_t outDevice,
- audio_devices_t inDevice
+ audio_devices_t inDevice,
+ bool systemReady
#ifdef TEE_SINK
, const sp<NBAIO_Sink>& teeSink
#endif
@@ -1291,6 +1309,7 @@
// one-time initialization, no locks required
sp<FastCapture> mFastCapture; // non-0 if there is also
// a fast capture
+
// FIXME audio watchdog thread
// contents are not guaranteed to be consistent, no locks required
diff --git a/services/audioflinger/tests/Android.mk b/services/audioflinger/tests/Android.mk
index 536eb93..e152468 100644
--- a/services/audioflinger/tests/Android.mk
+++ b/services/audioflinger/tests/Android.mk
@@ -30,12 +30,6 @@
#
include $(CLEAR_VARS)
-# Clang++ aborts on AudioMixer.cpp,
-# b/18373866, "do not know how to split this operator."
-ifeq ($(filter $(TARGET_ARCH),arm arm64),$(TARGET_ARCH))
- LOCAL_CLANG := false
-endif
-
LOCAL_SRC_FILES:= \
test-mixer.cpp \
../AudioMixer.cpp.arm \
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index a2327ee..4eef02f2 100755
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -20,7 +20,7 @@
// For mixed output and inputs, the policy will use max mixer sampling rates.
// Do not limit sampling rate otherwise
-#define MAX_MIXER_SAMPLING_RATE 48000
+#define MAX_MIXER_SAMPLING_RATE 192000
// For mixed output and inputs, the policy will use max mixer channel count.
// Do not limit channel count otherwise
@@ -60,7 +60,7 @@
*
* @return true if the device is a virtual one, false otherwise.
*/
-static bool is_virtual_input_device(audio_devices_t device)
+static inline bool is_virtual_input_device(audio_devices_t device)
{
if ((device & AUDIO_DEVICE_BIT_IN) != 0) {
device &= ~AUDIO_DEVICE_BIT_IN;
@@ -78,7 +78,7 @@
*
* @return true if the device needs distinguish on address, false otherwise..
*/
-static bool device_distinguishes_on_address(audio_devices_t device)
+static inline bool device_distinguishes_on_address(audio_devices_t device)
{
return ((device & APM_AUDIO_DEVICE_MATCH_ADDRESS_ALL & ~AUDIO_DEVICE_BIT_IN) != 0);
}
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index aa37eec..d1a2f4f 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -29,7 +29,7 @@
class DeviceDescriptor : public AudioPort, public AudioPortConfig
{
public:
- DeviceDescriptor(const String8& name, audio_devices_t type);
+ DeviceDescriptor(audio_devices_t type);
virtual ~DeviceDescriptor() {}
@@ -50,10 +50,9 @@
status_t dump(int fd, int spaces, int index) const;
void log() const;
+ String8 mTag;
String8 mAddress;
- static String8 emptyNameStr;
-
private:
audio_devices_t mDeviceType;
audio_port_handle_t mId;
@@ -73,12 +72,12 @@
audio_devices_t types() const { return mDeviceTypes; }
void loadDevicesFromType(audio_devices_t types);
- void loadDevicesFromName(char *name, const DeviceVector& declaredDevices);
+ void loadDevicesFromTag(char *tag, const DeviceVector& declaredDevices);
sp<DeviceDescriptor> getDevice(audio_devices_t type, String8 address) const;
DeviceVector getDevicesFromType(audio_devices_t types) const;
sp<DeviceDescriptor> getDeviceFromId(audio_port_handle_t id) const;
- sp<DeviceDescriptor> getDeviceFromName(const String8& name) const;
+ sp<DeviceDescriptor> getDeviceFromTag(const String8& tag) const;
DeviceVector getDevicesFromTypeAddr(audio_devices_t type, String8 address) const;
audio_devices_t getDevicesFromHwModule(audio_module_handle_t moduleHandle) const;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
index 64f883a..afcd073 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp
@@ -272,6 +272,12 @@
(audio_channel_mask_t)ConfigParsingUtils::stringToEnum(sOutChannelsNameToEnumTable,
ARRAY_SIZE(sOutChannelsNameToEnumTable),
str);
+ if (channelMask == 0) { // if not found, check the channel index table
+ channelMask = (audio_channel_mask_t)
+ ConfigParsingUtils::stringToEnum(sIndexChannelsNameToEnumTable,
+ ARRAY_SIZE(sIndexChannelsNameToEnumTable),
+ str);
+ }
if (channelMask != 0) {
mChannelMasks.add(channelMask);
}
@@ -605,9 +611,13 @@
// For mixed output and inputs, use max mixer sampling rates. Do not
// limit sampling rate otherwise
+ // For inputs, also see checkCompatibleSamplingRate().
if (mType != AUDIO_PORT_TYPE_MIX) {
maxRate = UINT_MAX;
}
+ // TODO: should mSamplingRates[] be ordered in terms of our preference
+ // and we return the first (and hence most preferred) match? This is of concern if
+ // we want to choose 96kHz over 192kHz for USB driver stability or resource constraints.
for (size_t i = 0; i < mSamplingRates.size(); i ++) {
if ((mSamplingRates[i] > samplingRate) && (mSamplingRates[i] <= maxRate)) {
samplingRate = mSamplingRates[i];
diff --git a/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp b/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp
index 9ab1d61..89ef045 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp
@@ -218,7 +218,7 @@
node = node->first_child;
while (node) {
if (strcmp(ATTACHED_OUTPUT_DEVICES_TAG, node->name) == 0) {
- availableOutputDevices.loadDevicesFromName((char *)node->value,
+ availableOutputDevices.loadDevicesFromTag((char *)node->value,
declaredDevices);
ALOGV("loadGlobalConfig() Attached Output Devices %08x",
availableOutputDevices.types());
@@ -228,13 +228,13 @@
ARRAY_SIZE(sDeviceTypeToEnumTable),
(char *)node->value);
if (device != AUDIO_DEVICE_NONE) {
- defaultOutputDevice = new DeviceDescriptor(String8("default-output"), device);
+ defaultOutputDevice = new DeviceDescriptor(device);
} else {
ALOGW("loadGlobalConfig() default device not specified");
}
ALOGV("loadGlobalConfig() mDefaultOutputDevice %08x", defaultOutputDevice->type());
} else if (strcmp(ATTACHED_INPUT_DEVICES_TAG, node->name) == 0) {
- availableInputDevices.loadDevicesFromName((char *)node->value,
+ availableInputDevices.loadDevicesFromTag((char *)node->value,
declaredDevices);
ALOGV("loadGlobalConfig() Available InputDevices %08x", availableInputDevices.types());
} else if (strcmp(SPEAKER_DRC_ENABLED_TAG, node->name) == 0) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 0715eea..797077a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -24,13 +24,11 @@
namespace android {
-String8 DeviceDescriptor::emptyNameStr = String8("");
-
-DeviceDescriptor::DeviceDescriptor(const String8& name, audio_devices_t type) :
- AudioPort(name, AUDIO_PORT_TYPE_DEVICE,
+DeviceDescriptor::DeviceDescriptor(audio_devices_t type) :
+ AudioPort(String8(""), AUDIO_PORT_TYPE_DEVICE,
audio_is_output_device(type) ? AUDIO_PORT_ROLE_SINK :
AUDIO_PORT_ROLE_SOURCE),
- mAddress(""), mDeviceType(type), mId(0)
+ mTag(""), mAddress(""), mDeviceType(type), mId(0)
{
}
@@ -142,24 +140,21 @@
uint32_t i = 31 - __builtin_clz(types);
uint32_t type = 1 << i;
types &= ~type;
- add(new DeviceDescriptor(String8("device_type"), type | role_bit));
+ add(new DeviceDescriptor(type | role_bit));
}
}
-void DeviceVector::loadDevicesFromName(char *name,
+void DeviceVector::loadDevicesFromTag(char *tag,
const DeviceVector& declaredDevices)
{
- char *devName = strtok(name, "|");
- while (devName != NULL) {
- if (strlen(devName) != 0) {
+ char *devTag = strtok(tag, "|");
+ while (devTag != NULL) {
+ if (strlen(devTag) != 0) {
audio_devices_t type = ConfigParsingUtils::stringToEnum(sDeviceTypeToEnumTable,
ARRAY_SIZE(sDeviceTypeToEnumTable),
- devName);
+ devTag);
if (type != AUDIO_DEVICE_NONE) {
- devName = (char *)ConfigParsingUtils::enumToString(sDeviceNameToEnumTable,
- ARRAY_SIZE(sDeviceNameToEnumTable),
- type);
- sp<DeviceDescriptor> dev = new DeviceDescriptor(String8(devName), type);
+ sp<DeviceDescriptor> dev = new DeviceDescriptor(type);
if (type == AUDIO_DEVICE_IN_REMOTE_SUBMIX ||
type == AUDIO_DEVICE_OUT_REMOTE_SUBMIX ) {
dev->mAddress = String8("0");
@@ -167,13 +162,13 @@
add(dev);
} else {
sp<DeviceDescriptor> deviceDesc =
- declaredDevices.getDeviceFromName(String8(devName));
+ declaredDevices.getDeviceFromTag(String8(devTag));
if (deviceDesc != 0) {
add(deviceDesc);
}
}
}
- devName = strtok(NULL, "|");
+ devTag = strtok(NULL, "|");
}
}
@@ -239,11 +234,11 @@
return devices;
}
-sp<DeviceDescriptor> DeviceVector::getDeviceFromName(const String8& name) const
+sp<DeviceDescriptor> DeviceVector::getDeviceFromTag(const String8& tag) const
{
sp<DeviceDescriptor> device;
for (size_t i = 0; i < size(); i++) {
- if (itemAt(i)->mName == name) {
+ if (itemAt(i)->mTag == tag) {
device = itemAt(i);
break;
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index e955447..7e2050b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -58,7 +58,7 @@
} else if (strcmp(node->name, CHANNELS_TAG) == 0) {
profile->loadInChannels((char *)node->value);
} else if (strcmp(node->name, DEVICES_TAG) == 0) {
- profile->mSupportedDevices.loadDevicesFromName((char *)node->value,
+ profile->mSupportedDevices.loadDevicesFromTag((char *)node->value,
mDeclaredDevices);
} else if (strcmp(node->name, FLAGS_TAG) == 0) {
profile->mFlags = ConfigParsingUtils::parseInputFlagNames((char *)node->value);
@@ -105,7 +105,7 @@
} else if (strcmp(node->name, CHANNELS_TAG) == 0) {
profile->loadOutChannels((char *)node->value);
} else if (strcmp(node->name, DEVICES_TAG) == 0) {
- profile->mSupportedDevices.loadDevicesFromName((char *)node->value,
+ profile->mSupportedDevices.loadDevicesFromTag((char *)node->value,
mDeclaredDevices);
} else if (strcmp(node->name, FLAGS_TAG) == 0) {
profile->mFlags = ConfigParsingUtils::parseOutputFlagNames((char *)node->value);
@@ -154,7 +154,8 @@
ALOGW("loadDevice() bad type %08x", type);
return BAD_VALUE;
}
- sp<DeviceDescriptor> deviceDesc = new DeviceDescriptor(String8(root->name), type);
+ sp<DeviceDescriptor> deviceDesc = new DeviceDescriptor(type);
+ deviceDesc->mTag = String8(root->name);
node = root->first_child;
while (node) {
@@ -172,8 +173,8 @@
node = node->next;
}
- ALOGV("loadDevice() adding device name %s type %08x address %s",
- deviceDesc->mName.string(), type, deviceDesc->mAddress.string());
+ ALOGV("loadDevice() adding device tag %s type %08x address %s",
+ deviceDesc->mTag.string(), type, deviceDesc->mAddress.string());
mDeclaredDevices.add(deviceDesc);
@@ -189,7 +190,7 @@
profile->mChannelMasks.add(config->channel_mask);
profile->mFormats.add(config->format);
- sp<DeviceDescriptor> devDesc = new DeviceDescriptor(name, device);
+ sp<DeviceDescriptor> devDesc = new DeviceDescriptor(device);
devDesc->mAddress = address;
profile->mSupportedDevices.add(devDesc);
@@ -220,7 +221,7 @@
profile->mChannelMasks.add(config->channel_mask);
profile->mFormats.add(config->format);
- sp<DeviceDescriptor> devDesc = new DeviceDescriptor(name, device);
+ sp<DeviceDescriptor> devDesc = new DeviceDescriptor(device);
devDesc->mAddress = address;
profile->mSupportedDevices.add(devDesc);
@@ -350,7 +351,8 @@
}
sp<DeviceDescriptor> devDesc =
- new DeviceDescriptor(String8(device_name != NULL ? device_name : ""), device);
+ new DeviceDescriptor(device);
+ devDesc->mName = device_name;
devDesc->mAddress = address;
return devDesc;
}
diff --git a/services/audiopolicy/enginedefault/Android.mk b/services/audiopolicy/enginedefault/Android.mk
index b0ae835..8d43b89 100755
--- a/services/audiopolicy/enginedefault/Android.mk
+++ b/services/audiopolicy/enginedefault/Android.mk
@@ -43,6 +43,4 @@
libutils \
libaudioutils \
-include external/stlport/libstlport.mk
-
include $(BUILD_SHARED_LIBRARY)
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 50f1609..7a785eb 100755
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -498,6 +498,10 @@
device2 = availableOutputDevices.types() & AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
}
}
+ if (isInCall() && (strategy == STRATEGY_MEDIA)) {
+ device = getDeviceForStrategy(STRATEGY_PHONE);
+ break;
+ }
if ((device2 == AUDIO_DEVICE_NONE) &&
(mForceUse[AUDIO_POLICY_FORCE_FOR_MEDIA] != AUDIO_POLICY_FORCE_NO_BT_A2DP) &&
(outputs.getA2dpOutput() != 0)) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index b7eed62..0adaac9 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -171,7 +171,7 @@
}
updateDevicesAndOutputs();
- if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL) {
+ if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
audio_devices_t newDevice = getNewOutputDevice(mPrimaryOutput, false /*fromCache*/);
updateCallRouting(newDevice);
}
@@ -261,7 +261,7 @@
closeAllInputs();
- if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL) {
+ if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
audio_devices_t newDevice = getNewOutputDevice(mPrimaryOutput, false /*fromCache*/);
updateCallRouting(newDevice);
}
@@ -302,6 +302,9 @@
audio_patch_handle_t afPatchHandle;
DeviceVector deviceList;
+ if(!hasPrimaryOutput()) {
+ return;
+ }
audio_devices_t txDevice = getDeviceAndMixForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
ALOGV("updateCallRouting device rxDevice %08x txDevice %08x", rxDevice, txDevice);
@@ -449,8 +452,6 @@
checkOutputForAllStrategies();
updateDevicesAndOutputs();
- sp<SwAudioOutputDescriptor> hwOutputDesc = mPrimaryOutput;
-
int delayMs = 0;
if (isStateInCall(state)) {
nsecs_t sysTime = systemTime();
@@ -477,29 +478,31 @@
}
}
- // Note that despite the fact that getNewOutputDevice() is called on the primary output,
- // the device returned is not necessarily reachable via this output
- audio_devices_t rxDevice = getNewOutputDevice(mPrimaryOutput, false /*fromCache*/);
- // force routing command to audio hardware when ending call
- // even if no device change is needed
- if (isStateInCall(oldState) && rxDevice == AUDIO_DEVICE_NONE) {
- rxDevice = hwOutputDesc->device();
- }
+ if (hasPrimaryOutput()) {
+ // Note that despite the fact that getNewOutputDevice() is called on the primary output,
+ // the device returned is not necessarily reachable via this output
+ audio_devices_t rxDevice = getNewOutputDevice(mPrimaryOutput, false /*fromCache*/);
+ // force routing command to audio hardware when ending call
+ // even if no device change is needed
+ if (isStateInCall(oldState) && rxDevice == AUDIO_DEVICE_NONE) {
+ rxDevice = mPrimaryOutput->device();
+ }
- if (state == AUDIO_MODE_IN_CALL) {
- updateCallRouting(rxDevice, delayMs);
- } else if (oldState == AUDIO_MODE_IN_CALL) {
- if (mCallRxPatch != 0) {
- mpClientInterface->releaseAudioPatch(mCallRxPatch->mAfPatchHandle, 0);
- mCallRxPatch.clear();
+ if (state == AUDIO_MODE_IN_CALL) {
+ updateCallRouting(rxDevice, delayMs);
+ } else if (oldState == AUDIO_MODE_IN_CALL) {
+ if (mCallRxPatch != 0) {
+ mpClientInterface->releaseAudioPatch(mCallRxPatch->mAfPatchHandle, 0);
+ mCallRxPatch.clear();
+ }
+ if (mCallTxPatch != 0) {
+ mpClientInterface->releaseAudioPatch(mCallTxPatch->mAfPatchHandle, 0);
+ mCallTxPatch.clear();
+ }
+ setOutputDevice(mPrimaryOutput, rxDevice, force, 0);
+ } else {
+ setOutputDevice(mPrimaryOutput, rxDevice, force, 0);
}
- if (mCallTxPatch != 0) {
- mpClientInterface->releaseAudioPatch(mCallTxPatch->mAfPatchHandle, 0);
- mCallTxPatch.clear();
- }
- setOutputDevice(mPrimaryOutput, rxDevice, force, 0);
- } else {
- setOutputDevice(mPrimaryOutput, rxDevice, force, 0);
}
// if entering in call state, handle special case of active streams
// pertaining to sonification strategy see handleIncallSonification()
@@ -543,7 +546,7 @@
checkA2dpSuspend();
checkOutputForAllStrategies();
updateDevicesAndOutputs();
- if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL) {
+ if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
audio_devices_t newDevice = getNewOutputDevice(mPrimaryOutput, true /*fromCache*/);
updateCallRouting(newDevice);
}
@@ -579,24 +582,43 @@
audio_channel_mask_t channelMask,
audio_output_flags_t flags)
{
+ // only retain flags that will drive the direct output profile selection
+ // if explicitly requested
+ static const uint32_t kRelevantFlags =
+ (AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
+ flags =
+ (audio_output_flags_t)((flags & kRelevantFlags) | AUDIO_OUTPUT_FLAG_DIRECT);
+
+ sp<IOProfile> profile;
+
for (size_t i = 0; i < mHwModules.size(); i++) {
if (mHwModules[i]->mHandle == 0) {
continue;
}
for (size_t j = 0; j < mHwModules[i]->mOutputProfiles.size(); j++) {
- sp<IOProfile> profile = mHwModules[i]->mOutputProfiles[j];
- bool found = profile->isCompatibleProfile(device, String8(""),
+ sp<IOProfile> curProfile = mHwModules[i]->mOutputProfiles[j];
+ if (!curProfile->isCompatibleProfile(device, String8(""),
samplingRate, NULL /*updatedSamplingRate*/,
format, NULL /*updatedFormat*/,
channelMask, NULL /*updatedChannelMask*/,
- flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD ?
- AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD : AUDIO_OUTPUT_FLAG_DIRECT);
- if (found && (mAvailableOutputDevices.types() & profile->mSupportedDevices.types())) {
- return profile;
+ flags)) {
+ continue;
+ }
+ // reject profiles not corresponding to a device currently available
+ if ((mAvailableOutputDevices.types() & curProfile->mSupportedDevices.types()) == 0) {
+ continue;
+ }
+ // if several profiles are compatible, give priority to one with offload capability
+ if (profile != 0 && ((curProfile->mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0)) {
+ continue;
+ }
+ profile = curProfile;
+ if ((profile->mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
+ break;
}
}
}
- return 0;
+ return profile;
}
audio_io_handle_t AudioPolicyManager::getOutput(audio_stream_type_t stream,
@@ -768,6 +790,9 @@
if (stream != AUDIO_STREAM_MUSIC) {
flags = (audio_output_flags_t)(flags &~AUDIO_OUTPUT_FLAG_DEEP_BUFFER);
}
+ if (stream == AUDIO_STREAM_TTS) {
+ flags = AUDIO_OUTPUT_FLAG_TTS;
+ }
sp<IOProfile> profile;
@@ -816,10 +841,27 @@
if (outputDesc != NULL) {
closeOutput(outputDesc->mIoHandle);
}
+
+ // if the selected profile is offloaded and no offload info was specified,
+ // create a default one
+ audio_offload_info_t defaultOffloadInfo = AUDIO_INFO_INITIALIZER;
+ if ((profile->mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) && !offloadInfo) {
+ flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
+ defaultOffloadInfo.sample_rate = samplingRate;
+ defaultOffloadInfo.channel_mask = channelMask;
+ defaultOffloadInfo.format = format;
+ defaultOffloadInfo.stream_type = stream;
+ defaultOffloadInfo.bit_rate = 0;
+ defaultOffloadInfo.duration_us = -1;
+ defaultOffloadInfo.has_video = true; // conservative
+ defaultOffloadInfo.is_streaming = true; // likely
+ offloadInfo = &defaultOffloadInfo;
+ }
+
outputDesc = new SwAudioOutputDescriptor(profile, mpClientInterface);
outputDesc->mDevice = device;
outputDesc->mLatency = 0;
- outputDesc->mFlags =(audio_output_flags_t) (outputDesc->mFlags | flags);
+ outputDesc->mFlags = (audio_output_flags_t)(outputDesc->mFlags | flags);
audio_config_t config = AUDIO_CONFIG_INITIALIZER;
config.sample_rate = samplingRate;
config.channel_mask = channelMask;
@@ -851,10 +893,6 @@
if (audio_is_linear_pcm(format) && samplingRate <= MAX_MIXER_SAMPLING_RATE) {
goto non_direct_output;
}
- // fall back to mixer output if possible when the direct output could not be open
- if (audio_is_linear_pcm(format) && samplingRate <= MAX_MIXER_SAMPLING_RATE) {
- goto non_direct_output;
- }
return AUDIO_IO_HANDLE_NONE;
}
outputDesc->mSamplingRate = config.sample_rate;
@@ -1223,7 +1261,7 @@
// If effects where present on the output, audioflinger moved them to the primary
// output by default: move them back to the appropriate output.
audio_io_handle_t dstOutput = getOutputForEffect();
- if (dstOutput != mPrimaryOutput->mIoHandle) {
+ if (hasPrimaryOutput() && dstOutput != mPrimaryOutput->mIoHandle) {
mpClientInterface->moveEffects(AUDIO_SESSION_OUTPUT_MIX,
mPrimaryOutput->mIoHandle, dstOutput);
}
@@ -1931,7 +1969,8 @@
snprintf(buffer, SIZE, "\nAudioPolicyManager Dump: %p\n", this);
result.append(buffer);
- snprintf(buffer, SIZE, " Primary Output: %d\n", mPrimaryOutput->mIoHandle);
+ snprintf(buffer, SIZE, " Primary Output: %d\n",
+ hasPrimaryOutput() ? mPrimaryOutput->mIoHandle : AUDIO_IO_HANDLE_NONE);
result.append(buffer);
snprintf(buffer, SIZE, " Phone state: %d\n", mEngine->getPhoneState());
result.append(buffer);
@@ -1992,8 +2031,9 @@
}
//TODO: enable audio offloading with video when ready
- if (offloadInfo.has_video)
- {
+ const bool allowOffloadWithVideo =
+ property_get_bool("audio.offload.video", false /* default_value */);
+ if (offloadInfo.has_video && !allowOffloadWithVideo) {
ALOGV("isOffloadSupported: has_video == true, returning false");
return false;
}
@@ -2648,7 +2688,7 @@
mUidCached = getuid();
mpClientInterface = clientInterface;
- mDefaultOutputDevice = new DeviceDescriptor(String8("Speaker"), AUDIO_DEVICE_OUT_SPEAKER);
+ mDefaultOutputDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_SPEAKER);
if (ConfigParsingUtils::loadAudioPolicyConfig(AUDIO_POLICY_VENDOR_CONFIG_FILE,
mHwModules, mAvailableInputDevices, mAvailableOutputDevices,
mDefaultOutputDevice, mSpeakerDrcEnabled) != NO_ERROR) {
@@ -2893,7 +2933,7 @@
status_t AudioPolicyManager::initCheck()
{
- return (mPrimaryOutput == 0) ? NO_INIT : NO_ERROR;
+ return hasPrimaryOutput() ? NO_ERROR : NO_INIT;
}
#ifdef AUDIO_POLICY_TEST
@@ -3268,7 +3308,8 @@
policyMix->setOutput(desc);
desc->mPolicyMix = policyMix->getMix();
- } else if ((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) == 0) {
+ } else if (((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) == 0) &&
+ hasPrimaryOutput()) {
// no duplicated output for direct outputs and
// outputs used by dynamic policy mixes
audio_io_handle_t duplicatedOutput = AUDIO_IO_HANDLE_NONE;
@@ -4544,7 +4585,8 @@
int delayMs,
audio_devices_t device)
{
- ALOGVV("setStrategyMute() strategy %d, mute %d, output %d", strategy, on, output);
+ ALOGVV("setStrategyMute() strategy %d, mute %d, output ID %d",
+ strategy, on, outputDesc->getId());
for (int stream = 0; stream < AUDIO_STREAM_CNT; stream++) {
if (stream == AUDIO_STREAM_PATCH) {
continue;
@@ -4597,6 +4639,10 @@
void AudioPolicyManager::handleIncallSonification(audio_stream_type_t stream,
bool starting, bool stateChange)
{
+ if(!hasPrimaryOutput()) {
+ return;
+ }
+
// if the stream pertains to sonification strategy and we are in call we must
// mute the stream if it is low visibility. If it is high visibility, we must play a tone
// in the device used for phone strategy and play the tone if the selected device does not
@@ -4738,7 +4784,7 @@
sp<HwModule> module;
sp<IOProfile> profile;
sp<DeviceDescriptor> defaultInputDevice =
- new DeviceDescriptor(String8("builtin-mic"), AUDIO_DEVICE_IN_BUILTIN_MIC);
+ new DeviceDescriptor(AUDIO_DEVICE_IN_BUILTIN_MIC);
mAvailableOutputDevices.add(mDefaultOutputDevice);
mAvailableInputDevices.add(defaultInputDevice);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index ea16864..f9d1198 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -553,10 +553,16 @@
audio_devices_t availablePrimaryOutputDevices() const
{
+ if (!hasPrimaryOutput()) {
+ return AUDIO_DEVICE_NONE;
+ }
return mPrimaryOutput->supportedDevices() & mAvailableOutputDevices.types();
}
audio_devices_t availablePrimaryInputDevices() const
{
+ if (!hasPrimaryOutput()) {
+ return AUDIO_DEVICE_NONE;
+ }
return mAvailableInputDevices.getDevicesFromHwModule(mPrimaryOutput->getModuleHandle());
}
@@ -576,6 +582,8 @@
void clearSessionRoutes(uid_t uid);
void checkStrategyRoute(routing_strategy strategy, audio_io_handle_t ouptutToSkip);
+ status_t hasPrimaryOutput() const { return mPrimaryOutput != 0; }
+
uid_t mUidCached;
AudioPolicyClientInterface *mpClientInterface; // audio policy client interface
sp<SwAudioOutputDescriptor> mPrimaryOutput; // primary output descriptor
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 9c60911..e8ef24e 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -62,6 +62,7 @@
libbinder \
libcutils \
libmedia \
+ libmediautils \
libcamera_client \
libgui \
libhardware \
@@ -72,6 +73,7 @@
LOCAL_C_INCLUDES += \
system/media/camera/include \
system/media/private/camera/include \
+ frameworks/native/include/media/openmax \
external/jpeg
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index 8613ac6..280bb9d 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -359,7 +359,7 @@
delete mMetadata;
}
- mAnw.clear();
+ mSurface.clear();
mSurfaceTexture.clear();
mProducer.clear();
mConsumer.clear();
@@ -395,11 +395,11 @@
return res;
}
- mAnw = new Surface(mProducer, /*useAsync*/ true);
- if (mAnw == NULL) {
+ mSurface = new Surface(mProducer, /*useAsync*/ true);
+ if (mSurface == NULL) {
return NO_MEMORY;
}
- res = device->createStream(mAnw, width, height, format,
+ res = device->createStream(mSurface, width, height, format,
HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0, &mStreamId);
if (res) {
return res;
@@ -653,7 +653,7 @@
CameraHardwareInterfaceFlashControl::~CameraHardwareInterfaceFlashControl() {
disconnectCameraDevice();
- mAnw.clear();
+ mSurface.clear();
mSurfaceTexture.clear();
mProducer.clear();
mConsumer.clear();
@@ -810,18 +810,18 @@
return res;
}
- mAnw = new Surface(mProducer, /*useAsync*/ true);
- if (mAnw == NULL) {
+ mSurface = new Surface(mProducer, /*useAsync*/ true);
+ if (mSurface == NULL) {
return NO_MEMORY;
}
- res = native_window_api_connect(mAnw.get(), NATIVE_WINDOW_API_CAMERA);
+ res = native_window_api_connect(mSurface.get(), NATIVE_WINDOW_API_CAMERA);
if (res) {
ALOGE("%s: Unable to connect to native window", __FUNCTION__);
return res;
}
- return device->setPreviewWindow(mAnw);
+ return device->setPreviewWindow(mSurface);
}
status_t CameraHardwareInterfaceFlashControl::connectCameraDevice(
@@ -870,7 +870,7 @@
CameraParameters::FLASH_MODE_OFF);
mDevice->setParameters(mParameters);
mDevice->stopPreview();
- status_t res = native_window_api_disconnect(mAnw.get(),
+ status_t res = native_window_api_disconnect(mSurface.get(),
NATIVE_WINDOW_API_CAMERA);
if (res) {
ALOGW("%s: native_window_api_disconnect failed: %s (%d)",
diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h
index 30f01f0..4d5fe8d 100644
--- a/services/camera/libcameraservice/CameraFlashlight.h
+++ b/services/camera/libcameraservice/CameraFlashlight.h
@@ -166,7 +166,7 @@
sp<IGraphicBufferProducer> mProducer;
sp<IGraphicBufferConsumer> mConsumer;
sp<GLConsumer> mSurfaceTexture;
- sp<ANativeWindow> mAnw;
+ sp<Surface> mSurface;
int32_t mStreamId;
Mutex mLock;
@@ -215,7 +215,7 @@
sp<IGraphicBufferProducer> mProducer;
sp<IGraphicBufferConsumer> mConsumer;
sp<GLConsumer> mSurfaceTexture;
- sp<ANativeWindow> mAnw;
+ sp<Surface> mSurface;
Mutex mLock;
};
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 3f80faf..f42fada 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -33,6 +33,7 @@
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <binder/ProcessInfoService.h>
+#include <camera/ICameraServiceProxy.h>
#include <cutils/atomic.h>
#include <cutils/properties.h>
#include <gui/Surface.h>
@@ -40,6 +41,7 @@
#include <media/AudioSystem.h>
#include <media/IMediaHTTPService.h>
#include <media/mediaplayer.h>
+#include <mediautils/BatteryNotifier.h>
#include <utils/Errors.h>
#include <utils/Log.h>
#include <utils/String16.h>
@@ -122,8 +124,8 @@
// should be ok for now.
static CameraService *gCameraService;
-CameraService::CameraService() : mEventLog(DEFAULT_EVENT_LOG_LENGTH),
- mLastUserId(DEFAULT_LAST_USER_ID), mSoundRef(0), mModule(0), mFlashlight(0) {
+CameraService::CameraService() : mEventLog(DEFAULT_EVENT_LOG_LENGTH), mAllowedUsers(),
+ mSoundRef(0), mModule(0), mFlashlight(0) {
ALOGI("CameraService started (pid=%d)", getpid());
gCameraService = this;
@@ -139,6 +141,11 @@
BnCameraService::onFirstRef();
+ // Update battery life tracking if service is restarting
+ BatteryNotifier& notifier(BatteryNotifier::getInstance());
+ notifier.noteResetCamera();
+ notifier.noteResetFlashlight();
+
camera_module_t *rawModule;
int err = hw_get_module(CAMERA_HARDWARE_MODULE_ID,
(const hw_module_t **)&rawModule);
@@ -224,6 +231,18 @@
}
CameraDeviceFactory::registerService(this);
+
+ CameraService::pingCameraServiceProxy();
+}
+
+void CameraService::pingCameraServiceProxy() {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("media.camera.proxy"));
+ if (binder == nullptr) {
+ return;
+ }
+ sp<ICameraServiceProxy> proxyBinder = interface_cast<ICameraServiceProxy>(binder);
+ proxyBinder->pingForUserUpdate();
}
CameraService::~CameraService() {
@@ -323,12 +342,39 @@
res = setTorchStatusLocked(cameraId, newStatus);
if (res) {
- ALOGE("%s: Failed to set the torch status", __FUNCTION__,
- (uint32_t)newStatus);
+ ALOGE("%s: Failed to set the torch status", __FUNCTION__, (uint32_t)newStatus);
return;
}
{
+ // Update battery life logging for flashlight
+ Mutex::Autolock al(mTorchClientMapMutex);
+ auto iter = mTorchUidMap.find(cameraId);
+ if (iter != mTorchUidMap.end()) {
+ int oldUid = iter->second.second;
+ int newUid = iter->second.first;
+ BatteryNotifier& notifier(BatteryNotifier::getInstance());
+ if (oldUid != newUid) {
+ // If the UID has changed, log the status and update current UID in mTorchUidMap
+ if (status == ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON) {
+ notifier.noteFlashlightOff(cameraId, oldUid);
+ }
+ if (newStatus == ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON) {
+ notifier.noteFlashlightOn(cameraId, newUid);
+ }
+ iter->second.second = newUid;
+ } else {
+ // If the UID has not changed, log the status
+ if (newStatus == ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON) {
+ notifier.noteFlashlightOn(cameraId, oldUid);
+ } else {
+ notifier.noteFlashlightOff(cameraId, oldUid);
+ }
+ }
+ }
+ }
+
+ {
Mutex::Autolock lock(mStatusListenerLock);
for (auto& i : mListenerList) {
i->onTorchStatusChanged(newStatus, String16{cameraId});
@@ -519,34 +565,12 @@
int CameraService::getCameraPriorityFromProcState(int procState) {
// Find the priority for the camera usage based on the process state. Higher priority clients
// win for evictions.
- // Note: Unlike the ordering for ActivityManager, persistent system processes will always lose
- // the camera to the top/foreground applications.
- switch(procState) {
- case PROCESS_STATE_TOP: // User visible
- return 100;
- case PROCESS_STATE_IMPORTANT_FOREGROUND: // Foreground
- return 90;
- case PROCESS_STATE_PERSISTENT: // Persistent system services
- case PROCESS_STATE_PERSISTENT_UI:
- return 80;
- case PROCESS_STATE_IMPORTANT_BACKGROUND: // "Important" background processes
- return 70;
- case PROCESS_STATE_BACKUP: // Everything else
- case PROCESS_STATE_HEAVY_WEIGHT:
- case PROCESS_STATE_SERVICE:
- case PROCESS_STATE_RECEIVER:
- case PROCESS_STATE_HOME:
- case PROCESS_STATE_LAST_ACTIVITY:
- case PROCESS_STATE_CACHED_ACTIVITY:
- case PROCESS_STATE_CACHED_ACTIVITY_CLIENT:
- case PROCESS_STATE_CACHED_EMPTY:
- return 1;
- case PROCESS_STATE_NONEXISTENT:
- return -1;
- default:
- ALOGE("%s: Received unknown process state from ActivityManagerService!", __FUNCTION__);
- return -1;
+ if (procState < 0) {
+ ALOGE("%s: Received invalid process state %d from ActivityManagerService!", __FUNCTION__,
+ procState);
+ return -1;
}
+ return INT_MAX - procState;
}
status_t CameraService::getCameraVendorTagDescriptor(/*out*/sp<VendorTagDescriptor>& desc) {
@@ -698,6 +722,20 @@
return NO_ERROR;
}
+String8 CameraService::toString(std::set<userid_t> intSet) {
+ String8 s("");
+ bool first = true;
+ for (userid_t i : intSet) {
+ if (first) {
+ s.appendFormat("%d", i);
+ first = false;
+ } else {
+ s.appendFormat(", %d", i);
+ }
+ }
+ return s;
+}
+
status_t CameraService::initializeShimMetadata(int cameraId) {
int uid = getCallingUid();
@@ -805,7 +843,7 @@
// Check device policy for this camera
char value[PROPERTY_VALUE_MAX];
char key[PROPERTY_KEY_MAX];
- int clientUserId = multiuser_get_user_id(clientUid);
+ userid_t clientUserId = multiuser_get_user_id(clientUid);
snprintf(key, PROPERTY_KEY_MAX, "sys.secpolicy.camera.off_%d", clientUserId);
property_get(key, value, "0");
if (strcmp(value, "1") == 0) {
@@ -817,10 +855,10 @@
// Only allow clients who are being used by the current foreground device user, unless calling
// from our own process.
- if (callingPid != getpid() &&
- (mLastUserId != clientUserId && mLastUserId != DEFAULT_LAST_USER_ID)) {
- ALOGE("CameraService::connect X (PID %d) rejected (cannot connect from previous "
- "device user %d, current device user %d)", callingPid, clientUserId, mLastUserId);
+ if (callingPid != getpid() && (mAllowedUsers.find(clientUserId) == mAllowedUsers.end())) {
+ ALOGE("CameraService::connect X (PID %d) rejected (cannot connect from "
+ "device user %d, currently allowed device users: %s)", callingPid, clientUserId,
+ toString(mAllowedUsers).string());
return PERMISSION_DENIED;
}
@@ -967,6 +1005,10 @@
"(PID %" PRId32 ", priority %" PRId32 ")", i->getKey().string(),
String8{i->getValue()->getPackageName()}.string(), i->getOwnerId(),
i->getPriority());
+ ALOGE(" Conflicts with: Device %s, client package %s (PID %"
+ PRId32 ", priority %" PRId32 ")", i->getKey().string(),
+ String8{i->getValue()->getPackageName()}.string(), i->getOwnerId(),
+ i->getPriority());
}
// Log the client's attempt
@@ -1057,24 +1099,19 @@
status_t CameraService::connect(
const sp<ICameraClient>& cameraClient,
int cameraId,
- const String16& opPackageName,
+ const String16& clientPackageName,
int clientUid,
/*out*/
sp<ICamera>& device) {
- const status_t result = checkCameraAccess(opPackageName);
- if (result != NO_ERROR) {
- return result;
- }
-
status_t ret = NO_ERROR;
String8 id = String8::format("%d", cameraId);
sp<Client> client = nullptr;
ret = connectHelper<ICameraClient,Client>(cameraClient, id, CAMERA_HAL_API_VERSION_UNSPECIFIED,
- opPackageName, clientUid, API_1, false, false, /*out*/client);
+ clientPackageName, clientUid, API_1, false, false, /*out*/client);
if(ret != NO_ERROR) {
- logRejected(id, getCallingPid(), String8(opPackageName),
+ logRejected(id, getCallingPid(), String8(clientPackageName),
String8::format("%s (%d)", strerror(-ret), ret));
return ret;
}
@@ -1086,16 +1123,11 @@
status_t CameraService::connectLegacy(
const sp<ICameraClient>& cameraClient,
int cameraId, int halVersion,
- const String16& opPackageName,
+ const String16& clientPackageName,
int clientUid,
/*out*/
sp<ICamera>& device) {
- const status_t result = checkCameraAccess(opPackageName);
- if (result != NO_ERROR) {
- return result;
- }
-
String8 id = String8::format("%d", cameraId);
int apiVersion = mModule->getModuleApiVersion();
if (halVersion != CAMERA_HAL_API_VERSION_UNSPECIFIED &&
@@ -1108,18 +1140,18 @@
*/
ALOGE("%s: camera HAL module version %x doesn't support connecting to legacy HAL devices!",
__FUNCTION__, apiVersion);
- logRejected(id, getCallingPid(), String8(opPackageName),
+ logRejected(id, getCallingPid(), String8(clientPackageName),
String8("HAL module version doesn't support legacy HAL connections"));
return INVALID_OPERATION;
}
status_t ret = NO_ERROR;
sp<Client> client = nullptr;
- ret = connectHelper<ICameraClient,Client>(cameraClient, id, halVersion, opPackageName,
+ ret = connectHelper<ICameraClient,Client>(cameraClient, id, halVersion, clientPackageName,
clientUid, API_1, true, false, /*out*/client);
if(ret != NO_ERROR) {
- logRejected(id, getCallingPid(), String8(opPackageName),
+ logRejected(id, getCallingPid(), String8(clientPackageName),
String8::format("%s (%d)", strerror(-ret), ret));
return ret;
}
@@ -1131,25 +1163,20 @@
status_t CameraService::connectDevice(
const sp<ICameraDeviceCallbacks>& cameraCb,
int cameraId,
- const String16& opPackageName,
+ const String16& clientPackageName,
int clientUid,
/*out*/
sp<ICameraDeviceUser>& device) {
- const status_t result = checkCameraAccess(opPackageName);
- if (result != NO_ERROR) {
- return result;
- }
-
status_t ret = NO_ERROR;
String8 id = String8::format("%d", cameraId);
sp<CameraDeviceClient> client = nullptr;
ret = connectHelper<ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
- CAMERA_HAL_API_VERSION_UNSPECIFIED, opPackageName, clientUid, API_2, false, false,
+ CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName, clientUid, API_2, false, false,
/*out*/client);
if(ret != NO_ERROR) {
- logRejected(id, getCallingPid(), String8(opPackageName),
+ logRejected(id, getCallingPid(), String8(clientPackageName),
String8::format("%s (%d)", strerror(-ret), ret));
return ret;
}
@@ -1160,12 +1187,13 @@
status_t CameraService::setTorchMode(const String16& cameraId, bool enabled,
const sp<IBinder>& clientBinder) {
- if (enabled && clientBinder == NULL) {
+ if (enabled && clientBinder == nullptr) {
ALOGE("%s: torch client binder is NULL", __FUNCTION__);
return -EINVAL;
}
String8 id = String8(cameraId.string());
+ int uid = getCallingUid();
// verify id is valid.
auto state = getCameraState(id);
@@ -1204,7 +1232,21 @@
}
}
+ {
+ // Update UID map - this is used in the torch status changed callbacks, so must be done
+ // before setTorchMode
+ Mutex::Autolock al(mTorchClientMapMutex);
+ if (mTorchUidMap.find(id) == mTorchUidMap.end()) {
+ mTorchUidMap[id].first = uid;
+ mTorchUidMap[id].second = uid;
+ } else {
+ // Set the pending UID
+ mTorchUidMap[id].first = uid;
+ }
+ }
+
status_t res = mFlashlight->setTorchMode(id, enabled);
+
if (res) {
ALOGE("%s: setting torch mode of camera %s to %d failed. %s (%d)",
__FUNCTION__, id.string(), enabled, strerror(-res), res);
@@ -1215,29 +1257,27 @@
// update the link to client's death
Mutex::Autolock al(mTorchClientMapMutex);
ssize_t index = mTorchClientMap.indexOfKey(id);
+ BatteryNotifier& notifier(BatteryNotifier::getInstance());
if (enabled) {
if (index == NAME_NOT_FOUND) {
mTorchClientMap.add(id, clientBinder);
} else {
- const sp<IBinder> oldBinder = mTorchClientMap.valueAt(index);
- oldBinder->unlinkToDeath(this);
-
+ mTorchClientMap.valueAt(index)->unlinkToDeath(this);
mTorchClientMap.replaceValueAt(index, clientBinder);
}
clientBinder->linkToDeath(this);
} else if (index != NAME_NOT_FOUND) {
- sp<IBinder> oldBinder = mTorchClientMap.valueAt(index);
- oldBinder->unlinkToDeath(this);
+ mTorchClientMap.valueAt(index)->unlinkToDeath(this);
}
}
return OK;
}
-void CameraService::notifySystemEvent(int eventId, int arg0) {
+void CameraService::notifySystemEvent(int32_t eventId, const int32_t* args, size_t length) {
switch(eventId) {
case ICameraService::USER_SWITCHED: {
- doUserSwitch(/*newUserId*/arg0);
+ doUserSwitch(/*newUserIds*/args, /*length*/length);
break;
}
case ICameraService::NO_EVENT:
@@ -1249,8 +1289,7 @@
}
}
-status_t CameraService::addListener(
- const sp<ICameraServiceListener>& listener) {
+status_t CameraService::addListener(const sp<ICameraServiceListener>& listener) {
ALOGV("%s: Add listener %p", __FUNCTION__, listener.get());
if (listener == 0) {
@@ -1480,20 +1519,30 @@
return clientDescriptorPtr->getValue();
}
-void CameraService::doUserSwitch(int newUserId) {
+void CameraService::doUserSwitch(const int32_t* newUserId, size_t length) {
// Acquire mServiceLock and prevent other clients from connecting
std::unique_ptr<AutoConditionLock> lock =
AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
- if (newUserId <= 0) {
- ALOGW("%s: Bad user ID %d given during user switch, resetting to default.", __FUNCTION__,
- newUserId);
- newUserId = DEFAULT_LAST_USER_ID;
+ std::set<userid_t> newAllowedUsers;
+ for (size_t i = 0; i < length; i++) {
+ if (newUserId[i] < 0) {
+ ALOGE("%s: Bad user ID %d given during user switch, ignoring.",
+ __FUNCTION__, newUserId[i]);
+ return;
+ }
+ newAllowedUsers.insert(static_cast<userid_t>(newUserId[i]));
}
- logUserSwitch(mLastUserId, newUserId);
- mLastUserId = newUserId;
+ if (newAllowedUsers == mAllowedUsers) {
+ ALOGW("%s: Received notification of user switch with no updated user IDs.", __FUNCTION__);
+ return;
+ }
+
+ logUserSwitch(mAllowedUsers, newAllowedUsers);
+
+ mAllowedUsers = std::move(newAllowedUsers);
// Current user has switched, evict all current clients.
std::vector<sp<BasicClient>> evicted;
@@ -1505,6 +1554,13 @@
continue;
}
+ // Don't evict clients that are still allowed.
+ uid_t clientUid = clientSp->getClientUid();
+ userid_t clientUserId = multiuser_get_user_id(clientUid);
+ if (mAllowedUsers.find(clientUserId) != mAllowedUsers.end()) {
+ continue;
+ }
+
evicted.push_back(clientSp);
String8 curTime = getFormattedCurrentTime();
@@ -1544,30 +1600,33 @@
}
void CameraService::logDisconnected(const char* cameraId, int clientPid,
- const char* opPackageName) {
+ const char* clientPackage) {
// Log the clients evicted
logEvent(String8::format("DISCONNECT device %s client for package %s (PID %d)", cameraId,
- opPackageName, clientPid));
+ clientPackage, clientPid));
}
void CameraService::logConnected(const char* cameraId, int clientPid,
- const char* opPackageName) {
+ const char* clientPackage) {
// Log the clients evicted
logEvent(String8::format("CONNECT device %s client for package %s (PID %d)", cameraId,
- opPackageName, clientPid));
+ clientPackage, clientPid));
}
void CameraService::logRejected(const char* cameraId, int clientPid,
- const char* opPackageName, const char* reason) {
+ const char* clientPackage, const char* reason) {
// Log the client rejected
logEvent(String8::format("REJECT device %s client for package %s (PID %d), reason: (%s)",
- cameraId, opPackageName, clientPid, reason));
+ cameraId, clientPackage, clientPid, reason));
}
-void CameraService::logUserSwitch(int oldUserId, int newUserId) {
+void CameraService::logUserSwitch(const std::set<userid_t>& oldUserIds,
+ const std::set<userid_t>& newUserIds) {
+ String8 newUsers = toString(newUserIds);
+ String8 oldUsers = toString(oldUserIds);
// Log the new and old users
- logEvent(String8::format("USER_SWITCH from old user: %d , to new user: %d", oldUserId,
- newUserId));
+ logEvent(String8::format("USER_SWITCH previous allowed users: %s , current allowed users: %s",
+ oldUsers.string(), newUsers.string()));
}
void CameraService::logDeviceRemoved(const char* cameraId, const char* reason) {
@@ -1598,6 +1657,21 @@
// Permission checks
switch (code) {
+ case BnCameraService::CONNECT:
+ case BnCameraService::CONNECT_DEVICE:
+ case BnCameraService::CONNECT_LEGACY: {
+ if (pid != selfPid) {
+ // we're called from a different process, do the real check
+ if (!checkCallingPermission(
+ String16("android.permission.CAMERA"))) {
+ const int uid = getCallingUid();
+ ALOGE("Permission Denial: "
+ "can't use the camera pid=%d, uid=%d", pid, uid);
+ return PERMISSION_DENIED;
+ }
+ }
+ break;
+ }
case BnCameraService::NOTIFY_SYSTEM_EVENT: {
if (pid != selfPid) {
// Ensure we're being called by system_server, or similar process with
@@ -1617,38 +1691,6 @@
return BnCameraService::onTransact(code, data, reply, flags);
}
-status_t CameraService::checkCameraAccess(const String16& opPackageName) {
- const int pid = getCallingPid();
-
- if (pid == getpid()) {
- return NO_ERROR;
- }
-
- const int uid = getCallingUid();
-
- if (!checkCallingPermission(String16("android.permission.CAMERA"))) {
- ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", pid, uid);
- return PERMISSION_DENIED;
- }
-
- AppOpsManager appOps;
- const int32_t result = appOps.noteOp(AppOpsManager::OP_CAMERA, uid, opPackageName);
-
- switch (result) {
- case AppOpsManager::MODE_ERRORED: {
- ALOGE("App op OP_CAMERA errored: can't use the camera pid=%d, uid=%d", pid, uid);
- return PERMISSION_DENIED;
- } break;
-
- case AppOpsManager::MODE_IGNORED: {
- ALOGE("App op OP_CAMERA ignored: can't use the camera pid=%d, uid=%d", pid, uid);
- return INVALID_OPERATION;
- } break;
- }
-
- return NO_ERROR;
-}
-
// We share the media players for shutter and recording sound for all clients.
// A reference count is kept to determine when we will actually release the
// media players.
@@ -1701,13 +1743,13 @@
CameraService::Client::Client(const sp<CameraService>& cameraService,
const sp<ICameraClient>& cameraClient,
- const String16& opPackageName,
+ const String16& clientPackageName,
int cameraId, int cameraFacing,
int clientPid, uid_t clientUid,
int servicePid) :
CameraService::BasicClient(cameraService,
IInterface::asBinder(cameraClient),
- opPackageName,
+ clientPackageName,
cameraId, cameraFacing,
clientPid, clientUid,
servicePid)
@@ -1734,11 +1776,11 @@
CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
const sp<IBinder>& remoteCallback,
- const String16& opPackageName,
+ const String16& clientPackageName,
int cameraId, int cameraFacing,
int clientPid, uid_t clientUid,
int servicePid):
- mOpPackageName(opPackageName), mDisconnected(false)
+ mClientPackageName(clientPackageName), mDisconnected(false)
{
mCameraService = cameraService;
mRemoteBinder = remoteCallback;
@@ -1766,7 +1808,7 @@
mCameraService->removeByClient(this);
mCameraService->logDisconnected(String8::format("%d", mCameraId), mClientPid,
- String8(mOpPackageName));
+ String8(mClientPackageName));
sp<IBinder> remote = getRemote();
if (remote != nullptr) {
@@ -1781,7 +1823,7 @@
}
String16 CameraService::BasicClient::getPackageName() const {
- return mOpPackageName;
+ return mClientPackageName;
}
@@ -1789,6 +1831,10 @@
return mClientPid;
}
+uid_t CameraService::BasicClient::getClientUid() const {
+ return mClientUid;
+}
+
bool CameraService::BasicClient::canCastToApiClient(apiLevel level) const {
// Defaults to API2.
return level == API_2;
@@ -1801,20 +1847,27 @@
{
ALOGV("%s: Start camera ops, package name = %s, client UID = %d",
- __FUNCTION__, String8(mOpPackageName).string(), mClientUid);
+ __FUNCTION__, String8(mClientPackageName).string(), mClientUid);
}
mAppOpsManager.startWatchingMode(AppOpsManager::OP_CAMERA,
- mOpPackageName, mOpsCallback);
+ mClientPackageName, mOpsCallback);
res = mAppOpsManager.startOp(AppOpsManager::OP_CAMERA,
- mClientUid, mOpPackageName);
+ mClientUid, mClientPackageName);
- if (res != AppOpsManager::MODE_ALLOWED) {
+ if (res == AppOpsManager::MODE_ERRORED) {
ALOGI("Camera %d: Access for \"%s\" has been revoked",
- mCameraId, String8(mOpPackageName).string());
+ mCameraId, String8(mClientPackageName).string());
return PERMISSION_DENIED;
}
+ if (res == AppOpsManager::MODE_IGNORED) {
+ ALOGI("Camera %d: Access for \"%s\" has been restricted",
+ mCameraId, String8(mClientPackageName).string());
+ // Return the same error as for device policy manager rejection
+ return -EACCES;
+ }
+
mOpsActive = true;
// Transition device availability listeners from PRESENT -> NOT_AVAILABLE
@@ -1829,7 +1882,7 @@
if (mOpsActive) {
// Notify app ops that the camera is available again
mAppOpsManager.finishOp(AppOpsManager::OP_CAMERA, mClientUid,
- mOpPackageName);
+ mClientPackageName);
mOpsActive = false;
auto rejected = {ICameraServiceListener::STATUS_NOT_PRESENT,
@@ -1854,7 +1907,7 @@
void CameraService::BasicClient::opChanged(int32_t op, const String16& packageName) {
String8 name(packageName);
- String8 myName(mOpPackageName);
+ String8 myName(mClientPackageName);
if (op != AppOpsManager::OP_CAMERA) {
ALOGW("Unexpected app ops notification received: %d", op);
@@ -1863,7 +1916,7 @@
int32_t res;
res = mAppOpsManager.checkOp(AppOpsManager::OP_CAMERA,
- mClientUid, mOpPackageName);
+ mClientUid, mClientPackageName);
ALOGV("checkOp returns: %d, %s ", res,
res == AppOpsManager::MODE_ALLOWED ? "ALLOWED" :
res == AppOpsManager::MODE_IGNORED ? "IGNORED" :
@@ -1958,9 +2011,40 @@
}
// ----------------------------------------------------------------------------
+// ClientEventListener
+// ----------------------------------------------------------------------------
+
+void CameraService::ClientEventListener::onClientAdded(
+ const resource_policy::ClientDescriptor<String8,
+ sp<CameraService::BasicClient>>& descriptor) {
+ auto basicClient = descriptor.getValue();
+ if (basicClient.get() != nullptr) {
+ BatteryNotifier& notifier(BatteryNotifier::getInstance());
+ notifier.noteStartCamera(descriptor.getKey(),
+ static_cast<int>(basicClient->getClientUid()));
+ }
+}
+
+void CameraService::ClientEventListener::onClientRemoved(
+ const resource_policy::ClientDescriptor<String8,
+ sp<CameraService::BasicClient>>& descriptor) {
+ auto basicClient = descriptor.getValue();
+ if (basicClient.get() != nullptr) {
+ BatteryNotifier& notifier(BatteryNotifier::getInstance());
+ notifier.noteStopCamera(descriptor.getKey(),
+ static_cast<int>(basicClient->getClientUid()));
+ }
+}
+
+
+// ----------------------------------------------------------------------------
// CameraClientManager
// ----------------------------------------------------------------------------
+CameraService::CameraClientManager::CameraClientManager() {
+ setListener(std::make_shared<ClientEventListener>());
+}
+
CameraService::CameraClientManager::~CameraClientManager() {}
sp<CameraService::BasicClient> CameraService::CameraClientManager::getCameraClient(
@@ -1985,12 +2069,18 @@
auto conflicting = i->getConflicting();
auto clientSp = i->getValue();
String8 packageName;
+ userid_t clientUserId = 0;
if (clientSp.get() != nullptr) {
packageName = String8{clientSp->getPackageName()};
+ uid_t clientUid = clientSp->getClientUid();
+ clientUserId = multiuser_get_user_id(clientUid);
}
ret.appendFormat("\n(Camera ID: %s, Cost: %" PRId32 ", PID: %" PRId32 ", Priority: %"
PRId32 ", ", key.string(), cost, pid, priority);
+ if (clientSp.get() != nullptr) {
+ ret.appendFormat("User Id: %d, ", clientUserId);
+ }
if (packageName.size() != 0) {
ret.appendFormat("Client Package Name: %s", packageName.string());
}
@@ -2073,6 +2163,7 @@
result.appendFormat("Number of camera devices: %d\n", mNumberOfCameras);
String8 activeClientString = mActiveClientManager.toString();
result.appendFormat("Active Camera Clients:\n%s", activeClientString.string());
+ result.appendFormat("Allowed users:\n%s\n", toString(mAllowedUsers).string());
sp<VendorTagDescriptor> desc = VendorTagDescriptor::getGlobalVendorTagDescriptor();
if (desc == NULL) {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 502fcfa..3298772 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -38,14 +38,15 @@
#include "CameraFlashlight.h"
#include "common/CameraModule.h"
+#include "media/RingBuffer.h"
#include "utils/AutoConditionLock.h"
#include "utils/ClientManager.h"
-#include "utils/RingBuffer.h"
#include <set>
#include <string>
#include <map>
#include <memory>
+#include <utility>
namespace android {
@@ -71,22 +72,8 @@
API_2 = 2
};
- // Process States (mirrors frameworks/base/core/java/android/app/ActivityManager.java)
+ // Process state (mirrors frameworks/base/core/java/android/app/ActivityManager.java)
static const int PROCESS_STATE_NONEXISTENT = -1;
- static const int PROCESS_STATE_PERSISTENT = 0;
- static const int PROCESS_STATE_PERSISTENT_UI = 1;
- static const int PROCESS_STATE_TOP = 2;
- static const int PROCESS_STATE_IMPORTANT_FOREGROUND = 3;
- static const int PROCESS_STATE_IMPORTANT_BACKGROUND = 4;
- static const int PROCESS_STATE_BACKUP = 5;
- static const int PROCESS_STATE_HEAVY_WEIGHT = 6;
- static const int PROCESS_STATE_SERVICE = 7;
- static const int PROCESS_STATE_RECEIVER = 8;
- static const int PROCESS_STATE_HOME = 9;
- static const int PROCESS_STATE_LAST_ACTIVITY = 10;
- static const int PROCESS_STATE_CACHED_ACTIVITY = 11;
- static const int PROCESS_STATE_CACHED_ACTIVITY_CLIENT = 12;
- static const int PROCESS_STATE_CACHED_EMPTY = 13;
// 3 second busy timeout when other clients are connecting
static const nsecs_t DEFAULT_CONNECT_TIMEOUT_NS = 3000000000;
@@ -97,11 +84,6 @@
// Default number of messages to store in eviction log
static const size_t DEFAULT_EVENT_LOG_LENGTH = 100;
- enum {
- // Default last user id
- DEFAULT_LAST_USER_ID = 0,
- };
-
// Implementation of BinderService<T>
static char const* getServiceName() { return "media.camera"; }
@@ -126,19 +108,19 @@
virtual status_t getCameraVendorTagDescriptor(/*out*/ sp<VendorTagDescriptor>& desc);
virtual status_t connect(const sp<ICameraClient>& cameraClient, int cameraId,
- const String16& opPackageName, int clientUid,
+ const String16& clientPackageName, int clientUid,
/*out*/
sp<ICamera>& device);
virtual status_t connectLegacy(const sp<ICameraClient>& cameraClient, int cameraId,
- int halVersion, const String16& opPackageName, int clientUid,
+ int halVersion, const String16& clientPackageName, int clientUid,
/*out*/
sp<ICamera>& device);
virtual status_t connectDevice(
const sp<ICameraDeviceCallbacks>& cameraCb,
int cameraId,
- const String16& opPackageName,
+ const String16& clientPackageName,
int clientUid,
/*out*/
sp<ICameraDeviceUser>& device);
@@ -155,7 +137,7 @@
virtual status_t setTorchMode(const String16& cameraId, bool enabled,
const sp<IBinder>& clientBinder);
- virtual void notifySystemEvent(int eventId, int arg0);
+ virtual void notifySystemEvent(int32_t eventId, const int32_t* args, size_t length);
// OK = supports api of that version, -EOPNOTSUPP = does not support
virtual status_t supportsCameraApi(
@@ -214,6 +196,9 @@
virtual void notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
const CaptureResultExtras& resultExtras) = 0;
+ // Get the UID of the application client using this
+ virtual uid_t getClientUid() const;
+
// Get the PID of the application client using this
virtual int getClientPid() const;
@@ -223,7 +208,7 @@
protected:
BasicClient(const sp<CameraService>& cameraService,
const sp<IBinder>& remoteCallback,
- const String16& opPackageName,
+ const String16& clientPackageName,
int cameraId,
int cameraFacing,
int clientPid,
@@ -242,7 +227,7 @@
sp<CameraService> mCameraService; // immutable after constructor
int mCameraId; // immutable after constructor
int mCameraFacing; // immutable after constructor
- const String16 mOpPackageName;
+ const String16 mClientPackageName;
pid_t mClientPid;
uid_t mClientUid; // immutable after constructor
pid_t mServicePid; // immutable after constructor
@@ -309,7 +294,7 @@
// Interface used by CameraService
Client(const sp<CameraService>& cameraService,
const sp<ICameraClient>& cameraClient,
- const String16& opPackageName,
+ const String16& clientPackageName,
int cameraId,
int cameraFacing,
int clientPid,
@@ -343,6 +328,20 @@
}; // class Client
+ /**
+ * A listener class that implements the LISTENER interface for use with a ClientManager, and
+ * implements the following methods:
+ * void onClientRemoved(const ClientDescriptor<KEY, VALUE>& descriptor);
+ * void onClientAdded(const ClientDescriptor<KEY, VALUE>& descriptor);
+ */
+ class ClientEventListener {
+ public:
+ void onClientAdded(const resource_policy::ClientDescriptor<String8,
+ sp<CameraService::BasicClient>>& descriptor);
+ void onClientRemoved(const resource_policy::ClientDescriptor<String8,
+ sp<CameraService::BasicClient>>& descriptor);
+ }; // class ClientEventListener
+
typedef std::shared_ptr<resource_policy::ClientDescriptor<String8,
sp<CameraService::BasicClient>>> DescriptorPtr;
@@ -354,9 +353,10 @@
* This class manages the eviction behavior for the camera clients. See the parent class
* implementation in utils/ClientManager for the specifics of this behavior.
*/
- class CameraClientManager :
- public resource_policy::ClientManager<String8, sp<CameraService::BasicClient>> {
+ class CameraClientManager : public resource_policy::ClientManager<String8,
+ sp<CameraService::BasicClient>, ClientEventListener> {
public:
+ CameraClientManager();
virtual ~CameraClientManager();
/**
@@ -480,10 +480,9 @@
// Single implementation shared between the various connect calls
template<class CALLBACK, class CLIENT>
status_t connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId, int halVersion,
- const String16& opPackageName, int clientUid, apiLevel effectiveApiLevel,
+ const String16& clientPackageName, int clientUid, apiLevel effectiveApiLevel,
bool legacyMode, bool shimUpdateOnly, /*out*/sp<CLIENT>& device);
-
// Lock guarding camera service state
Mutex mServiceLock;
@@ -506,8 +505,8 @@
RingBuffer<String8> mEventLog;
Mutex mLogLock;
- // UID of last user.
- int mLastUserId;
+ // Currently allowed user IDs
+ std::set<userid_t> mAllowedUsers;
/**
* Get the camera state for a given camera id.
@@ -556,7 +555,7 @@
/**
* Handle a notification that the current device user has changed.
*/
- void doUserSwitch(int newUserId);
+ void doUserSwitch(const int32_t* newUserId, size_t length);
/**
* Add an event log message.
@@ -582,7 +581,8 @@
/**
* Add an event log message that the current device user has been switched.
*/
- void logUserSwitch(int oldUserId, int newUserId);
+ void logUserSwitch(const std::set<userid_t>& oldUserIds,
+ const std::set<userid_t>& newUserIds);
/**
* Add an event log message that a device has been removed by the HAL
@@ -640,13 +640,15 @@
sp<CameraFlashlight> mFlashlight;
// guard mTorchStatusMap
Mutex mTorchStatusMutex;
- // guard mTorchClientMap
+ // guard mTorchClientMap, mTorchUidMap
Mutex mTorchClientMapMutex;
// camera id -> torch status
KeyedVector<String8, ICameraServiceListener::TorchStatus> mTorchStatusMap;
// camera id -> torch client binder
// only store the last client that turns on each camera's torch mode
- KeyedVector<String8, sp<IBinder> > mTorchClientMap;
+ KeyedVector<String8, sp<IBinder>> mTorchClientMap;
+ // camera id -> [incoming uid, current uid] pair
+ std::map<String8, std::pair<int, int>> mTorchUidMap;
// check and handle if torch client's process has died
void handleTorchClientBinderDied(const wp<IBinder> &who);
@@ -715,6 +717,11 @@
/*out*/sp<BasicClient>* client);
status_t checkCameraAccess(const String16& opPackageName);
+
+ static String8 toString(std::set<userid_t> intSet);
+
+ static void pingCameraServiceProxy();
+
};
template<class Func>
@@ -763,11 +770,11 @@
template<class CALLBACK, class CLIENT>
status_t CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
- int halVersion, const String16& opPackageName, int clientUid,
+ int halVersion, const String16& clientPackageName, int clientUid,
apiLevel effectiveApiLevel, bool legacyMode, bool shimUpdateOnly,
/*out*/sp<CLIENT>& device) {
status_t ret = NO_ERROR;
- String8 clientName8(opPackageName);
+ String8 clientName8(clientPackageName);
int clientPid = getCallingPid();
ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) for HAL version %s and "
@@ -791,15 +798,6 @@
if((ret = validateConnectLocked(cameraId, /*inout*/clientUid)) != NO_ERROR) {
return ret;
}
- int userId = multiuser_get_user_id(clientUid);
-
- if (userId != mLastUserId && clientPid != getpid() ) {
- // If no previous user ID had been set, set to the user of the caller.
- logUserSwitch(mLastUserId, userId);
- LOG_ALWAYS_FATAL_IF(mLastUserId != DEFAULT_LAST_USER_ID,
- "Invalid state: Should never update user ID here unless was default");
- mLastUserId = userId;
- }
// Check the shim parameters after acquiring lock, if they have already been updated and
// we were doing a shim update, return immediately
@@ -838,7 +836,7 @@
int facing = -1;
int deviceVersion = getDeviceVersion(id, /*out*/&facing);
sp<BasicClient> tmp = nullptr;
- if((ret = makeClient(this, cameraCb, opPackageName, cameraId, facing, clientPid,
+ if((ret = makeClient(this, cameraCb, clientPackageName, cameraId, facing, clientPid,
clientUid, getpid(), legacyMode, halVersion, deviceVersion, effectiveApiLevel,
/*out*/&tmp)) != NO_ERROR) {
return ret;
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 05ede92..e109595 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -529,7 +529,7 @@
if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
sp<IBinder> binder;
- sp<ANativeWindow> window;
+ sp<Surface> window;
if (bufferProducer != 0) {
binder = IInterface::asBinder(bufferProducer);
// Using controlledByApp flag to ensure that the buffer queue remains in
@@ -541,7 +541,7 @@
}
status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder,
- sp<ANativeWindow> window) {
+ sp<Surface> window) {
ATRACE_CALL();
status_t res;
@@ -666,7 +666,7 @@
status_t res;
if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
- sp<ANativeWindow> window;
+ sp<Surface> window;
if (callbackProducer != 0) {
window = new Surface(callbackProducer);
}
@@ -1559,6 +1559,9 @@
return commandPingL();
case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT:
return commandSetVideoBufferCountL(arg1);
+ case CAMERA_CMD_SET_VIDEO_FORMAT:
+ return commandSetVideoFormatL(arg1,
+ static_cast<android_dataspace>(arg2));
default:
ALOGE("%s: Unknown command %d (arguments %d, %d)",
__FUNCTION__, cmd, arg1, arg2);
@@ -1710,6 +1713,17 @@
return mStreamingProcessor->setRecordingBufferCount(count);
}
+status_t Camera2Client::commandSetVideoFormatL(int format,
+ android_dataspace dataspace) {
+ if (recordingEnabledL()) {
+ ALOGE("%s: Camera %d: Error setting video format after "
+ "recording was started", __FUNCTION__, mCameraId);
+ return INVALID_OPERATION;
+ }
+
+ return mStreamingProcessor->setRecordingFormat(format, dataspace);
+}
+
void Camera2Client::notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
const CaptureResultExtras& resultExtras) {
int32_t err = CAMERA_ERROR_UNKNOWN;
@@ -1881,6 +1895,16 @@
mCaptureSequencer->notifyAutoExposure(newState, triggerId);
}
+void Camera2Client::notifyShutter(const CaptureResultExtras& resultExtras,
+ nsecs_t timestamp) {
+ (void)resultExtras;
+ (void)timestamp;
+
+ ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
+ __FUNCTION__, resultExtras.requestId, timestamp);
+ mCaptureSequencer->notifyShutter(resultExtras, timestamp);
+}
+
camera2::SharedParameters& Camera2Client::getParameters() {
return mParameters;
}
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index a988037..c288313 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -106,6 +106,8 @@
virtual void notifyAutoFocus(uint8_t newState, int triggerId);
virtual void notifyAutoExposure(uint8_t newState, int triggerId);
+ virtual void notifyShutter(const CaptureResultExtras& resultExtras,
+ nsecs_t timestamp);
/**
* Interface used by independent components of Camera2Client.
@@ -148,7 +150,7 @@
typedef camera2::Parameters Parameters;
status_t setPreviewWindowL(const sp<IBinder>& binder,
- sp<ANativeWindow> window);
+ sp<Surface> window);
status_t startPreviewL(Parameters ¶ms, bool restart);
void stopPreviewL();
status_t startRecordingL(Parameters ¶ms, bool restart);
@@ -165,6 +167,7 @@
status_t commandEnableFocusMoveMsgL(bool enable);
status_t commandPingL();
status_t commandSetVideoBufferCountL(size_t count);
+ status_t commandSetVideoFormatL(int format, android_dataspace dataSpace);
// Current camera device configuration
camera2::SharedParameters mParameters;
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index 88c5811..5f4fb22 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -55,7 +55,7 @@
}
status_t CallbackProcessor::setCallbackWindow(
- sp<ANativeWindow> callbackWindow) {
+ sp<Surface> callbackWindow) {
ATRACE_CALL();
status_t res;
@@ -115,7 +115,7 @@
BufferQueue::createBufferQueue(&producer, &consumer);
mCallbackConsumer = new CpuConsumer(consumer, kCallbackHeapCount);
mCallbackConsumer->setFrameAvailableListener(this);
- mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer"));
+ mCallbackConsumer->setName(String8("Camera2-CallbackConsumer"));
mCallbackWindow = new Surface(producer);
}
@@ -123,7 +123,7 @@
// Check if stream parameters have to change
uint32_t currentWidth, currentHeight, currentFormat;
res = device->getStreamInfo(mCallbackStreamId,
- ¤tWidth, ¤tHeight, ¤tFormat);
+ ¤tWidth, ¤tHeight, ¤tFormat, 0);
if (res != OK) {
ALOGE("%s: Camera %d: Error querying callback output stream info: "
"%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.h b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
index 7fdc329..a290536 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
@@ -47,7 +47,7 @@
void onFrameAvailable(const BufferItem& item);
// Set to NULL to disable the direct-to-app callback window
- status_t setCallbackWindow(sp<ANativeWindow> callbackWindow);
+ status_t setCallbackWindow(sp<Surface> callbackWindow);
status_t updateStream(const Parameters ¶ms);
status_t deleteStream();
int getStreamId() const;
@@ -73,7 +73,7 @@
int mCallbackStreamId;
static const size_t kCallbackHeapCount = 6;
sp<CpuConsumer> mCallbackConsumer;
- sp<ANativeWindow> mCallbackWindow;
+ sp<Surface> mCallbackWindow;
sp<Camera2Heap> mCallbackHeap;
int mCallbackHeapId;
size_t mCallbackHeapHead, mCallbackHeapFree;
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
index 9849f4d..d847e0f 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
@@ -43,6 +43,8 @@
mNewFrameReceived(false),
mNewCaptureReceived(false),
mShutterNotified(false),
+ mHalNotifiedShutter(false),
+ mShutterCaptureId(-1),
mClient(client),
mCaptureState(IDLE),
mStateTransitionCount(0),
@@ -106,6 +108,16 @@
}
}
+void CaptureSequencer::notifyShutter(const CaptureResultExtras& resultExtras,
+ nsecs_t timestamp) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mInputMutex);
+ if (!mHalNotifiedShutter && resultExtras.requestId == mShutterCaptureId) {
+ mHalNotifiedShutter = true;
+ mShutterNotifySignal.signal();
+ }
+}
+
void CaptureSequencer::onResultAvailable(const CaptureResult &result) {
ATRACE_CALL();
ALOGV("%s: New result available.", __FUNCTION__);
@@ -335,6 +347,11 @@
} else {
nextState = STANDARD_START;
}
+ {
+ Mutex::Autolock l(mInputMutex);
+ mShutterCaptureId = mCaptureId;
+ mHalNotifiedShutter = false;
+ }
mShutterNotified = false;
return nextState;
@@ -541,6 +558,7 @@
return DONE;
}
}
+
// TODO: Capture should be atomic with setStreamingRequest here
res = client->getCameraDevice()->capture(captureCopy);
if (res != OK) {
@@ -560,6 +578,31 @@
ATRACE_CALL();
Mutex::Autolock l(mInputMutex);
+
+ // Wait for shutter callback
+ while (!mHalNotifiedShutter) {
+ if (mTimeoutCount <= 0) {
+ break;
+ }
+ res = mShutterNotifySignal.waitRelative(mInputMutex, kWaitDuration);
+ if (res == TIMED_OUT) {
+ mTimeoutCount--;
+ return STANDARD_CAPTURE_WAIT;
+ }
+ }
+
+ if (mHalNotifiedShutter) {
+ if (!mShutterNotified) {
+ SharedParameters::Lock l(client->getParameters());
+ /* warning: this also locks a SharedCameraCallbacks */
+ shutterNotifyLocked(l.mParameters, client, mMsgType);
+ mShutterNotified = true;
+ }
+ } else if (mTimeoutCount <= 0) {
+ ALOGW("Timed out waiting for shutter notification");
+ return DONE;
+ }
+
// Wait for new metadata result (mNewFrame)
while (!mNewFrameReceived) {
res = mNewFrameSignal.waitRelative(mInputMutex, kWaitDuration);
@@ -569,15 +612,6 @@
}
}
- // Approximation of the shutter being closed
- // - TODO: use the hal3 exposure callback in Camera3Device instead
- if (mNewFrameReceived && !mShutterNotified) {
- SharedParameters::Lock l(client->getParameters());
- /* warning: this also locks a SharedCameraCallbacks */
- shutterNotifyLocked(l.mParameters, client, mMsgType);
- mShutterNotified = true;
- }
-
// Wait until jpeg was captured by JpegProcessor
while (mNewFrameReceived && !mNewCaptureReceived) {
res = mNewCaptureSignal.waitRelative(mInputMutex, kWaitDuration);
@@ -591,6 +625,7 @@
return DONE;
}
if (mNewFrameReceived && mNewCaptureReceived) {
+
if (mNewFrameId != mCaptureId) {
ALOGW("Mismatched capture frame IDs: Expected %d, got %d",
mCaptureId, mNewFrameId);
@@ -667,7 +702,6 @@
sp<Camera2Client> &/*client*/) {
status_t res;
ATRACE_CALL();
-
while (!mNewCaptureReceived) {
res = mNewCaptureSignal.waitRelative(mInputMutex, kWaitDuration);
if (res == TIMED_OUT) {
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
index d42ab13..10252fb 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
@@ -62,6 +62,10 @@
// Notifications about AE state changes
void notifyAutoExposure(uint8_t newState, int triggerId);
+ // Notifications about shutter (capture start)
+ void notifyShutter(const CaptureResultExtras& resultExtras,
+ nsecs_t timestamp);
+
// Notification from the frame processor
virtual void onResultAvailable(const CaptureResult &result);
@@ -95,7 +99,10 @@
sp<MemoryBase> mCaptureBuffer;
Condition mNewCaptureSignal;
- bool mShutterNotified;
+ bool mShutterNotified; // Has CaptureSequencer sent shutter to Client
+ bool mHalNotifiedShutter; // Has HAL sent shutter to CaptureSequencer
+ int32_t mShutterCaptureId; // The captureId which is waiting for shutter notification
+ Condition mShutterNotifySignal;
/**
* Internal to CaptureSequencer
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index 34798bf..bd9786f 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -87,7 +87,7 @@
BufferQueue::createBufferQueue(&producer, &consumer);
mCaptureConsumer = new CpuConsumer(consumer, 1);
mCaptureConsumer->setFrameAvailableListener(this);
- mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
+ mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
mCaptureWindow = new Surface(producer);
}
@@ -115,7 +115,7 @@
// Check if stream parameters have to change
uint32_t currentWidth, currentHeight;
res = device->getStreamInfo(mCaptureStreamId,
- ¤tWidth, ¤tHeight, 0);
+ ¤tWidth, ¤tHeight, 0, 0);
if (res != OK) {
ALOGE("%s: Camera %d: Error querying capture output stream info: "
"%s (%d)", __FUNCTION__,
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.h b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
index 2040b30..fbdae11 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
@@ -70,8 +70,8 @@
int mCaptureStreamId;
sp<CpuConsumer> mCaptureConsumer;
- sp<ANativeWindow> mCaptureWindow;
- sp<MemoryHeapBase> mCaptureHeap;
+ sp<Surface> mCaptureWindow;
+ sp<MemoryHeapBase> mCaptureHeap;
virtual bool threadLoop();
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index b6071f6..66d7b00 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -25,11 +25,12 @@
#define ALOGVV(...) ((void)0)
#endif
+#include <cutils/properties.h>
#include <utils/Log.h>
#include <utils/Trace.h>
#include <gui/BufferItem.h>
#include <gui/Surface.h>
-#include <media/hardware/MetadataBufferType.h>
+#include <media/hardware/HardwareAPI.h>
#include "common/CameraDeviceBase.h"
#include "api1/Camera2Client.h"
@@ -51,7 +52,10 @@
mRecordingStreamId(NO_STREAM),
mRecordingFrameAvailable(false),
mRecordingHeapCount(kDefaultRecordingHeapCount),
- mRecordingHeapFree(kDefaultRecordingHeapCount)
+ mRecordingHeapFree(kDefaultRecordingHeapCount),
+ mRecordingFormat(kDefaultRecordingFormat),
+ mRecordingDataSpace(kDefaultRecordingDataSpace),
+ mRecordingGrallocUsage(kDefaultRecordingGrallocUsage)
{
}
@@ -60,7 +64,7 @@
deleteRecordingStream();
}
-status_t StreamingProcessor::setPreviewWindow(sp<ANativeWindow> window) {
+status_t StreamingProcessor::setPreviewWindow(sp<Surface> window) {
ATRACE_CALL();
status_t res;
@@ -151,7 +155,7 @@
// Check if stream parameters have to change
uint32_t currentWidth, currentHeight;
res = device->getStreamInfo(mPreviewStreamId,
- ¤tWidth, ¤tHeight, 0);
+ ¤tWidth, ¤tHeight, 0, 0);
if (res != OK) {
ALOGE("%s: Camera %d: Error querying preview stream info: "
"%s (%d)", __FUNCTION__, mId, strerror(-res), res);
@@ -280,6 +284,46 @@
return OK;
}
+status_t StreamingProcessor::setRecordingFormat(int format,
+ android_dataspace dataSpace) {
+ ATRACE_CALL();
+
+ Mutex::Autolock m(mMutex);
+
+ ALOGV("%s: Camera %d: New recording format/dataspace from encoder: %X, %X",
+ __FUNCTION__, mId, format, dataSpace);
+
+ mRecordingFormat = format;
+ mRecordingDataSpace = dataSpace;
+ int prevGrallocUsage = mRecordingGrallocUsage;
+ if (mRecordingFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ mRecordingGrallocUsage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
+ } else {
+ mRecordingGrallocUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+ }
+
+ ALOGV("%s: Camera %d: New recording gralloc usage: %08X", __FUNCTION__, mId,
+ mRecordingGrallocUsage);
+
+ if (prevGrallocUsage != mRecordingGrallocUsage) {
+ ALOGV("%s: Camera %d: Resetting recording consumer for new usage",
+ __FUNCTION__, mId);
+
+ if (isStreamActive(mActiveStreamIds, mRecordingStreamId)) {
+ ALOGE("%s: Camera %d: Changing recording format when "
+ "recording stream is already active!", __FUNCTION__,
+ mId);
+ return INVALID_OPERATION;
+ }
+
+ releaseAllRecordingFramesLocked();
+
+ mRecordingConsumer.clear();
+ }
+
+ return OK;
+}
+
status_t StreamingProcessor::updateRecordingRequest(const Parameters ¶ms) {
ATRACE_CALL();
status_t res;
@@ -340,9 +384,10 @@
return INVALID_OPERATION;
}
- uint32_t currentWidth, currentHeight;
+ uint32_t currentWidth, currentHeight, currentFormat;
+ android_dataspace currentDataSpace;
res = device->getStreamInfo(mRecordingStreamId,
- ¤tWidth, ¤tHeight, 0);
+ ¤tWidth, ¤tHeight, ¤tFormat, ¤tDataSpace);
if (res != OK) {
ALOGE("%s: Camera %d: Error querying recording output stream info: "
"%s (%d)", __FUNCTION__, mId,
@@ -350,8 +395,11 @@
return res;
}
- if (mRecordingConsumer == 0 || currentWidth != (uint32_t)params.videoWidth ||
- currentHeight != (uint32_t)params.videoHeight) {
+ if (mRecordingConsumer == 0 ||
+ currentWidth != (uint32_t)params.videoWidth ||
+ currentHeight != (uint32_t)params.videoHeight ||
+ currentFormat != (uint32_t)mRecordingFormat ||
+ currentDataSpace != mRecordingDataSpace) {
*needsUpdate = true;
}
*needsUpdate = false;
@@ -380,7 +428,7 @@
sp<IGraphicBufferConsumer> consumer;
BufferQueue::createBufferQueue(&producer, &consumer);
mRecordingConsumer = new BufferItemConsumer(consumer,
- GRALLOC_USAGE_HW_VIDEO_ENCODER,
+ mRecordingGrallocUsage,
mRecordingHeapCount + 1);
mRecordingConsumer->setFrameAvailableListener(this);
mRecordingConsumer->setName(String8("Camera2-RecordingConsumer"));
@@ -392,8 +440,11 @@
if (mRecordingStreamId != NO_STREAM) {
// Check if stream parameters have to change
uint32_t currentWidth, currentHeight;
+ uint32_t currentFormat;
+ android_dataspace currentDataSpace;
res = device->getStreamInfo(mRecordingStreamId,
- ¤tWidth, ¤tHeight, 0);
+ ¤tWidth, ¤tHeight,
+ ¤tFormat, ¤tDataSpace);
if (res != OK) {
ALOGE("%s: Camera %d: Error querying recording output stream info: "
"%s (%d)", __FUNCTION__, mId,
@@ -401,7 +452,10 @@
return res;
}
if (currentWidth != (uint32_t)params.videoWidth ||
- currentHeight != (uint32_t)params.videoHeight || newConsumer) {
+ currentHeight != (uint32_t)params.videoHeight ||
+ currentFormat != (uint32_t)mRecordingFormat ||
+ currentDataSpace != mRecordingDataSpace ||
+ newConsumer) {
// TODO: Should wait to be sure previous recording has finished
res = device->deleteStream(mRecordingStreamId);
@@ -422,11 +476,9 @@
if (mRecordingStreamId == NO_STREAM) {
mRecordingFrameCount = 0;
- // Selecting BT.709 colorspace by default
- // TODO: Wire this in from encoder side
res = device->createStream(mRecordingWindow,
params.videoWidth, params.videoHeight,
- CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, HAL_DATASPACE_BT709,
+ mRecordingFormat, mRecordingDataSpace,
CAMERA3_STREAM_ROTATION_0, &mRecordingStreamId);
if (res != OK) {
ALOGE("%s: Camera %d: Can't create output stream for recording: "
@@ -722,12 +774,12 @@
}
if (mRecordingHeap == 0) {
- const size_t bufferSize = 4 + sizeof(buffer_handle_t);
+ size_t payloadSize = sizeof(VideoNativeMetadata);
ALOGV("%s: Camera %d: Creating recording heap with %zu buffers of "
"size %zu bytes", __FUNCTION__, mId,
- mRecordingHeapCount, bufferSize);
+ mRecordingHeapCount, payloadSize);
- mRecordingHeap = new Camera2Heap(bufferSize, mRecordingHeapCount,
+ mRecordingHeap = new Camera2Heap(payloadSize, mRecordingHeapCount,
"Camera2Client::RecordingHeap");
if (mRecordingHeap->mHeap->getSize() == 0) {
ALOGE("%s: Camera %d: Unable to allocate memory for recording",
@@ -750,7 +802,7 @@
mRecordingHeapFree = mRecordingHeapCount;
}
- if ( mRecordingHeapFree == 0) {
+ if (mRecordingHeapFree == 0) {
ALOGE("%s: Camera %d: No free recording buffers, dropping frame",
__FUNCTION__, mId);
mRecordingConsumer->releaseBuffer(imgBuffer);
@@ -770,13 +822,15 @@
mRecordingHeap->mBuffers[heapIdx]->getMemory(&offset,
&size);
- uint8_t *data = (uint8_t*)heap->getBase() + offset;
- uint32_t type = kMetadataBufferTypeGrallocSource;
- *((uint32_t*)data) = type;
- *((buffer_handle_t*)(data + 4)) = imgBuffer.mGraphicBuffer->handle;
- ALOGVV("%s: Camera %d: Sending out buffer_handle_t %p",
- __FUNCTION__, mId,
- imgBuffer.mGraphicBuffer->handle);
+ VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
+ (uint8_t*)heap->getBase() + offset);
+ payload->eType = kMetadataBufferTypeANWBuffer;
+ payload->pBuffer = imgBuffer.mGraphicBuffer->getNativeBuffer();
+ payload->nFenceFd = -1;
+
+ ALOGVV("%s: Camera %d: Sending out ANWBuffer %p",
+ __FUNCTION__, mId, payload->pBuffer);
+
mRecordingBuffers.replaceAt(imgBuffer, heapIdx);
recordingHeap = mRecordingHeap;
}
@@ -809,42 +863,42 @@
heap->getHeapID(), mRecordingHeap->mHeap->getHeapID());
return;
}
- uint8_t *data = (uint8_t*)heap->getBase() + offset;
- uint32_t type = *(uint32_t*)data;
- if (type != kMetadataBufferTypeGrallocSource) {
+
+ VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
+ (uint8_t*)heap->getBase() + offset);
+
+ if (payload->eType != kMetadataBufferTypeANWBuffer) {
ALOGE("%s: Camera %d: Recording frame type invalid (got %x, expected %x)",
- __FUNCTION__, mId, type,
- kMetadataBufferTypeGrallocSource);
+ __FUNCTION__, mId, payload->eType,
+ kMetadataBufferTypeANWBuffer);
return;
}
// Release the buffer back to the recording queue
-
- buffer_handle_t imgHandle = *(buffer_handle_t*)(data + 4);
-
size_t itemIndex;
for (itemIndex = 0; itemIndex < mRecordingBuffers.size(); itemIndex++) {
const BufferItem item = mRecordingBuffers[itemIndex];
if (item.mBuf != BufferItemConsumer::INVALID_BUFFER_SLOT &&
- item.mGraphicBuffer->handle == imgHandle) {
- break;
+ item.mGraphicBuffer->getNativeBuffer() == payload->pBuffer) {
+ break;
}
}
+
if (itemIndex == mRecordingBuffers.size()) {
- ALOGE("%s: Camera %d: Can't find buffer_handle_t %p in list of "
+ ALOGE("%s: Camera %d: Can't find returned ANW Buffer %p in list of "
"outstanding buffers", __FUNCTION__, mId,
- imgHandle);
+ payload->pBuffer);
return;
}
- ALOGVV("%s: Camera %d: Freeing buffer_handle_t %p", __FUNCTION__,
- mId, imgHandle);
+ ALOGVV("%s: Camera %d: Freeing returned ANW buffer %p index %d", __FUNCTION__,
+ mId, payload->pBuffer, itemIndex);
res = mRecordingConsumer->releaseBuffer(mRecordingBuffers[itemIndex]);
if (res != OK) {
ALOGE("%s: Camera %d: Unable to free recording frame "
- "(buffer_handle_t: %p): %s (%d)", __FUNCTION__,
- mId, imgHandle, strerror(-res), res);
+ "(Returned ANW buffer: %p): %s (%d)", __FUNCTION__,
+ mId, payload->pBuffer, strerror(-res), res);
return;
}
mRecordingBuffers.replaceAt(itemIndex);
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
index 2474062..e0cad3a 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
@@ -43,7 +43,7 @@
StreamingProcessor(sp<Camera2Client> client);
~StreamingProcessor();
- status_t setPreviewWindow(sp<ANativeWindow> window);
+ status_t setPreviewWindow(sp<Surface> window);
bool haveValidPreviewWindow() const;
@@ -53,6 +53,8 @@
int getPreviewStreamId() const;
status_t setRecordingBufferCount(size_t count);
+ status_t setRecordingFormat(int format, android_dataspace_t dataspace);
+
status_t updateRecordingRequest(const Parameters ¶ms);
// If needsUpdate is set to true, a updateRecordingStream call with params will recreate
// recording stream
@@ -106,7 +108,7 @@
int32_t mPreviewRequestId;
int mPreviewStreamId;
CameraMetadata mPreviewRequest;
- sp<ANativeWindow> mPreviewWindow;
+ sp<Surface> mPreviewWindow;
// Recording-related members
static const nsecs_t kWaitDuration = 50000000; // 50 ms
@@ -115,7 +117,7 @@
int mRecordingStreamId;
int mRecordingFrameCount;
sp<BufferItemConsumer> mRecordingConsumer;
- sp<ANativeWindow> mRecordingWindow;
+ sp<Surface> mRecordingWindow;
CameraMetadata mRecordingRequest;
sp<camera2::Camera2Heap> mRecordingHeap;
@@ -127,6 +129,18 @@
Vector<BufferItem> mRecordingBuffers;
size_t mRecordingHeapHead, mRecordingHeapFree;
+ static const int kDefaultRecordingFormat =
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ int mRecordingFormat;
+
+ static const android_dataspace kDefaultRecordingDataSpace =
+ HAL_DATASPACE_BT709;
+ android_dataspace mRecordingDataSpace;
+
+ static const int kDefaultRecordingGrallocUsage =
+ GRALLOC_USAGE_HW_VIDEO_ENCODER;
+ int mRecordingGrallocUsage;
+
virtual bool threadLoop();
status_t processRecordingFrame();
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index a03f9c7..0b79b31 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -139,7 +139,7 @@
GRALLOC_USAGE_HW_CAMERA_ZSL,
kZslBufferDepth);
mZslConsumer->setFrameAvailableListener(this);
- mZslConsumer->setName(String8("Camera2Client::ZslConsumer"));
+ mZslConsumer->setName(String8("Camera2-ZslConsumer"));
mZslWindow = new Surface(producer);
}
@@ -147,7 +147,7 @@
// Check if stream parameters have to change
uint32_t currentWidth, currentHeight;
res = device->getStreamInfo(mZslStreamId,
- ¤tWidth, ¤tHeight, 0);
+ ¤tWidth, ¤tHeight, 0, 0);
if (res != OK) {
ALOGE("%s: Camera %d: Error querying capture output stream info: "
"%s (%d)", __FUNCTION__,
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
index 5f50d7b..5870bd3 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
@@ -101,7 +101,7 @@
int mZslStreamId;
int mZslReprocessStreamId;
sp<BufferItemConsumer> mZslConsumer;
- sp<ANativeWindow> mZslWindow;
+ sp<Surface> mZslWindow;
struct ZslPair {
BufferItem buffer;
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
index 470a6d6..69620ac 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
@@ -150,7 +150,7 @@
// Check if stream parameters have to change
uint32_t currentWidth, currentHeight;
res = device->getStreamInfo(mZslStreamId,
- ¤tWidth, ¤tHeight, 0);
+ ¤tWidth, ¤tHeight, 0, 0);
if (res != OK) {
ALOGE("%s: Camera %d: Error querying capture output stream info: "
"%s (%d)", __FUNCTION__,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index bf1692d..3b83f63 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -128,7 +128,6 @@
List<const CameraMetadata> metadataRequestList;
int32_t requestId = mRequestIdCounter;
uint32_t loopCounter = 0;
- bool isReprocess = false;
for (List<sp<CaptureRequest> >::iterator it = requests.begin(); it != requests.end(); ++it) {
sp<CaptureRequest> request = *it;
@@ -136,18 +135,15 @@
ALOGE("%s: Camera %d: Sent null request.",
__FUNCTION__, mCameraId);
return BAD_VALUE;
- } else if (it == requests.begin()) {
- isReprocess = request->mIsReprocess;
- if (isReprocess && !mInputStream.configured) {
- ALOGE("%s: Camera %d: no input stream is configured.");
+ } else if (request->mIsReprocess) {
+ if (!mInputStream.configured) {
+ ALOGE("%s: Camera %d: no input stream is configured.", __FUNCTION__, mCameraId);
return BAD_VALUE;
- } else if (isReprocess && streaming) {
- ALOGE("%s: Camera %d: streaming reprocess requests not supported.");
+ } else if (streaming) {
+ ALOGE("%s: Camera %d: streaming reprocess requests not supported.", __FUNCTION__,
+ mCameraId);
return BAD_VALUE;
}
- } else if (isReprocess != request->mIsReprocess) {
- ALOGE("%s: Camera %d: Sent regular and reprocess requests.");
- return BAD_VALUE;
}
CameraMetadata metadata(request->mMetadata);
@@ -196,7 +192,7 @@
metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0],
outputStreamIds.size());
- if (isReprocess) {
+ if (request->mIsReprocess) {
metadata.update(ANDROID_REQUEST_INPUT_STREAMS, &mInputStream.id, 1);
}
@@ -273,14 +269,34 @@
status_t CameraDeviceClient::beginConfigure() {
// TODO: Implement this.
- ALOGE("%s: Not implemented yet.", __FUNCTION__);
+ ALOGV("%s: Not implemented yet.", __FUNCTION__);
return OK;
}
-status_t CameraDeviceClient::endConfigure() {
+status_t CameraDeviceClient::endConfigure(bool isConstrainedHighSpeed) {
ALOGV("%s: ending configure (%d input stream, %zu output streams)",
__FUNCTION__, mInputStream.configured ? 1 : 0, mStreamMap.size());
+ // Sanitize the high speed session against necessary capability bit.
+ if (isConstrainedHighSpeed) {
+ CameraMetadata staticInfo = mDevice->info();
+ camera_metadata_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ bool isConstrainedHighSpeedSupported = false;
+ for(size_t i = 0; i < entry.count; ++i) {
+ uint8_t capability = entry.data.u8[i];
+ if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO) {
+ isConstrainedHighSpeedSupported = true;
+ break;
+ }
+ }
+ if (!isConstrainedHighSpeedSupported) {
+ ALOGE("%s: Camera %d: Try to create a constrained high speed configuration on a device"
+ " that doesn't support it.",
+ __FUNCTION__, mCameraId);
+ return INVALID_OPERATION;
+ }
+ }
+
status_t res;
if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
@@ -288,7 +304,7 @@
if (!mDevice.get()) return DEAD_OBJECT;
- return mDevice->configureStreams();
+ return mDevice->configureStreams(isConstrainedHighSpeed);
}
status_t CameraDeviceClient::deleteStream(int streamId) {
@@ -395,27 +411,28 @@
(consumerUsage & allowedFlags) != 0;
sp<IBinder> binder = IInterface::asBinder(bufferProducer);
- sp<ANativeWindow> anw = new Surface(bufferProducer, useAsync);
+ sp<Surface> surface = new Surface(bufferProducer, useAsync);
+ ANativeWindow *anw = surface.get();
int width, height, format;
android_dataspace dataSpace;
- if ((res = anw->query(anw.get(), NATIVE_WINDOW_WIDTH, &width)) != OK) {
+ if ((res = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
ALOGE("%s: Camera %d: Failed to query Surface width", __FUNCTION__,
mCameraId);
return res;
}
- if ((res = anw->query(anw.get(), NATIVE_WINDOW_HEIGHT, &height)) != OK) {
+ if ((res = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
ALOGE("%s: Camera %d: Failed to query Surface height", __FUNCTION__,
mCameraId);
return res;
}
- if ((res = anw->query(anw.get(), NATIVE_WINDOW_FORMAT, &format)) != OK) {
+ if ((res = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
ALOGE("%s: Camera %d: Failed to query Surface format", __FUNCTION__,
mCameraId);
return res;
}
- if ((res = anw->query(anw.get(), NATIVE_WINDOW_DEFAULT_DATASPACE,
+ if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
reinterpret_cast<int*>(&dataSpace))) != OK) {
ALOGE("%s: Camera %d: Failed to query Surface dataSpace", __FUNCTION__,
mCameraId);
@@ -440,7 +457,7 @@
}
int streamId = -1;
- res = mDevice->createStream(anw, width, height, format, dataSpace,
+ res = mDevice->createStream(surface, width, height, format, dataSpace,
static_cast<camera3_stream_rotation_t>
(outputConfiguration.getRotation()),
&streamId);
@@ -695,15 +712,10 @@
return BAD_VALUE;
}
- // Also returns BAD_VALUE if stream ID was not valid
+ // Also returns BAD_VALUE if stream ID was not valid, or stream already
+ // has been used
res = mDevice->prepare(streamId);
- if (res == BAD_VALUE) {
- ALOGE("%s: Camera %d: Unexpected BAD_VALUE when preparing stream, but we"
- " already checked and the stream ID (%d) should be valid.",
- __FUNCTION__, mCameraId, streamId);
- }
-
return res;
}
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index b8d8bea..3bda70c 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -79,7 +79,7 @@
virtual status_t beginConfigure();
- virtual status_t endConfigure();
+ virtual status_t endConfigure(bool isConstrainedHighSpeed = false);
// Returns -EBUSY if device is not idle
virtual status_t deleteStream(int streamId);
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 9b2e143..ba0b264 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -118,7 +118,7 @@
ALOGI("Closed Camera %d. Client was: %s (PID %d, UID %u)",
TClientBase::mCameraId,
- String8(TClientBase::mOpPackageName).string(),
+ String8(TClientBase::mClientPackageName).string(),
mInitialClientPid, TClientBase::mClientUid);
}
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index f02fc32..06177e3 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -106,7 +106,7 @@
* For HAL_PIXEL_FORMAT_BLOB formats, the width and height should be the
* logical dimensions of the buffer, not the number of bytes.
*/
- virtual status_t createStream(sp<ANativeWindow> consumer,
+ virtual status_t createStream(sp<Surface> consumer,
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id) = 0;
@@ -128,7 +128,8 @@
* Get information about a given stream.
*/
virtual status_t getStreamInfo(int id,
- uint32_t *width, uint32_t *height, uint32_t *format) = 0;
+ uint32_t *width, uint32_t *height,
+ uint32_t *format, android_dataspace *dataSpace) = 0;
/**
* Set stream gralloc buffer transform
@@ -157,7 +158,7 @@
* - BAD_VALUE if the set of streams was invalid (e.g. fmts or sizes)
* - INVALID_OPERATION if the device was in the wrong state
*/
- virtual status_t configureStreams() = 0;
+ virtual status_t configureStreams(bool isConstrainedHighSpeed = false) = 0;
// get the buffer producer of the input stream
virtual status_t getInputBufferProducer(
diff --git a/services/camera/libcameraservice/common/CameraModule.cpp b/services/camera/libcameraservice/common/CameraModule.cpp
index 064ff71..fcbf958 100644
--- a/services/camera/libcameraservice/common/CameraModule.cpp
+++ b/services/camera/libcameraservice/common/CameraModule.cpp
@@ -31,6 +31,7 @@
// Keys added in HAL3.3
if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_3) {
+ const size_t NUM_DERIVED_KEYS_HAL3_3 = 5;
Vector<uint8_t> controlModes;
uint8_t data = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE;
chars.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &data, /*count*/1);
@@ -78,6 +79,58 @@
}
chars.update(ANDROID_CONTROL_AVAILABLE_MODES, controlModes);
+
+ entry = chars.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ // HAL3.2 devices passing existing CTS test should all support all LSC modes and LSC map
+ bool lensShadingModeSupported = false;
+ if (entry.count > 0) {
+ for (size_t i = 0; i < entry.count; i++) {
+ if (entry.data.i32[i] == ANDROID_SHADING_MODE) {
+ lensShadingModeSupported = true;
+ break;
+ }
+ }
+ }
+ Vector<uint8_t> lscModes;
+ Vector<uint8_t> lscMapModes;
+ lscModes.push(ANDROID_SHADING_MODE_FAST);
+ lscModes.push(ANDROID_SHADING_MODE_HIGH_QUALITY);
+ lscMapModes.push(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF);
+ if (lensShadingModeSupported) {
+ lscModes.push(ANDROID_SHADING_MODE_OFF);
+ lscMapModes.push(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
+ }
+ chars.update(ANDROID_SHADING_AVAILABLE_MODES, lscModes);
+ chars.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, lscMapModes);
+
+ entry = chars.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ Vector<int32_t> availableCharsKeys;
+ availableCharsKeys.setCapacity(entry.count + NUM_DERIVED_KEYS_HAL3_3);
+ for (size_t i = 0; i < entry.count; i++) {
+ availableCharsKeys.push(entry.data.i32[i]);
+ }
+ availableCharsKeys.push(ANDROID_CONTROL_AE_LOCK_AVAILABLE);
+ availableCharsKeys.push(ANDROID_CONTROL_AWB_LOCK_AVAILABLE);
+ availableCharsKeys.push(ANDROID_CONTROL_AVAILABLE_MODES);
+ availableCharsKeys.push(ANDROID_SHADING_AVAILABLE_MODES);
+ availableCharsKeys.push(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES);
+ chars.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, availableCharsKeys);
+
+ // Need update android.control.availableHighSpeedVideoConfigurations since HAL3.3
+ // adds batch size to this array.
+ entry = chars.find(ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS);
+ if (entry.count > 0) {
+ Vector<int32_t> highSpeedConfig;
+ for (size_t i = 0; i < entry.count; i += 4) {
+ highSpeedConfig.add(entry.data.i32[i]); // width
+ highSpeedConfig.add(entry.data.i32[i + 1]); // height
+ highSpeedConfig.add(entry.data.i32[i + 2]); // fps_min
+ highSpeedConfig.add(entry.data.i32[i + 3]); // fps_max
+ highSpeedConfig.add(1); // batchSize_max. default to 1 for HAL3.2
+ }
+ chars.update(ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
+ highSpeedConfig);
+ }
}
return;
}
diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp
index f6645f3..dfe5565 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.cpp
+++ b/services/camera/libcameraservice/device2/Camera2Device.cpp
@@ -240,7 +240,7 @@
return mRequestQueue.waitForDequeue(requestId, timeout);
}
-status_t Camera2Device::createStream(sp<ANativeWindow> consumer,
+status_t Camera2Device::createStream(sp<Surface> consumer,
uint32_t width, uint32_t height, int format,
android_dataspace /*dataSpace*/, camera3_stream_rotation_t rotation, int *id) {
ATRACE_CALL();
@@ -315,7 +315,8 @@
status_t Camera2Device::getStreamInfo(int id,
- uint32_t *width, uint32_t *height, uint32_t *format) {
+ uint32_t *width, uint32_t *height,
+ uint32_t *format, android_dataspace *dataSpace) {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
bool found = false;
@@ -336,6 +337,7 @@
if (width) *width = (*streamI)->getWidth();
if (height) *height = (*streamI)->getHeight();
if (format) *format = (*streamI)->getFormat();
+ if (dataSpace) *dataSpace = HAL_DATASPACE_UNKNOWN;
return OK;
}
@@ -415,7 +417,7 @@
return OK;
}
-status_t Camera2Device::configureStreams() {
+status_t Camera2Device::configureStreams(bool isConstrainedHighSpeed) {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h
index fd1240a..c9f3a2c 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.h
+++ b/services/camera/libcameraservice/device2/Camera2Device.h
@@ -56,19 +56,20 @@
int64_t *lastFrameNumber = NULL);
virtual status_t clearStreamingRequest(int64_t *lastFrameNumber = NULL);
virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout);
- virtual status_t createStream(sp<ANativeWindow> consumer,
+ virtual status_t createStream(sp<Surface> consumer,
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id);
virtual status_t createInputStream(
uint32_t width, uint32_t height, int format, int *id);
virtual status_t createReprocessStreamFromStream(int outputId, int *id);
virtual status_t getStreamInfo(int id,
- uint32_t *width, uint32_t *height, uint32_t *format);
+ uint32_t *width, uint32_t *height,
+ uint32_t *format, android_dataspace *dataSpace);
virtual status_t setStreamTransform(int id, int transform);
virtual status_t deleteStream(int id);
virtual status_t deleteReprocessStream(int id);
// No-op on HAL2 devices
- virtual status_t configureStreams();
+ virtual status_t configureStreams(bool isConstrainedHighSpeed = false);
virtual status_t getInputBufferProducer(
sp<IGraphicBufferProducer> *producer);
virtual status_t createDefaultRequest(int templateId, CameraMetadata *request);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index d2c2482..c28a57e 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -57,6 +57,7 @@
Camera3Device::Camera3Device(int id):
mId(id),
+ mIsConstrainedHighSpeedConfiguration(false),
mHal3Device(NULL),
mStatus(STATUS_UNINITIALIZED),
mUsePartialResult(false),
@@ -419,6 +420,8 @@
lines.appendFormat(" Error cause: %s\n", mErrorCause.string());
}
lines.appendFormat(" Stream configuration:\n");
+ lines.appendFormat(" Operation mode: %s \n", mIsConstrainedHighSpeedConfiguration ?
+ "CONSTRAINED HIGH SPEED VIDEO" : "NORMAL");
if (mInputStream != NULL) {
write(fd, lines.string(), lines.size());
@@ -814,7 +817,7 @@
return OK;
}
-status_t Camera3Device::createStream(sp<ANativeWindow> consumer,
+status_t Camera3Device::createStream(sp<Surface> consumer,
uint32_t width, uint32_t height, int format, android_dataspace dataSpace,
camera3_stream_rotation_t rotation, int *id) {
ATRACE_CALL();
@@ -902,7 +905,8 @@
status_t Camera3Device::getStreamInfo(int id,
- uint32_t *width, uint32_t *height, uint32_t *format) {
+ uint32_t *width, uint32_t *height,
+ uint32_t *format, android_dataspace *dataSpace) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
@@ -933,7 +937,7 @@
if (width) *width = mOutputStreams[idx]->getWidth();
if (height) *height = mOutputStreams[idx]->getHeight();
if (format) *format = mOutputStreams[idx]->getFormat();
-
+ if (dataSpace) *dataSpace = mOutputStreams[idx]->getDataSpace();
return OK;
}
@@ -1023,12 +1027,13 @@
return INVALID_OPERATION;
}
-status_t Camera3Device::configureStreams() {
+status_t Camera3Device::configureStreams(bool isConstrainedHighSpeed) {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
+ mIsConstrainedHighSpeedConfiguration = isConstrainedHighSpeed;
return configureStreamsLocked();
}
@@ -1342,6 +1347,8 @@
status_t Camera3Device::prepare(int streamId) {
ATRACE_CALL();
ALOGV("%s: Camera %d: Preparing stream %d", __FUNCTION__, mId, streamId);
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
sp<Camera3StreamInterface> stream;
ssize_t outputStreamIdx = mOutputStreams.indexOfKey(streamId);
@@ -1353,14 +1360,12 @@
stream = mOutputStreams.editValueAt(outputStreamIdx);
if (stream->isUnpreparable() || stream->hasOutstandingBuffers() ) {
- ALOGE("%s: Camera %d: Stream %d has already been a request target",
- __FUNCTION__, mId, streamId);
+ CLOGE("Stream %d has already been a request target", streamId);
return BAD_VALUE;
}
if (mRequestThread->isStreamPending(stream)) {
- ALOGE("%s: Camera %d: Stream %d is already a target in a pending request",
- __FUNCTION__, mId, streamId);
+ CLOGE("Stream %d is already a target in a pending request", streamId);
return BAD_VALUE;
}
@@ -1528,7 +1533,9 @@
ALOGV("%s: Camera %d: Starting stream configuration", __FUNCTION__, mId);
camera3_stream_configuration config;
-
+ config.operation_mode = mIsConstrainedHighSpeedConfiguration ?
+ CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE :
+ CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE;
config.num_streams = (mInputStream != NULL) + mOutputStreams.size();
Vector<camera3_stream_t*> streams;
@@ -2613,6 +2620,21 @@
if (listener != NULL) {
for (RequestList::iterator it = mRequestQueue.begin();
it != mRequestQueue.end(); ++it) {
+ // Abort the input buffers for reprocess requests.
+ if ((*it)->mInputStream != NULL) {
+ camera3_stream_buffer_t inputBuffer;
+ status_t res = (*it)->mInputStream->getInputBuffer(&inputBuffer);
+ if (res != OK) {
+ ALOGW("%s: %d: couldn't get input buffer while clearing the request "
+ "list: %s (%d)", __FUNCTION__, __LINE__, strerror(-res), res);
+ } else {
+ res = (*it)->mInputStream->returnInputBuffer(inputBuffer);
+ if (res != OK) {
+ ALOGE("%s: %d: couldn't return input buffer while clearing the request "
+ "list: %s (%d)", __FUNCTION__, __LINE__, strerror(-res), res);
+ }
+ }
+ }
// Set the frame number this request would have had, if it
// had been submitted; this frame number will not be reused.
// The requestId and burstId fields were set when the request was
@@ -2752,29 +2774,11 @@
__FUNCTION__);
}
- camera3_stream_buffer_t inputBuffer;
uint32_t totalNumBuffers = 0;
// Fill in buffers
-
if (nextRequest->mInputStream != NULL) {
- res = nextRequest->mInputStream->getInputBuffer(&inputBuffer);
- if (res != OK) {
- // Can't get input buffer from gralloc queue - this could be due to
- // disconnected queue or other producer misbehavior, so not a fatal
- // error
- ALOGE("RequestThread: Can't get input buffer, skipping request:"
- " %s (%d)", strerror(-res), res);
- Mutex::Autolock l(mRequestLock);
- if (mListener != NULL) {
- mListener->notifyError(
- ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
- nextRequest->mResultExtras);
- }
- cleanUpFailedRequest(request, nextRequest, outputBuffers);
- return true;
- }
- request.input_buffer = &inputBuffer;
+ request.input_buffer = &nextRequest->mInputBuffer;
totalNumBuffers += 1;
} else {
request.input_buffer = NULL;
@@ -2792,11 +2796,13 @@
// error
ALOGE("RequestThread: Can't get output buffer, skipping request:"
" %s (%d)", strerror(-res), res);
- Mutex::Autolock l(mRequestLock);
- if (mListener != NULL) {
- mListener->notifyError(
- ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
- nextRequest->mResultExtras);
+ {
+ Mutex::Autolock l(mRequestLock);
+ if (mListener != NULL) {
+ mListener->notifyError(
+ ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
+ nextRequest->mResultExtras);
+ }
}
cleanUpFailedRequest(request, nextRequest, outputBuffers);
return true;
@@ -2865,6 +2871,12 @@
nextRequest->mSettings.unlock(request.settings);
}
+ // Unset as current request
+ {
+ Mutex::Autolock l(mRequestLock);
+ mNextRequest.clear();
+ }
+
// Remove any previously queued triggers (after unlock)
res = removeTriggers(mPrevRequest);
if (res != OK) {
@@ -2890,6 +2902,13 @@
sp<Camera3StreamInterface>& stream) {
Mutex::Autolock l(mRequestLock);
+ if (mNextRequest != nullptr) {
+ for (const auto& s : mNextRequest->mOutputStreams) {
+ if (stream == s) return true;
+ }
+ if (stream == mNextRequest->mInputStream) return true;
+ }
+
for (const auto& request : mRequestQueue) {
for (const auto& s : request->mOutputStreams) {
if (stream == s) return true;
@@ -2915,15 +2934,18 @@
if (request.settings != NULL) {
nextRequest->mSettings.unlock(request.settings);
}
- if (request.input_buffer != NULL) {
- request.input_buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
- nextRequest->mInputStream->returnInputBuffer(*(request.input_buffer));
+ if (nextRequest->mInputStream != NULL) {
+ nextRequest->mInputBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ nextRequest->mInputStream->returnInputBuffer(nextRequest->mInputBuffer);
}
for (size_t i = 0; i < request.num_output_buffers; i++) {
outputBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR;
nextRequest->mOutputStreams.editItemAt(i)->returnBuffer(
outputBuffers[i], 0);
}
+
+ Mutex::Autolock l(mRequestLock);
+ mNextRequest.clear();
}
sp<Camera3Device::CaptureRequest>
@@ -3006,7 +3028,28 @@
nextRequest->mResultExtras.frameNumber = mFrameNumber++;
nextRequest->mResultExtras.afTriggerId = mCurrentAfTriggerId;
nextRequest->mResultExtras.precaptureTriggerId = mCurrentPreCaptureTriggerId;
+
+ // Since RequestThread::clear() removes buffers from the input stream,
+ // get the right buffer here before unlocking mRequestLock
+ if (nextRequest->mInputStream != NULL) {
+ res = nextRequest->mInputStream->getInputBuffer(&nextRequest->mInputBuffer);
+ if (res != OK) {
+ // Can't get input buffer from gralloc queue - this could be due to
+ // disconnected queue or other producer misbehavior, so not a fatal
+ // error
+ ALOGE("%s: Can't get input buffer, skipping request:"
+ " %s (%d)", __FUNCTION__, strerror(-res), res);
+ if (mListener != NULL) {
+ mListener->notifyError(
+ ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
+ nextRequest->mResultExtras);
+ }
+ return NULL;
+ }
+ }
}
+ mNextRequest = nextRequest;
+
return nextRequest;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 4fbcb2e..e2fd8d4 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -94,7 +94,7 @@
// Actual stream creation/deletion is delayed until first request is submitted
// If adding streams while actively capturing, will pause device before adding
// stream, reconfiguring device, and unpausing.
- virtual status_t createStream(sp<ANativeWindow> consumer,
+ virtual status_t createStream(sp<Surface> consumer,
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id);
virtual status_t createInputStream(
@@ -109,13 +109,14 @@
virtual status_t createReprocessStreamFromStream(int outputId, int *id);
virtual status_t getStreamInfo(int id,
- uint32_t *width, uint32_t *height, uint32_t *format);
+ uint32_t *width, uint32_t *height,
+ uint32_t *format, android_dataspace *dataSpace);
virtual status_t setStreamTransform(int id, int transform);
virtual status_t deleteStream(int id);
virtual status_t deleteReprocessStream(int id);
- virtual status_t configureStreams();
+ virtual status_t configureStreams(bool isConstraiedHighSpeed = false);
virtual status_t getInputBufferProducer(
sp<IGraphicBufferProducer> *producer);
@@ -173,6 +174,9 @@
// Camera device ID
const int mId;
+ // Flag indicating is the current active stream configuration is constrained high speed.
+ bool mIsConstrainedHighSpeedConfiguration;
+
/**** Scope for mLock ****/
camera3_device_t *mHal3Device;
@@ -234,6 +238,7 @@
public:
CameraMetadata mSettings;
sp<camera3::Camera3Stream> mInputStream;
+ camera3_stream_buffer_t mInputBuffer;
Vector<sp<camera3::Camera3OutputStreamInterface> >
mOutputStreams;
CaptureResultExtras mResultExtras;
@@ -501,6 +506,10 @@
Condition mRequestSignal;
RequestList mRequestQueue;
RequestList mRepeatingRequests;
+ // The next request being prepped for submission to the HAL, no longer
+ // on the request queue. Read-only even with mRequestLock held, outside
+ // of threadLoop
+ sp<const CaptureRequest> mNextRequest;
bool mReconfigured;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 84c5754..2504bfd 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -187,6 +187,8 @@
assert(mBuffersInFlight.size() == 0);
+ mConsumer->abandon();
+
/**
* no-op since we can't disconnect the producer from the consumer-side
*/
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 7a0331b..8c611d5 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -32,7 +32,7 @@
namespace camera3 {
Camera3OutputStream::Camera3OutputStream(int id,
- sp<ANativeWindow> consumer,
+ sp<Surface> consumer,
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation) :
Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height,
@@ -48,7 +48,7 @@
}
Camera3OutputStream::Camera3OutputStream(int id,
- sp<ANativeWindow> consumer,
+ sp<Surface> consumer,
uint32_t width, uint32_t height, size_t maxSize, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation) :
Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, maxSize,
@@ -229,6 +229,7 @@
(void) args;
String8 lines;
lines.appendFormat(" Stream[%d]: Output\n", mId);
+ lines.appendFormat(" Consumer name: %s\n", mConsumerName.string());
write(fd, lines.string(), lines.size());
Camera3IOStreamBase::dump(fd, args);
@@ -278,6 +279,8 @@
return res;
}
+ mConsumerName = mConsumer->getConsumerName();
+
res = native_window_set_usage(mConsumer.get(), camera3_stream::usage);
if (res != OK) {
ALOGE("%s: Unable to configure usage %08x for stream %d",
@@ -326,7 +329,8 @@
}
int maxConsumerBuffers;
- res = mConsumer->query(mConsumer.get(),
+ res = static_cast<ANativeWindow*>(mConsumer.get())->query(
+ mConsumer.get(),
NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
if (res != OK) {
ALOGE("%s: Unable to query consumer undequeued"
@@ -401,7 +405,7 @@
status_t res;
int32_t u = 0;
- res = mConsumer->query(mConsumer.get(),
+ res = static_cast<ANativeWindow*>(mConsumer.get())->query(mConsumer.get(),
NATIVE_WINDOW_CONSUMER_USAGE_BITS, &u);
// If an opaque output stream's endpoint is ImageReader, add
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 513b695..941d693 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -38,7 +38,7 @@
/**
* Set up a stream for formats that have 2 dimensions, such as RAW and YUV.
*/
- Camera3OutputStream(int id, sp<ANativeWindow> consumer,
+ Camera3OutputStream(int id, sp<Surface> consumer,
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation);
@@ -46,7 +46,7 @@
* Set up a stream for formats that have a variable buffer size for the same
* dimensions, such as compressed JPEG.
*/
- Camera3OutputStream(int id, sp<ANativeWindow> consumer,
+ Camera3OutputStream(int id, sp<Surface> consumer,
uint32_t width, uint32_t height, size_t maxSize, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation);
@@ -81,7 +81,7 @@
virtual status_t disconnectLocked();
- sp<ANativeWindow> mConsumer;
+ sp<Surface> mConsumer;
private:
int mTransform;
@@ -89,6 +89,9 @@
bool mTraceFirstBuffer;
+ // Name of Surface consumer
+ String8 mConsumerName;
+
/**
* Internal Camera3Stream interface
*/
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index d177b57..6c87a45 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -45,6 +45,7 @@
virtual uint32_t getWidth() const = 0;
virtual uint32_t getHeight() const = 0;
virtual int getFormat() const = 0;
+ virtual android_dataspace getDataSpace() const = 0;
/**
* Start the stream configuration process. Returns a handle to the stream's
diff --git a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
index 10d7f2e..eefcb44 100644
--- a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
@@ -122,6 +122,7 @@
sp<IGraphicBufferConsumer> consumer;
BufferQueue::createBufferQueue(&producer, &consumer);
mProducer = new RingBufferConsumer(consumer, GRALLOC_USAGE_HW_CAMERA_ZSL, bufferCount);
+ mProducer->setName(String8("Camera2-ZslRingBufferConsumer"));
mConsumer = new Surface(producer);
}
diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h
index aa40a2d..7ae58d5 100644
--- a/services/camera/libcameraservice/utils/ClientManager.h
+++ b/services/camera/libcameraservice/utils/ClientManager.h
@@ -172,6 +172,26 @@
// --------------------------------------------------------------------------------
/**
+ * A default class implementing the LISTENER interface used by ClientManager.
+ */
+template<class KEY, class VALUE>
+class DefaultEventListener {
+public:
+ void onClientAdded(const ClientDescriptor<KEY, VALUE>& descriptor);
+ void onClientRemoved(const ClientDescriptor<KEY, VALUE>& descriptor);
+};
+
+template<class KEY, class VALUE>
+void DefaultEventListener<KEY, VALUE>::onClientAdded(
+ const ClientDescriptor<KEY, VALUE>& /*descriptor*/) {}
+
+template<class KEY, class VALUE>
+void DefaultEventListener<KEY, VALUE>::onClientRemoved(
+ const ClientDescriptor<KEY, VALUE>& /*descriptor*/) {}
+
+// --------------------------------------------------------------------------------
+
+/**
* The ClientManager class wraps an LRU-ordered list of active clients and implements eviction
* behavior for handling shared resource access.
*
@@ -189,7 +209,7 @@
* incoming descriptor has the highest priority. Otherwise, the incoming descriptor is
* removed instead.
*/
-template<class KEY, class VALUE>
+template<class KEY, class VALUE, class LISTENER=DefaultEventListener<KEY, VALUE>>
class ClientManager {
public:
// The default maximum "cost" allowed before evicting
@@ -275,6 +295,24 @@
status_t waitUntilRemoved(const std::shared_ptr<ClientDescriptor<KEY, VALUE>> client,
nsecs_t timeout) const;
+ /**
+ * Set the current listener for client add/remove events.
+ *
+ * The listener instance must inherit from the LISTENER class and implement the following
+ * methods:
+ * void onClientRemoved(const ClientDescriptor<KEY, VALUE>& descriptor);
+ * void onClientAdded(const ClientDescriptor<KEY, VALUE>& descriptor);
+ *
+ * These callback methods will be called with the ClientManager's lock held, and should
+ * not call any further ClientManager methods.
+ *
+ * The onClientRemoved method will be called when the client has been removed or evicted
+ * from the ClientManager that this event listener has been added to. The onClientAdded
+ * method will be called when the client has been added to the ClientManager that this
+ * event listener has been added to.
+ */
+ void setListener(const std::shared_ptr<LISTENER>& listener);
+
protected:
~ClientManager();
@@ -300,36 +338,38 @@
int32_t mMaxCost;
// LRU ordered, most recent at end
std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> mClients;
+ std::shared_ptr<LISTENER> mListener;
}; // class ClientManager
-template<class KEY, class VALUE>
-ClientManager<KEY, VALUE>::ClientManager() :
+template<class KEY, class VALUE, class LISTENER>
+ClientManager<KEY, VALUE, LISTENER>::ClientManager() :
ClientManager(DEFAULT_MAX_COST) {}
-template<class KEY, class VALUE>
-ClientManager<KEY, VALUE>::ClientManager(int32_t totalCost) : mMaxCost(totalCost) {}
+template<class KEY, class VALUE, class LISTENER>
+ClientManager<KEY, VALUE, LISTENER>::ClientManager(int32_t totalCost) : mMaxCost(totalCost) {}
-template<class KEY, class VALUE>
-ClientManager<KEY, VALUE>::~ClientManager() {}
+template<class KEY, class VALUE, class LISTENER>
+ClientManager<KEY, VALUE, LISTENER>::~ClientManager() {}
-template<class KEY, class VALUE>
-std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> ClientManager<KEY, VALUE>::wouldEvict(
+template<class KEY, class VALUE, class LISTENER>
+std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>>
+ClientManager<KEY, VALUE, LISTENER>::wouldEvict(
const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& client) const {
Mutex::Autolock lock(mLock);
return wouldEvictLocked(client);
}
-template<class KEY, class VALUE>
+template<class KEY, class VALUE, class LISTENER>
std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>>
-ClientManager<KEY, VALUE>::getIncompatibleClients(
+ClientManager<KEY, VALUE, LISTENER>::getIncompatibleClients(
const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& client) const {
Mutex::Autolock lock(mLock);
return wouldEvictLocked(client, /*returnIncompatibleClients*/true);
}
-template<class KEY, class VALUE>
+template<class KEY, class VALUE, class LISTENER>
std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>>
-ClientManager<KEY, VALUE>::wouldEvictLocked(
+ClientManager<KEY, VALUE, LISTENER>::wouldEvictLocked(
const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& client,
bool returnIncompatibleClients) const {
@@ -420,8 +460,9 @@
}
-template<class KEY, class VALUE>
-std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> ClientManager<KEY, VALUE>::addAndEvict(
+template<class KEY, class VALUE, class LISTENER>
+std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>>
+ClientManager<KEY, VALUE, LISTENER>::addAndEvict(
const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& client) {
Mutex::Autolock lock(mLock);
auto evicted = wouldEvictLocked(client);
@@ -432,31 +473,37 @@
auto iter = evicted.cbegin();
- // Remove evicted clients from list
- mClients.erase(std::remove_if(mClients.begin(), mClients.end(),
- [&iter] (std::shared_ptr<ClientDescriptor<KEY, VALUE>>& curClientPtr) {
- if (curClientPtr->getKey() == (*iter)->getKey()) {
- iter++;
- return true;
- }
- return false;
- }), mClients.end());
+ if (iter != evicted.cend()) {
+ if (mListener != nullptr) mListener->onClientRemoved(**iter);
+
+ // Remove evicted clients from list
+ mClients.erase(std::remove_if(mClients.begin(), mClients.end(),
+ [&iter] (std::shared_ptr<ClientDescriptor<KEY, VALUE>>& curClientPtr) {
+ if (curClientPtr->getKey() == (*iter)->getKey()) {
+ iter++;
+ return true;
+ }
+ return false;
+ }), mClients.end());
+ }
+
+ if (mListener != nullptr) mListener->onClientAdded(*client);
mClients.push_back(client);
mRemovedCondition.broadcast();
return evicted;
}
-template<class KEY, class VALUE>
+template<class KEY, class VALUE, class LISTENER>
std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>>
-ClientManager<KEY, VALUE>::getAll() const {
+ClientManager<KEY, VALUE, LISTENER>::getAll() const {
Mutex::Autolock lock(mLock);
return mClients;
}
-template<class KEY, class VALUE>
-std::vector<KEY> ClientManager<KEY, VALUE>::getAllKeys() const {
+template<class KEY, class VALUE, class LISTENER>
+std::vector<KEY> ClientManager<KEY, VALUE, LISTENER>::getAllKeys() const {
Mutex::Autolock lock(mLock);
std::vector<KEY> keys(mClients.size());
for (const auto& i : mClients) {
@@ -465,8 +512,8 @@
return keys;
}
-template<class KEY, class VALUE>
-std::vector<int32_t> ClientManager<KEY, VALUE>::getAllOwners() const {
+template<class KEY, class VALUE, class LISTENER>
+std::vector<int32_t> ClientManager<KEY, VALUE, LISTENER>::getAllOwners() const {
Mutex::Autolock lock(mLock);
std::set<int32_t> owners;
for (const auto& i : mClients) {
@@ -475,8 +522,8 @@
return std::vector<int32_t>(owners.begin(), owners.end());
}
-template<class KEY, class VALUE>
-void ClientManager<KEY, VALUE>::updatePriorities(
+template<class KEY, class VALUE, class LISTENER>
+void ClientManager<KEY, VALUE, LISTENER>::updatePriorities(
const std::map<int32_t,int32_t>& ownerPriorityList) {
Mutex::Autolock lock(mLock);
for (auto& i : mClients) {
@@ -487,8 +534,8 @@
}
}
-template<class KEY, class VALUE>
-std::shared_ptr<ClientDescriptor<KEY, VALUE>> ClientManager<KEY, VALUE>::get(
+template<class KEY, class VALUE, class LISTENER>
+std::shared_ptr<ClientDescriptor<KEY, VALUE>> ClientManager<KEY, VALUE, LISTENER>::get(
const KEY& key) const {
Mutex::Autolock lock(mLock);
for (const auto& i : mClients) {
@@ -497,23 +544,30 @@
return std::shared_ptr<ClientDescriptor<KEY, VALUE>>(nullptr);
}
-template<class KEY, class VALUE>
-void ClientManager<KEY, VALUE>::removeAll() {
+template<class KEY, class VALUE, class LISTENER>
+void ClientManager<KEY, VALUE, LISTENER>::removeAll() {
Mutex::Autolock lock(mLock);
+ if (mListener != nullptr) {
+ for (const auto& i : mClients) {
+ mListener->onClientRemoved(*i);
+ }
+ }
mClients.clear();
mRemovedCondition.broadcast();
}
-template<class KEY, class VALUE>
-std::shared_ptr<ClientDescriptor<KEY, VALUE>> ClientManager<KEY, VALUE>::remove(const KEY& key) {
+template<class KEY, class VALUE, class LISTENER>
+std::shared_ptr<ClientDescriptor<KEY, VALUE>> ClientManager<KEY, VALUE, LISTENER>::remove(
+ const KEY& key) {
Mutex::Autolock lock(mLock);
std::shared_ptr<ClientDescriptor<KEY, VALUE>> ret;
// Remove evicted clients from list
mClients.erase(std::remove_if(mClients.begin(), mClients.end(),
- [&key, &ret] (std::shared_ptr<ClientDescriptor<KEY, VALUE>>& curClientPtr) {
+ [this, &key, &ret] (std::shared_ptr<ClientDescriptor<KEY, VALUE>>& curClientPtr) {
if (curClientPtr->getKey() == key) {
+ if (mListener != nullptr) mListener->onClientRemoved(*curClientPtr);
ret = curClientPtr;
return true;
}
@@ -524,8 +578,8 @@
return ret;
}
-template<class KEY, class VALUE>
-status_t ClientManager<KEY, VALUE>::waitUntilRemoved(
+template<class KEY, class VALUE, class LISTENER>
+status_t ClientManager<KEY, VALUE, LISTENER>::waitUntilRemoved(
const std::shared_ptr<ClientDescriptor<KEY, VALUE>> client,
nsecs_t timeout) const {
status_t ret = NO_ERROR;
@@ -556,14 +610,21 @@
return ret;
}
-template<class KEY, class VALUE>
-void ClientManager<KEY, VALUE>::remove(
+template<class KEY, class VALUE, class LISTENER>
+void ClientManager<KEY, VALUE, LISTENER>::setListener(const std::shared_ptr<LISTENER>& listener) {
+ Mutex::Autolock lock(mLock);
+ mListener = listener;
+}
+
+template<class KEY, class VALUE, class LISTENER>
+void ClientManager<KEY, VALUE, LISTENER>::remove(
const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& value) {
Mutex::Autolock lock(mLock);
// Remove evicted clients from list
mClients.erase(std::remove_if(mClients.begin(), mClients.end(),
- [&value] (std::shared_ptr<ClientDescriptor<KEY, VALUE>>& curClientPtr) {
+ [this, &value] (std::shared_ptr<ClientDescriptor<KEY, VALUE>>& curClientPtr) {
if (curClientPtr == value) {
+ if (mListener != nullptr) mListener->onClientRemoved(*curClientPtr);
return true;
}
return false;
@@ -571,8 +632,8 @@
mRemovedCondition.broadcast();
}
-template<class KEY, class VALUE>
-int64_t ClientManager<KEY, VALUE>::getCurrentCostLocked() const {
+template<class KEY, class VALUE, class LISTENER>
+int64_t ClientManager<KEY, VALUE, LISTENER>::getCurrentCostLocked() const {
int64_t totalCost = 0;
for (const auto& x : mClients) {
totalCost += x->getCost();
diff --git a/services/mediaresourcemanager/Android.mk b/services/mediaresourcemanager/Android.mk
index 84218cf..b72230f 100644
--- a/services/mediaresourcemanager/Android.mk
+++ b/services/mediaresourcemanager/Android.mk
@@ -2,7 +2,7 @@
include $(CLEAR_VARS)
-LOCAL_SRC_FILES := ResourceManagerService.cpp
+LOCAL_SRC_FILES := ResourceManagerService.cpp ServiceLog.cpp
LOCAL_SHARED_LIBRARIES := libmedia libstagefright libbinder libutils liblog
@@ -13,6 +13,9 @@
LOCAL_C_INCLUDES += \
$(TOPDIR)frameworks/av/include
+LOCAL_CFLAGS += -Werror -Wall
+LOCAL_CLANG := true
+
include $(BUILD_SHARED_LIBRARY)
include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 17aac4e..e2b6695 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -29,6 +29,7 @@
#include <unistd.h>
#include "ResourceManagerService.h"
+#include "ServiceLog.h"
namespace android {
@@ -88,7 +89,7 @@
return infos.editItemAt(infos.size() - 1);
}
-status_t ResourceManagerService::dump(int fd, const Vector<String16>& args) {
+status_t ResourceManagerService::dump(int fd, const Vector<String16>& /* args */) {
Mutex::Autolock lock(mLock);
String8 result;
@@ -103,16 +104,14 @@
snprintf(buffer, SIZE, " SupportsSecureWithNonSecureCodec: %d\n", mSupportsSecureWithNonSecureCodec);
result.append(buffer);
- snprintf(buffer, SIZE, " Processes:\n");
- result.append(buffer);
+ result.append(" Processes:\n");
for (size_t i = 0; i < mMap.size(); ++i) {
snprintf(buffer, SIZE, " Pid: %d\n", mMap.keyAt(i));
result.append(buffer);
const ResourceInfos &infos = mMap.valueAt(i);
for (size_t j = 0; j < infos.size(); ++j) {
- snprintf(buffer, SIZE, " Client:\n");
- result.append(buffer);
+ result.append(" Client:\n");
snprintf(buffer, SIZE, " Id: %lld\n", (long long)infos[j].clientId);
result.append(buffer);
@@ -120,14 +119,15 @@
result.append(buffer);
Vector<MediaResource> resources = infos[j].resources;
- snprintf(buffer, SIZE, " Resources:\n");
- result.append(buffer);
+ result.append(" Resources:\n");
for (size_t k = 0; k < resources.size(); ++k) {
snprintf(buffer, SIZE, " %s\n", resources[k].toString().string());
result.append(buffer);
}
}
}
+ result.append(" Events logs (most recent at top):\n");
+ result.append(mServiceLog->toString(" " /* linePrefix */));
write(fd, result.string(), result.size());
return OK;
@@ -135,27 +135,30 @@
ResourceManagerService::ResourceManagerService()
: mProcessInfo(new ProcessInfo()),
+ mServiceLog(new ServiceLog()),
mSupportsMultipleSecureCodecs(true),
mSupportsSecureWithNonSecureCodec(true) {}
ResourceManagerService::ResourceManagerService(sp<ProcessInfoInterface> processInfo)
: mProcessInfo(processInfo),
+ mServiceLog(new ServiceLog()),
mSupportsMultipleSecureCodecs(true),
mSupportsSecureWithNonSecureCodec(true) {}
ResourceManagerService::~ResourceManagerService() {}
void ResourceManagerService::config(const Vector<MediaResourcePolicy> &policies) {
- ALOGV("config(%s)", getString(policies).string());
+ String8 log = String8::format("config(%s)", getString(policies).string());
+ mServiceLog->add(log);
Mutex::Autolock lock(mLock);
for (size_t i = 0; i < policies.size(); ++i) {
String8 type = policies[i].mType;
- uint64_t value = policies[i].mValue;
+ String8 value = policies[i].mValue;
if (type == kPolicySupportsMultipleSecureCodecs) {
- mSupportsMultipleSecureCodecs = (value != 0);
+ mSupportsMultipleSecureCodecs = (value == "true");
} else if (type == kPolicySupportsSecureWithNonSecureCodec) {
- mSupportsSecureWithNonSecureCodec = (value != 0);
+ mSupportsSecureWithNonSecureCodec = (value == "true");
}
}
}
@@ -165,8 +168,9 @@
int64_t clientId,
const sp<IResourceManagerClient> client,
const Vector<MediaResource> &resources) {
- ALOGV("addResource(pid %d, clientId %lld, resources %s)",
+ String8 log = String8::format("addResource(pid %d, clientId %lld, resources %s)",
pid, (long long) clientId, getString(resources).string());
+ mServiceLog->add(log);
Mutex::Autolock lock(mLock);
ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
@@ -176,7 +180,8 @@
}
void ResourceManagerService::removeResource(int64_t clientId) {
- ALOGV("removeResource(%lld)", (long long) clientId);
+ String8 log = String8::format("removeResource(%lld)", (long long) clientId);
+ mServiceLog->add(log);
Mutex::Autolock lock(mLock);
bool found = false;
@@ -201,8 +206,9 @@
bool ResourceManagerService::reclaimResource(
int callingPid, const Vector<MediaResource> &resources) {
- ALOGV("reclaimResource(callingPid %d, resources %s)",
+ String8 log = String8::format("reclaimResource(callingPid %d, resources %s)",
callingPid, getString(resources).string());
+ mServiceLog->add(log);
Vector<sp<IResourceManagerClient>> clients;
{
@@ -265,7 +271,8 @@
sp<IResourceManagerClient> failedClient;
for (size_t i = 0; i < clients.size(); ++i) {
- ALOGV("reclaimResource from client %p", clients[i].get());
+ log = String8::format("reclaimResource from client %p", clients[i].get());
+ mServiceLog->add(log);
if (!clients[i]->reclaimResource()) {
failedClient = clients[i];
break;
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 0c3d694..0d9d878 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -30,6 +30,7 @@
namespace android {
+class ServiceLog;
struct ProcessInfoInterface;
struct ResourceInfo {
@@ -96,6 +97,7 @@
mutable Mutex mLock;
sp<ProcessInfoInterface> mProcessInfo;
+ sp<ServiceLog> mServiceLog;
PidResourceInfosMap mMap;
bool mSupportsMultipleSecureCodecs;
bool mSupportsSecureWithNonSecureCodec;
diff --git a/services/mediaresourcemanager/ServiceLog.cpp b/services/mediaresourcemanager/ServiceLog.cpp
new file mode 100644
index 0000000..791e797
--- /dev/null
+++ b/services/mediaresourcemanager/ServiceLog.cpp
@@ -0,0 +1,63 @@
+/*
+**
+** Copyright 2015, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ServiceLog"
+#include <utils/Log.h>
+
+#include <time.h>
+
+#include "ServiceLog.h"
+
+static const size_t kDefaultMaxNum = 100;
+
+namespace android {
+
+ServiceLog::ServiceLog() : mMaxNum(kDefaultMaxNum), mLogs(mMaxNum) {}
+ServiceLog::ServiceLog(size_t maxNum) : mMaxNum(maxNum), mLogs(mMaxNum) {}
+
+void ServiceLog::add(const String8 &log) {
+ Mutex::Autolock lock(mLock);
+ time_t now = time(0);
+ char buf[64];
+ strftime(buf, sizeof(buf), "%m-%d %T", localtime(&now));
+ mLogs.add(String8::format("%s %s", buf, log.string()));
+}
+
+String8 ServiceLog::toString(const char *linePrefix) const {
+ Mutex::Autolock lock(mLock);
+ String8 result;
+ for (const auto& log : mLogs) {
+ addLine(log.string(), linePrefix, &result);
+ }
+ if (mLogs.size() == mMaxNum) {
+ addLine("...", linePrefix, &result);
+ } else if (mLogs.size() == 0) {
+ addLine("[no events yet]", linePrefix, &result);
+ }
+ return result;
+}
+
+void ServiceLog::addLine(const char *log, const char *prefix, String8 *result) const {
+ if (prefix != NULL) {
+ result->append(prefix);
+ }
+ result->append(log);
+ result->append("\n");
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ServiceLog.h b/services/mediaresourcemanager/ServiceLog.h
new file mode 100644
index 0000000..a6f16eb
--- /dev/null
+++ b/services/mediaresourcemanager/ServiceLog.h
@@ -0,0 +1,50 @@
+/*
+**
+** Copyright 2015, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_SERVICELOG_H
+#define ANDROID_SERVICELOG_H
+
+#include <utils/Errors.h>
+#include <utils/String8.h>
+#include <utils/threads.h>
+#include <utils/Vector.h>
+
+#include "media/RingBuffer.h"
+
+namespace android {
+
+class ServiceLog : public RefBase {
+public:
+ ServiceLog();
+ ServiceLog(size_t maxNum);
+
+ void add(const String8 &log);
+ String8 toString(const char *linePrefix = NULL) const;
+
+private:
+ size_t mMaxNum;
+ mutable Mutex mLock;
+ RingBuffer<String8> mLogs;
+
+ void addLine(const char *log, const char *prefix, String8 *result) const;
+};
+
+// ----------------------------------------------------------------------------
+
+}; // namespace android
+
+#endif // ANDROID_SERVICELOG_H
diff --git a/services/mediaresourcemanager/test/Android.mk b/services/mediaresourcemanager/test/Android.mk
index 228b62a..3b4ef0d 100644
--- a/services/mediaresourcemanager/test/Android.mk
+++ b/services/mediaresourcemanager/test/Android.mk
@@ -20,6 +20,35 @@
frameworks/av/include \
frameworks/av/services/mediaresourcemanager \
+LOCAL_CFLAGS += -Werror -Wall
+LOCAL_CLANG := true
+
+LOCAL_32_BIT_ONLY := true
+
+include $(BUILD_NATIVE_TEST)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := ServiceLog_test
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_SRC_FILES := \
+ ServiceLog_test.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ liblog \
+ libmedia \
+ libresourcemanagerservice \
+ libutils \
+
+LOCAL_C_INCLUDES := \
+ frameworks/av/include \
+ frameworks/av/services/mediaresourcemanager \
+
+LOCAL_CFLAGS += -Werror -Wall
+LOCAL_CLANG := true
+
LOCAL_32_BIT_ONLY := true
include $(BUILD_NATIVE_TEST)
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index bccc7fa..3d53f1f 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -180,17 +180,27 @@
EXPECT_TRUE(mService->mSupportsSecureWithNonSecureCodec);
Vector<MediaResourcePolicy> policies1;
- policies1.push_back(MediaResourcePolicy(String8(kPolicySupportsMultipleSecureCodecs), 1));
policies1.push_back(
- MediaResourcePolicy(String8(kPolicySupportsSecureWithNonSecureCodec), 0));
+ MediaResourcePolicy(
+ String8(kPolicySupportsMultipleSecureCodecs),
+ String8("true")));
+ policies1.push_back(
+ MediaResourcePolicy(
+ String8(kPolicySupportsSecureWithNonSecureCodec),
+ String8("false")));
mService->config(policies1);
EXPECT_TRUE(mService->mSupportsMultipleSecureCodecs);
EXPECT_FALSE(mService->mSupportsSecureWithNonSecureCodec);
Vector<MediaResourcePolicy> policies2;
- policies2.push_back(MediaResourcePolicy(String8(kPolicySupportsMultipleSecureCodecs), 0));
policies2.push_back(
- MediaResourcePolicy(String8(kPolicySupportsSecureWithNonSecureCodec), 1));
+ MediaResourcePolicy(
+ String8(kPolicySupportsMultipleSecureCodecs),
+ String8("false")));
+ policies2.push_back(
+ MediaResourcePolicy(
+ String8(kPolicySupportsSecureWithNonSecureCodec),
+ String8("true")));
mService->config(policies2);
EXPECT_FALSE(mService->mSupportsMultipleSecureCodecs);
EXPECT_TRUE(mService->mSupportsSecureWithNonSecureCodec);
diff --git a/services/mediaresourcemanager/test/ServiceLog_test.cpp b/services/mediaresourcemanager/test/ServiceLog_test.cpp
new file mode 100644
index 0000000..9172499
--- /dev/null
+++ b/services/mediaresourcemanager/test/ServiceLog_test.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ServiceLog_test"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include "ServiceLog.h"
+
+namespace android {
+
+class ServiceLogTest : public ::testing::Test {
+public:
+ ServiceLogTest() : mServiceLog(new ServiceLog(3)) {
+ }
+
+protected:
+ sp<ServiceLog> mServiceLog;
+};
+
+TEST_F(ServiceLogTest, addThenToString) {
+ String8 logString;
+
+ mServiceLog->add(String8("log1"));
+ logString = mServiceLog->toString();
+ EXPECT_TRUE(logString.contains("log1"));
+ ALOGV("toString:\n%s", logString.string());
+
+ static const char kTestLogPrefix[] = "testlogprefix: ";
+ logString = mServiceLog->toString(kTestLogPrefix);
+ EXPECT_TRUE(logString.contains(kTestLogPrefix));
+ EXPECT_TRUE(logString.contains("log1"));
+ ALOGV("toString:\n%s", logString.string());
+
+ mServiceLog->add(String8("log2"));
+ logString = mServiceLog->toString();
+ EXPECT_TRUE(logString.contains("log1"));
+ EXPECT_TRUE(logString.contains("log2"));
+ ALOGV("toString:\n%s", logString.string());
+
+ mServiceLog->add(String8("log3"));
+ logString = mServiceLog->toString();
+ EXPECT_TRUE(logString.contains("log1"));
+ EXPECT_TRUE(logString.contains("log2"));
+ EXPECT_TRUE(logString.contains("log3"));
+ ALOGV("toString:\n%s", logString.string());
+
+ mServiceLog->add(String8("log4"));
+ logString = mServiceLog->toString();
+ EXPECT_FALSE(logString.contains("log1"));
+ EXPECT_TRUE(logString.contains("log2"));
+ EXPECT_TRUE(logString.contains("log3"));
+ EXPECT_TRUE(logString.contains("log4"));
+ ALOGV("toString:\n%s", logString.string());
+
+ mServiceLog->add(String8("log5"));
+ logString = mServiceLog->toString();
+ EXPECT_FALSE(logString.contains("log1"));
+ EXPECT_FALSE(logString.contains("log2"));
+ EXPECT_TRUE(logString.contains("log3"));
+ EXPECT_TRUE(logString.contains("log4"));
+ EXPECT_TRUE(logString.contains("log5"));
+ ALOGV("toString:\n%s", logString.string());
+}
+
+} // namespace android
diff --git a/services/soundtrigger/SoundTriggerHwService.cpp b/services/soundtrigger/SoundTriggerHwService.cpp
index 081aff7..9de6fe2 100644
--- a/services/soundtrigger/SoundTriggerHwService.cpp
+++ b/services/soundtrigger/SoundTriggerHwService.cpp
@@ -787,6 +787,7 @@
mHwDevice->stop_recognition(mHwDevice, model->mHandle);
// keep model in ACTIVE state so that event is processed by onCallbackEvent()
struct sound_trigger_phrase_recognition_event phraseEvent;
+ memset(&phraseEvent, 0, sizeof(struct sound_trigger_phrase_recognition_event));
switch (model->mType) {
case SOUND_MODEL_TYPE_KEYPHRASE:
phraseEvent.num_phrases = model->mConfig.num_phrases;