Merge "Camera: add buffer freed notification for output streams" into oc-dev
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index bf9904c..c6c35ef 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -240,6 +240,14 @@
c->releaseRecordingFrameHandle(handle);
}
+void Camera::releaseRecordingFrameHandleBatch(
+ const std::vector<native_handle_t*> handles) {
+ ALOGV("releaseRecordingFrameHandleBatch");
+ sp <::android::hardware::ICamera> c = mCamera;
+ if (c == 0) return;
+ c->releaseRecordingFrameHandleBatch(handles);
+}
+
// get preview state
bool Camera::previewEnabled()
{
@@ -418,6 +426,37 @@
}
}
+void Camera::recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<nsecs_t>& timestamps,
+ const std::vector<native_handle_t*>& handles)
+{
+ // If recording proxy listener is registered, forward the frame and return.
+ // The other listener (mListener) is ignored because the receiver needs to
+ // call releaseRecordingFrameHandle.
+ sp<ICameraRecordingProxyListener> proxylistener;
+ {
+ Mutex::Autolock _l(mLock);
+ proxylistener = mRecordingProxyListener;
+ }
+ if (proxylistener != NULL) {
+ proxylistener->recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
+ return;
+ }
+
+ sp<CameraListener> listener;
+ {
+ Mutex::Autolock _l(mLock);
+ listener = mListener;
+ }
+
+ if (listener != NULL) {
+ listener->postRecordingFrameHandleTimestampBatch(timestamps, handles);
+ } else {
+ ALOGW("No listener was set. Drop a batch of recording frames.");
+ releaseRecordingFrameHandleBatch(handles);
+ }
+}
+
sp<ICameraRecordingProxy> Camera::getRecordingProxy() {
ALOGV("getProxy");
return new RecordingProxy(this);
@@ -448,6 +487,12 @@
mCamera->releaseRecordingFrameHandle(handle);
}
+void Camera::RecordingProxy::releaseRecordingFrameHandleBatch(
+ const std::vector<native_handle_t*>& handles) {
+ ALOGV("RecordingProxy::releaseRecordingFrameHandleBatch");
+ mCamera->releaseRecordingFrameHandleBatch(handles);
+}
+
Camera::RecordingProxy::RecordingProxy(const sp<Camera>& camera)
{
mCamera = camera;
diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp
index 2bf956d..f0945c7 100644
--- a/camera/ICamera.cpp
+++ b/camera/ICamera.cpp
@@ -55,6 +55,7 @@
SET_VIDEO_BUFFER_MODE,
SET_VIDEO_BUFFER_TARGET,
RELEASE_RECORDING_FRAME_HANDLE,
+ RELEASE_RECORDING_FRAME_HANDLE_BATCH,
};
class BpCamera: public BpInterface<ICamera>
@@ -172,6 +173,24 @@
native_handle_delete(handle);
}
+ void releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
+ ALOGV("releaseRecordingFrameHandleBatch");
+ Parcel data, reply;
+ data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+ uint32_t n = handles.size();
+ data.writeUint32(n);
+ for (auto& handle : handles) {
+ data.writeNativeHandle(handle);
+ }
+ remote()->transact(RELEASE_RECORDING_FRAME_HANDLE_BATCH, data, &reply);
+
+ // Close the native handle because camera received a dup copy.
+ for (auto& handle : handles) {
+ native_handle_close(handle);
+ native_handle_delete(handle);
+ }
+ }
+
status_t setVideoBufferMode(int32_t videoBufferMode)
{
ALOGV("setVideoBufferMode: %d", videoBufferMode);
@@ -378,6 +397,19 @@
releaseRecordingFrameHandle(data.readNativeHandle());
return NO_ERROR;
} break;
+ case RELEASE_RECORDING_FRAME_HANDLE_BATCH: {
+ ALOGV("RELEASE_RECORDING_FRAME_HANDLE_BATCH");
+ CHECK_INTERFACE(ICamera, data, reply);
+ // releaseRecordingFrameHandle will be responsble to close the native handle.
+ uint32_t n = data.readUint32();
+ std::vector<native_handle_t*> handles;
+ handles.reserve(n);
+ for (uint32_t i = 0; i < n; i++) {
+ handles.push_back(data.readNativeHandle());
+ }
+ releaseRecordingFrameHandleBatch(handles);
+ return NO_ERROR;
+ } break;
case SET_VIDEO_BUFFER_MODE: {
ALOGV("SET_VIDEO_BUFFER_MODE");
CHECK_INTERFACE(ICamera, data, reply);
diff --git a/camera/ICameraClient.cpp b/camera/ICameraClient.cpp
index 1b6fac4..7e6297c 100644
--- a/camera/ICameraClient.cpp
+++ b/camera/ICameraClient.cpp
@@ -32,6 +32,7 @@
DATA_CALLBACK,
DATA_CALLBACK_TIMESTAMP,
RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP,
+ RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH,
};
class BpCameraClient: public BpInterface<ICameraClient>
@@ -91,6 +92,29 @@
remote()->transact(RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP, data, &reply,
IBinder::FLAG_ONEWAY);
}
+
+ void recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<nsecs_t>& timestamps,
+ const std::vector<native_handle_t*>& handles) {
+ ALOGV("recordingFrameHandleCallbackTimestampBatch");
+ Parcel data, reply;
+ data.writeInterfaceToken(ICameraClient::getInterfaceDescriptor());
+ uint32_t n = timestamps.size();
+ if (n != handles.size()) {
+ ALOGE("%s: size of timestamps(%zu) and handles(%zu) mismatch!",
+ __FUNCTION__, timestamps.size(), handles.size());
+ return;
+ }
+ data.writeUint32(n);
+ for (auto ts : timestamps) {
+ data.writeInt64(ts);
+ }
+ for (auto& handle : handles) {
+ data.writeNativeHandle(handle);
+ }
+ remote()->transact(RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH, data, &reply,
+ IBinder::FLAG_ONEWAY);
+ }
};
IMPLEMENT_META_INTERFACE(CameraClient, "android.hardware.ICameraClient");
@@ -154,6 +178,41 @@
recordingFrameHandleCallbackTimestamp(timestamp, handle);
return NO_ERROR;
} break;
+ case RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH: {
+ ALOGV("RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH");
+ CHECK_INTERFACE(ICameraClient, data, reply);
+ uint32_t n = 0;
+ status_t res = data.readUint32(&n);
+ if (res != OK) {
+ ALOGE("%s: Failed to read batch size: %s (%d)", __FUNCTION__, strerror(-res), res);
+ return BAD_VALUE;
+ }
+ std::vector<nsecs_t> timestamps;
+ std::vector<native_handle_t*> handles;
+ timestamps.reserve(n);
+ handles.reserve(n);
+ for (uint32_t i = 0; i < n; i++) {
+ res = data.readInt64(×tamps[i]);
+ if (res != OK) {
+ ALOGE("%s: Failed to read timestamp[%d]: %s (%d)",
+ __FUNCTION__, i, strerror(-res), res);
+ return BAD_VALUE;
+ }
+ }
+ for (uint32_t i = 0; i < n; i++) {
+ native_handle_t* handle = data.readNativeHandle();
+ if (handle == nullptr) {
+ ALOGE("%s: Received a null native handle at handles[%d]",
+ __FUNCTION__, i);
+ return BAD_VALUE;
+ }
+ handles.push_back(handle);
+ }
+
+ // The native handle will be freed in BpCamera::releaseRecordingFrameHandleBatch.
+ recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/camera/ICameraRecordingProxy.cpp b/camera/ICameraRecordingProxy.cpp
index c9f8b5c..bd6af75 100644
--- a/camera/ICameraRecordingProxy.cpp
+++ b/camera/ICameraRecordingProxy.cpp
@@ -32,6 +32,7 @@
STOP_RECORDING,
RELEASE_RECORDING_FRAME,
RELEASE_RECORDING_FRAME_HANDLE,
+ RELEASE_RECORDING_FRAME_HANDLE_BATCH,
};
@@ -82,6 +83,24 @@
native_handle_close(handle);
native_handle_delete(handle);
}
+
+ void releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
+ ALOGV("releaseRecordingFrameHandleBatch");
+ Parcel data, reply;
+ data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
+ uint32_t n = handles.size();
+ data.writeUint32(n);
+ for (auto& handle : handles) {
+ data.writeNativeHandle(handle);
+ }
+ remote()->transact(RELEASE_RECORDING_FRAME_HANDLE_BATCH, data, &reply);
+
+ // Close the native handle because camera received a dup copy.
+ for (auto& handle : handles) {
+ native_handle_close(handle);
+ native_handle_delete(handle);
+ }
+ }
};
IMPLEMENT_META_INTERFACE(CameraRecordingProxy, "android.hardware.ICameraRecordingProxy");
@@ -121,6 +140,31 @@
releaseRecordingFrameHandle(data.readNativeHandle());
return NO_ERROR;
} break;
+ case RELEASE_RECORDING_FRAME_HANDLE_BATCH: {
+ ALOGV("RELEASE_RECORDING_FRAME_HANDLE_BATCH");
+ CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
+ uint32_t n = 0;
+ status_t res = data.readUint32(&n);
+ if (res != OK) {
+ ALOGE("%s: Failed to read batch size: %s (%d)", __FUNCTION__, strerror(-res), res);
+ return BAD_VALUE;
+ }
+ std::vector<native_handle_t*> handles;
+ handles.reserve(n);
+ for (uint32_t i = 0; i < n; i++) {
+ native_handle_t* handle = data.readNativeHandle();
+ if (handle == nullptr) {
+ ALOGE("%s: Received a null native handle at handles[%d]",
+ __FUNCTION__, i);
+ return BAD_VALUE;
+ }
+ handles.push_back(handle);
+ }
+
+ // releaseRecordingFrameHandleBatch will be responsble to close the native handle.
+ releaseRecordingFrameHandleBatch(handles);
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/camera/ICameraRecordingProxyListener.cpp b/camera/ICameraRecordingProxyListener.cpp
index 8529d3e..c954241 100644
--- a/camera/ICameraRecordingProxyListener.cpp
+++ b/camera/ICameraRecordingProxyListener.cpp
@@ -28,6 +28,7 @@
enum {
DATA_CALLBACK_TIMESTAMP = IBinder::FIRST_CALL_TRANSACTION,
RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP,
+ RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH
};
class BpCameraRecordingProxyListener: public BpInterface<ICameraRecordingProxyListener>
@@ -62,6 +63,36 @@
native_handle_close(handle);
native_handle_delete(handle);
}
+
+ void recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<nsecs_t>& timestamps,
+ const std::vector<native_handle_t*>& handles) {
+ ALOGV("recordingFrameHandleCallbackTimestampBatch");
+ Parcel data, reply;
+ data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
+
+ uint32_t n = timestamps.size();
+ if (n != handles.size()) {
+ ALOGE("%s: size of timestamps(%zu) and handles(%zu) mismatch!",
+ __FUNCTION__, timestamps.size(), handles.size());
+ return;
+ }
+ data.writeUint32(n);
+ for (auto ts : timestamps) {
+ data.writeInt64(ts);
+ }
+ for (auto& handle : handles) {
+ data.writeNativeHandle(handle);
+ }
+ remote()->transact(RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH, data, &reply,
+ IBinder::FLAG_ONEWAY);
+
+ // The native handle is dupped in ICameraClient so we need to free it here.
+ for (auto& handle : handles) {
+ native_handle_close(handle);
+ native_handle_delete(handle);
+ }
+ }
};
IMPLEMENT_META_INTERFACE(CameraRecordingProxyListener, "android.hardware.ICameraRecordingProxyListener");
@@ -101,6 +132,41 @@
recordingFrameHandleCallbackTimestamp(timestamp, handle);
return NO_ERROR;
} break;
+ case RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH: {
+ ALOGV("RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP_BATCH");
+ CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
+ uint32_t n = 0;
+ status_t res = data.readUint32(&n);
+ if (res != OK) {
+ ALOGE("%s: Failed to read batch size: %s (%d)", __FUNCTION__, strerror(-res), res);
+ return BAD_VALUE;
+ }
+ std::vector<nsecs_t> timestamps;
+ std::vector<native_handle_t*> handles;
+ timestamps.reserve(n);
+ handles.reserve(n);
+ for (uint32_t i = 0; i < n; i++) {
+ res = data.readInt64(×tamps[i]);
+ if (res != OK) {
+ ALOGE("%s: Failed to read timestamp[%d]: %s (%d)",
+ __FUNCTION__, i, strerror(-res), res);
+ return BAD_VALUE;
+ }
+ }
+ for (uint32_t i = 0; i < n; i++) {
+ native_handle_t* handle = data.readNativeHandle();
+ if (handle == nullptr) {
+ ALOGE("%s: Received a null native handle at handles[%d]",
+ __FUNCTION__, i);
+ return BAD_VALUE;
+ }
+ handles.push_back(handle);
+ }
+ // The native handle will be freed in
+ // BpCameraRecordingProxy::releaseRecordingFrameHandleBatch.
+ recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 57dc228..430aa1c 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -44,6 +44,9 @@
camera_frame_metadata_t *metadata) = 0;
virtual void postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) = 0;
virtual void postRecordingFrameHandleTimestamp(nsecs_t timestamp, native_handle_t* handle) = 0;
+ virtual void postRecordingFrameHandleTimestampBatch(
+ const std::vector<nsecs_t>& timestamps,
+ const std::vector<native_handle_t*>& handles) = 0;
};
class Camera;
@@ -118,6 +121,10 @@
// release a recording frame handle
void releaseRecordingFrameHandle(native_handle_t *handle);
+ // release a batch of recording frame handles
+ void releaseRecordingFrameHandleBatch(
+ const std::vector<native_handle_t*> handles);
+
// autoFocus - status returned from callback
status_t autoFocus();
@@ -166,6 +173,10 @@
camera_frame_metadata_t *metadata);
virtual void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
virtual void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp, native_handle_t* handle);
+ virtual void recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<nsecs_t>& timestamps,
+ const std::vector<native_handle_t*>& handles);
+
class RecordingProxy : public BnCameraRecordingProxy
{
@@ -177,6 +188,8 @@
virtual void stopRecording();
virtual void releaseRecordingFrame(const sp<IMemory>& mem);
virtual void releaseRecordingFrameHandle(native_handle_t* handle);
+ virtual void releaseRecordingFrameHandleBatch(
+ const std::vector<native_handle_t*>& handles);
private:
sp<Camera> mCamera;
diff --git a/camera/include/camera/ICameraRecordingProxy.h b/camera/include/camera/ICameraRecordingProxy.h
index cb6824a..02af2f3 100644
--- a/camera/include/camera/ICameraRecordingProxy.h
+++ b/camera/include/camera/ICameraRecordingProxy.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_H
#define ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_H
+#include <vector>
#include <binder/IInterface.h>
#include <cutils/native_handle.h>
#include <utils/RefBase.h>
@@ -85,6 +86,8 @@
virtual void stopRecording() = 0;
virtual void releaseRecordingFrame(const sp<IMemory>& mem) = 0;
virtual void releaseRecordingFrameHandle(native_handle_t *handle) = 0;
+ virtual void releaseRecordingFrameHandleBatch(
+ const std::vector<native_handle_t*>& handles) = 0;
};
// ----------------------------------------------------------------------------
diff --git a/camera/include/camera/ICameraRecordingProxyListener.h b/camera/include/camera/ICameraRecordingProxyListener.h
index 1fee5b9..da03c56 100644
--- a/camera/include/camera/ICameraRecordingProxyListener.h
+++ b/camera/include/camera/ICameraRecordingProxyListener.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_LISTENER_H
#define ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_LISTENER_H
+#include <vector>
#include <binder/IInterface.h>
#include <cutils/native_handle.h>
#include <stdint.h>
@@ -38,6 +39,10 @@
virtual void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
native_handle_t* handle) = 0;
+
+ virtual void recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<nsecs_t>& timestamps,
+ const std::vector<native_handle_t*>& handles) = 0;
};
// ----------------------------------------------------------------------------
diff --git a/camera/include/camera/android/hardware/ICamera.h b/camera/include/camera/android/hardware/ICamera.h
index 315669e..80823d6 100644
--- a/camera/include/camera/android/hardware/ICamera.h
+++ b/camera/include/camera/android/hardware/ICamera.h
@@ -101,6 +101,11 @@
// ICameraClient::recordingFrameHandleCallbackTimestamp.
virtual void releaseRecordingFrameHandle(native_handle_t *handle) = 0;
+ // Release a batch of recording frame handles that was received via
+ // ICameraClient::recordingFrameHandleCallbackTimestampBatch
+ virtual void releaseRecordingFrameHandleBatch(
+ const std::vector<native_handle_t*>& handles) = 0;
+
// auto focus
virtual status_t autoFocus() = 0;
diff --git a/camera/include/camera/android/hardware/ICameraClient.h b/camera/include/camera/android/hardware/ICameraClient.h
index f6ee311..8e46d17 100644
--- a/camera/include/camera/android/hardware/ICameraClient.h
+++ b/camera/include/camera/android/hardware/ICameraClient.h
@@ -41,6 +41,13 @@
// ICamera::releaseRecordingFrameHandle to release the frame handle.
virtual void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
native_handle_t* handle) = 0;
+
+ // Invoked to send a batch of recording frame handles with timestamp. Call
+ // ICamera::releaseRecordingFrameHandleBatch to release the frame handles.
+ // Size of timestamps and handles must match
+ virtual void recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<nsecs_t>& timestamps,
+ const std::vector<native_handle_t*>& handles) = 0;
};
// ----------------------------------------------------------------------------
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index 6c91fdc..ecca354 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -51,6 +51,9 @@
const sp<IMemory>&) override {};
void recordingFrameHandleCallbackTimestamp(nsecs_t,
native_handle_t*) override {};
+ void recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<nsecs_t>&,
+ const std::vector<native_handle_t*>&) override {};
status_t waitForPreviewStart();
status_t waitForEvent(Mutex &mutex, Condition &condition, bool &flag);
diff --git a/drm/libmediadrm/Android.mk b/drm/libmediadrm/Android.mk
index 590622e..5b56501 100644
--- a/drm/libmediadrm/Android.mk
+++ b/drm/libmediadrm/Android.mk
@@ -18,6 +18,7 @@
LOCAL_SRC_FILES += \
CasImpl.cpp \
DescramblerImpl.cpp \
+ DrmPluginPath.cpp \
DrmSessionManager.cpp \
ICrypto.cpp \
IDrm.cpp \
diff --git a/drm/libmediadrm/Crypto.cpp b/drm/libmediadrm/Crypto.cpp
index d93dad6..a5d7346 100644
--- a/drm/libmediadrm/Crypto.cpp
+++ b/drm/libmediadrm/Crypto.cpp
@@ -22,6 +22,7 @@
#include <binder/IMemory.h>
#include <media/Crypto.h>
+#include <media/DrmPluginPath.h>
#include <media/hardware/CryptoAPI.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AString.h>
@@ -102,7 +103,7 @@
}
// no luck, have to search
- String8 dirPath("/vendor/lib/mediadrm");
+ String8 dirPath(getDrmPluginPath());
String8 pluginPath;
DIR* pDir = opendir(dirPath.string());
diff --git a/drm/libmediadrm/Drm.cpp b/drm/libmediadrm/Drm.cpp
index e3176e3..1004eb8 100644
--- a/drm/libmediadrm/Drm.cpp
+++ b/drm/libmediadrm/Drm.cpp
@@ -21,6 +21,7 @@
#include <dirent.h>
#include <dlfcn.h>
+#include <media/DrmPluginPath.h>
#include <media/DrmSessionClientInterface.h>
#include <media/DrmSessionManager.h>
#include <media/Drm.h>
@@ -220,7 +221,7 @@
}
// no luck, have to search
- String8 dirPath("/vendor/lib/mediadrm");
+ String8 dirPath(getDrmPluginPath());
DIR* pDir = opendir(dirPath.string());
if (pDir == NULL) {
diff --git a/drm/libmediadrm/DrmPluginPath.cpp b/drm/libmediadrm/DrmPluginPath.cpp
new file mode 100644
index 0000000..c760825
--- /dev/null
+++ b/drm/libmediadrm/DrmPluginPath.cpp
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DrmPluginPath"
+#include <utils/Log.h>
+
+#include <cutils/properties.h>
+#include <media/DrmPluginPath.h>
+
+namespace android {
+
+const char* getDrmPluginPath() {
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("drm.64bit.enabled", value, NULL) == 0) {
+ return "/vendor/lib/mediadrm";
+ } else {
+ return "/vendor/lib64/mediadrm";
+ }
+}
+
+} // namespace android
diff --git a/include/media/DrmPluginPath.h b/include/media/DrmPluginPath.h
new file mode 120000
index 0000000..06b12cf
--- /dev/null
+++ b/include/media/DrmPluginPath.h
@@ -0,0 +1 @@
+../../media/libmedia/include/DrmPluginPath.h
\ No newline at end of file
diff --git a/media/libaaudio/examples/write_sine/src/write_sine.cpp b/media/libaaudio/examples/write_sine/src/write_sine.cpp
index 511fe94..80b6252 100644
--- a/media/libaaudio/examples/write_sine/src/write_sine.cpp
+++ b/media/libaaudio/examples/write_sine/src/write_sine.cpp
@@ -27,6 +27,7 @@
#define NUM_SECONDS 10
#define NANOS_PER_MICROSECOND ((int64_t)1000)
#define NANOS_PER_MILLISECOND (NANOS_PER_MICROSECOND * 1000)
+#define NANOS_PER_SECOND (NANOS_PER_MILLISECOND * 1000)
static const char *getSharingModeText(aaudio_sharing_mode_t mode) {
const char *modeText = "unknown";
@@ -43,6 +44,15 @@
return modeText;
}
+static int64_t getNanoseconds(clockid_t clockId = CLOCK_MONOTONIC) {
+ struct timespec time;
+ int result = clock_gettime(clockId, &time);
+ if (result < 0) {
+ return -errno;
+ }
+ return (time.tv_sec * NANOS_PER_SECOND) + time.tv_nsec;
+}
+
int main(int argc, char **argv)
{
(void)argc; // unused
@@ -56,7 +66,8 @@
const aaudio_audio_format_t requestedDataFormat = AAUDIO_FORMAT_PCM_I16;
aaudio_audio_format_t actualDataFormat = AAUDIO_FORMAT_PCM_I16;
- const aaudio_sharing_mode_t requestedSharingMode = AAUDIO_SHARING_MODE_EXCLUSIVE;
+ //const aaudio_sharing_mode_t requestedSharingMode = AAUDIO_SHARING_MODE_EXCLUSIVE;
+ const aaudio_sharing_mode_t requestedSharingMode = AAUDIO_SHARING_MODE_SHARED;
aaudio_sharing_mode_t actualSharingMode = AAUDIO_SHARING_MODE_SHARED;
AAudioStreamBuilder *aaudioBuilder = nullptr;
@@ -172,6 +183,26 @@
goto finish;
}
framesLeft -= actual;
+
+ // Use timestamp to estimate latency.
+ {
+ int64_t presentationFrame;
+ int64_t presentationTime;
+ result = AAudioStream_getTimestamp(aaudioStream,
+ CLOCK_MONOTONIC,
+ &presentationFrame,
+ &presentationTime
+ );
+ if (result == AAUDIO_OK) {
+ int64_t elapsedNanos = getNanoseconds() - presentationTime;
+ int64_t elapsedFrames = actualSampleRate * elapsedNanos / NANOS_PER_SECOND;
+ int64_t currentFrame = presentationFrame + elapsedFrames;
+ int64_t framesWritten = AAudioStream_getFramesWritten(aaudioStream);
+ int64_t estimatedLatencyFrames = framesWritten - currentFrame;
+ int64_t estimatedLatencyMillis = estimatedLatencyFrames * 1000 / actualSampleRate;
+ printf("estimatedLatencyMillis %d\n", (int)estimatedLatencyMillis);
+ }
+ }
}
xRunCount = AAudioStream_getXRunCount(aaudioStream);
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index af0593d..6ac8554 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -50,7 +50,6 @@
virtual aaudio_result_t requestFlush() = 0;
virtual aaudio_result_t requestStop() = 0;
- // TODO use aaudio_clockid_t all the way down to AudioClock
virtual aaudio_result_t getTimestamp(clockid_t clockId,
int64_t *framePosition,
int64_t *timeNanoseconds) = 0;
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index dd040a0..d380eb8 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -25,6 +25,7 @@
#include "AudioClock.h"
#include "AudioStreamRecord.h"
+#include "utility/AAudioUtilities.h"
using namespace android;
using namespace aaudio;
@@ -224,5 +225,28 @@
return 192; // TODO add query to AudioRecord.cpp
}
-// TODO implement getTimestamp
-
+aaudio_result_t AudioStreamRecord::getTimestamp(clockid_t clockId,
+ int64_t *framePosition,
+ int64_t *timeNanoseconds) {
+ ExtendedTimestamp extendedTimestamp;
+ status_t status = mAudioRecord->getTimestamp(&extendedTimestamp);
+ if (status != NO_ERROR) {
+ return AAudioConvert_androidToAAudioResult(status);
+ }
+ // TODO Merge common code into AudioStreamLegacy after rebasing.
+ int timebase;
+ switch(clockId) {
+ case CLOCK_BOOTTIME:
+ timebase = ExtendedTimestamp::TIMEBASE_BOOTTIME;
+ break;
+ case CLOCK_MONOTONIC:
+ timebase = ExtendedTimestamp::TIMEBASE_MONOTONIC;
+ break;
+ default:
+ ALOGE("getTimestamp() - Unrecognized clock type %d", (int) clockId);
+ return AAUDIO_ERROR_UNEXPECTED_VALUE;
+ break;
+ }
+ status = extendedTimestamp.getBestTimestamp(framePosition, timeNanoseconds, timebase);
+ return AAudioConvert_androidToAAudioResult(status);
+}
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index c8d389b..4667f05 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -44,10 +44,8 @@
virtual aaudio_result_t requestStop() override;
virtual aaudio_result_t getTimestamp(clockid_t clockId,
- int64_t *framePosition,
- int64_t *timeNanoseconds) override {
- return AAUDIO_ERROR_UNIMPLEMENTED; // TODO
- }
+ int64_t *framePosition,
+ int64_t *timeNanoseconds) override;
virtual aaudio_result_t read(void *buffer,
int32_t numFrames,
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index e0a04c3..8bb6aee 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -20,11 +20,11 @@
#include <stdint.h>
#include <media/AudioTrack.h>
-
#include <aaudio/AAudio.h>
-#include "AudioClock.h"
-#include "AudioStreamTrack.h"
+#include "utility/AudioClock.h"
+#include "AudioStreamTrack.h"
+#include "utility/AAudioUtilities.h"
using namespace android;
using namespace aaudio;
@@ -292,3 +292,29 @@
}
return AudioStream::getFramesRead();
}
+
+aaudio_result_t AudioStreamTrack::getTimestamp(clockid_t clockId,
+ int64_t *framePosition,
+ int64_t *timeNanoseconds) {
+ ExtendedTimestamp extendedTimestamp;
+ status_t status = mAudioTrack->getTimestamp(&extendedTimestamp);
+ if (status != NO_ERROR) {
+ return AAudioConvert_androidToAAudioResult(status);
+ }
+ // TODO Merge common code into AudioStreamLegacy after rebasing.
+ int timebase;
+ switch(clockId) {
+ case CLOCK_BOOTTIME:
+ timebase = ExtendedTimestamp::TIMEBASE_BOOTTIME;
+ break;
+ case CLOCK_MONOTONIC:
+ timebase = ExtendedTimestamp::TIMEBASE_MONOTONIC;
+ break;
+ default:
+ ALOGE("getTimestamp() - Unrecognized clock type %d", (int) clockId);
+ return AAUDIO_ERROR_UNEXPECTED_VALUE;
+ break;
+ }
+ status = extendedTimestamp.getBestTimestamp(framePosition, timeNanoseconds, timebase);
+ return AAudioConvert_androidToAAudioResult(status);
+}
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index 1de07ce..7a53022 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -47,9 +47,7 @@
virtual aaudio_result_t getTimestamp(clockid_t clockId,
int64_t *framePosition,
- int64_t *timeNanoseconds) override {
- return AAUDIO_ERROR_UNIMPLEMENTED; // TODO call getTimestamp(ExtendedTimestamp *timestamp);
- }
+ int64_t *timeNanoseconds) override;
virtual aaudio_result_t write(const void *buffer,
int32_t numFrames,
diff --git a/media/libmedia/include/DrmPluginPath.h b/media/libmedia/include/DrmPluginPath.h
new file mode 100644
index 0000000..51ba26e
--- /dev/null
+++ b/media/libmedia/include/DrmPluginPath.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DRM_PLUGIN_PATH_H_
+
+#define DRM_PLUGIN_PATH_H_
+
+namespace android {
+
+const char* getDrmPluginPath();
+
+} // namespace android
+
+#endif // DRM_PLUGIN_PATH_H_
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index f689ac9..95f378f 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -397,13 +397,13 @@
// Attempt to parse an float literal optionally surrounded by whitespace,
// returns true on success, false otherwise.
-static bool safe_strtof(const char *s, float *val) {
+static bool safe_strtod(const char *s, double *val) {
char *end;
// It is lame, but according to man page, we have to set errno to 0
- // before calling strtof().
+ // before calling strtod().
errno = 0;
- *val = strtof(s, &end);
+ *val = strtod(s, &end);
if (end == s || errno == ERANGE) {
return false;
@@ -706,13 +706,23 @@
return OK;
}
-status_t StagefrightRecorder::setParamCaptureFps(float fps) {
+status_t StagefrightRecorder::setParamCaptureFps(double fps) {
ALOGV("setParamCaptureFps: %.2f", fps);
- int64_t timeUs = (int64_t) (1000000.0 / fps + 0.5f);
+ constexpr int64_t k1E12 = 1000000000000ll;
+ int64_t fpsx1e12 = k1E12 * fps;
+ if (fpsx1e12 == 0) {
+ ALOGE("FPS is zero or too small");
+ return BAD_VALUE;
+ }
- // Not allowing time more than a day
- if (timeUs <= 0 || timeUs > 86400*1E6) {
+ // This does not overflow since 10^6 * 10^12 < 2^63
+ int64_t timeUs = 1000000ll * k1E12 / fpsx1e12;
+
+ // Not allowing time more than a day and a millisecond for error margin.
+ // Note: 1e12 / 86400 = 11574074.(074) and 1e18 / 11574074 = 86400000553;
+ // therefore 1 ms of margin should be sufficient.
+ if (timeUs <= 0 || timeUs > 86400001000ll) {
ALOGE("Time between frame capture (%lld) is out of range [0, 1 Day]", (long long)timeUs);
return BAD_VALUE;
}
@@ -846,8 +856,8 @@
return setParamCaptureFpsEnable(captureFpsEnable);
}
} else if (key == "time-lapse-fps") {
- float fps;
- if (safe_strtof(value.string(), &fps)) {
+ double fps;
+ if (safe_strtod(value.string(), &fps)) {
return setParamCaptureFps(fps);
}
} else {
@@ -2073,7 +2083,7 @@
mMaxFileSizeBytes = 0;
mTrackEveryTimeDurationUs = 0;
mCaptureFpsEnable = false;
- mCaptureFps = 0.0f;
+ mCaptureFps = 0.0;
mTimeBetweenCaptureUs = -1;
mCameraSourceTimeLapse = NULL;
mMetaDataStoredInVideoBuffers = kMetadataBufferTypeInvalid;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 38377d2..9a6c4da 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -122,7 +122,7 @@
int32_t mTotalBitRate;
bool mCaptureFpsEnable;
- float mCaptureFps;
+ double mCaptureFps;
int64_t mTimeBetweenCaptureUs;
sp<CameraSourceTimeLapse> mCameraSourceTimeLapse;
@@ -172,7 +172,7 @@
status_t setParamAudioSamplingRate(int32_t sampleRate);
status_t setParamAudioTimeScale(int32_t timeScale);
status_t setParamCaptureFpsEnable(int32_t timeLapseEnable);
- status_t setParamCaptureFps(float fps);
+ status_t setParamCaptureFps(double fps);
status_t setParamVideoEncodingBitRate(int32_t bitRate);
status_t setParamVideoIFramesInterval(int32_t seconds);
status_t setParamVideoEncoderProfile(int32_t profile);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
index cb668e4..9a4bc8c 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
@@ -294,6 +294,9 @@
return;
}
+ if (streamErr != ERROR_END_OF_STREAM) {
+ handleError(streamErr);
+ }
mReachedEOS = true;
if (mRenderer != NULL) {
mRenderer->queueEOS(true /* audio */, ERROR_END_OF_STREAM);
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 0fe44eb..a569f5d 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -58,6 +58,10 @@
virtual void postRecordingFrameHandleTimestamp(nsecs_t timestamp, native_handle_t* handle);
+ virtual void postRecordingFrameHandleTimestampBatch(
+ const std::vector<nsecs_t>& timestamps,
+ const std::vector<native_handle_t*>& handles);
+
protected:
virtual ~CameraSourceListener();
@@ -110,6 +114,20 @@
}
}
+void CameraSourceListener::postRecordingFrameHandleTimestampBatch(
+ const std::vector<nsecs_t>& timestamps,
+ const std::vector<native_handle_t*>& handles) {
+ sp<CameraSource> source = mSource.promote();
+ if (source.get() != nullptr) {
+ int n = timestamps.size();
+ std::vector<nsecs_t> modifiedTimestamps(n);
+ for (int i = 0; i < n; i++) {
+ modifiedTimestamps[i] = timestamps[i] / 1000;
+ }
+ source->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
+ }
+}
+
static int32_t getColorFormat(const char* colorFormat) {
if (!colorFormat) {
ALOGE("Invalid color format");
@@ -952,10 +970,35 @@
}
if (handle != nullptr) {
- // Frame contains a VideoNativeHandleMetadata. Send the handle back to camera.
- releaseRecordingFrameHandle(handle);
- mMemoryBases.push_back(frame);
- mMemoryBaseAvailableCond.signal();
+ uint32_t batchSize = 0;
+ {
+ Mutex::Autolock autoLock(mBatchLock);
+ if (mInflightBatchSizes.size() > 0) {
+ batchSize = mInflightBatchSizes[0];
+ }
+ }
+ if (batchSize == 0) { // return buffers one by one
+ // Frame contains a VideoNativeHandleMetadata. Send the handle back to camera.
+ releaseRecordingFrameHandle(handle);
+ mMemoryBases.push_back(frame);
+ mMemoryBaseAvailableCond.signal();
+ } else { // Group buffers in batch then return
+ Mutex::Autolock autoLock(mBatchLock);
+ mInflightReturnedHandles.push_back(handle);
+ mInflightReturnedMemorys.push_back(frame);
+ if (mInflightReturnedHandles.size() == batchSize) {
+ releaseRecordingFrameHandleBatch(mInflightReturnedHandles);
+
+ mInflightBatchSizes.pop_front();
+ mInflightReturnedHandles.clear();
+ for (const auto& mem : mInflightReturnedMemorys) {
+ mMemoryBases.push_back(mem);
+ mMemoryBaseAvailableCond.signal();
+ }
+ mInflightReturnedMemorys.clear();
+ }
+ }
+
} else if (mCameraRecordingProxy != nullptr) {
// mCamera is created by application. Return the frame back to camera via camera
// recording proxy.
@@ -1126,6 +1169,21 @@
}
}
+void CameraSource::releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
+ if (mCameraRecordingProxy != nullptr) {
+ mCameraRecordingProxy->releaseRecordingFrameHandleBatch(handles);
+ } else if (mCamera != nullptr) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ mCamera->releaseRecordingFrameHandleBatch(handles);
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ } else {
+ for (auto& handle : handles) {
+ native_handle_close(handle);
+ native_handle_delete(handle);
+ }
+ }
+}
+
void CameraSource::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
native_handle_t* handle) {
ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
@@ -1163,6 +1221,62 @@
mFrameAvailableCondition.signal();
}
+void CameraSource::recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<int64_t>& timestampsUs,
+ const std::vector<native_handle_t*>& handles) {
+ size_t n = timestampsUs.size();
+ if (n != handles.size()) {
+ ALOGE("%s: timestampsUs(%zu) and handles(%zu) size mismatch!",
+ __FUNCTION__, timestampsUs.size(), handles.size());
+ }
+
+ Mutex::Autolock autoLock(mLock);
+ int batchSize = 0;
+ for (size_t i = 0; i < n; i++) {
+ int64_t timestampUs = timestampsUs[i];
+ native_handle_t* handle = handles[i];
+
+ ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
+ if (handle == nullptr) continue;
+
+ if (shouldSkipFrameLocked(timestampUs)) {
+ releaseRecordingFrameHandle(handle);
+ continue;
+ }
+
+ while (mMemoryBases.empty()) {
+ if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
+ TIMED_OUT) {
+ ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
+ releaseRecordingFrameHandle(handle);
+ continue;
+ }
+ }
+ ++batchSize;
+ ++mNumFramesReceived;
+ sp<IMemory> data = *mMemoryBases.begin();
+ mMemoryBases.erase(mMemoryBases.begin());
+
+ // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
+ VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->pointer());
+ metadata->eType = kMetadataBufferTypeNativeHandleSource;
+ metadata->pHandle = handle;
+
+ mFramesReceived.push_back(data);
+ int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
+ mFrameTimes.push_back(timeUs);
+ ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
+
+ }
+ if (batchSize > 0) {
+ Mutex::Autolock autoLock(mBatchLock);
+ mInflightBatchSizes.push_back(batchSize);
+ }
+ for (int i = 0; i < batchSize; i++) {
+ mFrameAvailableCondition.signal();
+ }
+}
+
CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
const sp<CameraSource>& cameraSource) {
mConsumer = consumer;
@@ -1279,6 +1393,17 @@
mSource->recordingFrameHandleCallbackTimestamp(timestamp / 1000, handle);
}
+void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<int64_t>& timestampsUs,
+ const std::vector<native_handle_t*>& handles) {
+ int n = timestampsUs.size();
+ std::vector<nsecs_t> modifiedTimestamps(n);
+ for (int i = 0; i < n; i++) {
+ modifiedTimestamps[i] = timestampsUs[i] / 1000;
+ }
+ mSource->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
+}
+
void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
ALOGI("Camera recording proxy died");
}
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 390c556..970526a 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -315,6 +315,17 @@
CameraSource::recordingFrameHandleCallbackTimestamp(timestampUs, handle);
}
+void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<int64_t>& timestampsUs,
+ const std::vector<native_handle_t*>& handles) {
+ ALOGV("recordingFrameHandleCallbackTimestampBatch");
+ int n = timestampsUs.size();
+ for (int i = 0; i < n; i++) {
+ // Don't do batching for CameraSourceTimeLapse for now
+ recordingFrameHandleCallbackTimestamp(timestampsUs[i], handles[i]);
+ }
+}
+
void CameraSourceTimeLapse::processBufferQueueFrame(BufferItem& buffer) {
ALOGV("processBufferQueueFrame");
int64_t timestampUs = buffer.mTimestamp / 1000;
diff --git a/media/libstagefright/MP3Extractor.cpp b/media/libstagefright/MP3Extractor.cpp
index 82e7a26..22df522 100644
--- a/media/libstagefright/MP3Extractor.cpp
+++ b/media/libstagefright/MP3Extractor.cpp
@@ -538,7 +538,7 @@
buffer->release();
buffer = NULL;
- return ERROR_END_OF_STREAM;
+ return (n < 0 ? n : ERROR_END_OF_STREAM);
}
uint32_t header = U32_AT((const uint8_t *)buffer->data());
@@ -582,7 +582,7 @@
buffer->release();
buffer = NULL;
- return ERROR_END_OF_STREAM;
+ return (n < 0 ? n : ERROR_END_OF_STREAM);
}
buffer->set_range(0, frame_size);
diff --git a/media/libstagefright/include/CameraSource.h b/media/libstagefright/include/CameraSource.h
index c604f2d..aa56d27 100644
--- a/media/libstagefright/include/CameraSource.h
+++ b/media/libstagefright/include/CameraSource.h
@@ -18,6 +18,7 @@
#define CAMERA_SOURCE_H_
+#include <deque>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaSource.h>
#include <camera/android/hardware/ICamera.h>
@@ -141,6 +142,9 @@
const sp<IMemory> &data);
virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
native_handle_t* handle);
+ virtual void recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<int64_t>& timestampsUs,
+ const std::vector<native_handle_t*>& handles);
private:
sp<CameraSource> mSource;
@@ -213,6 +217,8 @@
virtual status_t startCameraRecording();
virtual void releaseRecordingFrame(const sp<IMemory>& frame);
virtual void releaseRecordingFrameHandle(native_handle_t* handle);
+ // stagefright recorder not using this for now
+ virtual void releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles);
// Returns true if need to skip the current frame.
// Called from dataCallbackTimestamp.
@@ -227,6 +233,10 @@
virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
native_handle_t* handle);
+ virtual void recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<int64_t>& timestampsUs,
+ const std::vector<native_handle_t*>& handles);
+
// Process a buffer item received in BufferQueueListener.
virtual void processBufferQueueFrame(BufferItem& buffer);
@@ -271,6 +281,13 @@
KeyedVector<ANativeWindowBuffer*, BufferItem> mReceivedBufferItemMap;
sp<BufferQueueListener> mBufferQueueListener;
+ Mutex mBatchLock; // protecting access to mInflightXXXXX members below
+ // Start of members protected by mBatchLock
+ std::deque<uint32_t> mInflightBatchSizes;
+ std::vector<native_handle_t*> mInflightReturnedHandles;
+ std::vector<const sp<IMemory>> mInflightReturnedMemorys;
+ // End of members protected by mBatchLock
+
void releaseQueuedFrames();
void releaseOneRecordingFrame(const sp<IMemory>& frame);
void createVideoBufferMemoryHeap(size_t size, uint32_t bufferCount);
diff --git a/media/libstagefright/include/CameraSourceTimeLapse.h b/media/libstagefright/include/CameraSourceTimeLapse.h
index 871c1d9..b066f9a 100644
--- a/media/libstagefright/include/CameraSourceTimeLapse.h
+++ b/media/libstagefright/include/CameraSourceTimeLapse.h
@@ -147,12 +147,23 @@
// In the video camera case calls skipFrameAndModifyTimeStamp() to modify
// timestamp and set mSkipCurrentFrame.
- // Then it calls the base CameraSource::recordingFrameHandleCallbackTimestamp()
+ // Then it calls the base CameraSource::recordingFrameHandleCallbackTimestamp() or
+ // CameraSource::recordingFrameHandleCallbackTimestampBatch()
// This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode when
// the metadata is VideoNativeHandleMetadata.
virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
native_handle_t* handle);
+ // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
+ // timestamp and set mSkipCurrentFrame.
+ // Then it calls the base CameraSource::recordingFrameHandleCallbackTimestamp() or
+ // CameraSource::recordingFrameHandleCallbackTimestampBatch()
+ // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode when
+ // the metadata is VideoNativeHandleMetadata.
+ virtual void recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<int64_t>& timestampsUs,
+ const std::vector<native_handle_t*>& handles);
+
// Process a buffer item received in CameraSource::BufferQueueListener.
// This will be called in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
virtual void processBufferQueueFrame(BufferItem& buffer);
diff --git a/media/ndk/Android.mk b/media/ndk/Android.mk
index 0984ca4..2c070af 100644
--- a/media/ndk/Android.mk
+++ b/media/ndk/Android.mk
@@ -47,6 +47,9 @@
LOCAL_CFLAGS += -Werror -Wall
+LOCAL_STATIC_LIBRARIES := \
+ libgrallocusage \
+
LOCAL_SHARED_LIBRARIES := \
libbinder \
libmedia \
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index c0aee90..c449611 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -27,6 +27,7 @@
#include <android_media_Utils.h>
#include <android_runtime/android_view_Surface.h>
#include <android_runtime/android_hardware_HardwareBuffer.h>
+#include <grallocusage/GrallocUsageConversion.h>
using namespace android;
@@ -260,7 +261,8 @@
uint64_t consumerUsage;
android_hardware_HardwareBuffer_convertToGrallocUsageBits(
&producerUsage, &consumerUsage, mUsage0, mUsage1);
- mHalUsage = consumerUsage;
+ // Strip out producerUsage here.
+ mHalUsage = android_convertGralloc1To0Usage(0, consumerUsage);
sp<IGraphicBufferProducer> gbProducer;
sp<IGraphicBufferConsumer> gbConsumer;
@@ -411,11 +413,9 @@
}
// Check if the producer buffer configurations match what ImageReader configured.
- if ((bufferFmt != HAL_PIXEL_FORMAT_BLOB) && (readerFmt != HAL_PIXEL_FORMAT_BLOB) &&
- (readerWidth != bufferWidth || readerHeight != bufferHeight)) {
- ALOGW("%s: Buffer size: %dx%d, doesn't match AImageReader configured size: %dx%d",
- __FUNCTION__, bufferWidth, bufferHeight, readerWidth, readerHeight);
- }
+ ALOGV_IF(readerWidth != bufferWidth || readerHeight != bufferHeight,
+ "%s: Buffer size: %dx%d, doesn't match AImageReader configured size: %dx%d",
+ __FUNCTION__, bufferWidth, bufferHeight, readerWidth, readerHeight);
// Check if the buffer usage is a super set of reader's usage bits, aka all usage bits that
// ImageReader requested has been supported from the producer side.
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 2be9362..8a7bb86 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -57,6 +57,9 @@
utils/AutoConditionLock.cpp \
utils/TagMonitor.cpp
+LOCAL_STATIC_LIBRARIES:= \
+ libgrallocusage
+
LOCAL_SHARED_LIBRARIES:= \
libui \
liblog \
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index 0ff9314..4537ae6 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -944,7 +944,7 @@
}
// need to set __get_memory in set_callbacks().
- device->setCallbacks(NULL, NULL, NULL, NULL);
+ device->setCallbacks(NULL, NULL, NULL, NULL, NULL);
mParameters = device->getParameters();
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 3aec562..335e999 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -1261,6 +1261,13 @@
ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
}
+void Camera2Client::releaseRecordingFrameHandleBatch(
+ const std::vector<native_handle_t*>& handles) {
+ (void)handles;
+ ATRACE_CALL();
+ ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
+}
+
status_t Camera2Client::autoFocus() {
ATRACE_CALL();
Mutex::Autolock icl(mBinderSerializationLock);
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 87c91a0..9738aca 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -72,6 +72,8 @@
virtual bool recordingEnabled();
virtual void releaseRecordingFrame(const sp<IMemory>& mem);
virtual void releaseRecordingFrameHandle(native_handle_t *handle);
+ virtual void releaseRecordingFrameHandleBatch(
+ const std::vector<native_handle_t*>& handles);
virtual status_t autoFocus();
virtual status_t cancelAutoFocus();
virtual status_t takePicture(int msgType);
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index ffb657e..df8726e 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -98,6 +98,7 @@
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
+ handleCallbackTimestampBatch,
(void *)(uintptr_t)mCameraId);
// Enable zoom, error, focus, and metadata messages by default
@@ -533,6 +534,50 @@
mHardware->releaseRecordingFrame(dataPtr);
}
+void CameraClient::releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
+ size_t n = handles.size();
+ std::vector<sp<IMemory>> frames;
+ frames.reserve(n);
+ bool error = false;
+ for (auto& handle : handles) {
+ sp<IMemory> dataPtr;
+ {
+ Mutex::Autolock l(mAvailableCallbackBuffersLock);
+ if (!mAvailableCallbackBuffers.empty()) {
+ dataPtr = mAvailableCallbackBuffers.back();
+ mAvailableCallbackBuffers.pop_back();
+ }
+ }
+
+ if (dataPtr == nullptr) {
+ ALOGE("%s: %d: No callback buffer available. Dropping frames.", __FUNCTION__,
+ __LINE__);
+ error = true;
+ break;
+ } else if (dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
+ ALOGE("%s: %d: Callback buffer must be VideoNativeHandleMetadata", __FUNCTION__,
+ __LINE__);
+ error = true;
+ break;
+ }
+
+ VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(dataPtr->pointer());
+ metadata->eType = kMetadataBufferTypeNativeHandleSource;
+ metadata->pHandle = handle;
+ frames.push_back(dataPtr);
+ }
+
+ if (error) {
+ for (auto& handle : handles) {
+ native_handle_close(handle);
+ native_handle_delete(handle);
+ }
+ } else {
+ mHardware->releaseRecordingFrameBatch(frames);
+ }
+ return;
+}
+
status_t CameraClient::setVideoBufferMode(int32_t videoBufferMode) {
LOG1("setVideoBufferMode: %d", videoBufferMode);
bool enableMetadataInBuffers = false;
@@ -855,6 +900,49 @@
client->handleGenericDataTimestamp(timestamp, msgType, dataPtr);
}
+void CameraClient::handleCallbackTimestampBatch(
+ int32_t msgType, const std::vector<HandleTimestampMessage>& msgs, void* user) {
+ LOG2("dataCallbackTimestampBatch");
+ sp<CameraClient> client = getClientFromCookie(user);
+ if (client.get() == nullptr) return;
+ if (!client->lockIfMessageWanted(msgType)) return;
+
+ sp<hardware::ICameraClient> c = client->mRemoteCallback;
+ client->mLock.unlock();
+ if (c != 0 && msgs.size() > 0) {
+ size_t n = msgs.size();
+ std::vector<nsecs_t> timestamps;
+ std::vector<native_handle_t*> handles;
+ timestamps.reserve(n);
+ handles.reserve(n);
+ for (auto& msg : msgs) {
+ native_handle_t* handle = nullptr;
+ if (msg.dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
+ ALOGE("%s: dataPtr does not contain VideoNativeHandleMetadata!", __FUNCTION__);
+ return;
+ }
+ VideoNativeHandleMetadata *metadata =
+ (VideoNativeHandleMetadata*)(msg.dataPtr->pointer());
+ if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
+ handle = metadata->pHandle;
+ }
+
+ if (handle == nullptr) {
+ ALOGE("%s: VideoNativeHandleMetadata type mismatch or null handle passed!",
+ __FUNCTION__);
+ return;
+ }
+ {
+ Mutex::Autolock l(client->mAvailableCallbackBuffersLock);
+ client->mAvailableCallbackBuffers.push_back(msg.dataPtr);
+ }
+ timestamps.push_back(msg.timestamp);
+ handles.push_back(handle);
+ }
+ c->recordingFrameHandleCallbackTimestampBatch(timestamps, handles);
+ }
+}
+
// snapshot taken callback
void CameraClient::handleShutter(void) {
if (mPlayShutterSound) {
diff --git a/services/camera/libcameraservice/api1/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h
index 91f00e3..1073384 100644
--- a/services/camera/libcameraservice/api1/CameraClient.h
+++ b/services/camera/libcameraservice/api1/CameraClient.h
@@ -50,6 +50,8 @@
virtual bool recordingEnabled();
virtual void releaseRecordingFrame(const sp<IMemory>& mem);
virtual void releaseRecordingFrameHandle(native_handle_t *handle);
+ virtual void releaseRecordingFrameHandleBatch(
+ const std::vector<native_handle_t*>& handles);
virtual status_t autoFocus();
virtual status_t cancelAutoFocus();
virtual status_t takePicture(int msgType);
@@ -109,6 +111,8 @@
static void dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata, void* user);
static void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr, void* user);
+ static void handleCallbackTimestampBatch(
+ int32_t msgType, const std::vector<HandleTimestampMessage>&, void* user);
// handlers for messages
void handleShutter(void);
void handlePreviewData(int32_t msgType, const sp<IMemory>& mem,
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index e03ec66..bfae9e9 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -304,7 +304,7 @@
sp<IGraphicBufferProducer> producer;
sp<IGraphicBufferConsumer> consumer;
BufferQueue::createBufferQueue(&producer, &consumer);
- mProducer = new RingBufferConsumer(consumer, GRALLOC_USAGE_HW_CAMERA_ZSL,
+ mProducer = new RingBufferConsumer(consumer, GRALLOC_USAGE_HW_CAMERA_READ,
mBufferQueueDepth);
mProducer->setName(String8("Camera2-ZslRingBufferConsumer"));
sp<Surface> outSurface = new Surface(producer);
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp b/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
index b52c0d8..0b57c50 100644
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
+++ b/services/camera/libcameraservice/device1/CameraHardwareInterface.cpp
@@ -17,6 +17,8 @@
//#define LOG_NDEBUG 0
#include <inttypes.h>
+#include <grallocusage/GrallocUsageConversion.h>
+
#include "CameraHardwareInterface.h"
namespace android {
@@ -193,6 +195,36 @@
return hardware::Void();
}
+hardware::Return<void> CameraHardwareInterface::handleCallbackTimestampBatch(
+ DataCallbackMsg msgType,
+ const hardware::hidl_vec<hardware::camera::device::V1_0::HandleTimestampMessage>& messages) {
+ std::vector<android::HandleTimestampMessage> msgs;
+ msgs.reserve(messages.size());
+
+ for (const auto& hidl_msg : messages) {
+ if (mHidlMemPoolMap.count(hidl_msg.data) == 0) {
+ ALOGE("%s: memory pool ID %d not found", __FUNCTION__, hidl_msg.data);
+ return hardware::Void();
+ }
+ sp<CameraHeapMemory> mem(
+ static_cast<CameraHeapMemory *>(mHidlMemPoolMap.at(hidl_msg.data)->handle));
+
+ if (hidl_msg.bufferIndex >= mem->mNumBufs) {
+ ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__,
+ hidl_msg.bufferIndex, mem->mNumBufs);
+ return hardware::Void();
+ }
+ VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*)
+ mem->mBuffers[hidl_msg.bufferIndex]->pointer();
+ md->pHandle = const_cast<native_handle_t*>(hidl_msg.frameData.getNativeHandle());
+
+ msgs.push_back({hidl_msg.timestamp, mem->mBuffers[hidl_msg.bufferIndex]});
+ }
+
+ mDataCbTimestampBatch((int32_t) msgType, msgs, mCbUser);
+ return hardware::Void();
+}
+
std::pair<bool, uint64_t> CameraHardwareInterface::getBufferId(
ANativeWindowBuffer* anb) {
std::lock_guard<std::mutex> lock(mBufferIdMapLock);
@@ -365,14 +397,14 @@
}
hardware::Return<Status>
-CameraHardwareInterface::setUsage(hardware::graphics::allocator::V2_0::ProducerUsage usage) {
+CameraHardwareInterface::setUsage(hardware::camera::device::V1_0::ProducerUsageFlags usage) {
Status s = Status::INTERNAL_ERROR;
ANativeWindow *a = mPreviewWindow.get();
if (a == nullptr) {
ALOGE("%s: preview window is null", __FUNCTION__);
return s;
}
- mPreviewUsage = (int) usage;
+ mPreviewUsage = ::android_convertGralloc1To0Usage(usage, /*consumerUsage*/ 0);
int rc = native_window_set_usage(a, mPreviewUsage);
if (rc == OK) {
cleanupCirculatingBuffers();
@@ -468,11 +500,13 @@
void CameraHardwareInterface::setCallbacks(notify_callback notify_cb,
data_callback data_cb,
data_callback_timestamp data_cb_timestamp,
+ data_callback_timestamp_batch data_cb_timestamp_batch,
void* user)
{
mNotifyCb = notify_cb;
mDataCb = data_cb;
mDataCbTimestamp = data_cb_timestamp;
+ mDataCbTimestampBatch = data_cb_timestamp_batch;
mCbUser = user;
ALOGV("%s(%s)", __FUNCTION__, mName.string());
@@ -628,6 +662,44 @@
}
}
+void CameraHardwareInterface::releaseRecordingFrameBatch(const std::vector<sp<IMemory>>& frames)
+{
+ ALOGV("%s(%s)", __FUNCTION__, mName.string());
+ size_t n = frames.size();
+ std::vector<VideoFrameMessage> msgs;
+ msgs.reserve(n);
+ for (auto& mem : frames) {
+ if (CC_LIKELY(mHidlDevice != nullptr)) {
+ ssize_t offset;
+ size_t size;
+ sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
+ if (size == sizeof(VideoNativeHandleMetadata)) {
+ uint32_t heapId = heap->getHeapID();
+ uint32_t bufferIndex = offset / size;
+ VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*) mem->pointer();
+ // Caching the handle here because md->pHandle will be subject to HAL's edit
+ native_handle_t* nh = md->pHandle;
+ VideoFrameMessage msg;
+ msgs.push_back({nh, heapId, bufferIndex});
+ } else {
+ ALOGE("%s only supports VideoNativeHandleMetadata mode", __FUNCTION__);
+ return;
+ }
+ } else {
+ ALOGE("Non HIDL mode do not support %s", __FUNCTION__);
+ return;
+ }
+ }
+
+ mHidlDevice->releaseRecordingFrameHandleBatch(msgs);
+
+ for (auto& msg : msgs) {
+ native_handle_t* nh = const_cast<native_handle_t*>(msg.frameData.getNativeHandle());
+ native_handle_close(nh);
+ native_handle_delete(nh);
+ }
+}
+
status_t CameraHardwareInterface::autoFocus()
{
ALOGV("%s(%s)", __FUNCTION__, mName.string());
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.h b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
index 88ab2e9..caeacef 100644
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.h
+++ b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
@@ -48,6 +48,15 @@
const sp<IMemory> &dataPtr,
void *user);
+struct HandleTimestampMessage {
+ nsecs_t timestamp;
+ const sp<IMemory> dataPtr;
+};
+
+typedef void (*data_callback_timestamp_batch)(
+ int32_t msgType,
+ const std::vector<HandleTimestampMessage>&, void* user);
+
/**
* CameraHardwareInterface.h defines the interface to the
* camera hardware abstraction layer, used for setting and getting
@@ -112,6 +121,7 @@
void setCallbacks(notify_callback notify_cb,
data_callback data_cb,
data_callback_timestamp data_cb_timestamp,
+ data_callback_timestamp_batch data_cb_timestamp_batch,
void* user);
/**
@@ -227,6 +237,20 @@
void releaseRecordingFrame(const sp<IMemory>& mem);
/**
+ * Release a batch of recording frames previously returned by
+ * CAMERA_MSG_VIDEO_FRAME. This method only supports frames that are
+ * stored as VideoNativeHandleMetadata.
+ *
+ * It is camera hal client's responsibility to release video recording
+ * frames sent out by the camera hal before the camera hal receives
+ * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives
+ * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's
+ * responsibility of managing the life-cycle of the video recording
+ * frames.
+ */
+ void releaseRecordingFrameBatch(const std::vector<sp<IMemory>>& frames);
+
+ /**
* Start auto focus, the notification callback routine is called
* with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus()
* will be called again if another auto focus is needed.
@@ -416,6 +440,10 @@
hardware::camera::device::V1_0::DataCallbackMsg msgType,
const hardware::hidl_handle& frameData, uint32_t data,
uint32_t bufferIndex, int64_t timestamp) override;
+ hardware::Return<void> handleCallbackTimestampBatch(
+ hardware::camera::device::V1_0::DataCallbackMsg msgType,
+ const hardware::hidl_vec<
+ hardware::camera::device::V1_0::HandleTimestampMessage>&) override;
/**
* Implementation of android::hardware::camera::device::V1_0::ICameraDevicePreviewCallback
@@ -433,7 +461,7 @@
hardware::Return<hardware::camera::common::V1_0::Status>
setCrop(int32_t left, int32_t top, int32_t right, int32_t bottom) override;
hardware::Return<hardware::camera::common::V1_0::Status>
- setUsage(hardware::graphics::allocator::V2_0::ProducerUsage usage) override;
+ setUsage(hardware::camera::device::V1_0::ProducerUsageFlags usage) override;
hardware::Return<hardware::camera::common::V1_0::Status>
setSwapInterval(int32_t interval) override;
hardware::Return<void> getMinUndequeuedBufferCount(
@@ -450,9 +478,10 @@
struct camera_preview_window mHalPreviewWindow;
- notify_callback mNotifyCb;
- data_callback mDataCb;
- data_callback_timestamp mDataCbTimestamp;
+ notify_callback mNotifyCb;
+ data_callback mDataCb;
+ data_callback_timestamp mDataCbTimestamp;
+ data_callback_timestamp_batch mDataCbTimestampBatch;
void *mCbUser;
// Cached values for preview stream parameters
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 8aa73c6..4502612 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -44,6 +44,8 @@
#include <utils/Timers.h>
#include <cutils/properties.h>
+#include <grallocusage/GrallocUsageConversion.h>
+
#include <android/hardware/camera2/ICameraDeviceUser.h>
#include "utils/CameraTraces.h"
@@ -495,11 +497,6 @@
return dataSpace;
}
-ConsumerUsageFlags Camera3Device::mapToConsumerUsage(
- uint32_t usage) {
- return usage;
-}
-
StreamRotation Camera3Device::mapToStreamRotation(camera3_stream_rotation_t rotation) {
switch (rotation) {
case CAMERA3_STREAM_ROTATION_0:
@@ -549,16 +546,6 @@
return static_cast<uint32_t>(pixelFormat);
}
-uint32_t Camera3Device::mapConsumerToFrameworkUsage(
- ConsumerUsageFlags usage) {
- return usage;
-}
-
-uint32_t Camera3Device::mapProducerToFrameworkUsage(
- ProducerUsageFlags usage) {
- return usage;
-}
-
ssize_t Camera3Device::getJpegBufferSize(uint32_t width, uint32_t height) const {
// Get max jpeg size (area-wise).
Size maxJpegResolution = getMaxJpegResolution();
@@ -3149,7 +3136,9 @@
dst.width = src->width;
dst.height = src->height;
dst.format = mapToPixelFormat(src->format);
- dst.usage = mapToConsumerUsage(src->usage);
+ uint64_t consumerUsage, producerUsage;
+ ::android_convertGralloc0To1Usage(src->usage, &producerUsage, &consumerUsage);
+ dst.usage = consumerUsage;
dst.dataSpace = mapToHidlDataspace(src->data_space);
dst.rotation = mapToStreamRotation((camera3_stream_rotation_t) src->rotation);
@@ -3231,7 +3220,6 @@
__FUNCTION__, streamId);
return INVALID_OPERATION;
}
- dst->usage = mapConsumerToFrameworkUsage(src.consumerUsage);
} else {
// OUTPUT
if (src.consumerUsage != 0) {
@@ -3239,8 +3227,8 @@
__FUNCTION__, streamId);
return INVALID_OPERATION;
}
- dst->usage = mapProducerToFrameworkUsage(src.producerUsage);
}
+ dst->usage = ::android_convertGralloc1To0Usage(src.producerUsage, src.consumerUsage);
dst->max_buffers = src.maxBuffers;
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 692a97d..d4b92cf 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -597,7 +597,6 @@
static hardware::graphics::common::V1_0::PixelFormat mapToPixelFormat(int frameworkFormat);
static hardware::camera::device::V3_2::DataspaceFlags mapToHidlDataspace(
android_dataspace dataSpace);
- static hardware::camera::device::V3_2::ConsumerUsageFlags mapToConsumerUsage(uint32_t usage);
static hardware::camera::device::V3_2::StreamRotation mapToStreamRotation(
camera3_stream_rotation_t rotation);
// Returns a negative error code if the passed-in operation mode is not valid.
@@ -605,10 +604,6 @@
/*out*/ hardware::camera::device::V3_2::StreamConfigurationMode *mode);
static camera3_buffer_status_t mapHidlBufferStatus(hardware::camera::device::V3_2::BufferStatus status);
static int mapToFrameworkFormat(hardware::graphics::common::V1_0::PixelFormat pixelFormat);
- static uint32_t mapConsumerToFrameworkUsage(
- hardware::camera::device::V3_2::ConsumerUsageFlags usage);
- static uint32_t mapProducerToFrameworkUsage(
- hardware::camera::device::V3_2::ProducerUsageFlags usage);
struct RequestTrigger {
// Metadata tag number, e.g. android.control.aePrecaptureTrigger
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index e46d55e..c60a68c 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -634,7 +634,7 @@
NATIVE_WINDOW_CONSUMER_USAGE_BITS, &u);
// If an opaque output stream's endpoint is ImageReader, add
- // GRALLOC_USAGE_HW_CAMERA_ZSL to the usage so HAL knows it will be used
+ // GRALLOC_USAGE_HW_CAMERA_READ to the usage so HAL knows it will be used
// for the ZSL use case.
// Assume it's for ImageReader if the consumer usage doesn't have any of these bits set:
// 1. GRALLOC_USAGE_HW_TEXTURE
@@ -644,7 +644,7 @@
if (camera3_stream::format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
(u & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER |
GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_VIDEO_ENCODER)) == 0) {
- u |= GRALLOC_USAGE_HW_CAMERA_ZSL;
+ u |= GRALLOC_USAGE_HW_CAMERA_READ;
}
*usage = u;
diff --git a/services/mediadrm/Android.mk b/services/mediadrm/Android.mk
index 87fddd4..1d5fa07 100644
--- a/services/mediadrm/Android.mk
+++ b/services/mediadrm/Android.mk
@@ -40,7 +40,13 @@
endif
LOCAL_MODULE:= mediadrmserver
+
+# TODO: Some legacy DRM plugins only support 32-bit. They need to be migrated to
+# 64-bit. (b/18948909) Once all of a device's legacy DRM plugins support 64-bit,
+# that device can turn on ENABLE_MEDIADRM_64 to build this service as 64-bit.
+ifneq ($(ENABLE_MEDIADRM_64), true)
LOCAL_32_BIT_ONLY := true
+endif
LOCAL_INIT_RC := mediadrmserver.rc