Merge "Make the framework treat getModelState recognition events the same as regular recognition events"
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 321eb08..4e9b27d 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -167,19 +167,23 @@
}
OutputConfiguration::OutputConfiguration(sp<IGraphicBufferProducer>& gbp, int rotation,
+ const String16& physicalId,
int surfaceSetID, bool isShared) {
mGbps.push_back(gbp);
mRotation = rotation;
mSurfaceSetID = surfaceSetID;
mIsDeferred = false;
mIsShared = isShared;
+ mPhysicalCameraId = physicalId;
}
OutputConfiguration::OutputConfiguration(
const std::vector<sp<IGraphicBufferProducer>>& gbps,
- int rotation, int surfaceSetID, int surfaceType, int width, int height, bool isShared)
+ int rotation, const String16& physicalCameraId, int surfaceSetID, int surfaceType,
+ int width, int height, bool isShared)
: mGbps(gbps), mRotation(rotation), mSurfaceSetID(surfaceSetID), mSurfaceType(surfaceType),
- mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared) { }
+ mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
+ mPhysicalCameraId(physicalCameraId) { }
status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 5b117fb..95c4f39 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -65,10 +65,12 @@
OutputConfiguration(const android::Parcel& parcel);
OutputConfiguration(sp<IGraphicBufferProducer>& gbp, int rotation,
+ const String16& physicalCameraId,
int surfaceSetID = INVALID_SET_ID, bool isShared = false);
OutputConfiguration(const std::vector<sp<IGraphicBufferProducer>>& gbps,
- int rotation, int surfaceSetID = INVALID_SET_ID,
+ int rotation, const String16& physicalCameraId,
+ int surfaceSetID = INVALID_SET_ID,
int surfaceType = OutputConfiguration::SURFACE_TYPE_UNKNOWN, int width = 0,
int height = 0, bool isShared = false);
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index fd95296..540d84e 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -28,6 +28,8 @@
#include <camera/NdkCameraCaptureSession.h>
#include "impl/ACameraCaptureSession.h"
+#include "impl/ACameraCaptureSession.inc"
+
using namespace android;
EXPORT
@@ -82,7 +84,31 @@
return ACAMERA_ERROR_SESSION_CLOSED;
}
- return session->capture(cbs, numRequests, requests, captureSequenceId);
+ return session->capture(
+ cbs, numRequests, requests, captureSequenceId);
+}
+
+EXPORT
+camera_status_t ACameraCaptureSession_logicalCamera_capture(
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ ATRACE_CALL();
+ if (session == nullptr || requests == nullptr || numRequests < 1) {
+ ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
+ __FUNCTION__, session, numRequests, requests);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (session->isClosed()) {
+ ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+ *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
+ return ACAMERA_ERROR_SESSION_CLOSED;
+ }
+
+ return session->capture(
+ lcbs, numRequests, requests, captureSequenceId);
}
EXPORT
@@ -107,6 +133,28 @@
}
EXPORT
+camera_status_t ACameraCaptureSession_logicalCamera_setRepeatingRequest(
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ ATRACE_CALL();
+ if (session == nullptr || requests == nullptr || numRequests < 1) {
+ ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
+ __FUNCTION__, session, numRequests, requests);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (session->isClosed()) {
+ ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+ *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
+ return ACAMERA_ERROR_SESSION_CLOSED;
+ }
+
+ return session->setRepeatingRequest(lcbs, numRequests, requests, captureSequenceId);
+}
+
+EXPORT
camera_status_t ACameraCaptureSession_stopRepeating(ACameraCaptureSession* session) {
ATRACE_CALL();
if (session == nullptr) {
diff --git a/camera/ndk/NdkCameraDevice.cpp b/camera/ndk/NdkCameraDevice.cpp
index ef05e0b..98608da 100644
--- a/camera/ndk/NdkCameraDevice.cpp
+++ b/camera/ndk/NdkCameraDevice.cpp
@@ -129,6 +129,20 @@
}
EXPORT
+camera_status_t ACaptureSessionPhysicalOutput_create(
+ ACameraWindowType* window, const char* physicalId,
+ /*out*/ACaptureSessionOutput** out) {
+ ATRACE_CALL();
+ if (window == nullptr || physicalId == nullptr || out == nullptr) {
+ ALOGE("%s: Error: bad argument. window %p, physicalId %p, out %p",
+ __FUNCTION__, window, physicalId, out);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ *out = new ACaptureSessionOutput(window, false, physicalId);
+ return ACAMERA_OK;
+}
+
+EXPORT
camera_status_t ACaptureSessionSharedOutput_add(ACaptureSessionOutput *out,
ACameraWindowType* window) {
ATRACE_CALL();
diff --git a/camera/ndk/NdkCameraMetadata.cpp b/camera/ndk/NdkCameraMetadata.cpp
index 34ec2da..9a39ed8 100644
--- a/camera/ndk/NdkCameraMetadata.cpp
+++ b/camera/ndk/NdkCameraMetadata.cpp
@@ -69,3 +69,20 @@
metadata->decStrong((void*) ACameraMetadata_free);
}
}
+
+EXPORT
+bool ACameraMetadata_isLogicalMultiCamera(const ACameraMetadata* staticMetadata,
+ /*out*/size_t* numPhysicalCameras, /*out*/const char*const** physicalCameraIds) {
+ ATRACE_CALL();
+ if (numPhysicalCameras == nullptr || physicalCameraIds == nullptr) {
+ ALOGE("%s: Invalid input: numPhysicalCameras %p, physicalCameraIds %p",
+ __FUNCTION__, numPhysicalCameras, physicalCameraIds);
+ return false;
+ }
+ if (staticMetadata == nullptr) {
+ ALOGE("%s: Invalid input: staticMetadata is null.", __FUNCTION__);
+ return false;
+ }
+
+ return staticMetadata->isLogicalMultiCamera(numPhysicalCameras, physicalCameraIds);
+}
diff --git a/camera/ndk/impl/ACameraCaptureSession.cpp b/camera/ndk/impl/ACameraCaptureSession.cpp
index fb72bdb..d6f1412 100644
--- a/camera/ndk/impl/ACameraCaptureSession.cpp
+++ b/camera/ndk/impl/ACameraCaptureSession.cpp
@@ -107,47 +107,6 @@
return ret;
}
-camera_status_t
-ACameraCaptureSession::setRepeatingRequest(
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
- int numRequests, ACaptureRequest** requests,
- /*optional*/int* captureSequenceId) {
- sp<acam::CameraDevice> dev = getDeviceSp();
- if (dev == nullptr) {
- ALOGE("Error: Device associated with session %p has been closed!", this);
- return ACAMERA_ERROR_SESSION_CLOSED;
- }
-
- camera_status_t ret;
- dev->lockDeviceForSessionOps();
- {
- Mutex::Autolock _l(mSessionLock);
- ret = dev->setRepeatingRequestsLocked(
- this, cbs, numRequests, requests, captureSequenceId);
- }
- dev->unlockDevice();
- return ret;
-}
-
-camera_status_t ACameraCaptureSession::capture(
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
- int numRequests, ACaptureRequest** requests,
- /*optional*/int* captureSequenceId) {
- sp<acam::CameraDevice> dev = getDeviceSp();
- if (dev == nullptr) {
- ALOGE("Error: Device associated with session %p has been closed!", this);
- return ACAMERA_ERROR_SESSION_CLOSED;
- }
- camera_status_t ret;
- dev->lockDeviceForSessionOps();
- {
- Mutex::Autolock _l(mSessionLock);
- ret = dev->captureLocked(this, cbs, numRequests, requests, captureSequenceId);
- }
- dev->unlockDevice();
- return ret;
-}
-
camera_status_t ACameraCaptureSession::updateOutputConfiguration(ACaptureSessionOutput *output) {
sp<acam::CameraDevice> dev = getDeviceSp();
if (dev == nullptr) {
diff --git a/camera/ndk/impl/ACameraCaptureSession.h b/camera/ndk/impl/ACameraCaptureSession.h
index 133c2c8..08a9226 100644
--- a/camera/ndk/impl/ACameraCaptureSession.h
+++ b/camera/ndk/impl/ACameraCaptureSession.h
@@ -17,6 +17,7 @@
#define _ACAMERA_CAPTURE_SESSION_H
#include <set>
+#include <string>
#include <hardware/camera3.h>
#include <camera/NdkCameraDevice.h>
@@ -29,8 +30,9 @@
using namespace android;
struct ACaptureSessionOutput {
- explicit ACaptureSessionOutput(ACameraWindowType* window, bool isShared = false) :
- mWindow(window), mIsShared(isShared) {};
+ explicit ACaptureSessionOutput(ACameraWindowType* window, bool isShared = false,
+ const char* physicalCameraId = "") :
+ mWindow(window), mIsShared(isShared), mPhysicalCameraId(physicalCameraId) {};
bool operator == (const ACaptureSessionOutput& other) const {
return mWindow == other.mWindow;
@@ -49,6 +51,7 @@
std::set<ACameraWindowType *> mSharedWindows;
bool mIsShared;
int mRotation = CAMERA3_STREAM_ROTATION_0;
+ std::string mPhysicalCameraId;
};
#endif
@@ -88,13 +91,15 @@
camera_status_t abortCaptures();
+ template<class T>
camera_status_t setRepeatingRequest(
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
+ /*optional*/T* cbs,
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId);
+ template<class T>
camera_status_t capture(
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
+ /*optional*/T* cbs,
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId);
diff --git a/camera/ndk/impl/ACameraCaptureSession.inc b/camera/ndk/impl/ACameraCaptureSession.inc
new file mode 100644
index 0000000..86bf8a5
--- /dev/null
+++ b/camera/ndk/impl/ACameraCaptureSession.inc
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ACameraCaptureSession.h"
+
+#ifdef __ANDROID_VNDK__
+#include "ndk_vendor/impl/ACameraDeviceVendor.inc"
+#else
+#include "ACameraDevice.inc"
+#endif
+
+using namespace android;
+
+template <class T>
+camera_status_t
+ACameraCaptureSession::setRepeatingRequest(
+ /*optional*/T* cbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ sp<acam::CameraDevice> dev = getDeviceSp();
+ if (dev == nullptr) {
+ ALOGE("Error: Device associated with session %p has been closed!", this);
+ return ACAMERA_ERROR_SESSION_CLOSED;
+ }
+
+ camera_status_t ret;
+ dev->lockDeviceForSessionOps();
+ {
+ Mutex::Autolock _l(mSessionLock);
+ ret = dev->setRepeatingRequestsLocked(
+ this, cbs, numRequests, requests, captureSequenceId);
+ }
+ dev->unlockDevice();
+ return ret;
+}
+
+template <class T>
+camera_status_t ACameraCaptureSession::capture(
+ /*optional*/T* cbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ sp<acam::CameraDevice> dev = getDeviceSp();
+ if (dev == nullptr) {
+ ALOGE("Error: Device associated with session %p has been closed!", this);
+ return ACAMERA_ERROR_SESSION_CLOSED;
+ }
+ camera_status_t ret;
+ dev->lockDeviceForSessionOps();
+ {
+ Mutex::Autolock _l(mSessionLock);
+ ret = dev->captureLocked(this, cbs, numRequests, requests, captureSequenceId);
+ }
+ dev->unlockDevice();
+ return ret;
+}
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 657d41f..d8a5765 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -20,13 +20,14 @@
#include <vector>
#include <inttypes.h>
#include <android/hardware/ICameraService.h>
-#include <camera2/SubmitInfo.h>
#include <gui/Surface.h>
#include "ACameraDevice.h"
#include "ACameraMetadata.h"
#include "ACaptureRequest.h"
#include "ACameraCaptureSession.h"
+#include "ACameraCaptureSession.inc"
+
namespace android {
namespace acam {
@@ -39,6 +40,7 @@
const char* CameraDevice::kCaptureRequestKey = "CaptureRequest";
const char* CameraDevice::kTimeStampKey = "TimeStamp";
const char* CameraDevice::kCaptureResultKey = "CaptureResult";
+const char* CameraDevice::kPhysicalCaptureResultKey = "PhysicalCaptureResult";
const char* CameraDevice::kCaptureFailureKey = "CaptureFailure";
const char* CameraDevice::kSequenceIdKey = "SequenceId";
const char* CameraDevice::kFrameNumberKey = "FrameNumber";
@@ -190,106 +192,6 @@
return ACAMERA_OK;
}
-camera_status_t
-CameraDevice::captureLocked(
- sp<ACameraCaptureSession> session,
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
- int numRequests, ACaptureRequest** requests,
- /*optional*/int* captureSequenceId) {
- return submitRequestsLocked(
- session, cbs, numRequests, requests, captureSequenceId, /*isRepeating*/false);
-}
-
-camera_status_t
-CameraDevice::setRepeatingRequestsLocked(
- sp<ACameraCaptureSession> session,
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
- int numRequests, ACaptureRequest** requests,
- /*optional*/int* captureSequenceId) {
- return submitRequestsLocked(
- session, cbs, numRequests, requests, captureSequenceId, /*isRepeating*/true);
-}
-
-camera_status_t
-CameraDevice::submitRequestsLocked(
- sp<ACameraCaptureSession> session,
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
- int numRequests, ACaptureRequest** requests,
- /*optional*/int* captureSequenceId,
- bool isRepeating) {
- camera_status_t ret = checkCameraClosedOrErrorLocked();
- if (ret != ACAMERA_OK) {
- ALOGE("Camera %s submit capture request failed! ret %d", getId(), ret);
- return ret;
- }
-
- // Form two vectors of capture request, one for internal tracking
- std::vector<hardware::camera2::CaptureRequest> requestList;
- Vector<sp<CaptureRequest> > requestsV;
- requestsV.setCapacity(numRequests);
- for (int i = 0; i < numRequests; i++) {
- sp<CaptureRequest> req;
- ret = allocateCaptureRequest(requests[i], req);
- if (ret != ACAMERA_OK) {
- ALOGE("Convert capture request to internal format failure! ret %d", ret);
- return ret;
- }
- if (req->mSurfaceList.empty()) {
- ALOGE("Capture request without output target cannot be submitted!");
- return ACAMERA_ERROR_INVALID_PARAMETER;
- }
- requestList.push_back(*(req.get()));
- requestsV.push_back(req);
- }
-
- if (isRepeating) {
- ret = stopRepeatingLocked();
- if (ret != ACAMERA_OK) {
- ALOGE("Camera %s stop repeating failed! ret %d", getId(), ret);
- return ret;
- }
- }
-
- binder::Status remoteRet;
- hardware::camera2::utils::SubmitInfo info;
- remoteRet = mRemote->submitRequestList(requestList, isRepeating, &info);
- int sequenceId = info.mRequestId;
- int64_t lastFrameNumber = info.mLastFrameNumber;
- if (sequenceId < 0) {
- ALOGE("Camera %s submit request remote failure: ret %d", getId(), sequenceId);
- return ACAMERA_ERROR_UNKNOWN;
- }
-
- CallbackHolder cbHolder(session, requestsV, isRepeating, cbs);
- mSequenceCallbackMap.insert(std::make_pair(sequenceId, cbHolder));
-
- if (isRepeating) {
- // stopRepeating above should have cleanup repeating sequence id
- if (mRepeatingSequenceId != REQUEST_ID_NONE) {
- setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_DEVICE);
- return ACAMERA_ERROR_CAMERA_DEVICE;
- }
- mRepeatingSequenceId = sequenceId;
- } else {
- mSequenceLastFrameNumberMap.insert(std::make_pair(sequenceId, lastFrameNumber));
- }
-
- if (mIdle) {
- sp<AMessage> msg = new AMessage(kWhatSessionStateCb, mHandler);
- msg->setPointer(kContextKey, session->mUserSessionCallback.context);
- msg->setObject(kSessionSpKey, session);
- msg->setPointer(kCallbackFpKey, (void*) session->mUserSessionCallback.onActive);
- postSessionMsgAndCleanup(msg);
- }
- mIdle = false;
- mBusySession = session;
-
- if (captureSequenceId) {
- *captureSequenceId = sequenceId;
- }
- return ACAMERA_OK;
-}
-
camera_status_t CameraDevice::updateOutputConfigurationLocked(ACaptureSessionOutput *output) {
camera_status_t ret = checkCameraClosedOrErrorLocked();
if (ret != ACAMERA_OK) {
@@ -325,8 +227,9 @@
return ret;
}
- OutputConfiguration outConfig(iGBP, output->mRotation, OutputConfiguration::INVALID_SET_ID,
- true);
+ String16 physicalId16(output->mPhysicalCameraId.c_str());
+ OutputConfiguration outConfig(iGBP, output->mRotation, physicalId16,
+ OutputConfiguration::INVALID_SET_ID, true);
for (auto& anw : output->mSharedWindows) {
ret = getIGBPfromAnw(anw, iGBP);
@@ -640,8 +543,9 @@
if (ret != ACAMERA_OK) {
return ret;
}
+ String16 physicalId16(outConfig.mPhysicalCameraId.c_str());
outputSet.insert(std::make_pair(
- anw, OutputConfiguration(iGBP, outConfig.mRotation,
+ anw, OutputConfiguration(iGBP, outConfig.mRotation, physicalId16,
OutputConfiguration::INVALID_SET_ID, outConfig.mIsShared)));
}
auto addSet = outputSet;
@@ -829,7 +733,7 @@
if (errorCode == hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER) {
int32_t streamId = resultExtras.errorStreamId;
ACameraCaptureSession_captureCallback_bufferLost onBufferLost =
- cbh.mCallbacks.onCaptureBufferLost;
+ cbh.mOnCaptureBufferLost;
auto outputPairIt = mConfiguredOutputs.find(streamId);
if (outputPairIt == mConfiguredOutputs.end()) {
ALOGE("%s: Error: stream id %d does not exist", __FUNCTION__, streamId);
@@ -846,7 +750,7 @@
getId(), anw, frameNumber);
sp<AMessage> msg = new AMessage(kWhatCaptureBufferLost, mHandler);
- msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, session);
msg->setPointer(kCallbackFpKey, (void*) onBufferLost);
msg->setObject(kCaptureRequestKey, request);
@@ -858,7 +762,7 @@
}
} else { // Handle other capture failures
// Fire capture failure callback if there is one registered
- ACameraCaptureSession_captureCallback_failed onError = cbh.mCallbacks.onCaptureFailed;
+ ACameraCaptureSession_captureCallback_failed onError = cbh.mOnCaptureFailed;
sp<CameraCaptureFailure> failure(new CameraCaptureFailure());
failure->frameNumber = frameNumber;
// TODO: refine this when implementing flush
@@ -868,7 +772,7 @@
hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT);
sp<AMessage> msg = new AMessage(kWhatCaptureFail, mHandler);
- msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, session);
msg->setPointer(kCallbackFpKey, (void*) onError);
msg->setObject(kCaptureRequestKey, request);
@@ -890,6 +794,7 @@
case kWhatSessionStateCb:
case kWhatCaptureStart:
case kWhatCaptureResult:
+ case kWhatLogicalCaptureResult:
case kWhatCaptureFail:
case kWhatCaptureSeqEnd:
case kWhatCaptureSeqAbort:
@@ -960,6 +865,7 @@
case kWhatSessionStateCb:
case kWhatCaptureStart:
case kWhatCaptureResult:
+ case kWhatLogicalCaptureResult:
case kWhatCaptureFail:
case kWhatCaptureSeqEnd:
case kWhatCaptureSeqAbort:
@@ -977,6 +883,7 @@
switch (msg->what()) {
case kWhatCaptureStart:
case kWhatCaptureResult:
+ case kWhatLogicalCaptureResult:
case kWhatCaptureFail:
case kWhatCaptureBufferLost:
found = msg->findObject(kCaptureRequestKey, &obj);
@@ -1048,6 +955,64 @@
freeACaptureRequest(request);
break;
}
+ case kWhatLogicalCaptureResult:
+ {
+ ACameraCaptureSession_logicalCamera_captureCallback_result onResult;
+ found = msg->findPointer(kCallbackFpKey, (void**) &onResult);
+ if (!found) {
+ ALOGE("%s: Cannot find logicalCamera capture result callback!",
+ __FUNCTION__);
+ return;
+ }
+ if (onResult == nullptr) {
+ return;
+ }
+
+ found = msg->findObject(kCaptureResultKey, &obj);
+ if (!found) {
+ ALOGE("%s: Cannot find capture result!", __FUNCTION__);
+ return;
+ }
+ sp<ACameraMetadata> result(static_cast<ACameraMetadata*>(obj.get()));
+
+ found = msg->findObject(kPhysicalCaptureResultKey, &obj);
+ if (!found) {
+ ALOGE("%s: Cannot find physical capture result!", __FUNCTION__);
+ return;
+ }
+ sp<ACameraPhysicalCaptureResultInfo> physicalResult(
+ static_cast<ACameraPhysicalCaptureResultInfo*>(obj.get()));
+ std::vector<PhysicalCaptureResultInfo>& physicalResultInfo =
+ physicalResult->mPhysicalResultInfo;
+
+ std::vector<std::string> physicalCameraIds;
+ std::vector<sp<ACameraMetadata>> physicalMetadataCopy;
+ for (size_t i = 0; i < physicalResultInfo.size(); i++) {
+ String8 physicalId8(physicalResultInfo[i].mPhysicalCameraId);
+ physicalCameraIds.push_back(physicalId8.c_str());
+
+ CameraMetadata clone = physicalResultInfo[i].mPhysicalCameraMetadata;
+ clone.update(ANDROID_SYNC_FRAME_NUMBER,
+ &physicalResult->mFrameNumber, /*data_count*/1);
+ sp<ACameraMetadata> metadata =
+ new ACameraMetadata(clone.release(), ACameraMetadata::ACM_RESULT);
+ physicalMetadataCopy.push_back(metadata);
+ }
+
+ std::vector<const char*> physicalCameraIdPtrs;
+ std::vector<const ACameraMetadata*> physicalMetadataCopyPtrs;
+ for (size_t i = 0; i < physicalResultInfo.size(); i++) {
+ physicalCameraIdPtrs.push_back(physicalCameraIds[i].c_str());
+ physicalMetadataCopyPtrs.push_back(physicalMetadataCopy[i].get());
+ }
+
+ ACaptureRequest* request = allocateACaptureRequest(requestSp);
+ (*onResult)(context, session.get(), request, result.get(),
+ physicalResultInfo.size(), physicalCameraIdPtrs.data(),
+ physicalMetadataCopyPtrs.data());
+ freeACaptureRequest(request);
+ break;
+ }
case kWhatCaptureFail:
{
ACameraCaptureSession_captureCallback_failed onFail;
@@ -1158,12 +1123,34 @@
}
CameraDevice::CallbackHolder::CallbackHolder(
- sp<ACameraCaptureSession> session,
- const Vector<sp<CaptureRequest> >& requests,
- bool isRepeating,
- ACameraCaptureSession_captureCallbacks* cbs) :
- mSession(session), mRequests(requests),
- mIsRepeating(isRepeating), mCallbacks(fillCb(cbs)) {}
+ sp<ACameraCaptureSession> session,
+ const Vector<sp<CaptureRequest> >& requests,
+ bool isRepeating,
+ ACameraCaptureSession_captureCallbacks* cbs) :
+ mSession(session), mRequests(requests),
+ mIsRepeating(isRepeating),
+ mIsLogicalCameraCallback(false) {
+ initCaptureCallbacks(cbs);
+
+ if (cbs != nullptr) {
+ mOnCaptureCompleted = cbs->onCaptureCompleted;
+ }
+}
+
+CameraDevice::CallbackHolder::CallbackHolder(
+ sp<ACameraCaptureSession> session,
+ const Vector<sp<CaptureRequest> >& requests,
+ bool isRepeating,
+ ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs) :
+ mSession(session), mRequests(requests),
+ mIsRepeating(isRepeating),
+ mIsLogicalCameraCallback(true) {
+ initCaptureCallbacks(lcbs);
+
+ if (lcbs != nullptr) {
+ mOnLogicalCameraCaptureCompleted = lcbs->onLogicalCameraCaptureCompleted;
+ }
+}
void
CameraDevice::checkRepeatingSequenceCompleteLocked(
@@ -1180,9 +1167,9 @@
mSequenceCallbackMap.erase(cbIt);
// send seq aborted callback
sp<AMessage> msg = new AMessage(kWhatCaptureSeqAbort, mHandler);
- msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, cbh.mSession);
- msg->setPointer(kCallbackFpKey, (void*) cbh.mCallbacks.onCaptureSequenceAborted);
+ msg->setPointer(kCallbackFpKey, (void*) cbh.mOnCaptureSequenceAborted);
msg->setInt32(kSequenceIdKey, sequenceId);
postSessionMsgAndCleanup(msg);
} else {
@@ -1230,9 +1217,9 @@
mSequenceCallbackMap.erase(cbIt);
// send seq complete callback
sp<AMessage> msg = new AMessage(kWhatCaptureSeqEnd, mHandler);
- msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, cbh.mSession);
- msg->setPointer(kCallbackFpKey, (void*) cbh.mCallbacks.onCaptureSequenceCompleted);
+ msg->setPointer(kCallbackFpKey, (void*) cbh.mOnCaptureSequenceCompleted);
msg->setInt32(kSequenceIdKey, sequenceId);
msg->setInt64(kFrameNumberKey, lastFrameNumber);
@@ -1389,7 +1376,7 @@
auto it = dev->mSequenceCallbackMap.find(sequenceId);
if (it != dev->mSequenceCallbackMap.end()) {
CallbackHolder cbh = (*it).second;
- ACameraCaptureSession_captureCallback_start onStart = cbh.mCallbacks.onCaptureStarted;
+ ACameraCaptureSession_captureCallback_start onStart = cbh.mOnCaptureStarted;
sp<ACameraCaptureSession> session = cbh.mSession;
if ((size_t) burstId >= cbh.mRequests.size()) {
ALOGE("%s: Error: request index %d out of bound (size %zu)",
@@ -1398,7 +1385,7 @@
}
sp<CaptureRequest> request = cbh.mRequests[burstId];
sp<AMessage> msg = new AMessage(kWhatCaptureStart, dev->mHandler);
- msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, session);
msg->setPointer(kCallbackFpKey, (void*) onStart);
msg->setObject(kCaptureRequestKey, request);
@@ -1413,7 +1400,6 @@
const CameraMetadata& metadata,
const CaptureResultExtras& resultExtras,
const std::vector<PhysicalCaptureResultInfo>& physicalResultInfos) {
- (void) physicalResultInfos;
binder::Status ret = binder::Status::ok();
sp<CameraDevice> dev = mDevice.promote();
@@ -1449,9 +1435,6 @@
auto it = dev->mSequenceCallbackMap.find(sequenceId);
if (it != dev->mSequenceCallbackMap.end()) {
CallbackHolder cbh = (*it).second;
- ACameraCaptureSession_captureCallback_result onResult = isPartialResult ?
- cbh.mCallbacks.onCaptureProgressed :
- cbh.mCallbacks.onCaptureCompleted;
sp<ACameraCaptureSession> session = cbh.mSession;
if ((size_t) burstId >= cbh.mRequests.size()) {
ALOGE("%s: Error: request index %d out of bound (size %zu)",
@@ -1461,13 +1444,27 @@
sp<CaptureRequest> request = cbh.mRequests[burstId];
sp<ACameraMetadata> result(new ACameraMetadata(
metadataCopy.release(), ACameraMetadata::ACM_RESULT));
+ sp<ACameraPhysicalCaptureResultInfo> physicalResult(
+ new ACameraPhysicalCaptureResultInfo(physicalResultInfos, frameNumber));
- sp<AMessage> msg = new AMessage(kWhatCaptureResult, dev->mHandler);
- msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ sp<AMessage> msg = new AMessage(
+ cbh.mIsLogicalCameraCallback ? kWhatLogicalCaptureResult : kWhatCaptureResult,
+ dev->mHandler);
+ msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, session);
- msg->setPointer(kCallbackFpKey, (void*) onResult);
msg->setObject(kCaptureRequestKey, request);
msg->setObject(kCaptureResultKey, result);
+ if (isPartialResult) {
+ msg->setPointer(kCallbackFpKey,
+ (void *)cbh.mOnCaptureProgressed);
+ } else if (cbh.mIsLogicalCameraCallback) {
+ msg->setPointer(kCallbackFpKey,
+ (void *)cbh.mOnLogicalCameraCaptureCompleted);
+ msg->setObject(kPhysicalCaptureResultKey, physicalResult);
+ } else {
+ msg->setPointer(kCallbackFpKey,
+ (void *)cbh.mOnCaptureCompleted);
+ }
dev->postSessionMsgAndCleanup(msg);
}
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 8f56d3f..d0f363b 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -21,6 +21,7 @@
#include <set>
#include <atomic>
#include <utility>
+#include <vector>
#include <utils/StrongPointer.h>
#include <utils/Mutex.h>
#include <utils/String8.h>
@@ -46,6 +47,16 @@
// Wrap ACameraCaptureFailure so it can be ref-counted
struct CameraCaptureFailure : public RefBase, public ACameraCaptureFailure {};
+// Wrap PhysicalCaptureResultInfo so that it can be ref-counted
+struct ACameraPhysicalCaptureResultInfo: public RefBase {
+ ACameraPhysicalCaptureResultInfo(const std::vector<PhysicalCaptureResultInfo>& info,
+ int64_t frameNumber) :
+ mPhysicalResultInfo(info), mFrameNumber(frameNumber) {}
+
+ std::vector<PhysicalCaptureResultInfo> mPhysicalResultInfo;
+ int64_t mFrameNumber;
+};
+
class CameraDevice final : public RefBase {
public:
CameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
@@ -109,19 +120,22 @@
camera_status_t waitUntilIdleLocked();
+ template<class T>
camera_status_t captureLocked(sp<ACameraCaptureSession> session,
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
+ /*optional*/T* cbs,
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId);
+ template<class T>
camera_status_t setRepeatingRequestsLocked(sp<ACameraCaptureSession> session,
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
+ /*optional*/T* cbs,
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId);
+ template<class T>
camera_status_t submitRequestsLocked(
sp<ACameraCaptureSession> session,
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
+ /*optional*/T* cbs,
int numRequests, ACaptureRequest** requests,
/*out*/int* captureSequenceId,
bool isRepeating);
@@ -192,6 +206,7 @@
// Capture callbacks
kWhatCaptureStart, // onCaptureStarted
kWhatCaptureResult, // onCaptureProgressed, onCaptureCompleted
+ kWhatLogicalCaptureResult, // onLogicalCameraCaptureCompleted
kWhatCaptureFail, // onCaptureFailed
kWhatCaptureSeqEnd, // onCaptureSequenceCompleted
kWhatCaptureSeqAbort, // onCaptureSequenceAborted
@@ -207,6 +222,7 @@
static const char* kCaptureRequestKey;
static const char* kTimeStampKey;
static const char* kCaptureResultKey;
+ static const char* kPhysicalCaptureResultKey;
static const char* kCaptureFailureKey;
static const char* kSequenceIdKey;
static const char* kFrameNumberKey;
@@ -245,19 +261,46 @@
const Vector<sp<CaptureRequest> >& requests,
bool isRepeating,
ACameraCaptureSession_captureCallbacks* cbs);
+ CallbackHolder(sp<ACameraCaptureSession> session,
+ const Vector<sp<CaptureRequest> >& requests,
+ bool isRepeating,
+ ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs);
- static ACameraCaptureSession_captureCallbacks fillCb(
- ACameraCaptureSession_captureCallbacks* cbs) {
+ template <class T>
+ void initCaptureCallbacks(T* cbs) {
+ mContext = nullptr;
+ mOnCaptureStarted = nullptr;
+ mOnCaptureProgressed = nullptr;
+ mOnCaptureCompleted = nullptr;
+ mOnLogicalCameraCaptureCompleted = nullptr;
+ mOnCaptureFailed = nullptr;
+ mOnCaptureSequenceCompleted = nullptr;
+ mOnCaptureSequenceAborted = nullptr;
+ mOnCaptureBufferLost = nullptr;
if (cbs != nullptr) {
- return *cbs;
+ mContext = cbs->context;
+ mOnCaptureStarted = cbs->onCaptureStarted;
+ mOnCaptureProgressed = cbs->onCaptureProgressed;
+ mOnCaptureFailed = cbs->onCaptureFailed;
+ mOnCaptureSequenceCompleted = cbs->onCaptureSequenceCompleted;
+ mOnCaptureSequenceAborted = cbs->onCaptureSequenceAborted;
+ mOnCaptureBufferLost = cbs->onCaptureBufferLost;
}
- return { nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr };
}
-
sp<ACameraCaptureSession> mSession;
Vector<sp<CaptureRequest> > mRequests;
const bool mIsRepeating;
- ACameraCaptureSession_captureCallbacks mCallbacks;
+ const bool mIsLogicalCameraCallback;
+
+ void* mContext;
+ ACameraCaptureSession_captureCallback_start mOnCaptureStarted;
+ ACameraCaptureSession_captureCallback_result mOnCaptureProgressed;
+ ACameraCaptureSession_captureCallback_result mOnCaptureCompleted;
+ ACameraCaptureSession_logicalCamera_captureCallback_result mOnLogicalCameraCaptureCompleted;
+ ACameraCaptureSession_captureCallback_failed mOnCaptureFailed;
+ ACameraCaptureSession_captureCallback_sequenceEnd mOnCaptureSequenceCompleted;
+ ACameraCaptureSession_captureCallback_sequenceAbort mOnCaptureSequenceAborted;
+ ACameraCaptureSession_captureCallback_bufferLost mOnCaptureBufferLost;
};
// sequence id -> callbacks map
std::map<int, CallbackHolder> mSequenceCallbackMap;
diff --git a/camera/ndk/impl/ACameraDevice.inc b/camera/ndk/impl/ACameraDevice.inc
new file mode 100644
index 0000000..1fc5352
--- /dev/null
+++ b/camera/ndk/impl/ACameraDevice.inc
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <vector>
+#include <inttypes.h>
+#include "ACameraDevice.h"
+#include "ACameraMetadata.h"
+#include "ACaptureRequest.h"
+#include "ACameraCaptureSession.h"
+
+namespace android {
+namespace acam {
+
+template<class T>
+camera_status_t
+CameraDevice::captureLocked(
+ sp<ACameraCaptureSession> session,
+ /*optional*/T* cbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ return submitRequestsLocked(
+ session, cbs, numRequests, requests, captureSequenceId, /*isRepeating*/false);
+}
+
+template<class T>
+camera_status_t
+CameraDevice::setRepeatingRequestsLocked(
+ sp<ACameraCaptureSession> session,
+ /*optional*/T* cbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ return submitRequestsLocked(
+ session, cbs, numRequests, requests, captureSequenceId, /*isRepeating*/true);
+}
+
+template<class T>
+camera_status_t CameraDevice::submitRequestsLocked(
+ sp<ACameraCaptureSession> session,
+ /*optional*/T* cbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId,
+ bool isRepeating) {
+ camera_status_t ret = checkCameraClosedOrErrorLocked();
+ if (ret != ACAMERA_OK) {
+ ALOGE("Camera %s submit capture request failed! ret %d", getId(), ret);
+ return ret;
+ }
+
+ // Form two vectors of capture request, one for internal tracking
+ std::vector<hardware::camera2::CaptureRequest> requestList;
+ Vector<sp<CaptureRequest> > requestsV;
+ requestsV.setCapacity(numRequests);
+ for (int i = 0; i < numRequests; i++) {
+ sp<CaptureRequest> req;
+ ret = allocateCaptureRequest(requests[i], req);
+ if (ret != ACAMERA_OK) {
+ ALOGE("Convert capture request to internal format failure! ret %d", ret);
+ return ret;
+ }
+ if (req->mSurfaceList.empty()) {
+ ALOGE("Capture request without output target cannot be submitted!");
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ requestList.push_back(*(req.get()));
+ requestsV.push_back(req);
+ }
+
+ if (isRepeating) {
+ ret = stopRepeatingLocked();
+ if (ret != ACAMERA_OK) {
+ ALOGE("Camera %s stop repeating failed! ret %d", getId(), ret);
+ return ret;
+ }
+ }
+
+ binder::Status remoteRet;
+ hardware::camera2::utils::SubmitInfo info;
+ remoteRet = mRemote->submitRequestList(requestList, isRepeating, &info);
+ int sequenceId = info.mRequestId;
+ int64_t lastFrameNumber = info.mLastFrameNumber;
+ if (sequenceId < 0) {
+ ALOGE("Camera %s submit request remote failure: ret %d", getId(), sequenceId);
+ return ACAMERA_ERROR_UNKNOWN;
+ }
+
+ CallbackHolder cbHolder(session, requestsV, isRepeating, cbs);
+ mSequenceCallbackMap.insert(std::make_pair(sequenceId, cbHolder));
+
+ if (isRepeating) {
+ // stopRepeating above should have cleanup repeating sequence id
+ if (mRepeatingSequenceId != REQUEST_ID_NONE) {
+ setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_DEVICE);
+ return ACAMERA_ERROR_CAMERA_DEVICE;
+ }
+ mRepeatingSequenceId = sequenceId;
+ } else {
+ mSequenceLastFrameNumberMap.insert(std::make_pair(sequenceId, lastFrameNumber));
+ }
+
+ if (mIdle) {
+ sp<AMessage> msg = new AMessage(kWhatSessionStateCb, mHandler);
+ msg->setPointer(kContextKey, session->mUserSessionCallback.context);
+ msg->setObject(kSessionSpKey, session);
+ msg->setPointer(kCallbackFpKey, (void*) session->mUserSessionCallback.onActive);
+ postSessionMsgAndCleanup(msg);
+ }
+ mIdle = false;
+ mBusySession = session;
+
+ if (captureSequenceId) {
+ *captureSequenceId = sequenceId;
+ }
+ return ACAMERA_OK;
+}
+
+} // namespace acam
+} // namespace android
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 94b5713..c661233 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -50,6 +50,7 @@
case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS:
case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE:
case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT:
+ case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA:
return true;
case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING:
case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING:
@@ -79,11 +80,41 @@
uint8_t capability = entry.data.u8[i];
if (isNdkSupportedCapability(capability)) {
capabilities.push(capability);
+
+ if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
+ derivePhysicalCameraIds();
+ }
}
}
mData.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, capabilities);
}
+void
+ACameraMetadata::derivePhysicalCameraIds() {
+ ACameraMetadata_const_entry entry;
+ auto ret = getConstEntry(ACAMERA_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS, &entry);
+ if (ret != ACAMERA_OK) {
+ ALOGE("%s: Get ACAMERA_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS key failed. ret %d",
+ __FUNCTION__, ret);
+ return;
+ }
+
+ const uint8_t* ids = entry.data.u8;
+ size_t start = 0;
+ for (size_t i = 0; i < entry.count; ++i) {
+ if (ids[i] == '\0') {
+ if (start != i) {
+ mStaticPhysicalCameraIds.push_back((const char*)ids+start);
+ }
+ start = i+1;
+ }
+ }
+
+ if (mStaticPhysicalCameraIds.size() < 2) {
+ ALOGW("%s: Logical multi-camera device only has %zu physical cameras",
+ __FUNCTION__, mStaticPhysicalCameraIds.size());
+ }
+}
void
ACameraMetadata::filterDurations(uint32_t tag) {
@@ -309,6 +340,27 @@
return mData;
}
+bool
+ACameraMetadata::isLogicalMultiCamera(size_t* count, const char*const** physicalCameraIds) const {
+ if (mType != ACM_CHARACTERISTICS) {
+ ALOGE("%s must be called for a static metadata!", __FUNCTION__);
+ return false;
+ }
+ if (count == nullptr || physicalCameraIds == nullptr) {
+ ALOGE("%s: Invalid input count: %p, physicalCameraIds: %p", __FUNCTION__,
+ count, physicalCameraIds);
+ return false;
+ }
+
+ if (mStaticPhysicalCameraIds.size() >= 2) {
+ *count = mStaticPhysicalCameraIds.size();
+ *physicalCameraIds = mStaticPhysicalCameraIds.data();
+ return true;
+ }
+
+ return false;
+}
+
// TODO: some of key below should be hidden from user
// ex: ACAMERA_REQUEST_ID and ACAMERA_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR
/*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index f21dbaf..7049c4b 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -17,6 +17,7 @@
#define _ACAMERA_METADATA_H
#include <unordered_set>
+#include <vector>
#include <sys/types.h>
#include <utils/Mutex.h>
@@ -65,6 +66,7 @@
/*out*/const uint32_t** tags) const;
const CameraMetadata& getInternalData() const;
+ bool isLogicalMultiCamera(size_t* count, const char* const** physicalCameraIds) const;
private:
@@ -74,6 +76,7 @@
void filterUnsupportedFeatures(); // Hide features not yet supported by NDK
void filterStreamConfigurations(); // Hide input streams, translate hal format to NDK formats
void filterDurations(uint32_t tag); // translate hal format to NDK formats
+ void derivePhysicalCameraIds(); // Derive array of physical ids.
template<typename INTERNAL_T, typename NDK_T>
camera_status_t updateImpl(uint32_t tag, uint32_t count, const NDK_T* data) {
@@ -112,6 +115,8 @@
const ACAMERA_METADATA_TYPE mType;
static std::unordered_set<uint32_t> sSystemTags;
+
+ std::vector<const char*> mStaticPhysicalCameraIds;
};
#endif // _ACAMERA_METADATA_H
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 1244582..d13a818 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -643,6 +643,103 @@
ACaptureSessionOutput* output) __INTRODUCED_IN(28);
#endif /* __ANDROID_API__ >= 28 */
+#if __ANDROID_API__ >= 29
+/**
+ * The definition of final capture result callback with logical multi-camera support.
+ *
+ * This has the same functionality as final ACameraCaptureSession_captureCallback_result, with
+ * added ability to return physical camera result metadata within a logical multi-camera.
+ *
+ * For a logical multi-camera, this function will be called with the Id and result metadata
+ * of the underlying physical cameras, which the corresponding capture request contains targets for.
+ * If the capture request doesn't contain targets specific to any physical camera, or the current
+ * camera device isn't a logical multi-camera, physicalResultCount will be 0.
+ *
+ * @param context The optional application context provided by user in
+ * {@link ACameraCaptureSession_captureCallbacks}.
+ * @param session The camera capture session of interest.
+ * @param request The capture request of interest. Note that this pointer points to a copy of
+ * capture request sent by application, so the address is different to what
+ * application sent but the content will match. This request will be freed by
+ * framework immediately after this callback returns.
+ * @param result The capture result metadata reported by camera device. The memory is managed by
+ * camera framework. Do not access this pointer after this callback returns.
+ * @param physicalResultCount The number of physical camera result metadata
+ * @param physicalCameraIds The array of physical camera IDs on which the
+ * physical result metadata are reported.
+ * @param physicalResults The array of capture result metadata reported by the
+ * physical camera devices.
+ */
+typedef void (*ACameraCaptureSession_logicalCamera_captureCallback_result)(
+ void* context, ACameraCaptureSession* session,
+ ACaptureRequest* request, const ACameraMetadata* result,
+ size_t physicalResultCount, const char** physicalCameraIds,
+ const ACameraMetadata** physicalResults);
+
+/**
+ * This has the same functionality as ACameraCaptureSession_captureCallbacks,
+ * with the exception that an onLogicalCameraCaptureCompleted callback is
+ * used, instead of onCaptureCompleted, to support logical multi-camera.
+ */
+typedef struct ACameraCaptureSession_logicalCamera_captureCallbacks {
+ /**
+ * Same as ACameraCaptureSession_captureCallbacks
+ */
+ void* context;
+ ACameraCaptureSession_captureCallback_start onCaptureStarted;
+ ACameraCaptureSession_captureCallback_result onCaptureProgressed;
+
+ /**
+ * This callback is called when an image capture has fully completed and all the
+ * result metadata is available. For a logical multi-camera, this callback
+ * also returns the result metadata for all physical cameras being
+ * explicitly requested on.
+ *
+ * <p>This callback will always fire after the last {@link onCaptureProgressed};
+ * in other words, no more partial results will be delivered once the completed result
+ * is available.</p>
+ *
+ * <p>For performance-intensive use-cases where latency is a factor, consider
+ * using {@link onCaptureProgressed} instead.</p>
+ *
+ * <p>Note that the ACaptureRequest pointer in the callback will not match what application has
+ * submitted, but the contents the ACaptureRequest will match what application submitted.</p>
+ */
+ ACameraCaptureSession_logicalCamera_captureCallback_result onLogicalCameraCaptureCompleted;
+
+ /**
+ * Same as ACameraCaptureSession_captureCallbacks
+ */
+ ACameraCaptureSession_captureCallback_failed onCaptureFailed;
+ ACameraCaptureSession_captureCallback_sequenceEnd onCaptureSequenceCompleted;
+ ACameraCaptureSession_captureCallback_sequenceAbort onCaptureSequenceAborted;
+ ACameraCaptureSession_captureCallback_bufferLost onCaptureBufferLost;
+} ACameraCaptureSession_logicalCamera_captureCallbacks;
+
+/**
+ * This has the same functionality as ACameraCaptureSession_capture, with added
+ * support for logical multi-camera where the capture callbacks supports result metadata for
+ * physical cameras.
+ */
+camera_status_t ACameraCaptureSession_logicalCamera_capture(
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacks* callbacks,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) __INTRODUCED_IN(29);
+
+/**
+ * This has the same functionality as ACameraCaptureSession_setRepeatingRequest, with added
+ * support for logical multi-camera where the capture callbacks supports result metadata for
+ * physical cameras.
+ */
+camera_status_t ACameraCaptureSession_logicalCamera_setRepeatingRequest(
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacks* callbacks,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) __INTRODUCED_IN(29);
+
+#endif /* __ANDROID_API__ >= 29 */
+
__END_DECLS
#endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index 4fe43d5..26af4f8 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -765,6 +765,36 @@
#endif /* __ANDROID_API__ >= 28 */
+#if __ANDROID_API__ >= 29
+
+/**
+ * Create a ACaptureSessionOutput object used for streaming from a physical
+ * camera as part of a logical camera device.
+ *
+ * <p>The ACaptureSessionOutput is used in {@link ACaptureSessionOutputContainer_add} method to add
+ * an output {@link ANativeWindow} to ACaptureSessionOutputContainer. Use
+ * {@link ACaptureSessionOutput_free} to free the object and its memory after application no longer
+ * needs the {@link ACaptureSessionOutput}.</p>
+ *
+ * @param anw the {@link ANativeWindow} to be associated with the {@link ACaptureSessionOutput}
+ * @param physicalId the Id of the physical camera this output is associated
+ * with.
+ * @param output the output {@link ACaptureSessionOutput} will be stored here if the
+ * method call succeeds.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds. The created container will be
+ * filled in the output argument.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if anw, physicalId or output is NULL.</li></ul>
+ *
+ * @see ACaptureSessionOutputContainer_add
+ */
+camera_status_t ACaptureSessionPhysicalOutput_create(
+ ACameraWindowType* anw, const char* physicalId,
+ /*out*/ACaptureSessionOutput** output) __INTRODUCED_IN(29);
+
+#endif /* __ANDROID_API__ >= 29 */
+
__END_DECLS
#endif /* _NDK_CAMERA_DEVICE_H */
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index 611e270..9bbfb83 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -233,6 +233,28 @@
#endif /* __ANDROID_API__ >= 24 */
+#if __ANDROID_API__ >= 29
+
+/**
+ * Helper function to check if a camera is logical multi-camera.
+ *
+ * <p> Check whether a camera device is a logical multi-camera based on its
+ * static metadata. If it is, also returns its physical sub camera Ids.</p>
+ *
+ * @param staticMetadata the static metadata of the camera being checked.
+ * @param numPhysicalCameras returns the number of physical cameras.
+ * @param physicalCameraIds returns the array of physical camera Ids backing this logical
+ * camera device. Note that this pointer is only valid
+ * during the lifetime of the staticMetadata object.
+ *
+ * @return true if this is a logical multi-camera, false otherwise.
+ */
+bool ACameraMetadata_isLogicalMultiCamera(const ACameraMetadata* staticMetadata,
+ /*out*/size_t* numPhysicalCameras, /*out*/const char* const** physicalCameraIds)
+ __INTRODUCED_IN(29);
+
+#endif /* __ANDROID_API__ >= 29 */
+
__END_DECLS
#endif /* _NDK_CAMERA_METADATA_H */
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index cb474f4..4bb74cb 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -5552,6 +5552,25 @@
ACAMERA_DEPTH_END,
/**
+ * <p>String containing the ids of the underlying physical cameras.</p>
+ *
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>For a logical camera, this is concatenation of all underlying physical camera ids.
+ * The null terminator for physical camera id must be preserved so that the whole string
+ * can be tokenized using '\0' to generate list of physical camera ids.</p>
+ * <p>For example, if the physical camera ids of the logical camera are "2" and "3", the
+ * value of this tag will be ['2', '\0', '3', '\0'].</p>
+ * <p>The number of physical camera ids must be no less than 2.</p>
+ */
+ ACAMERA_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS = // byte[n]
+ ACAMERA_LOGICAL_MULTI_CAMERA_START,
+ /**
* <p>The accuracy of frame timestamp synchronization between physical cameras</p>
*
* <p>Type: byte (acamera_metadata_enum_android_logical_multi_camera_sensor_sync_type_t)</p>
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index a29e96d..5a00022 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -2,9 +2,11 @@
global:
ACameraCaptureSession_abortCaptures;
ACameraCaptureSession_capture;
+ ACameraCaptureSession_logicalCamera_capture; # introduced=29
ACameraCaptureSession_close;
ACameraCaptureSession_getDevice;
ACameraCaptureSession_setRepeatingRequest;
+ ACameraCaptureSession_logicalCamera_setRepeatingRequest; # introduced=29
ACameraCaptureSession_stopRepeating;
ACameraCaptureSession_updateSharedOutput; # introduced=28
ACameraDevice_close;
@@ -24,6 +26,7 @@
ACameraMetadata_free;
ACameraMetadata_getAllTags;
ACameraMetadata_getConstEntry;
+ ACameraMetadata_isLogicalMultiCamera; # introduced=29
ACameraOutputTarget_create;
ACameraOutputTarget_free;
ACaptureRequest_addTarget;
@@ -48,6 +51,7 @@
ACaptureSessionSharedOutput_create; # introduced=28
ACaptureSessionSharedOutput_add; # introduced=28
ACaptureSessionSharedOutput_remove; # introduced=28
+ ACaptureSessionPhysicalOutput_create; # introduced=29
ACaptureSessionOutput_free;
local:
*;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
index 8d9e90c..e1af8c1 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
@@ -14,11 +14,13 @@
* limitations under the License.
*/
+#include <string>
#include "utils.h"
struct ACaptureSessionOutput {
- explicit ACaptureSessionOutput(native_handle_t* window, bool isShared = false) :
- mWindow(window), mIsShared(isShared) {};
+ explicit ACaptureSessionOutput(native_handle_t* window, bool isShared = false,
+ const char* physicalCameraId = "") :
+ mWindow(window), mIsShared(isShared), mPhysicalCameraId(physicalCameraId) {};
bool operator == (const ACaptureSessionOutput& other) const {
return (mWindow == other.mWindow);
@@ -40,6 +42,7 @@
std::set<android::acam::utils::native_handle_ptr_wrapper> mSharedWindows;
bool mIsShared;
int mRotation = CAMERA3_STREAM_ROTATION_0;
+ std::string mPhysicalCameraId;
};
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 26e6b3c..f7863a5 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -29,6 +29,8 @@
#include "ACaptureRequest.h"
#include "utils.h"
+#include "ACameraCaptureSession.inc"
+
using namespace android;
namespace android {
@@ -47,6 +49,7 @@
const char* CameraDevice::kCaptureRequestKey = "CaptureRequest";
const char* CameraDevice::kTimeStampKey = "TimeStamp";
const char* CameraDevice::kCaptureResultKey = "CaptureResult";
+const char* CameraDevice::kPhysicalCaptureResultKey = "PhysicalCaptureResult";
const char* CameraDevice::kCaptureFailureKey = "CaptureFailure";
const char* CameraDevice::kSequenceIdKey = "SequenceId";
const char* CameraDevice::kFrameNumberKey = "FrameNumber";
@@ -206,28 +209,8 @@
return ACAMERA_OK;
}
-camera_status_t
-CameraDevice::captureLocked(
- sp<ACameraCaptureSession> session,
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
- int numRequests, ACaptureRequest** requests,
- /*optional*/int* captureSequenceId) {
- return submitRequestsLocked(
- session, cbs, numRequests, requests, captureSequenceId, /*isRepeating*/false);
-}
-
-camera_status_t
-CameraDevice::setRepeatingRequestsLocked(
- sp<ACameraCaptureSession> session,
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
- int numRequests, ACaptureRequest** requests,
- /*optional*/int* captureSequenceId) {
- return submitRequestsLocked(
- session, cbs, numRequests, requests, captureSequenceId, /*isRepeating*/true);
-}
-
-void addRequestSettingsMetadata(ACaptureRequest *aCaptureRequest,
- sp<CaptureRequest> &req) {
+void CameraDevice::addRequestSettingsMetadata(ACaptureRequest *aCaptureRequest,
+ sp<CaptureRequest> &req) {
CameraMetadata metadataCopy = aCaptureRequest->settings->getInternalData();
const camera_metadata_t *camera_metadata = metadataCopy.getAndLock();
HCameraMetadata hCameraMetadata;
@@ -237,101 +220,6 @@
req->mPhysicalCameraSettings[0].settings.metadata(std::move(hCameraMetadata));
}
-camera_status_t
-CameraDevice::submitRequestsLocked(
- sp<ACameraCaptureSession> session,
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
- int numRequests, ACaptureRequest** requests,
- /*optional*/int* captureSequenceId,
- bool isRepeating) {
- camera_status_t ret = checkCameraClosedOrErrorLocked();
- if (ret != ACAMERA_OK) {
- ALOGE("Camera %s submit capture request failed! ret %d", getId(), ret);
- return ret;
- }
-
- // Form two vectors of capture request, one for internal tracking
- std::vector<frameworks::cameraservice::device::V2_0::CaptureRequest> requestList;
- Vector<sp<CaptureRequest>> requestsV;
- requestsV.setCapacity(numRequests);
- for (int i = 0; i < numRequests; i++) {
- sp<CaptureRequest> req;
- ret = allocateCaptureRequest(requests[i], req);
- // We need to call this method since after submitRequestList is called,
- // the request metadata queue might have removed the capture request
- // metadata. Therefore we simply add the metadata to its wrapper class,
- // so that it can be retrived later.
- addRequestSettingsMetadata(requests[i], req);
- if (ret != ACAMERA_OK) {
- ALOGE("Convert capture request to internal format failure! ret %d", ret);
- return ret;
- }
- if (req->mCaptureRequest.streamAndWindowIds.size() == 0) {
- ALOGE("Capture request without output target cannot be submitted!");
- return ACAMERA_ERROR_INVALID_PARAMETER;
- }
- requestList.push_back(utils::convertToHidl(req.get()));
- requestsV.push_back(req);
- }
- if (isRepeating) {
- ret = stopRepeatingLocked();
- if (ret != ACAMERA_OK) {
- ALOGE("Camera %s stop repeating failed! ret %d", getId(), ret);
- return ret;
- }
- }
-
- SubmitInfo info;
- Status status;
- auto remoteRet = mRemote->submitRequestList(requestList, isRepeating,
- [&status, &info](auto s, auto &submitInfo) {
- status = s;
- info = submitInfo;
- });
- if (!remoteRet.isOk()) {
- ALOGE("%s: Transaction error for submitRequestList call: %s", __FUNCTION__,
- remoteRet.description().c_str());
- }
- if (status != Status::NO_ERROR) {
- return utils::convertFromHidl(status);
- }
- int32_t sequenceId = info.requestId;
- int64_t lastFrameNumber = info.lastFrameNumber;
- if (sequenceId < 0) {
- ALOGE("Camera %s submit request remote failure: ret %d", getId(), sequenceId);
- return ACAMERA_ERROR_UNKNOWN;
- }
-
- CallbackHolder cbHolder(session, requestsV, isRepeating, cbs);
- mSequenceCallbackMap.insert(std::make_pair(sequenceId, cbHolder));
-
- if (isRepeating) {
- // stopRepeating above should have cleanup repeating sequence id
- if (mRepeatingSequenceId != REQUEST_ID_NONE) {
- setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_DEVICE);
- return ACAMERA_ERROR_CAMERA_DEVICE;
- }
- mRepeatingSequenceId = sequenceId;
- } else {
- mSequenceLastFrameNumberMap.insert(std::make_pair(sequenceId, lastFrameNumber));
- }
-
- if (mIdle) {
- sp<AMessage> msg = new AMessage(kWhatSessionStateCb, mHandler);
- msg->setPointer(kContextKey, session->mUserSessionCallback.context);
- msg->setObject(kSessionSpKey, session);
- msg->setPointer(kCallbackFpKey, (void*) session->mUserSessionCallback.onActive);
- postSessionMsgAndCleanup(msg);
- }
- mIdle = false;
- mBusySession = session;
-
- if (captureSequenceId) {
- *captureSequenceId = sequenceId;
- }
- return ACAMERA_OK;
-}
-
camera_status_t CameraDevice::updateOutputConfigurationLocked(ACaptureSessionOutput *output) {
camera_status_t ret = checkCameraClosedOrErrorLocked();
if (ret != ACAMERA_OK) {
@@ -365,6 +253,7 @@
outConfig.windowGroupId = -1; // ndk doesn't support inter OutputConfiguration buffer sharing.
outConfig.windowHandles.resize(output->mSharedWindows.size() + 1);
outConfig.windowHandles[0] = output->mWindow;
+ outConfig.physicalCameraId = output->mPhysicalCameraId;
int i = 1;
for (auto& anw : output->mSharedWindows) {
outConfig.windowHandles[i++] = anw;
@@ -668,6 +557,7 @@
outConfigInsert.windowGroupId = -1;
outConfigInsert.windowHandles.resize(outConfig.mSharedWindows.size() + 1);
outConfigInsert.windowHandles[0] = anw;
+ outConfigInsert.physicalCameraId = outConfig.mPhysicalCameraId;
native_handle_ptr_wrapper wrap(anw);
outputSet.insert(std::make_pair(anw, outConfigInsertW));
}
@@ -894,7 +784,7 @@
if (errorCode == ErrorCode::CAMERA_BUFFER) {
int32_t streamId = resultExtras.errorStreamId;
ACameraCaptureSession_captureCallback_bufferLost onBufferLost =
- cbh.mCallbacks.onCaptureBufferLost;
+ cbh.mOnCaptureBufferLost;
auto outputPairIt = mConfiguredOutputs.find(streamId);
if (outputPairIt == mConfiguredOutputs.end()) {
ALOGE("%s: Error: stream id %d does not exist", __FUNCTION__, streamId);
@@ -913,7 +803,7 @@
getId(), anw, frameNumber);
sp<AMessage> msg = new AMessage(kWhatCaptureBufferLost, mHandler);
- msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, session);
msg->setPointer(kCallbackFpKey, (void*) onBufferLost);
msg->setObject(kCaptureRequestKey, request);
@@ -925,7 +815,7 @@
}
} else { // Handle other capture failures
// Fire capture failure callback if there is one registered
- ACameraCaptureSession_captureCallback_failed onError = cbh.mCallbacks.onCaptureFailed;
+ ACameraCaptureSession_captureCallback_failed onError = cbh.mOnCaptureFailed;
sp<CameraCaptureFailure> failure(new CameraCaptureFailure());
failure->frameNumber = frameNumber;
// TODO: refine this when implementing flush
@@ -934,7 +824,7 @@
failure->wasImageCaptured = (errorCode == ErrorCode::CAMERA_RESULT);
sp<AMessage> msg = new AMessage(kWhatCaptureFail, mHandler);
- msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, session);
msg->setPointer(kCallbackFpKey, (void*) onError);
msg->setObject(kCaptureRequestKey, request);
@@ -956,6 +846,7 @@
case kWhatSessionStateCb:
case kWhatCaptureStart:
case kWhatCaptureResult:
+ case kWhatLogicalCaptureResult:
case kWhatCaptureFail:
case kWhatCaptureSeqEnd:
case kWhatCaptureSeqAbort:
@@ -1026,6 +917,7 @@
case kWhatSessionStateCb:
case kWhatCaptureStart:
case kWhatCaptureResult:
+ case kWhatLogicalCaptureResult:
case kWhatCaptureFail:
case kWhatCaptureSeqEnd:
case kWhatCaptureSeqAbort:
@@ -1043,6 +935,7 @@
switch (msg->what()) {
case kWhatCaptureStart:
case kWhatCaptureResult:
+ case kWhatLogicalCaptureResult:
case kWhatCaptureFail:
case kWhatCaptureBufferLost:
found = msg->findObject(kCaptureRequestKey, &obj);
@@ -1114,6 +1007,62 @@
freeACaptureRequest(request);
break;
}
+ case kWhatLogicalCaptureResult:
+ {
+ ACameraCaptureSession_logicalCamera_captureCallback_result onResult;
+ found = msg->findPointer(kCallbackFpKey, (void**) &onResult);
+ if (!found) {
+ ALOGE("%s: Cannot find capture result callback!", __FUNCTION__);
+ return;
+ }
+ if (onResult == nullptr) {
+ return;
+ }
+
+ found = msg->findObject(kCaptureResultKey, &obj);
+ if (!found) {
+ ALOGE("%s: Cannot find capture result!", __FUNCTION__);
+ return;
+ }
+ sp<ACameraMetadata> result(static_cast<ACameraMetadata*>(obj.get()));
+
+ found = msg->findObject(kPhysicalCaptureResultKey, &obj);
+ if (!found) {
+ ALOGE("%s: Cannot find physical capture result!", __FUNCTION__);
+ return;
+ }
+ sp<ACameraPhysicalCaptureResultInfo> physicalResult(
+ static_cast<ACameraPhysicalCaptureResultInfo*>(obj.get()));
+ std::vector<PhysicalCaptureResultInfoLocal>& physicalResultInfo =
+ physicalResult->mPhysicalResultInfo;
+
+ std::vector<std::string> physicalCameraIds;
+ std::vector<sp<ACameraMetadata>> physicalMetadataCopy;
+ for (size_t i = 0; i < physicalResultInfo.size(); i++) {
+ physicalCameraIds.push_back(physicalResultInfo[i].physicalCameraId);
+
+ CameraMetadata clone = physicalResultInfo[i].physicalMetadata;
+ clone.update(ANDROID_SYNC_FRAME_NUMBER,
+ &physicalResult->mFrameNumber, /*data_count*/1);
+ sp<ACameraMetadata> metadata =
+ new ACameraMetadata(clone.release(), ACameraMetadata::ACM_RESULT);
+ physicalMetadataCopy.push_back(metadata);
+ }
+ std::vector<const char*> physicalCameraIdPtrs;
+ std::vector<const ACameraMetadata*> physicalMetadataCopyPtrs;
+ for (size_t i = 0; i < physicalResultInfo.size(); i++) {
+ physicalCameraIdPtrs.push_back(physicalCameraIds[i].c_str());
+ physicalMetadataCopyPtrs.push_back(physicalMetadataCopy[i].get());
+ }
+
+ ACaptureRequest* request = allocateACaptureRequest(requestSp);
+ (*onResult)(context, session.get(), request, result.get(),
+ physicalResultInfo.size(), physicalCameraIdPtrs.data(),
+ physicalMetadataCopyPtrs.data());
+ freeACaptureRequest(request);
+ break;
+ }
+
case kWhatCaptureFail:
{
ACameraCaptureSession_captureCallback_failed onFail;
@@ -1224,12 +1173,34 @@
}
CameraDevice::CallbackHolder::CallbackHolder(
- sp<ACameraCaptureSession> session,
- const Vector<sp<CaptureRequest> >& requests,
- bool isRepeating,
- ACameraCaptureSession_captureCallbacks* cbs) :
- mSession(session), mRequests(requests),
- mIsRepeating(isRepeating), mCallbacks(fillCb(cbs)) {}
+ sp<ACameraCaptureSession> session,
+ const Vector<sp<CaptureRequest> >& requests,
+ bool isRepeating,
+ ACameraCaptureSession_captureCallbacks* cbs) :
+ mSession(session), mRequests(requests),
+ mIsRepeating(isRepeating),
+ mIsLogicalCameraCallback(false) {
+ initCaptureCallbacks(cbs);
+
+ if (cbs != nullptr) {
+ mOnCaptureCompleted = cbs->onCaptureCompleted;
+ }
+}
+
+CameraDevice::CallbackHolder::CallbackHolder(
+ sp<ACameraCaptureSession> session,
+ const Vector<sp<CaptureRequest> >& requests,
+ bool isRepeating,
+ ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs) :
+ mSession(session), mRequests(requests),
+ mIsRepeating(isRepeating),
+ mIsLogicalCameraCallback(true) {
+ initCaptureCallbacks(lcbs);
+
+ if (lcbs != nullptr) {
+ mOnLogicalCameraCaptureCompleted = lcbs->onLogicalCameraCaptureCompleted;
+ }
+}
void
CameraDevice::checkRepeatingSequenceCompleteLocked(
@@ -1246,9 +1217,9 @@
mSequenceCallbackMap.erase(cbIt);
// send seq aborted callback
sp<AMessage> msg = new AMessage(kWhatCaptureSeqAbort, mHandler);
- msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, cbh.mSession);
- msg->setPointer(kCallbackFpKey, (void*) cbh.mCallbacks.onCaptureSequenceAborted);
+ msg->setPointer(kCallbackFpKey, (void*) cbh.mOnCaptureSequenceAborted);
msg->setInt32(kSequenceIdKey, sequenceId);
postSessionMsgAndCleanup(msg);
} else {
@@ -1295,9 +1266,9 @@
mSequenceCallbackMap.erase(cbIt);
// send seq complete callback
sp<AMessage> msg = new AMessage(kWhatCaptureSeqEnd, mHandler);
- msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, cbh.mSession);
- msg->setPointer(kCallbackFpKey, (void*) cbh.mCallbacks.onCaptureSequenceCompleted);
+ msg->setPointer(kCallbackFpKey, (void*) cbh.mOnCaptureSequenceCompleted);
msg->setInt32(kSequenceIdKey, sequenceId);
msg->setInt64(kFrameNumberKey, lastFrameNumber);
@@ -1454,7 +1425,7 @@
auto it = dev->mSequenceCallbackMap.find(sequenceId);
if (it != dev->mSequenceCallbackMap.end()) {
CallbackHolder cbh = (*it).second;
- ACameraCaptureSession_captureCallback_start onStart = cbh.mCallbacks.onCaptureStarted;
+ ACameraCaptureSession_captureCallback_start onStart = cbh.mOnCaptureStarted;
sp<ACameraCaptureSession> session = cbh.mSession;
if ((size_t) burstId >= cbh.mRequests.size()) {
ALOGE("%s: Error: request index %d out of bound (size %zu)",
@@ -1463,7 +1434,7 @@
}
sp<CaptureRequest> request = cbh.mRequests[burstId];
sp<AMessage> msg = new AMessage(kWhatCaptureStart, dev->mHandler);
- msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, session);
msg->setPointer(kCallbackFpKey, (void*) onStart);
msg->setObject(kCaptureRequestKey, request);
@@ -1478,7 +1449,6 @@
const FmqSizeOrMetadata& resultMetadata,
const CaptureResultExtras& resultExtras,
const hidl_vec<PhysicalCaptureResultInfo>& physicalResultInfos) {
- (void) physicalResultInfos;
auto ret = Void();
sp<CameraDevice> dev = mDevice.promote();
@@ -1508,27 +1478,10 @@
}
CameraMetadata metadataCopy;
- HCameraMetadata hCameraMetadata;
- bool converted = false;
- if (resultMetadata.getDiscriminator() ==
- FmqSizeOrMetadata::hidl_discriminator::fmqMetadataSize) {
- hCameraMetadata.resize(resultMetadata.fmqMetadataSize());
- bool read = dev->mCaptureResultMetadataQueue->read(hCameraMetadata.data(),
- resultMetadata.fmqMetadataSize());
- if (!read) {
- ALOGE("%s capture request settings could't be read from fmq",
- __FUNCTION__);
- return ret;
- }
- // TODO: Do we actually need to clone here ?
- converted = utils::convertFromHidlCloned(hCameraMetadata, &metadataCopy);
-
- } else {
- converted = utils::convertFromHidlCloned(resultMetadata.metadata(), &metadataCopy);
- }
-
- if (!converted) {
- ALOGE("%s result metadata couldn't be converted", __FUNCTION__);
+ camera_status_t status = readOneResultMetadata(resultMetadata,
+ dev->mCaptureResultMetadataQueue.get(), &metadataCopy);
+ if (status != ACAMERA_OK) {
+ ALOGE("%s: result metadata couldn't be converted", __FUNCTION__);
return ret;
}
@@ -1538,9 +1491,6 @@
auto it = dev->mSequenceCallbackMap.find(sequenceId);
if (it != dev->mSequenceCallbackMap.end()) {
CallbackHolder cbh = (*it).second;
- ACameraCaptureSession_captureCallback_result onResult = isPartialResult ?
- cbh.mCallbacks.onCaptureProgressed :
- cbh.mCallbacks.onCaptureCompleted;
sp<ACameraCaptureSession> session = cbh.mSession;
if ((size_t) burstId >= cbh.mRequests.size()) {
ALOGE("%s: Error: request index %d out of bound (size %zu)",
@@ -1551,12 +1501,39 @@
sp<ACameraMetadata> result(new ACameraMetadata(
metadataCopy.release(), ACameraMetadata::ACM_RESULT));
- sp<AMessage> msg = new AMessage(kWhatCaptureResult, dev->mHandler);
- msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ std::vector<PhysicalCaptureResultInfoLocal> localPhysicalResult;
+ localPhysicalResult.resize(physicalResultInfos.size());
+ for (size_t i = 0; i < physicalResultInfos.size(); i++) {
+ localPhysicalResult[i].physicalCameraId = physicalResultInfos[i].physicalCameraId;
+ status = readOneResultMetadata(physicalResultInfos[i].physicalCameraMetadata,
+ dev->mCaptureResultMetadataQueue.get(),
+ &localPhysicalResult[i].physicalMetadata);
+ if (status != ACAMERA_OK) {
+ ALOGE("%s: physical camera result metadata couldn't be converted", __FUNCTION__);
+ return ret;
+ }
+ }
+ sp<ACameraPhysicalCaptureResultInfo> physicalResult(
+ new ACameraPhysicalCaptureResultInfo(localPhysicalResult, frameNumber));
+
+ sp<AMessage> msg = new AMessage(
+ cbh.mIsLogicalCameraCallback ? kWhatLogicalCaptureResult : kWhatCaptureResult,
+ dev->mHandler);
+ msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, session);
- msg->setPointer(kCallbackFpKey, (void*) onResult);
msg->setObject(kCaptureRequestKey, request);
msg->setObject(kCaptureResultKey, result);
+ if (isPartialResult) {
+ msg->setPointer(kCallbackFpKey,
+ (void *)cbh.mOnCaptureProgressed);
+ } else if (cbh.mIsLogicalCameraCallback) {
+ msg->setPointer(kCallbackFpKey,
+ (void *)cbh.mOnLogicalCameraCaptureCompleted);
+ msg->setObject(kPhysicalCaptureResultKey, physicalResult);
+ } else {
+ msg->setPointer(kCallbackFpKey,
+ (void *)cbh.mOnCaptureCompleted);
+ }
dev->postSessionMsgAndCleanup(msg);
}
@@ -1590,5 +1567,31 @@
return ret;
}
+camera_status_t CameraDevice::ServiceCallback::readOneResultMetadata(
+ const FmqSizeOrMetadata& fmqSizeOrMetadata, ResultMetadataQueue* metadataQueue,
+ CameraMetadata* metadata) {
+ if (metadataQueue == nullptr || metadata == nullptr) {
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ bool converted;
+ HCameraMetadata hCameraMetadata;
+ if (fmqSizeOrMetadata.getDiscriminator() ==
+ FmqSizeOrMetadata::hidl_discriminator::fmqMetadataSize) {
+ hCameraMetadata.resize(fmqSizeOrMetadata.fmqMetadataSize());
+ bool read = metadataQueue->read(
+ hCameraMetadata.data(), fmqSizeOrMetadata.fmqMetadataSize());
+ if (!read) {
+ ALOGE("%s capture request settings could't be read from fmq", __FUNCTION__);
+ return ACAMERA_ERROR_UNKNOWN;
+ }
+ // TODO: Do we actually need to clone here ?
+ converted = utils::convertFromHidlCloned(hCameraMetadata, metadata);
+ } else {
+ converted = utils::convertFromHidlCloned(fmqSizeOrMetadata.metadata(), metadata);
+ }
+
+ return converted ? ACAMERA_OK : ACAMERA_ERROR_UNKNOWN;
+}
+
} // namespace acam
} // namespace android
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index 01a219f..c63b97f 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -21,6 +21,7 @@
#include <set>
#include <atomic>
#include <utility>
+#include <vector>
#include <utils/StrongPointer.h>
#include <utils/Mutex.h>
#include <utils/List.h>
@@ -65,6 +66,21 @@
// Wrap ACameraCaptureFailure so it can be ref-counted
struct CameraCaptureFailure : public RefBase, public ACameraCaptureFailure { };
+// Wrap PhysicalCaptureResultInfo so that it can be ref-counted
+struct PhysicalCaptureResultInfoLocal {
+ std::string physicalCameraId;
+ CameraMetadata physicalMetadata;
+};
+
+struct ACameraPhysicalCaptureResultInfo: public RefBase {
+ ACameraPhysicalCaptureResultInfo(const std::vector<PhysicalCaptureResultInfoLocal>& info,
+ int64_t frameNumber) :
+ mPhysicalResultInfo(info), mFrameNumber(frameNumber) {}
+
+ std::vector<PhysicalCaptureResultInfoLocal> mPhysicalResultInfo;
+ int64_t mFrameNumber;
+};
+
class CameraDevice final : public RefBase {
public:
CameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
@@ -99,6 +115,8 @@
android::hardware::Return<void> onRepeatingRequestError(uint64_t lastFrameNumber,
int32_t stoppedSequenceId) override;
private:
+ camera_status_t readOneResultMetadata(const FmqSizeOrMetadata& fmqSizeOrMetadata,
+ ResultMetadataQueue* metadataQueue, CameraMetadata* metadata);
const wp<CameraDevice> mDevice;
};
inline sp<ICameraDeviceCallback> getServiceCallback() {
@@ -127,24 +145,28 @@
camera_status_t waitUntilIdleLocked();
-
+ template<class T>
camera_status_t captureLocked(sp<ACameraCaptureSession> session,
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
+ /*optional*/T* cbs,
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId);
+ template<class T>
camera_status_t setRepeatingRequestsLocked(sp<ACameraCaptureSession> session,
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
+ /*optional*/T* cbs,
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId);
+ template<class T>
camera_status_t submitRequestsLocked(
sp<ACameraCaptureSession> session,
- /*optional*/ACameraCaptureSession_captureCallbacks* cbs,
+ /*optional*/T* cbs,
int numRequests, ACaptureRequest** requests,
/*out*/int* captureSequenceId,
bool isRepeating);
+ void addRequestSettingsMetadata(ACaptureRequest *aCaptureRequest, sp<CaptureRequest> &req);
+
camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
camera_status_t allocateCaptureRequest(
@@ -206,6 +228,7 @@
// Capture callbacks
kWhatCaptureStart, // onCaptureStarted
kWhatCaptureResult, // onCaptureProgressed, onCaptureCompleted
+ kWhatLogicalCaptureResult, // onLogicalCameraCaptureCompleted
kWhatCaptureFail, // onCaptureFailed
kWhatCaptureSeqEnd, // onCaptureSequenceCompleted
kWhatCaptureSeqAbort, // onCaptureSequenceAborted
@@ -221,6 +244,7 @@
static const char* kCaptureRequestKey;
static const char* kTimeStampKey;
static const char* kCaptureResultKey;
+ static const char* kPhysicalCaptureResultKey;
static const char* kCaptureFailureKey;
static const char* kSequenceIdKey;
static const char* kFrameNumberKey;
@@ -259,19 +283,47 @@
const Vector<sp<CaptureRequest>>& requests,
bool isRepeating,
ACameraCaptureSession_captureCallbacks* cbs);
+ CallbackHolder(sp<ACameraCaptureSession> session,
+ const Vector<sp<CaptureRequest>>& requests,
+ bool isRepeating,
+ ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs);
- static ACameraCaptureSession_captureCallbacks fillCb(
- ACameraCaptureSession_captureCallbacks* cbs) {
+ template <class T>
+ void initCaptureCallbacks(T* cbs) {
+ mContext = nullptr;
+ mOnCaptureStarted = nullptr;
+ mOnCaptureProgressed = nullptr;
+ mOnCaptureCompleted = nullptr;
+ mOnLogicalCameraCaptureCompleted = nullptr;
+ mOnCaptureFailed = nullptr;
+ mOnCaptureSequenceCompleted = nullptr;
+ mOnCaptureSequenceAborted = nullptr;
+ mOnCaptureBufferLost = nullptr;
if (cbs != nullptr) {
- return *cbs;
+ mContext = cbs->context;
+ mOnCaptureStarted = cbs->onCaptureStarted;
+ mOnCaptureProgressed = cbs->onCaptureProgressed;
+ mOnCaptureFailed = cbs->onCaptureFailed;
+ mOnCaptureSequenceCompleted = cbs->onCaptureSequenceCompleted;
+ mOnCaptureSequenceAborted = cbs->onCaptureSequenceAborted;
+ mOnCaptureBufferLost = cbs->onCaptureBufferLost;
}
- return { nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr };
}
sp<ACameraCaptureSession> mSession;
- Vector<sp<CaptureRequest>> mRequests;
+ Vector<sp<CaptureRequest>> mRequests;
const bool mIsRepeating;
- ACameraCaptureSession_captureCallbacks mCallbacks;
+ const bool mIsLogicalCameraCallback;
+
+ void* mContext;
+ ACameraCaptureSession_captureCallback_start mOnCaptureStarted;
+ ACameraCaptureSession_captureCallback_result mOnCaptureProgressed;
+ ACameraCaptureSession_captureCallback_result mOnCaptureCompleted;
+ ACameraCaptureSession_logicalCamera_captureCallback_result mOnLogicalCameraCaptureCompleted;
+ ACameraCaptureSession_captureCallback_failed mOnCaptureFailed;
+ ACameraCaptureSession_captureCallback_sequenceEnd mOnCaptureSequenceCompleted;
+ ACameraCaptureSession_captureCallback_sequenceAbort mOnCaptureSequenceAborted;
+ ACameraCaptureSession_captureCallback_bufferLost mOnCaptureBufferLost;
};
// sequence id -> callbacks map
std::map<int, CallbackHolder> mSequenceCallbackMap;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc b/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
new file mode 100644
index 0000000..7d2304e
--- /dev/null
+++ b/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
@@ -0,0 +1,152 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <vector>
+#include <inttypes.h>
+#include <android/frameworks/cameraservice/service/2.0/ICameraService.h>
+#include <android/frameworks/cameraservice/device/2.0/types.h>
+#include <CameraMetadata.h>
+
+#include "ndk_vendor/impl/ACameraDevice.h"
+#include "ACameraCaptureSession.h"
+#include "ACameraMetadata.h"
+#include "ACaptureRequest.h"
+#include "utils.h"
+
+using namespace android;
+
+namespace android {
+namespace acam {
+
+template<class T>
+camera_status_t
+CameraDevice::captureLocked(
+ sp<ACameraCaptureSession> session,
+ /*optional*/T* cbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ return submitRequestsLocked(
+ session, cbs, numRequests, requests, captureSequenceId, /*isRepeating*/false);
+}
+
+template<class T>
+camera_status_t
+CameraDevice::setRepeatingRequestsLocked(
+ sp<ACameraCaptureSession> session,
+ /*optional*/T* cbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ return submitRequestsLocked(
+ session, cbs, numRequests, requests, captureSequenceId, /*isRepeating*/true);
+}
+
+template<class T>
+camera_status_t CameraDevice::submitRequestsLocked(
+ sp<ACameraCaptureSession> session,
+ /*optional*/T* cbs,
+ int numRequests, ACaptureRequest** requests,
+ /*out*/int* captureSequenceId,
+ bool isRepeating)
+{
+ camera_status_t ret = checkCameraClosedOrErrorLocked();
+ if (ret != ACAMERA_OK) {
+ ALOGE("Camera %s submit capture request failed! ret %d", getId(), ret);
+ return ret;
+ }
+
+ // Form two vectors of capture request, one for internal tracking
+ std::vector<frameworks::cameraservice::device::V2_0::CaptureRequest> requestList;
+ Vector<sp<CaptureRequest>> requestsV;
+ requestsV.setCapacity(numRequests);
+ for (int i = 0; i < numRequests; i++) {
+ sp<CaptureRequest> req;
+ ret = allocateCaptureRequest(requests[i], req);
+ // We need to call this method since after submitRequestList is called,
+ // the request metadata queue might have removed the capture request
+ // metadata. Therefore we simply add the metadata to its wrapper class,
+ // so that it can be retrieved later.
+ addRequestSettingsMetadata(requests[i], req);
+ if (ret != ACAMERA_OK) {
+ ALOGE("Convert capture request to internal format failure! ret %d", ret);
+ return ret;
+ }
+ if (req->mCaptureRequest.streamAndWindowIds.size() == 0) {
+ ALOGE("Capture request without output target cannot be submitted!");
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ requestList.push_back(utils::convertToHidl(req.get()));
+ requestsV.push_back(req);
+ }
+ if (isRepeating) {
+ ret = stopRepeatingLocked();
+ if (ret != ACAMERA_OK) {
+ ALOGE("Camera %s stop repeating failed! ret %d", getId(), ret);
+ return ret;
+ }
+ }
+
+ SubmitInfo info;
+ Status status;
+ auto remoteRet = mRemote->submitRequestList(requestList, isRepeating,
+ [&status, &info](auto s, auto &submitInfo) {
+ status = s;
+ info = submitInfo;
+ });
+ if (!remoteRet.isOk()) {
+ ALOGE("%s: Transaction error for submitRequestList call: %s", __FUNCTION__,
+ remoteRet.description().c_str());
+ }
+ if (status != Status::NO_ERROR) {
+ return utils::convertFromHidl(status);
+ }
+ int32_t sequenceId = info.requestId;
+ int64_t lastFrameNumber = info.lastFrameNumber;
+ if (sequenceId < 0) {
+ ALOGE("Camera %s submit request remote failure: ret %d", getId(), sequenceId);
+ return ACAMERA_ERROR_UNKNOWN;
+ }
+
+ CallbackHolder cbHolder(session, requestsV, isRepeating, cbs);
+ mSequenceCallbackMap.insert(std::make_pair(sequenceId, cbHolder));
+ if (isRepeating) {
+ // stopRepeating above should have cleanup repeating sequence id
+ if (mRepeatingSequenceId != REQUEST_ID_NONE) {
+ setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_DEVICE);
+ return ACAMERA_ERROR_CAMERA_DEVICE;
+ }
+ mRepeatingSequenceId = sequenceId;
+ } else {
+ mSequenceLastFrameNumberMap.insert(std::make_pair(sequenceId, lastFrameNumber));
+ }
+
+ if (mIdle) {
+ sp<AMessage> msg = new AMessage(kWhatSessionStateCb, mHandler);
+ msg->setPointer(kContextKey, session->mUserSessionCallback.context);
+ msg->setObject(kSessionSpKey, session);
+ msg->setPointer(kCallbackFpKey, (void*) session->mUserSessionCallback.onActive);
+ postSessionMsgAndCleanup(msg);
+ }
+ mIdle = false;
+ mBusySession = session;
+
+ if (captureSequenceId) {
+ *captureSequenceId = sequenceId;
+ }
+ return ACAMERA_OK;
+}
+
+} // namespace acam
+} // namespace android
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 579412e..f9bb3ac 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -55,38 +55,27 @@
class CameraHelper {
public:
- CameraHelper(native_handle_t* imgReaderAnw) : mImgReaderAnw(imgReaderAnw) {}
+ CameraHelper(const char* id, ACameraManager *manager) :
+ mImgReaderAnw(nullptr), mCameraId(id), mCameraManager(manager) {}
~CameraHelper() { closeCamera(); }
- int initCamera() {
- if (mImgReaderAnw == nullptr) {
+ struct PhysicalImgReaderInfo {
+ const char* physicalCameraId;
+ native_handle_t* anw;
+ };
+ int initCamera(native_handle_t* imgReaderAnw,
+ const std::vector<PhysicalImgReaderInfo>& physicalImgReaders) {
+ if (imgReaderAnw == nullptr) {
ALOGE("Cannot initialize camera before image reader get initialized.");
return -1;
}
+ if (mIsCameraReady) {
+ ALOGE("initCamera should only be called once.");
+ return -1;
+ }
+
int ret;
-
- mCameraManager = ACameraManager_create();
- if (mCameraManager == nullptr) {
- ALOGE("Failed to create ACameraManager.");
- return -1;
- }
-
- ret = ACameraManager_getCameraIdList(mCameraManager, &mCameraIdList);
- if (ret != AMEDIA_OK) {
- ALOGE("Failed to get cameraIdList: ret=%d", ret);
- return ret;
- }
- if (mCameraIdList->numCameras < 1) {
- ALOGW("Device has no camera on board.");
- return 0;
- }
-
- // We always use the first camera.
- mCameraId = mCameraIdList->cameraIds[0];
- if (mCameraId == nullptr) {
- ALOGE("Failed to get cameraId.");
- return -1;
- }
+ mImgReaderAnw = imgReaderAnw;
ret = ACameraManager_openCamera(mCameraManager, mCameraId, &mDeviceCb, &mDevice);
if (ret != AMEDIA_OK || mDevice == nullptr) {
@@ -94,18 +83,6 @@
return -1;
}
- ret = ACameraManager_getCameraCharacteristics(mCameraManager, mCameraId, &mCameraMetadata);
- if (ret != ACAMERA_OK || mCameraMetadata == nullptr) {
- ALOGE("Get camera %s characteristics failure. ret %d, metadata %p", mCameraId, ret,
- mCameraMetadata);
- return -1;
- }
-
- if (!isCapabilitySupported(ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE)) {
- ALOGW("Camera does not support BACKWARD_COMPATIBLE.");
- return 0;
- }
-
// Create capture session
ret = ACaptureSessionOutputContainer_create(&mOutputs);
if (ret != AMEDIA_OK) {
@@ -122,6 +99,25 @@
ALOGE("ACaptureSessionOutputContainer_add failed, ret=%d", ret);
return ret;
}
+
+ for (auto& physicalStream : physicalImgReaders) {
+ ACaptureSessionOutput* sessionOutput = nullptr;
+ ret = ACaptureSessionPhysicalOutput_create(physicalStream.anw,
+ physicalStream.physicalCameraId, &sessionOutput);
+ if (ret != ACAMERA_OK) {
+ ALOGE("ACaptureSessionPhysicalOutput_create failed, ret=%d", ret);
+ return ret;
+ }
+ ret = ACaptureSessionOutputContainer_add(mOutputs, sessionOutput);
+ if (ret != AMEDIA_OK) {
+ ALOGE("ACaptureSessionOutputContainer_add failed, ret=%d", ret);
+ return ret;
+ }
+ mExtraOutputs.push_back(sessionOutput);
+ // Assume that at most one physical stream per physical camera.
+ mPhysicalCameraIds.push_back(physicalStream.physicalCameraId);
+ }
+
ret = ACameraDevice_createCaptureSession(mDevice, mOutputs, &mSessionCb, &mSession);
if (ret != AMEDIA_OK) {
ALOGE("ACameraDevice_createCaptureSession failed, ret=%d", ret);
@@ -145,21 +141,25 @@
return ret;
}
+ for (auto& physicalStream : physicalImgReaders) {
+ ACameraOutputTarget* outputTarget = nullptr;
+ ret = ACameraOutputTarget_create(physicalStream.anw, &outputTarget);
+ if (ret != AMEDIA_OK) {
+ ALOGE("ACameraOutputTarget_create failed, ret=%d", ret);
+ return ret;
+ }
+ ret = ACaptureRequest_addTarget(mStillRequest, outputTarget);
+ if (ret != AMEDIA_OK) {
+ ALOGE("ACaptureRequest_addTarget failed, ret=%d", ret);
+ return ret;
+ }
+ mReqExtraOutputs.push_back(outputTarget);
+ }
+
mIsCameraReady = true;
return 0;
}
- bool isCapabilitySupported(acamera_metadata_enum_android_request_available_capabilities_t cap) {
- ACameraMetadata_const_entry entry;
- ACameraMetadata_getConstEntry(
- mCameraMetadata, ACAMERA_REQUEST_AVAILABLE_CAPABILITIES, &entry);
- for (uint32_t i = 0; i < entry.count; i++) {
- if (entry.data.u8[i] == cap) {
- return true;
- }
- }
- return false;
- }
bool isCameraReady() { return mIsCameraReady; }
@@ -169,6 +169,10 @@
ACameraOutputTarget_free(mReqImgReaderOutput);
mReqImgReaderOutput = nullptr;
}
+ for (auto& outputTarget : mReqExtraOutputs) {
+ ACameraOutputTarget_free(outputTarget);
+ }
+ mReqExtraOutputs.clear();
if (mStillRequest) {
ACaptureRequest_free(mStillRequest);
mStillRequest = nullptr;
@@ -182,6 +186,10 @@
ACaptureSessionOutput_free(mImgReaderOutput);
mImgReaderOutput = nullptr;
}
+ for (auto& extraOutput : mExtraOutputs) {
+ ACaptureSessionOutput_free(extraOutput);
+ }
+ mExtraOutputs.clear();
if (mOutputs) {
ACaptureSessionOutputContainer_free(mOutputs);
mOutputs = nullptr;
@@ -191,19 +199,6 @@
ACameraDevice_close(mDevice);
mDevice = nullptr;
}
- if (mCameraMetadata) {
- ACameraMetadata_free(mCameraMetadata);
- mCameraMetadata = nullptr;
- }
- // Destroy camera manager
- if (mCameraIdList) {
- ACameraManager_deleteCameraIdList(mCameraIdList);
- mCameraIdList = nullptr;
- }
- if (mCameraManager) {
- ACameraManager_delete(mCameraManager);
- mCameraManager = nullptr;
- }
mIsCameraReady = false;
}
@@ -213,6 +208,12 @@
&seqId);
}
+ int takeLogicalCameraPicture() {
+ int seqId;
+ return ACameraCaptureSession_logicalCamera_capture(mSession, &mLogicalCaptureCallbacks,
+ 1, &mStillRequest, &seqId);
+ }
+
bool checkCallbacks(int pictureCount) {
std::lock_guard<std::mutex> lock(mMutex);
if (mCompletedCaptureCallbackCount != pictureCount) {
@@ -241,22 +242,22 @@
native_handle_t* mImgReaderAnw = nullptr; // not owned by us.
- // Camera manager
- ACameraManager* mCameraManager = nullptr;
- ACameraIdList* mCameraIdList = nullptr;
// Camera device
- ACameraMetadata* mCameraMetadata = nullptr;
ACameraDevice* mDevice = nullptr;
// Capture session
ACaptureSessionOutputContainer* mOutputs = nullptr;
ACaptureSessionOutput* mImgReaderOutput = nullptr;
+ std::vector<ACaptureSessionOutput*> mExtraOutputs;
+
ACameraCaptureSession* mSession = nullptr;
// Capture request
ACaptureRequest* mStillRequest = nullptr;
ACameraOutputTarget* mReqImgReaderOutput = nullptr;
+ std::vector<ACameraOutputTarget*> mReqExtraOutputs;
bool mIsCameraReady = false;
const char* mCameraId;
+ ACameraManager* mCameraManager;
int mCompletedCaptureCallbackCount = 0;
std::mutex mMutex;
ACameraCaptureSession_captureCallbacks mCaptureCallbacks = {
@@ -264,7 +265,6 @@
this, // context
nullptr, // onCaptureStarted
nullptr, // onCaptureProgressed
- // onCaptureCompleted, called serially, so no lock needed.
[](void* ctx , ACameraCaptureSession *, ACaptureRequest *,
const ACameraMetadata *) {
CameraHelper *ch = static_cast<CameraHelper *>(ctx);
@@ -275,8 +275,44 @@
nullptr, // onCaptureSequenceCompleted
nullptr, // onCaptureSequenceAborted
nullptr, // onCaptureBufferLost
- };
+ };
+ std::vector<std::string> mPhysicalCameraIds;
+ ACameraCaptureSession_logicalCamera_captureCallbacks mLogicalCaptureCallbacks = {
+ // TODO: Add tests for other callbacks
+ this, // context
+ nullptr, // onCaptureStarted
+ nullptr, // onCaptureProgressed
+ [](void* ctx , ACameraCaptureSession *, ACaptureRequest *,
+ const ACameraMetadata *, size_t physicalResultCount,
+ const char** physicalCameraIds, const ACameraMetadata** physicalResults) {
+ CameraHelper *ch = static_cast<CameraHelper *>(ctx);
+ std::lock_guard<std::mutex> lock(ch->mMutex);
+ ASSERT_EQ(physicalResultCount, ch->mPhysicalCameraIds.size());
+ for (size_t i = 0; i < physicalResultCount; i++) {
+ ASSERT_TRUE(physicalCameraIds[i] != nullptr);
+ ASSERT_TRUE(physicalResults[i] != nullptr);
+ ASSERT_NE(std::find(ch->mPhysicalCameraIds.begin(),
+ ch->mPhysicalCameraIds.end(), physicalCameraIds[i]),
+ ch->mPhysicalCameraIds.end());
+
+ // Verify frameNumber and sensorTimestamp exist in physical
+ // result metadata
+ ACameraMetadata_const_entry entry;
+ ACameraMetadata_getConstEntry(
+ physicalResults[i], ACAMERA_SYNC_FRAME_NUMBER, &entry);
+ ASSERT_EQ(entry.count, 1);
+ ACameraMetadata_getConstEntry(
+ physicalResults[i], ACAMERA_SENSOR_TIMESTAMP, &entry);
+ ASSERT_EQ(entry.count, 1);
+ }
+ ch->mCompletedCaptureCallbackCount++;
+ },
+ nullptr, // onCaptureFailed
+ nullptr, // onCaptureSequenceCompleted
+ nullptr, // onCaptureSequenceAborted
+ nullptr, // onCaptureBufferLost
+ };
};
class ImageReaderTestCase {
@@ -476,84 +512,276 @@
AImageReader_BufferRemovedListener mReaderDetachedCb{this, onBufferRemoved};
};
-bool takePictures(uint64_t readerUsage, int readerMaxImages, bool readerAsync, int pictureCount) {
- int ret = 0;
- ImageReaderTestCase testCase(
- kTestImageWidth, kTestImageHeight, kTestImageFormat, readerUsage, readerMaxImages,
- readerAsync);
- ret = testCase.initImageReader();
- if (ret < 0) {
- ALOGE("Unable to initialize ImageReader");
+
+class AImageReaderVendorTest : public ::testing::Test {
+ public:
+ void SetUp() override {
+ mCameraManager = ACameraManager_create();
+ if (mCameraManager == nullptr) {
+ ALOGE("Failed to create ACameraManager.");
+ return;
+ }
+
+ camera_status_t ret = ACameraManager_getCameraIdList(mCameraManager, &mCameraIdList);
+ if (ret != ACAMERA_OK) {
+ ALOGE("Failed to get cameraIdList: ret=%d", ret);
+ return;
+ }
+ if (mCameraIdList->numCameras < 1) {
+ ALOGW("Device has no camera on board.");
+ return;
+ }
+ }
+ void TearDown() override {
+ // Destroy camera manager
+ if (mCameraIdList) {
+ ACameraManager_deleteCameraIdList(mCameraIdList);
+ mCameraIdList = nullptr;
+ }
+ if (mCameraManager) {
+ ACameraManager_delete(mCameraManager);
+ mCameraManager = nullptr;
+ }
+ }
+
+ bool takePictures(const char* id, uint64_t readerUsage, int readerMaxImages,
+ bool readerAsync, int pictureCount) {
+ int ret = 0;
+
+ ImageReaderTestCase testCase(
+ kTestImageWidth, kTestImageHeight, kTestImageFormat, readerUsage, readerMaxImages,
+ readerAsync);
+ ret = testCase.initImageReader();
+ if (ret < 0) {
+ ALOGE("Unable to initialize ImageReader");
+ return false;
+ }
+
+ CameraHelper cameraHelper(id, mCameraManager);
+ ret = cameraHelper.initCamera(testCase.getNativeWindow(), {});
+ if (ret < 0) {
+ ALOGE("Unable to initialize camera helper");
+ return false;
+ }
+
+ if (!cameraHelper.isCameraReady()) {
+ ALOGW("Camera is not ready after successful initialization. It's either due to camera "
+ "on board lacks BACKWARDS_COMPATIBLE capability or the device does not have "
+ "camera on board.");
+ return true;
+ }
+
+ for (int i = 0; i < pictureCount; i++) {
+ ret = cameraHelper.takePicture();
+ if (ret < 0) {
+ ALOGE("Unable to take picture");
+ return false;
+ }
+ }
+
+ // Sleep until all capture finished
+ for (int i = 0; i < kCaptureWaitRetry * pictureCount; i++) {
+ usleep(kCaptureWaitUs);
+ if (testCase.getAcquiredImageCount() == pictureCount) {
+ ALOGI("Session take ~%d ms to capture %d images", i * kCaptureWaitUs / 1000,
+ pictureCount);
+ break;
+ }
+ }
+ return testCase.getAcquiredImageCount() == pictureCount &&
+ cameraHelper.checkCallbacks(pictureCount);
+ }
+
+ bool testTakePicturesNative(const char* id) {
+ for (auto& readerUsage :
+ {AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}) {
+ for (auto& readerMaxImages : {1, 4, 8}) {
+ for (auto& readerAsync : {true, false}) {
+ for (auto& pictureCount : {1, 4, 8}) {
+ if (!takePictures(id, readerUsage, readerMaxImages,
+ readerAsync, pictureCount)) {
+ ALOGE("Test takePictures failed for test case usage=%" PRIu64
+ ", maxImages=%d, async=%d, pictureCount=%d",
+ readerUsage, readerMaxImages, readerAsync, pictureCount);
+ return false;
+ }
+ }
+ }
+ }
+ }
+ return true;
+ }
+
+ // Camera manager
+ ACameraManager* mCameraManager = nullptr;
+ ACameraIdList* mCameraIdList = nullptr;
+
+ bool isCapabilitySupported(ACameraMetadata* staticInfo,
+ acamera_metadata_enum_android_request_available_capabilities_t cap) {
+ ACameraMetadata_const_entry entry;
+ ACameraMetadata_getConstEntry(
+ staticInfo, ACAMERA_REQUEST_AVAILABLE_CAPABILITIES, &entry);
+ for (uint32_t i = 0; i < entry.count; i++) {
+ if (entry.data.u8[i] == cap) {
+ return true;
+ }
+ }
return false;
}
- CameraHelper cameraHelper(testCase.getNativeWindow());
- ret = cameraHelper.initCamera();
- if (ret < 0) {
- ALOGE("Unable to initialize camera helper");
+ bool isSizeSupportedForFormat(ACameraMetadata* staticInfo,
+ int32_t format, int32_t width, int32_t height) {
+ ACameraMetadata_const_entry entry;
+ ACameraMetadata_getConstEntry(staticInfo,
+ ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &entry);
+ for (uint32_t i = 0; i < entry.count; i += 4) {
+ if (entry.data.i32[i] == format &&
+ entry.data.i32[i+3] == ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
+ entry.data.i32[i+1] == width &&
+ entry.data.i32[i+2] == height) {
+ return true;
+ }
+ }
return false;
}
+ void findCandidateLogicalCamera(const char **cameraId,
+ ACameraMetadata** staticMetadata,
+ std::vector<const char*>* candidatePhysicalIds) {
+ // Find first available logical camera
+ for (int i = 0; i < mCameraIdList->numCameras; i++) {
+ camera_status_t ret;
+ ret = ACameraManager_getCameraCharacteristics(
+ mCameraManager, mCameraIdList->cameraIds[i], staticMetadata);
+ ASSERT_EQ(ret, ACAMERA_OK);
+ ASSERT_NE(*staticMetadata, nullptr);
+
+ if (!isCapabilitySupported(*staticMetadata,
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA)) {
+ ACameraMetadata_free(*staticMetadata);
+ *staticMetadata = nullptr;
+ continue;
+ }
+
+ // Check returned physical camera Ids are valid
+ size_t physicalCameraIdCnt = 0;
+ const char*const* physicalCameraIds = nullptr;
+ bool isLogicalCamera = ACameraMetadata_isLogicalMultiCamera(*staticMetadata,
+ &physicalCameraIdCnt, &physicalCameraIds);
+ ASSERT_TRUE(isLogicalCamera);
+ ASSERT_GE(physicalCameraIdCnt, 2);
+ ACameraMetadata* physicalCameraMetadata = nullptr;
+ candidatePhysicalIds->clear();
+ for (size_t j = 0; j < physicalCameraIdCnt && candidatePhysicalIds->size() < 2; j++) {
+ ASSERT_GT(strlen(physicalCameraIds[j]), 0);
+ ret = ACameraManager_getCameraCharacteristics(
+ mCameraManager, physicalCameraIds[j], &physicalCameraMetadata);
+ ASSERT_EQ(ret, ACAMERA_OK);
+ ASSERT_NE(physicalCameraMetadata, nullptr);
+
+ if (isSizeSupportedForFormat(physicalCameraMetadata, kTestImageFormat,
+ kTestImageWidth, kTestImageHeight)) {
+ candidatePhysicalIds->push_back(physicalCameraIds[j]);
+ }
+ ACameraMetadata_free(physicalCameraMetadata);
+ }
+ if (candidatePhysicalIds->size() == 2) {
+ *cameraId = mCameraIdList->cameraIds[i];
+ return;
+ } else {
+ ACameraMetadata_free(*staticMetadata);
+ *staticMetadata = nullptr;
+ }
+ }
+ *cameraId = nullptr;
+ return;
+ }
+};
+
+TEST_F(AImageReaderVendorTest, CreateWindowNativeHandle) {
+ // We always use the first camera.
+ const char* cameraId = mCameraIdList->cameraIds[0];
+ ASSERT_TRUE(cameraId != nullptr);
+
+ ACameraMetadata* staticMetadata = nullptr;
+ camera_status_t ret = ACameraManager_getCameraCharacteristics(
+ mCameraManager, cameraId, &staticMetadata);
+ ASSERT_EQ(ret, ACAMERA_OK);
+ ASSERT_NE(staticMetadata, nullptr);
+
+ bool isBC = isCapabilitySupported(staticMetadata,
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
+
+ ACameraMetadata_free(staticMetadata);
+
+ if (!isBC) {
+ ALOGW("Camera does not support BACKWARD_COMPATIBLE.");
+ return;
+ }
+
+ EXPECT_TRUE(testTakePicturesNative(cameraId));
+}
+
+TEST_F(AImageReaderVendorTest, LogicalCameraPhysicalStream) {
+ const char* cameraId = nullptr;
+ ACameraMetadata* staticMetadata = nullptr;
+ std::vector<const char*> physicalCameraIds;
+
+ findCandidateLogicalCamera(&cameraId, &staticMetadata, &physicalCameraIds);
+ if (cameraId == nullptr) {
+ // Couldn't find logical camera to test
+ return;
+ }
+
+ // Test streaming the logical multi-camera
+ uint64_t readerUsage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN;
+ int32_t readerMaxImages = 8;
+ bool readerAsync = false;
+ const int pictureCount = 6;
+ std::vector<ImageReaderTestCase*> testCases;
+ for (size_t i = 0; i < 3; i++) {
+ ImageReaderTestCase* testCase = new ImageReaderTestCase(
+ kTestImageWidth, kTestImageHeight, kTestImageFormat, readerUsage, readerMaxImages,
+ readerAsync);
+ ASSERT_EQ(testCase->initImageReader(), 0);
+ testCases.push_back(testCase);
+ }
+
+ CameraHelper cameraHelper(cameraId, mCameraManager);
+ std::vector<CameraHelper::PhysicalImgReaderInfo> physicalImgReaderInfo;
+ physicalImgReaderInfo.push_back({physicalCameraIds[0], testCases[1]->getNativeWindow()});
+ physicalImgReaderInfo.push_back({physicalCameraIds[1], testCases[2]->getNativeWindow()});
+
+ int ret = cameraHelper.initCamera(testCases[0]->getNativeWindow(), physicalImgReaderInfo);
+ ASSERT_EQ(ret, 0);
+
if (!cameraHelper.isCameraReady()) {
ALOGW("Camera is not ready after successful initialization. It's either due to camera on "
"board lacks BACKWARDS_COMPATIBLE capability or the device does not have camera on "
"board.");
- return true;
+ return;
}
for (int i = 0; i < pictureCount; i++) {
- ret = cameraHelper.takePicture();
- if (ret < 0) {
- ALOGE("Unable to take picture");
- return false;
- }
+ ret = cameraHelper.takeLogicalCameraPicture();
+ ASSERT_EQ(ret, 0);
}
// Sleep until all capture finished
for (int i = 0; i < kCaptureWaitRetry * pictureCount; i++) {
usleep(kCaptureWaitUs);
- if (testCase.getAcquiredImageCount() == pictureCount) {
+ if (testCases[0]->getAcquiredImageCount() == pictureCount) {
ALOGI("Session take ~%d ms to capture %d images", i * kCaptureWaitUs / 1000,
pictureCount);
break;
}
}
- return testCase.getAcquiredImageCount() == pictureCount &&
- cameraHelper.checkCallbacks(pictureCount);
-}
+ ASSERT_EQ(testCases[0]->getAcquiredImageCount(), pictureCount);
+ ASSERT_EQ(testCases[1]->getAcquiredImageCount(), pictureCount);
+ ASSERT_EQ(testCases[2]->getAcquiredImageCount(), pictureCount);
+ ASSERT_TRUE(cameraHelper.checkCallbacks(pictureCount));
-class AImageReaderWindowHandleTest : public ::testing::Test {
- public:
- void SetUp() override {
- }
- void TearDown() override {
-
- }
-
-};
-
-bool testTakePicturesNative() {
- for (auto& readerUsage :
- {AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN}) {
- for (auto& readerMaxImages : {1, 4, 8}) {
- for (auto& readerAsync : {true, false}) {
- for (auto& pictureCount : {1, 4, 8}) {
- if (!takePictures(readerUsage, readerMaxImages, readerAsync, pictureCount)) {
- ALOGE("Test takePictures failed for test case usage=%" PRIu64 ", maxImages=%d, "
- "async=%d, pictureCount=%d",
- readerUsage, readerMaxImages, readerAsync, pictureCount);
- return false;
- }
- }
- }
- }
- }
- return true;
-}
-
-
-TEST_F(AImageReaderWindowHandleTest, CreateWindowNativeHandle) {
- EXPECT_TRUE(testTakePicturesNative());
+ ACameraMetadata_free(staticMetadata);
}
} // namespace
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index fa8a7a3..8534b28 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -480,7 +480,8 @@
sp<Surface> surface(new Surface(gbProducer, /*controlledByApp*/false));
- OutputConfiguration output(gbProducer, /*rotation*/0);
+ String16 noPhysicalId;
+ OutputConfiguration output(gbProducer, /*rotation*/0, noPhysicalId);
// Can we configure?
res = device->beginConfigure();
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 86e9040..34a9a40 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -224,11 +224,15 @@
player->setSource(rawSource);
rawSource.clear();
- player->start(true /* sourceAlreadyStarted */);
+ err = player->start(true /* sourceAlreadyStarted */);
- status_t finalStatus;
- while (!player->reachedEOS(&finalStatus)) {
- usleep(100000ll);
+ if (err == OK) {
+ status_t finalStatus;
+ while (!player->reachedEOS(&finalStatus)) {
+ usleep(100000ll);
+ }
+ } else {
+ fprintf(stderr, "unable to start playback err=%d (0x%08x)\n", err, err);
}
delete player;
@@ -651,7 +655,8 @@
MEDIA_MIMETYPE_AUDIO_G711_ALAW, MEDIA_MIMETYPE_AUDIO_VORBIS,
MEDIA_MIMETYPE_VIDEO_VP8, MEDIA_MIMETYPE_VIDEO_VP9,
MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, MEDIA_MIMETYPE_VIDEO_HEVC,
- MEDIA_MIMETYPE_AUDIO_EAC3, MEDIA_MIMETYPE_AUDIO_AC4
+ MEDIA_MIMETYPE_AUDIO_EAC3, MEDIA_MIMETYPE_AUDIO_AC4,
+ MEDIA_MIMETYPE_VIDEO_AV1
};
const char *codecType = queryDecoders? "decoder" : "encoder";
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
index 28a78aa..bb9d7ec 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
@@ -29,8 +29,7 @@
srcs: ["src/FwdLockEngine.cpp"],
shared_libs: [
- "libicui18n",
- "libicuuc",
+ "libandroidicu",
"libutils",
"liblog",
"libdl",
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index 41d1833..01efb22 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -27,7 +27,6 @@
"libmediadrmmetrics_lite",
"libmediametrics",
"libmediautils",
- "libprotobuf-cpp-lite",
"libstagefright_foundation",
"libutils",
"android.hardware.drm@1.0",
@@ -60,13 +59,11 @@
shared_libs: [
"android.hardware.drm@1.0",
"android.hardware.drm@1.1",
- "libbase",
"libbinder",
"libhidlbase",
"liblog",
"libmediametrics",
"libprotobuf-cpp-lite",
- "libstagefright_foundation",
"libutils",
],
cflags: [
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index 14ff493..66c509f 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -553,12 +553,14 @@
const String8& appPackageName) {
Mutex::Autolock autoLock(mLock);
- for (size_t i = 0; i < mFactories.size(); i++) {
+ for (size_t i = mFactories.size() - 1; i >= 0; i--) {
if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
- mPlugin = makeDrmPlugin(mFactories[i], uuid, appPackageName);
- if (mPlugin != NULL) {
+ auto plugin = makeDrmPlugin(mFactories[i], uuid, appPackageName);
+ if (plugin != NULL) {
+ mPlugin = plugin;
mPluginV1_1 = drm::V1_1::IDrmPlugin::castFrom(mPlugin);
mPluginV1_2 = drm::V1_2::IDrmPlugin::castFrom(mPlugin);
+ break;
}
}
}
@@ -567,6 +569,9 @@
mInitCheck = ERROR_UNSUPPORTED;
} else {
if (!mPlugin->setListener(this).isOk()) {
+ mPlugin = NULL;
+ mPluginV1_1 = NULL;
+ mPluginV1_2 = NULL;
mInitCheck = DEAD_OBJECT;
} else {
mInitCheck = OK;
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index aeefbdb..87730ae 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -468,7 +468,8 @@
if (outargs.numOutBytes > 0) {
mInputSize = 0;
- int consumed = ((capacity / sizeof(int16_t)) - inargs.numInSamples);
+ int consumed = (capacity / sizeof(int16_t)) - inargs.numInSamples
+ + outargs.numInSamples;
mInputTimeUs = work->input.ordinal.timestamp
+ (consumed * 1000000ll / channelCount / sampleRate);
buffer = createLinearBuffer(block, 0, outargs.numOutBytes);
diff --git a/media/codec2/components/aom/Android.bp b/media/codec2/components/aom/Android.bp
new file mode 100644
index 0000000..0fabf5c
--- /dev/null
+++ b/media/codec2/components/aom/Android.bp
@@ -0,0 +1,14 @@
+cc_library_shared {
+ name: "libcodec2_soft_av1dec",
+ defaults: [
+ "libcodec2_soft-defaults",
+ "libcodec2_soft_sanitize_all-defaults",
+ ],
+
+ srcs: ["C2SoftAomDec.cpp"],
+ static_libs: ["libaom"],
+
+ include_dirs: [
+ "external/libaom/",
+ ],
+}
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
new file mode 100644
index 0000000..6be1807
--- /dev/null
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -0,0 +1,750 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAomDec"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftAomDec.h"
+
+namespace android {
+
+constexpr char COMPONENT_NAME[] = "c2.android.av1.decoder";
+
+class C2SoftAomDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+ public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+ : SimpleInterface<void>::BaseParams(
+ helper, COMPONENT_NAME, C2Component::KIND_DECODER,
+ C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
+ noPrivateBuffers(); // TODO: account for our buffers here
+ noInputReferences();
+ noOutputReferences();
+ noInputLatency();
+ noTimeStretch();
+
+ addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+ .withConstValue(new C2ComponentAttributesSetting(
+ C2Component::ATTRIB_IS_TEMPORAL))
+ .build());
+
+ addParameter(
+ DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 2048, 2),
+ C2F(mSize, height).inRange(2, 2048, 2),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::input(0u,
+ C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
+ .withFields({
+ C2F(mProfileLevel, profile).oneOf({
+ C2Config::PROFILE_AV1_0,
+ C2Config::PROFILE_AV1_1}),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_AV1_2,
+ C2Config::LEVEL_AV1_2_1,
+ C2Config::LEVEL_AV1_2_2,
+ C2Config::LEVEL_AV1_3,
+ C2Config::LEVEL_AV1_3_1,
+ C2Config::LEVEL_AV1_3_2,
+ })
+ })
+ .withSetter(ProfileLevelSetter, mSize)
+ .build());
+
+ addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+ .withDefault(new C2StreamMaxPictureSizeTuning::output(
+ 0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 2048, 2),
+ C2F(mSize, height).inRange(2, 2048, 2),
+ })
+ .withSetter(MaxPictureSizeSetter, mSize)
+ .build());
+
+ addParameter(
+ DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withDefault(
+ new C2StreamMaxBufferSizeInfo::input(0u, 320 * 240 * 3 / 4))
+ .withFields({
+ C2F(mMaxInputSize, value).any(),
+ })
+ .calculatedAs(MaxInputSizeSetter, mMaxSize)
+ .build());
+
+ C2ChromaOffsetStruct locations[1] = {
+ C2ChromaOffsetStruct::ITU_YUV_420_0()};
+ std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+ C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */,
+ C2Color::YUV_420);
+ memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+ defaultColorInfo = C2StreamColorInfo::output::AllocShared(
+ {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */,
+ C2Color::YUV_420);
+ helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+ addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+ .withConstValue(defaultColorInfo)
+ .build());
+
+ addParameter(
+ DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsTuning::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mDefaultColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mDefaultColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mDefaultColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mDefaultColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(DefaultColorAspectsSetter)
+ .build());
+
+ // TODO: support more formats?
+ addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+ .withConstValue(new C2StreamPixelFormatInfo::output(
+ 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .build());
+ }
+
+ static C2R SizeSetter(bool mayBlock,
+ const C2P<C2StreamPictureSizeInfo::output>& oldMe,
+ C2P<C2VideoSizeStreamInfo::output>& me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R MaxPictureSizeSetter(
+ bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
+ const C2P<C2StreamPictureSizeInfo::output>& size) {
+ (void)mayBlock;
+ // TODO: get max width/height from the size's field helpers vs.
+ // hardcoding
+ me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
+ me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
+ return C2R::Ok();
+ }
+
+ static C2R MaxInputSizeSetter(
+ bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
+ const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
+ (void)mayBlock;
+ // assume compression ratio of 2
+ me.set().value = (((maxSize.v.width + 63) / 64) *
+ ((maxSize.v.height + 63) / 64) * 3072);
+ return C2R::Ok();
+ }
+ static C2R DefaultColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
+ const C2P<C2StreamPictureSizeInfo::output> &size) {
+ (void)mayBlock;
+ (void)size;
+ (void)me; // TODO: validate
+ return C2R::Ok();
+ }
+ std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
+ return mDefaultColorAspects;
+ }
+
+ private:
+ std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+ std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+ std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+ std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+ std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+ std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+};
+
+C2SoftAomDec::C2SoftAomDec(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl)
+ : SimpleC2Component(
+ std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mCodecCtx(nullptr){
+
+ GENERATE_FILE_NAMES();
+ CREATE_DUMP_FILE(mInFile);
+ CREATE_DUMP_FILE(mOutFile);
+
+ gettimeofday(&mTimeStart, nullptr);
+ gettimeofday(&mTimeEnd, nullptr);
+}
+
+C2SoftAomDec::~C2SoftAomDec() {
+ onRelease();
+}
+
+c2_status_t C2SoftAomDec::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftAomDec::onStop() {
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ return C2_OK;
+}
+
+void C2SoftAomDec::onReset() {
+ (void)onStop();
+ c2_status_t err = onFlush_sm();
+ if (err != C2_OK) {
+ ALOGW("Failed to flush decoder. Try to hard reset decoder.");
+ destroyDecoder();
+ (void)initDecoder();
+ }
+}
+
+void C2SoftAomDec::onRelease() {
+ destroyDecoder();
+}
+
+c2_status_t C2SoftAomDec::onFlush_sm() {
+ if (aom_codec_decode(mCodecCtx, nullptr, 0, nullptr)) {
+ ALOGE("Failed to flush av1 decoder.");
+ return C2_CORRUPTED;
+ }
+
+ aom_codec_iter_t iter = nullptr;
+ while (aom_codec_get_frame(mCodecCtx, &iter)) {
+ }
+
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+
+ return C2_OK;
+}
+
+static int GetCPUCoreCount() {
+ int cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %d", cpuCoreCount);
+ return cpuCoreCount;
+}
+
+status_t C2SoftAomDec::initDecoder() {
+ mSignalledError = false;
+ mSignalledOutputEos = false;
+ if (!mCodecCtx) {
+ mCodecCtx = new aom_codec_ctx_t;
+ }
+
+ if (!mCodecCtx) {
+ ALOGE("mCodecCtx is null");
+ return NO_MEMORY;
+ }
+
+ aom_codec_dec_cfg_t cfg;
+ memset(&cfg, 0, sizeof(aom_codec_dec_cfg_t));
+ cfg.threads = GetCPUCoreCount();
+ cfg.allow_lowbitdepth = 1;
+
+ aom_codec_flags_t flags;
+ memset(&flags, 0, sizeof(aom_codec_flags_t));
+
+ aom_codec_err_t err;
+ if ((err = aom_codec_dec_init(mCodecCtx, aom_codec_av1_dx(), &cfg, 0))) {
+ ALOGE("av1 decoder failed to initialize. (%d)", err);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t C2SoftAomDec::destroyDecoder() {
+ if (mCodecCtx) {
+ aom_codec_destroy(mCodecCtx);
+ delete mCodecCtx;
+ mCodecCtx = nullptr;
+ }
+ return OK;
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+void C2SoftAomDec::finishWork(uint64_t index,
+ const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2GraphicBlock>& block) {
+ std::shared_ptr<C2Buffer> buffer =
+ createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+ auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
+ uint32_t flags = 0;
+ if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
+ (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+ if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+ fillWork(work);
+ } else {
+ finish(index, fillWork);
+ }
+}
+
+void C2SoftAomDec::process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ work->result = C2_OK;
+ work->workletsProcessed = 0u;
+ work->worklets.front()->output.configUpdate.clear();
+ work->worklets.front()->output.flags = work->input.flags;
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ C2ReadView rView = mDummyReadView;
+ if (!work->input.buffers.empty()) {
+ rView =
+ work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+
+ bool codecConfig =
+ ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x",
+ inSize, (int)work->input.ordinal.timestamp.peeku(),
+ (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
+
+ if (codecConfig) {
+ fillEmptyWork(work);
+ return;
+ }
+
+ int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
+ if (inSize) {
+ uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
+ int32_t decodeTime = 0;
+ int32_t delay = 0;
+
+ DUMP_TO_FILE(mOutFile, bitstream, inSize);
+ GETTIME(&mTimeStart, nullptr);
+ TIME_DIFF(mTimeEnd, mTimeStart, delay);
+
+ aom_codec_err_t err =
+ aom_codec_decode(mCodecCtx, bitstream, inSize, &frameIndex);
+
+ GETTIME(&mTimeEnd, nullptr);
+ TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
+ ALOGV("decodeTime=%4d delay=%4d\n", decodeTime, delay);
+
+ if (err != AOM_CODEC_OK) {
+ ALOGE("av1 decoder failed to decode frame err: %d", err);
+ work->result = C2_CORRUPTED;
+ work->workletsProcessed = 1u;
+ mSignalledError = true;
+ return;
+ }
+
+ } else {
+ if (aom_codec_decode(mCodecCtx, nullptr, 0, nullptr)) {
+ ALOGE("Failed to flush av1 decoder.");
+ work->result = C2_CORRUPTED;
+ work->workletsProcessed = 1u;
+ mSignalledError = true;
+ return;
+ }
+ }
+
+ (void)outputBuffer(pool, work);
+
+ if (eos) {
+ drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+ mSignalledOutputEos = true;
+ } else if (!inSize) {
+ fillEmptyWork(work);
+ }
+}
+
+static void copyOutputBufferToYV12Frame(uint8_t *dst,
+ const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ uint32_t width, uint32_t height) {
+ size_t dstYStride = align(width, 16);
+ size_t dstUVStride = align(dstYStride / 2, 16);
+ uint8_t* dstStart = dst;
+
+
+ for (size_t i = 0; i < height; ++i) {
+ memcpy(dst, srcY, width);
+ srcY += srcYStride;
+ dst += dstYStride;
+ }
+
+ dst = dstStart + dstYStride * height;
+ for (size_t i = 0; i < height / 2; ++i) {
+ memcpy(dst, srcV, width / 2);
+ srcV += srcVStride;
+ dst += dstUVStride;
+ }
+
+ dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
+ for (size_t i = 0; i < height / 2; ++i) {
+ memcpy(dst, srcU, width / 2);
+ srcU += srcUStride;
+ dst += dstUVStride;
+ }
+}
+
+static void convertYUV420Planar16ToY410(uint32_t *dst,
+ const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstStride, size_t width, size_t height) {
+
+ // Converting two lines at a time, slightly faster
+ for (size_t y = 0; y < height; y += 2) {
+ uint32_t *dstTop = (uint32_t *) dst;
+ uint32_t *dstBot = (uint32_t *) (dst + dstStride);
+ uint16_t *ySrcTop = (uint16_t*) srcY;
+ uint16_t *ySrcBot = (uint16_t*) (srcY + srcYStride);
+ uint16_t *uSrc = (uint16_t*) srcU;
+ uint16_t *vSrc = (uint16_t*) srcV;
+
+ uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
+ size_t x = 0;
+ for (; x < width - 3; x += 4) {
+
+ u01 = *((uint32_t*)uSrc); uSrc += 2;
+ v01 = *((uint32_t*)vSrc); vSrc += 2;
+
+ y01 = *((uint32_t*)ySrcTop); ySrcTop += 2;
+ y23 = *((uint32_t*)ySrcTop); ySrcTop += 2;
+ y45 = *((uint32_t*)ySrcBot); ySrcBot += 2;
+ y67 = *((uint32_t*)ySrcBot); ySrcBot += 2;
+
+ uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+ uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
+
+ *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
+ *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
+ *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
+ *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
+
+ *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
+ *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
+ *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
+ *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
+ }
+
+ // There should be at most 2 more pixels to process. Note that we don't
+ // need to consider odd case as the buffer is always aligned to even.
+ if (x < width) {
+ u01 = *uSrc;
+ v01 = *vSrc;
+ y01 = *((uint32_t*)ySrcTop);
+ y45 = *((uint32_t*)ySrcBot);
+ uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+ *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
+ *dstTop++ = ((y01 >> 16) << 10) | uv0;
+ *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
+ *dstBot++ = ((y45 >> 16) << 10) | uv0;
+ }
+
+ srcY += srcYStride * 2;
+ srcU += srcUStride;
+ srcV += srcVStride;
+ dst += dstStride * 2;
+ }
+
+ return;
+}
+
+static void convertYUV420Planar16ToYUV420Planar(uint8_t *dst,
+ const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstStride, size_t width, size_t height) {
+
+ uint8_t *dstY = (uint8_t *)dst;
+ size_t dstYSize = dstStride * height;
+ size_t dstUVStride = align(dstStride / 2, 16);
+ size_t dstUVSize = dstUVStride * height / 2;
+ uint8_t *dstV = dstY + dstYSize;
+ uint8_t *dstU = dstV + dstUVSize;
+
+ for (size_t y = 0; y < height; ++y) {
+ for (size_t x = 0; x < width; ++x) {
+ dstY[x] = (uint8_t)(srcY[x] >> 2);
+ }
+
+ srcY += srcYStride;
+ dstY += dstStride;
+ }
+
+ for (size_t y = 0; y < (height + 1) / 2; ++y) {
+ for (size_t x = 0; x < (width + 1) / 2; ++x) {
+ dstU[x] = (uint8_t)(srcU[x] >> 2);
+ dstV[x] = (uint8_t)(srcV[x] >> 2);
+ }
+
+ srcU += srcUStride;
+ srcV += srcVStride;
+ dstU += dstUVStride;
+ dstV += dstUVStride;
+ }
+ return;
+}
+bool C2SoftAomDec::outputBuffer(
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work)
+{
+ if (!(work && pool)) return false;
+
+ aom_codec_iter_t iter = nullptr;
+ aom_image_t* img = aom_codec_get_frame(mCodecCtx, &iter);
+
+ if (!img) return false;
+
+ if (img->d_w != mWidth || img->d_h != mHeight) {
+ mWidth = img->d_w;
+ mHeight = img->d_h;
+
+ C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+ if (err == C2_OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(size));
+ } else {
+ ALOGE("Config update size failed");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ work->workletsProcessed = 1u;
+ return false;
+ }
+ }
+
+ CHECK(img->fmt == AOM_IMG_FMT_I420 || img->fmt == AOM_IMG_FMT_I42016);
+
+ std::shared_ptr<C2GraphicBlock> block;
+ uint32_t format = HAL_PIXEL_FORMAT_YV12;
+ if (img->fmt == AOM_IMG_FMT_I42016) {
+ IntfImpl::Lock lock = mIntf->lock();
+ std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects = mIntf->getDefaultColorAspects_l();
+
+ if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+ defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+ defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+ format = HAL_PIXEL_FORMAT_RGBA_1010102;
+ }
+ }
+ C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+ c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight,
+ format, usage, &block);
+
+ if (err != C2_OK) {
+ ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+ work->result = err;
+ return false;
+ }
+
+ C2GraphicView wView = block->map().get();
+
+ if (wView.error()) {
+ ALOGE("graphic view map failed %d", wView.error());
+ work->result = C2_CORRUPTED;
+ return false;
+ }
+
+ ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d",
+ block->width(), block->height(), mWidth, mHeight,
+ (int)*(int64_t*)img->user_priv);
+
+ uint8_t* dst = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+ size_t srcYStride = img->stride[AOM_PLANE_Y];
+ size_t srcUStride = img->stride[AOM_PLANE_U];
+ size_t srcVStride = img->stride[AOM_PLANE_V];
+
+ if (img->fmt == AOM_IMG_FMT_I42016) {
+ const uint16_t *srcY = (const uint16_t *)img->planes[AOM_PLANE_Y];
+ const uint16_t *srcU = (const uint16_t *)img->planes[AOM_PLANE_U];
+ const uint16_t *srcV = (const uint16_t *)img->planes[AOM_PLANE_V];
+
+ if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+ convertYUV420Planar16ToY410((uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2,
+ srcUStride / 2, srcVStride / 2,
+ align(mWidth, 16),
+ mWidth, mHeight);
+ } else {
+ convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
+ srcUStride / 2, srcVStride / 2,
+ align(mWidth, 16),
+ mWidth, mHeight);
+ }
+ } else {
+ const uint8_t *srcY = (const uint8_t *)img->planes[AOM_PLANE_Y];
+ const uint8_t *srcU = (const uint8_t *)img->planes[AOM_PLANE_U];
+ const uint8_t *srcV = (const uint8_t *)img->planes[AOM_PLANE_V];
+ copyOutputBufferToYV12Frame(dst, srcY, srcU, srcV,
+ srcYStride, srcUStride, srcVStride, mWidth, mHeight);
+ }
+ finishWork(*(int64_t*)img->user_priv, work, std::move(block));
+ block = nullptr;
+ return true;
+}
+
+c2_status_t C2SoftAomDec::drainInternal(
+ uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool,
+ const std::unique_ptr<C2Work>& work) {
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ if (aom_codec_decode(mCodecCtx, nullptr, 0, nullptr)) {
+ ALOGE("Failed to flush av1 decoder.");
+ return C2_CORRUPTED;
+ }
+
+ while ((outputBuffer(pool, work))) {
+ }
+
+ if (drainMode == DRAIN_COMPONENT_WITH_EOS && work &&
+ work->workletsProcessed == 0u) {
+ fillEmptyWork(work);
+ }
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftAomDec::drain(uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool>& pool) {
+ return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftAomFactory : public C2ComponentFactory {
+ public:
+ C2SoftAomFactory()
+ : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id, std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftAomDec(COMPONENT_NAME, id,
+ std::make_shared<C2SoftAomDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftAomDec::IntfImpl>(
+ COMPONENT_NAME, id,
+ std::make_shared<C2SoftAomDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAomFactory() override = default;
+
+ private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAomFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/aom/C2SoftAomDec.h b/media/codec2/components/aom/C2SoftAomDec.h
new file mode 100644
index 0000000..4c82647
--- /dev/null
+++ b/media/codec2/components/aom/C2SoftAomDec.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_AV1_DEC_H_
+#define ANDROID_C2_SOFT_AV1_DEC_H_
+
+#include <SimpleC2Component.h>
+#include "aom/aom_decoder.h"
+#include "aom/aomdx.h"
+
+#define GETTIME(a, b) gettimeofday(a, b);
+#define TIME_DIFF(start, end, diff) \
+ diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
+ ((end).tv_usec - (start).tv_usec);
+
+namespace android {
+
+struct C2SoftAomDec : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftAomDec(const char* name, c2_node_id_t id,
+ const std::shared_ptr<IntfImpl>& intfImpl);
+ virtual ~C2SoftAomDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2BlockPool>& pool) override;
+ c2_status_t drain(uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool>& pool) override;
+
+ private:
+ std::shared_ptr<IntfImpl> mIntf;
+ aom_codec_ctx_t* mCodecCtx;
+
+ uint32_t mWidth;
+ uint32_t mHeight;
+ bool mSignalledOutputEos;
+ bool mSignalledError;
+
+ #ifdef FILE_DUMP_ENABLE
+ char mInFile[200];
+ char mOutFile[200];
+ #endif /* FILE_DUMP_ENABLE */
+
+ struct timeval mTimeStart; // Time at the start of decode()
+ struct timeval mTimeEnd; // Time at the end of decode()
+
+ status_t initDecoder();
+ status_t destroyDecoder();
+ void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+ const std::shared_ptr<C2GraphicBlock>& block);
+ bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+ const std::unique_ptr<C2Work>& work);
+
+ c2_status_t drainInternal(uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool>& pool,
+ const std::unique_ptr<C2Work>& work);
+
+ C2_DO_NOT_COPY(C2SoftAomDec);
+};
+
+#ifdef FILE_DUMP_ENABLE
+
+#define INPUT_DUMP_PATH "/data/local/tmp/temp/av1"
+#define INPUT_DUMP_EXT "webm"
+#define OUTPUT_DUMP_PATH "/data/local/tmp/temp/av1"
+#define OUTPUT_DUMP_EXT "av1"
+#define GENERATE_FILE_NAMES() \
+ { \
+ GETTIME(&mTimeStart, NULL); \
+ strcpy(mInFile, ""); \
+ ALOGD("GENERATE_FILE_NAMES"); \
+ sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, mTimeStart.tv_sec, \
+ mTimeStart.tv_usec, INPUT_DUMP_EXT); \
+ strcpy(mOutFile, ""); \
+ sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH, \
+ mTimeStart.tv_sec, mTimeStart.tv_usec, OUTPUT_DUMP_EXT); \
+ }
+
+#define CREATE_DUMP_FILE(m_filename) \
+ { \
+ FILE* fp = fopen(m_filename, "wb"); \
+ if (fp != NULL) { \
+ ALOGD("Opened file %s", m_filename); \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not open file %s", m_filename); \
+ } \
+ }
+#define DUMP_TO_FILE(m_filename, m_buf, m_size) \
+ { \
+ FILE* fp = fopen(m_filename, "ab"); \
+ if (fp != NULL && m_buf != NULL) { \
+ int i; \
+ ALOGD("Dump to file!"); \
+ i = fwrite(m_buf, 1, m_size, fp); \
+ if (i != (int)m_size) { \
+ ALOGD("Error in fwrite, returned %d", i); \
+ perror("Error in write to file"); \
+ } \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not write to file %s", m_filename); \
+ if (fp != NULL) fclose(fp); \
+ } \
+ }
+#else /* FILE_DUMP_ENABLE */
+#define INPUT_DUMP_PATH
+#define INPUT_DUMP_EXT
+#define OUTPUT_DUMP_PATH
+#define OUTPUT_DUMP_EXT
+#define GENERATE_FILE_NAMES()
+#define CREATE_DUMP_FILE(m_filename)
+#define DUMP_TO_FILE(m_filename, m_buf, m_size)
+#endif /* FILE_DUMP_ENABLE */
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_AV1_DEC_H_
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 50b4d20..b8baec8 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -132,6 +132,56 @@
}
}
+class SimpleC2Component::BlockingBlockPool : public C2BlockPool {
+public:
+ BlockingBlockPool(const std::shared_ptr<C2BlockPool>& base): mBase{base} {}
+
+ virtual local_id_t getLocalId() const override {
+ return mBase->getLocalId();
+ }
+
+ virtual C2Allocator::id_t getAllocatorId() const override {
+ return mBase->getAllocatorId();
+ }
+
+ virtual c2_status_t fetchLinearBlock(
+ uint32_t capacity,
+ C2MemoryUsage usage,
+ std::shared_ptr<C2LinearBlock>* block) {
+ c2_status_t status;
+ do {
+ status = mBase->fetchLinearBlock(capacity, usage, block);
+ } while (status == C2_TIMED_OUT);
+ return status;
+ }
+
+ virtual c2_status_t fetchCircularBlock(
+ uint32_t capacity,
+ C2MemoryUsage usage,
+ std::shared_ptr<C2CircularBlock>* block) {
+ c2_status_t status;
+ do {
+ status = mBase->fetchCircularBlock(capacity, usage, block);
+ } while (status == C2_TIMED_OUT);
+ return status;
+ }
+
+ virtual c2_status_t fetchGraphicBlock(
+ uint32_t width, uint32_t height, uint32_t format,
+ C2MemoryUsage usage,
+ std::shared_ptr<C2GraphicBlock>* block) {
+ c2_status_t status;
+ do {
+ status = mBase->fetchGraphicBlock(width, height, format, usage,
+ block);
+ } while (status == C2_TIMED_OUT);
+ return status;
+ }
+
+private:
+ std::shared_ptr<C2BlockPool> mBase;
+};
+
////////////////////////////////////////////////////////////////////////////////
namespace {
@@ -446,12 +496,16 @@
}
}
- err = GetCodec2BlockPool(poolId, shared_from_this(), &mOutputBlockPool);
+ std::shared_ptr<C2BlockPool> blockPool;
+ err = GetCodec2BlockPool(poolId, shared_from_this(), &blockPool);
ALOGD("Using output block pool with poolID %llu => got %llu - %d",
(unsigned long long)poolId,
(unsigned long long)(
- mOutputBlockPool ? mOutputBlockPool->getLocalId() : 111000111),
+ blockPool ? blockPool->getLocalId() : 111000111),
err);
+ if (err == C2_OK) {
+ mOutputBlockPool = std::make_shared<BlockingBlockPool>(blockPool);
+ }
return err;
}();
if (err != C2_OK) {
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index b3a98f4..43029a9 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -234,7 +234,8 @@
typedef std::unordered_map<uint64_t, std::unique_ptr<C2Work>> PendingWork;
Mutexed<PendingWork> mPendingWork;
- std::shared_ptr<C2BlockPool> mOutputBlockPool;
+ class BlockingBlockPool;
+ std::shared_ptr<BlockingBlockPool> mOutputBlockPool;
SimpleC2Component() = delete;
};
diff --git a/media/codec2/components/flac/Android.bp b/media/codec2/components/flac/Android.bp
index d485964..e5eb51d 100644
--- a/media/codec2/components/flac/Android.bp
+++ b/media/codec2/components/flac/Android.bp
@@ -23,5 +23,8 @@
srcs: ["C2SoftFlacEnc.cpp"],
- static_libs: ["libFLAC"],
+ static_libs: [
+ "libaudioutils",
+ "libFLAC",
+ ],
}
diff --git a/media/codec2/components/flac/C2SoftFlacDec.cpp b/media/codec2/components/flac/C2SoftFlacDec.cpp
index f1e2f51..86b16e8 100644
--- a/media/codec2/components/flac/C2SoftFlacDec.cpp
+++ b/media/codec2/components/flac/C2SoftFlacDec.cpp
@@ -83,8 +83,21 @@
DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
.withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 32768))
.build());
+
+ addParameter(
+ DefineParam(mPcmEncodingInfo, C2_PARAMKEY_PCM_ENCODING)
+ .withDefault(new C2StreamPcmEncodingInfo::output(0u, C2Config::PCM_16))
+ .withFields({C2F(mPcmEncodingInfo, value).oneOf({
+ C2Config::PCM_16,
+ // C2Config::PCM_8,
+ C2Config::PCM_FLOAT})
+ })
+ .withSetter((Setter<decltype(*mPcmEncodingInfo)>::StrictValueWithNoDeps))
+ .build());
}
+ int32_t getPcmEncodingInfo() const { return mPcmEncodingInfo->value; }
+
private:
std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
@@ -94,6 +107,7 @@
std::shared_ptr<C2StreamChannelCountInfo::output> mChannelCount;
std::shared_ptr<C2BitrateTuning::input> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+ std::shared_ptr<C2StreamPcmEncodingInfo::output> mPcmEncodingInfo;
};
C2SoftFlacDec::C2SoftFlacDec(
@@ -263,11 +277,11 @@
return;
}
- size_t outSize;
- if (mHasStreamInfo)
- outSize = mStreamInfo.max_blocksize * mStreamInfo.channels * sizeof(short);
- else
- outSize = kMaxBlockSize * FLACDecoder::kMaxChannels * sizeof(short);
+ const bool outputFloat = mIntf->getPcmEncodingInfo() == C2Config::PCM_FLOAT;
+ const size_t sampleSize = outputFloat ? sizeof(float) : sizeof(short);
+ size_t outSize = mHasStreamInfo ?
+ mStreamInfo.max_blocksize * mStreamInfo.channels * sampleSize
+ : kMaxBlockSize * FLACDecoder::kMaxChannels * sampleSize;
std::shared_ptr<C2LinearBlock> block;
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
@@ -284,9 +298,8 @@
return;
}
- short *output = reinterpret_cast<short *>(wView.data());
status_t decoderErr = mFLACDecoder->decodeOneFrame(
- input, inSize, output, &outSize);
+ input, inSize, wView.data(), &outSize, outputFloat);
if (decoderErr != OK) {
ALOGE("process: FLACDecoder decodeOneFrame returns error %d", decoderErr);
mSignalledError = true;
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index e4192c7..4ea35c2 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "C2SoftFlacEnc"
#include <log/log.h>
+#include <audio_utils/primitives.h>
#include <media/stagefright/foundation/MediaDefs.h>
#include <C2PlatformSupport.h>
@@ -72,11 +73,23 @@
DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
.withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 4608))
.build());
+
+ addParameter(
+ DefineParam(mPcmEncodingInfo, C2_PARAMKEY_PCM_ENCODING)
+ .withDefault(new C2StreamPcmEncodingInfo::input(0u, C2Config::PCM_16))
+ .withFields({C2F(mPcmEncodingInfo, value).oneOf({
+ C2Config::PCM_16,
+ // C2Config::PCM_8,
+ C2Config::PCM_FLOAT})
+ })
+ .withSetter((Setter<decltype(*mPcmEncodingInfo)>::StrictValueWithNoDeps))
+ .build());
}
uint32_t getSampleRate() const { return mSampleRate->value; }
uint32_t getChannelCount() const { return mChannelCount->value; }
uint32_t getBitrate() const { return mBitrate->value; }
+ int32_t getPcmEncodingInfo() const { return mPcmEncodingInfo->value; }
private:
std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
@@ -87,6 +100,7 @@
std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
std::shared_ptr<C2BitrateTuning::output> mBitrate;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
+ std::shared_ptr<C2StreamPcmEncodingInfo::input> mPcmEncodingInfo;
};
constexpr char COMPONENT_NAME[] = "c2.android.flac.encoder";
@@ -224,12 +238,15 @@
mWroteHeader = true;
}
- uint32_t sampleRate = mIntf->getSampleRate();
- uint32_t channelCount = mIntf->getChannelCount();
- uint64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
+ const uint32_t sampleRate = mIntf->getSampleRate();
+ const uint32_t channelCount = mIntf->getChannelCount();
+ const bool inputFloat = mIntf->getPcmEncodingInfo() == C2Config::PCM_FLOAT;
+ const unsigned sampleSize = inputFloat ? sizeof(float) : sizeof(int16_t);
+ const unsigned frameSize = channelCount * sampleSize;
+ const uint64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
size_t outCapacity = inSize;
- outCapacity += mBlockSize * channelCount * sizeof(int16_t);
+ outCapacity += mBlockSize * frameSize;
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
c2_status_t err = pool->fetchLinearBlock(outCapacity, usage, &mOutputBlock);
@@ -250,14 +267,19 @@
size_t inPos = 0;
while (inPos < inSize) {
const uint8_t *inPtr = rView.data() + inOffset;
- size_t processSize = MIN(kInBlockSize * channelCount * sizeof(int16_t), (inSize - inPos));
- const unsigned nbInputFrames = processSize / (channelCount * sizeof(int16_t));
- const unsigned nbInputSamples = processSize / sizeof(int16_t);
- const int16_t *pcm16 = reinterpret_cast<const int16_t *>(inPtr + inPos);
- ALOGV("about to encode %zu bytes", processSize);
+ const size_t processSize = MIN(kInBlockSize * frameSize, (inSize - inPos));
+ const unsigned nbInputFrames = processSize / frameSize;
+ const unsigned nbInputSamples = processSize / sampleSize;
- for (unsigned i = 0; i < nbInputSamples; i++) {
- mInputBufferPcm32[i] = (FLAC__int32) pcm16[i];
+ ALOGV("about to encode %zu bytes", processSize);
+ if (inputFloat) {
+ const float * const pcmFloat = reinterpret_cast<const float *>(inPtr + inPos);
+ memcpy_to_q8_23_from_float_with_clamp(mInputBufferPcm32, pcmFloat, nbInputSamples);
+ } else {
+ const int16_t * const pcm16 = reinterpret_cast<const int16_t *>(inPtr + inPos);
+ for (unsigned i = 0; i < nbInputSamples; i++) {
+ mInputBufferPcm32[i] = (FLAC__int32) pcm16[i];
+ }
}
FLAC__bool ok = FLAC__stream_encoder_process_interleaved(
@@ -342,10 +364,12 @@
return UNKNOWN_ERROR;
}
+ const bool inputFloat = mIntf->getPcmEncodingInfo() == C2Config::PCM_FLOAT;
+ const int bitsPerSample = inputFloat ? 24 : 16;
FLAC__bool ok = true;
ok = ok && FLAC__stream_encoder_set_channels(mFlacStreamEncoder, mIntf->getChannelCount());
ok = ok && FLAC__stream_encoder_set_sample_rate(mFlacStreamEncoder, mIntf->getSampleRate());
- ok = ok && FLAC__stream_encoder_set_bits_per_sample(mFlacStreamEncoder, 16);
+ ok = ok && FLAC__stream_encoder_set_bits_per_sample(mFlacStreamEncoder, bitsPerSample);
ok = ok && FLAC__stream_encoder_set_compression_level(mFlacStreamEncoder, mCompressionLevel);
ok = ok && FLAC__stream_encoder_set_verify(mFlacStreamEncoder, false);
if (!ok) {
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 27aa064..cf1f6cf 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -392,6 +392,7 @@
_C2_PL_HEVC_BASE = 0x6000,
_C2_PL_VP9_BASE = 0x7000,
_C2_PL_DV_BASE = 0x8000,
+ _C2_PL_AV1_BASE = 0x9000,
C2_PROFILE_LEVEL_VENDOR_START = 0x70000000,
};
@@ -539,6 +540,11 @@
PROFILE_DV_HE_07 = _C2_PL_DV_BASE + 7, ///< Dolby Vision dvhe.07 profile
PROFILE_DV_HE_08 = _C2_PL_DV_BASE + 8, ///< Dolby Vision dvhe.08 profile
PROFILE_DV_AV_09 = _C2_PL_DV_BASE + 9, ///< Dolby Vision dvav.09 profile
+
+ // AV1 profiles
+ PROFILE_AV1_0 = _C2_PL_AV1_BASE, ///< AV1 Profile 0 (4:2:0, 8 to 10 bit)
+ PROFILE_AV1_1, ///< AV1 Profile 1 (8 to 10 bit)
+ PROFILE_AV1_2, ///< AV1 Profile 2 (8 to 12 bit)
};
enum C2Config::level_t : uint32_t {
@@ -652,6 +658,31 @@
LEVEL_DV_HIGH_UHD_30, ///< Dolby Vision high tier uhd30
LEVEL_DV_HIGH_UHD_48, ///< Dolby Vision high tier uhd48
LEVEL_DV_HIGH_UHD_60, ///< Dolby Vision high tier uhd60
+
+ LEVEL_AV1_2 = _C2_PL_AV1_BASE , ///< AV1 Level 2
+ LEVEL_AV1_2_1, ///< AV1 Level 2.1
+ LEVEL_AV1_2_2, ///< AV1 Level 2.2
+ LEVEL_AV1_2_3, ///< AV1 Level 2.3
+ LEVEL_AV1_3, ///< AV1 Level 3
+ LEVEL_AV1_3_1, ///< AV1 Level 3.1
+ LEVEL_AV1_3_2, ///< AV1 Level 3.2
+ LEVEL_AV1_3_3, ///< AV1 Level 3.3
+ LEVEL_AV1_4, ///< AV1 Level 4
+ LEVEL_AV1_4_1, ///< AV1 Level 4.1
+ LEVEL_AV1_4_2, ///< AV1 Level 4.2
+ LEVEL_AV1_4_3, ///< AV1 Level 4.3
+ LEVEL_AV1_5, ///< AV1 Level 5
+ LEVEL_AV1_5_1, ///< AV1 Level 5.1
+ LEVEL_AV1_5_2, ///< AV1 Level 5.2
+ LEVEL_AV1_5_3, ///< AV1 Level 5.3
+ LEVEL_AV1_6, ///< AV1 Level 6
+ LEVEL_AV1_6_1, ///< AV1 Level 6.1
+ LEVEL_AV1_6_2, ///< AV1 Level 6.2
+ LEVEL_AV1_6_3, ///< AV1 Level 6.3
+ LEVEL_AV1_7, ///< AV1 Level 7
+ LEVEL_AV1_7_1, ///< AV1 Level 7.1
+ LEVEL_AV1_7_2, ///< AV1 Level 7.2
+ LEVEL_AV1_7_3, ///< AV1 Level 7.3
};
struct C2ProfileLevelStruct {
diff --git a/media/codec2/hidl/1.0/vts/audio/VtsHidlC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/audio/VtsHidlC2V1_0TargetAudioDecTest.cpp
index 1e87f38..d4b973f 100644
--- a/media/codec2/hidl/1.0/vts/audio/VtsHidlC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/audio/VtsHidlC2V1_0TargetAudioDecTest.cpp
@@ -330,57 +330,72 @@
}
}
+// number of elementary streams per component
+#define STREAM_COUNT 2
+
// LookUpTable of clips and metadata for component testing
void GetURLForComponent(Codec2AudioDecHidlTest::standardComp comp, char* mURL,
- char* info) {
+ char* info, size_t streamIndex = 0) {
struct CompToURL {
Codec2AudioDecHidlTest::standardComp comp;
- const char* mURL;
- const char* info;
+ const char mURL[STREAM_COUNT][512];
+ const char info[STREAM_COUNT][512];
};
+ ASSERT_TRUE(streamIndex < STREAM_COUNT);
+
static const CompToURL kCompToURL[] = {
{Codec2AudioDecHidlTest::standardComp::xaac,
- "bbb_aac_stereo_128kbps_48000hz.aac",
- "bbb_aac_stereo_128kbps_48000hz.info"},
+ {"bbb_aac_stereo_128kbps_48000hz.aac",
+ "bbb_aac_stereo_128kbps_48000hz.aac"},
+ {"bbb_aac_stereo_128kbps_48000hz.info",
+ "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"}},
{Codec2AudioDecHidlTest::standardComp::mp3,
- "bbb_mp3_stereo_192kbps_48000hz.mp3",
- "bbb_mp3_stereo_192kbps_48000hz.info"},
+ {"bbb_mp3_stereo_192kbps_48000hz.mp3",
+ "bbb_mp3_stereo_192kbps_48000hz.mp3"},
+ {"bbb_mp3_stereo_192kbps_48000hz.info",
+ "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"}},
{Codec2AudioDecHidlTest::standardComp::aac,
- "bbb_aac_stereo_128kbps_48000hz.aac",
- "bbb_aac_stereo_128kbps_48000hz.info"},
+ {"bbb_aac_stereo_128kbps_48000hz.aac",
+ "bbb_aac_stereo_128kbps_48000hz.aac"},
+ {"bbb_aac_stereo_128kbps_48000hz.info",
+ "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"}},
{Codec2AudioDecHidlTest::standardComp::amrnb,
- "sine_amrnb_1ch_12kbps_8000hz.amrnb",
- "sine_amrnb_1ch_12kbps_8000hz.info"},
+ {"sine_amrnb_1ch_12kbps_8000hz.amrnb",
+ "sine_amrnb_1ch_12kbps_8000hz.amrnb"},
+ {"sine_amrnb_1ch_12kbps_8000hz.info",
+ "sine_amrnb_1ch_12kbps_8000hz_multi_frame.info"}},
{Codec2AudioDecHidlTest::standardComp::amrwb,
- "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
- "bbb_amrwb_1ch_14kbps_16000hz.info"},
+ {"bbb_amrwb_1ch_14kbps_16000hz.amrwb",
+ "bbb_amrwb_1ch_14kbps_16000hz.amrwb"},
+ {"bbb_amrwb_1ch_14kbps_16000hz.info",
+ "bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info"}},
{Codec2AudioDecHidlTest::standardComp::vorbis,
- "bbb_vorbis_stereo_128kbps_48000hz.vorbis",
- "bbb_vorbis_stereo_128kbps_48000hz.info"},
+ {"bbb_vorbis_stereo_128kbps_48000hz.vorbis", ""},
+ {"bbb_vorbis_stereo_128kbps_48000hz.info", ""}},
{Codec2AudioDecHidlTest::standardComp::opus,
- "bbb_opus_stereo_128kbps_48000hz.opus",
- "bbb_opus_stereo_128kbps_48000hz.info"},
+ {"bbb_opus_stereo_128kbps_48000hz.opus", ""},
+ {"bbb_opus_stereo_128kbps_48000hz.info", ""}},
{Codec2AudioDecHidlTest::standardComp::g711alaw,
- "bbb_g711alaw_1ch_8khz.raw",
- "bbb_g711alaw_1ch_8khz.info"},
+ {"bbb_g711alaw_1ch_8khz.raw", ""},
+ {"bbb_g711alaw_1ch_8khz.info", ""}},
{Codec2AudioDecHidlTest::standardComp::g711mlaw,
- "bbb_g711mulaw_1ch_8khz.raw",
- "bbb_g711mulaw_1ch_8khz.info"},
+ {"bbb_g711mulaw_1ch_8khz.raw", ""},
+ {"bbb_g711mulaw_1ch_8khz.info", ""}},
{Codec2AudioDecHidlTest::standardComp::gsm,
- "bbb_gsm_1ch_8khz_13kbps.raw",
- "bbb_gsm_1ch_8khz_13kbps.info"},
+ {"bbb_gsm_1ch_8khz_13kbps.raw", ""},
+ {"bbb_gsm_1ch_8khz_13kbps.info", ""}},
{Codec2AudioDecHidlTest::standardComp::raw,
- "bbb_raw_1ch_8khz_s32le.raw",
- "bbb_raw_1ch_8khz_s32le.info"},
+ {"bbb_raw_1ch_8khz_s32le.raw", ""},
+ {"bbb_raw_1ch_8khz_s32le.info", ""}},
{Codec2AudioDecHidlTest::standardComp::flac,
- "bbb_flac_stereo_680kbps_48000hz.flac",
- "bbb_flac_stereo_680kbps_48000hz.info"},
+ {"bbb_flac_stereo_680kbps_48000hz.flac", ""},
+ {"bbb_flac_stereo_680kbps_48000hz.info", ""}},
};
for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
if (kCompToURL[i].comp == comp) {
- strcat(mURL, kCompToURL[i].mURL);
- strcat(info, kCompToURL[i].info);
+ strcat(mURL, kCompToURL[i].mURL[streamIndex]);
+ strcat(info, kCompToURL[i].info[streamIndex]);
return;
}
}
@@ -491,10 +506,15 @@
ASSERT_EQ(mComponent->stop(), C2_OK);
}
-TEST_F(Codec2AudioDecHidlTest, DecodeTest) {
+class Codec2AudioDecDecodeTest : public Codec2AudioDecHidlTest,
+ public ::testing::WithParamInterface<int32_t> {
+};
+
+TEST_P(Codec2AudioDecDecodeTest, DecodeTest) {
description("Decodes input file");
if (mDisableTest) return;
+ uint32_t streamIndex = GetParam();
ASSERT_EQ(mComponent->start(), C2_OK);
mTimestampDevTest = true;
char mURL[512], info[512];
@@ -502,7 +522,12 @@
strcpy(mURL, gEnv->getRes().c_str());
strcpy(info, gEnv->getRes().c_str());
- GetURLForComponent(mCompName, mURL, info);
+ GetURLForComponent(mCompName, mURL, info, streamIndex);
+ if (!strcmp(mURL, gEnv->getRes().c_str())) {
+ ALOGV("EMPTY INPUT gEnv->getRes().c_str() %s mURL %s ",
+ gEnv->getRes().c_str(), mURL);
+ return;
+ }
eleInfo.open(info);
ASSERT_EQ(eleInfo.is_open(), true);
@@ -521,6 +546,9 @@
Info.push_back({bytesCount, flags, timestamp});
}
eleInfo.close();
+ // Reset total no of frames received
+ mFramesReceived = 0;
+ mTimestampUs = 0;
int32_t bitStreamInfo[2] = {0};
if (mCompName == raw) {
bitStreamInfo[0] = 8000;
@@ -577,6 +605,9 @@
ASSERT_EQ(mComponent->stop(), C2_OK);
}
+INSTANTIATE_TEST_CASE_P(StreamIndexes, Codec2AudioDecDecodeTest,
+ ::testing::Values(0, 1));
+
// thumbnail test
TEST_F(Codec2AudioDecHidlTest, ThumbnailTest) {
description("Test Request for thumbnail");
diff --git a/media/codec2/hidl/1.0/vts/res/bbb_aac_stereo_128kbps_48000hz_multi_frame.info b/media/codec2/hidl/1.0/vts/res/bbb_aac_stereo_128kbps_48000hz_multi_frame.info
new file mode 100644
index 0000000..182af20
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/res/bbb_aac_stereo_128kbps_48000hz_multi_frame.info
@@ -0,0 +1,443 @@
+5 32 0
+5 32 0
+337 1 0
+322 1 21333
+279 1 42666
+563 1 64000
+635 1 106666
+634 1 149333
+629 1 192000
+680 1 234666
+688 1 277333
+1036 1 320000
+1040 1 384000
+1009 1 448000
+1020 1 512000
+1357 1 576000
+1353 1 661333
+1351 1 746666
+1351 1 832000
+343 1 917333
+335 1 938666
+339 1 960000
+342 1 981333
+348 1 1002666
+350 1 1024000
+351 1 1045333
+342 1 1066666
+366 1 1088000
+340 1 1109333
+354 1 1130666
+340 1 1152000
+334 1 1173333
+338 1 1194666
+340 1 1216000
+351 1 1237333
+346 1 1258666
+331 1 1280000
+321 1 1301333
+343 1 1322666
+342 1 1344000
+345 1 1365333
+326 1 1386666
+342 1 1408000
+356 1 1429333
+351 1 1450666
+343 1 1472000
+347 1 1493333
+349 1 1514666
+350 1 1536000
+330 1 1557333
+341 1 1578666
+340 1 1600000
+330 1 1621333
+340 1 1642666
+335 1 1664000
+344 1 1685333
+359 1 1706666
+337 1 1728000
+346 1 1749333
+330 1 1770666
+351 1 1792000
+355 1 1813333
+352 1 1834666
+325 1 1856000
+342 1 1877333
+327 1 1898666
+349 1 1920000
+326 1 1941333
+337 1 1962666
+378 1 1984000
+321 1 2005333
+319 1 2026666
+346 1 2048000
+352 1 2069333
+349 1 2090666
+331 1 2112000
+330 1 2133333
+329 1 2154666
+333 1 2176000
+367 1 2197333
+362 1 2218666
+337 1 2240000
+337 1 2261333
+360 1 2282666
+333 1 2304000
+317 1 2325333
+344 1 2346666
+335 1 2368000
+337 1 2389333
+349 1 2410666
+336 1 2432000
+348 1 2453333
+349 1 2474666
+342 1 2496000
+359 1 2517333
+340 1 2538666
+340 1 2560000
+348 1 2581333
+334 1 2602666
+328 1 2624000
+341 1 2645333
+339 1 2666666
+337 1 2688000
+350 1 2709333
+326 1 2730666
+360 1 2752000
+344 1 2773333
+340 1 2794666
+343 1 2816000
+361 1 2837333
+329 1 2858666
+345 1 2880000
+345 1 2901333
+330 1 2922666
+342 1 2944000
+344 1 2965333
+330 1 2986666
+329 1 3008000
+335 1 3029333
+366 1 3050666
+328 1 3072000
+349 1 3093333
+339 1 3114666
+340 1 3136000
+335 1 3157333
+327 1 3178666
+348 1 3200000
+339 1 3221333
+334 1 3242666
+350 1 3264000
+325 1 3285333
+361 1 3306666
+338 1 3328000
+350 1 3349333
+353 1 3370666
+327 1 3392000
+346 1 3413333
+348 1 3434666
+339 1 3456000
+342 1 3477333
+334 1 3498666
+350 1 3520000
+354 1 3541333
+363 1 3562666
+322 1 3584000
+337 1 3605333
+355 1 3626666
+329 1 3648000
+324 1 3669333
+338 1 3690666
+356 1 3712000
+330 1 3733333
+321 1 3754666
+337 1 3776000
+345 1 3797333
+335 1 3818666
+348 1 3840000
+342 1 3861333
+348 1 3882666
+335 1 3904000
+344 1 3925333
+357 1 3946666
+368 1 3968000
+324 1 3989333
+343 1 4010666
+341 1 4032000
+329 1 4053333
+356 1 4074666
+317 1 4096000
+351 1 4117333
+340 1 4138666
+340 1 4160000
+332 1 4181333
+355 1 4202666
+357 1 4224000
+327 1 4245333
+338 1 4266666
+323 1 4288000
+346 1 4309333
+352 1 4330666
+347 1 4352000
+343 1 4373333
+311 1 4394666
+338 1 4416000
+365 1 4437333
+349 1 4458666
+327 1 4480000
+355 1 4501333
+319 1 4522666
+349 1 4544000
+351 1 4565333
+337 1 4586666
+340 1 4608000
+349 1 4629333
+316 1 4650666
+344 1 4672000
+334 1 4693333
+344 1 4714666
+347 1 4736000
+348 1 4757333
+334 1 4778666
+338 1 4800000
+331 1 4821333
+344 1 4842666
+342 1 4864000
+336 1 4885333
+326 1 4906666
+364 1 4928000
+350 1 4949333
+350 1 4970666
+363 1 4992000
+358 1 5013333
+305 1 5034666
+344 1 5056000
+346 1 5077333
+342 1 5098666
+330 1 5120000
+318 1 5141333
+361 1 5162666
+354 1 5184000
+313 1 5205333
+330 1 5226666
+350 1 5248000
+347 1 5269333
+346 1 5290666
+357 1 5312000
+325 1 5333333
+335 1 5354666
+331 1 5376000
+366 1 5397333
+329 1 5418666
+349 1 5440000
+371 1 5461333
+326 1 5482666
+333 1 5504000
+319 1 5525333
+327 1 5546666
+353 1 5568000
+356 1 5589333
+348 1 5610666
+338 1 5632000
+331 1 5653333
+341 1 5674666
+362 1 5696000
+326 1 5717333
+359 1 5738666
+315 1 5760000
+376 1 5781333
+343 1 5802666
+354 1 5824000
+353 1 5845333
+344 1 5866666
+334 1 5888000
+345 1 5909333
+355 1 5930666
+322 1 5952000
+334 1 5973333
+353 1 5994666
+338 1 6016000
+351 1 6037333
+334 1 6058666
+339 1 6080000
+345 1 6101333
+347 1 6122666
+355 1 6144000
+312 1 6165333
+352 1 6186666
+354 1 6208000
+318 1 6229333
+344 1 6250666
+363 1 6272000
+321 1 6293333
+339 1 6314666
+356 1 6336000
+334 1 6357333
+354 1 6378666
+325 1 6400000
+321 1 6421333
+341 1 6442666
+337 1 6464000
+351 1 6485333
+343 1 6506666
+341 1 6528000
+344 1 6549333
+341 1 6570666
+364 1 6592000
+319 1 6613333
+348 1 6634666
+332 1 6656000
+333 1 6677333
+343 1 6698666
+348 1 6720000
+347 1 6741333
+350 1 6762666
+342 1 6784000
+341 1 6805333
+326 1 6826666
+351 1 6848000
+329 1 6869333
+323 1 6890666
+350 1 6912000
+361 1 6933333
+326 1 6954666
+345 1 6976000
+345 1 6997333
+311 1 7018666
+349 1 7040000
+358 1 7061333
+352 1 7082666
+347 1 7104000
+364 1 7125333
+328 1 7146666
+318 1 7168000
+351 1 7189333
+340 1 7210666
+341 1 7232000
+355 1 7253333
+336 1 7274666
+352 1 7296000
+341 1 7317333
+334 1 7338666
+348 1 7360000
+342 1 7381333
+335 1 7402666
+342 1 7424000
+359 1 7445333
+349 1 7466666
+329 1 7488000
+356 1 7509333
+292 1 7530666
+316 1 7552000
+318 1 7573333
+320 1 7594666
+342 1 7616000
+285 1 7637333
+326 1 7658666
+352 1 7680000
+392 1 7701333
+364 1 7722666
+384 1 7744000
+334 1 7765333
+317 1 7786666
+326 1 7808000
+373 1 7829333
+354 1 7850666
+329 1 7872000
+347 1 7893333
+353 1 7914666
+338 1 7936000
+317 1 7957333
+354 1 7978666
+345 1 8000000
+350 1 8021333
+351 1 8042666
+332 1 8064000
+358 1 8085333
+315 1 8106666
+336 1 8128000
+358 1 8149333
+343 1 8170666
+319 1 8192000
+370 1 8213333
+344 1 8234666
+361 1 8256000
+343 1 8277333
+337 1 8298666
+354 1 8320000
+332 1 8341333
+348 1 8362666
+328 1 8384000
+345 1 8405333
+340 1 8426666
+346 1 8448000
+341 1 8469333
+344 1 8490666
+342 1 8512000
+341 1 8533333
+345 1 8554666
+337 1 8576000
+335 1 8597333
+335 1 8618666
+340 1 8640000
+345 1 8661333
+341 1 8682666
+342 1 8704000
+338 1 8725333
+343 1 8746666
+336 1 8768000
+338 1 8789333
+353 1 8810666
+339 1 8832000
+329 1 8853333
+349 1 8874666
+323 1 8896000
+351 1 8917333
+359 1 8938666
+357 1 8960000
+341 1 8981333
+333 1 9002666
+335 1 9024000
+328 1 9045333
+347 1 9066666
+343 1 9088000
+369 1 9109333
+331 1 9130666
+344 1 9152000
+330 1 9173333
+346 1 9194666
+337 1 9216000
+341 1 9237333
+338 1 9258666
+329 1 9280000
+360 1 9301333
+336 1 9322666
+341 1 9344000
+341 1 9365333
+345 1 9386666
+351 1 9408000
+349 1 9429333
+336 1 9450666
+326 1 9472000
+349 1 9493333
+343 1 9514666
+357 1 9536000
+342 1 9557333
+325 1 9578666
+346 1 9600000
+326 1 9621333
+402 1 9642666
+331 1 9664000
+339 1 9685333
+371 1 9706666
+314 1 9728000
+310 1 9749333
+364 1 9770666
+338 1 9792000
+339 1 9813333
+337 1 9834666
+355 1 9856000
+351 1 9877333
+332 1 9898666
+316 1 9920000
+474 1 9941333
diff --git a/media/codec2/hidl/1.0/vts/res/bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info b/media/codec2/hidl/1.0/vts/res/bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info
new file mode 100644
index 0000000..c420009
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/res/bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info
@@ -0,0 +1,460 @@
+41 1 0
+41 1 20000
+82 1 40000
+82 1 80000
+82 1 120000
+82 1 160000
+82 1 200000
+82 1 240000
+82 1 280000
+82 1 320000
+82 1 360000
+123 1 400000
+123 1 460000
+123 1 520000
+123 1 580000
+123 1 640000
+164 1 700000
+164 1 780000
+164 1 860000
+164 1 940000
+164 1 1020000
+41 1 1100000
+41 1 1120000
+41 1 1140000
+41 1 1160000
+41 1 1180000
+41 1 1200000
+41 1 1220000
+41 1 1240000
+41 1 1260000
+41 1 1280000
+41 1 1300000
+41 1 1320000
+41 1 1340000
+41 1 1360000
+41 1 1380000
+41 1 1400000
+41 1 1420000
+41 1 1440000
+41 1 1460000
+41 1 1480000
+41 1 1500000
+41 1 1520000
+41 1 1540000
+41 1 1560000
+41 1 1580000
+41 1 1600000
+41 1 1620000
+41 1 1640000
+41 1 1660000
+41 1 1680000
+41 1 1700000
+41 1 1720000
+41 1 1740000
+41 1 1760000
+41 1 1780000
+41 1 1800000
+41 1 1820000
+41 1 1840000
+41 1 1860000
+41 1 1880000
+41 1 1900000
+41 1 1920000
+41 1 1940000
+41 1 1960000
+41 1 1980000
+41 1 2000000
+41 1 2020000
+41 1 2040000
+41 1 2060000
+41 1 2080000
+41 1 2100000
+41 1 2120000
+41 1 2140000
+41 1 2160000
+41 1 2180000
+41 1 2200000
+41 1 2220000
+41 1 2240000
+41 1 2260000
+41 1 2280000
+41 1 2300000
+41 1 2320000
+41 1 2340000
+41 1 2360000
+41 1 2380000
+41 1 2400000
+41 1 2420000
+41 1 2440000
+41 1 2460000
+41 1 2480000
+41 1 2500000
+41 1 2520000
+41 1 2540000
+41 1 2560000
+41 1 2580000
+41 1 2600000
+41 1 2620000
+41 1 2640000
+41 1 2660000
+41 1 2680000
+41 1 2700000
+41 1 2720000
+41 1 2740000
+41 1 2760000
+41 1 2780000
+41 1 2800000
+41 1 2820000
+41 1 2840000
+41 1 2860000
+41 1 2880000
+41 1 2900000
+41 1 2920000
+41 1 2940000
+41 1 2960000
+41 1 2980000
+41 1 3000000
+41 1 3020000
+41 1 3040000
+41 1 3060000
+41 1 3080000
+41 1 3100000
+41 1 3120000
+41 1 3140000
+41 1 3160000
+41 1 3180000
+41 1 3200000
+41 1 3220000
+41 1 3240000
+41 1 3260000
+41 1 3280000
+41 1 3300000
+41 1 3320000
+41 1 3340000
+41 1 3360000
+41 1 3380000
+41 1 3400000
+41 1 3420000
+41 1 3440000
+41 1 3460000
+41 1 3480000
+41 1 3500000
+41 1 3520000
+41 1 3540000
+41 1 3560000
+41 1 3580000
+41 1 3600000
+41 1 3620000
+41 1 3640000
+41 1 3660000
+41 1 3680000
+41 1 3700000
+41 1 3720000
+41 1 3740000
+41 1 3760000
+41 1 3780000
+41 1 3800000
+41 1 3820000
+41 1 3840000
+41 1 3860000
+41 1 3880000
+41 1 3900000
+41 1 3920000
+41 1 3940000
+41 1 3960000
+41 1 3980000
+41 1 4000000
+41 1 4020000
+41 1 4040000
+41 1 4060000
+41 1 4080000
+41 1 4100000
+41 1 4120000
+41 1 4140000
+41 1 4160000
+41 1 4180000
+41 1 4200000
+41 1 4220000
+41 1 4240000
+41 1 4260000
+41 1 4280000
+41 1 4300000
+41 1 4320000
+41 1 4340000
+41 1 4360000
+41 1 4380000
+41 1 4400000
+41 1 4420000
+41 1 4440000
+41 1 4460000
+41 1 4480000
+41 1 4500000
+41 1 4520000
+41 1 4540000
+41 1 4560000
+41 1 4580000
+41 1 4600000
+41 1 4620000
+41 1 4640000
+41 1 4660000
+41 1 4680000
+41 1 4700000
+41 1 4720000
+41 1 4740000
+41 1 4760000
+41 1 4780000
+41 1 4800000
+41 1 4820000
+41 1 4840000
+41 1 4860000
+41 1 4880000
+41 1 4900000
+41 1 4920000
+41 1 4940000
+41 1 4960000
+41 1 4980000
+41 1 5000000
+41 1 5020000
+41 1 5040000
+41 1 5060000
+41 1 5080000
+41 1 5100000
+41 1 5120000
+41 1 5140000
+41 1 5160000
+41 1 5180000
+41 1 5200000
+41 1 5220000
+41 1 5240000
+41 1 5260000
+41 1 5280000
+41 1 5300000
+41 1 5320000
+41 1 5340000
+41 1 5360000
+41 1 5380000
+41 1 5400000
+41 1 5420000
+41 1 5440000
+41 1 5460000
+41 1 5480000
+41 1 5500000
+41 1 5520000
+41 1 5540000
+41 1 5560000
+41 1 5580000
+41 1 5600000
+41 1 5620000
+41 1 5640000
+41 1 5660000
+41 1 5680000
+41 1 5700000
+41 1 5720000
+41 1 5740000
+41 1 5760000
+41 1 5780000
+41 1 5800000
+41 1 5820000
+41 1 5840000
+41 1 5860000
+41 1 5880000
+41 1 5900000
+41 1 5920000
+41 1 5940000
+41 1 5960000
+41 1 5980000
+41 1 6000000
+41 1 6020000
+41 1 6040000
+41 1 6060000
+41 1 6080000
+41 1 6100000
+41 1 6120000
+41 1 6140000
+41 1 6160000
+41 1 6180000
+41 1 6200000
+41 1 6220000
+41 1 6240000
+41 1 6260000
+41 1 6280000
+41 1 6300000
+41 1 6320000
+41 1 6340000
+41 1 6360000
+41 1 6380000
+41 1 6400000
+41 1 6420000
+41 1 6440000
+41 1 6460000
+41 1 6480000
+41 1 6500000
+41 1 6520000
+41 1 6540000
+41 1 6560000
+41 1 6580000
+41 1 6600000
+41 1 6620000
+41 1 6640000
+41 1 6660000
+41 1 6680000
+41 1 6700000
+41 1 6720000
+41 1 6740000
+41 1 6760000
+41 1 6780000
+41 1 6800000
+41 1 6820000
+41 1 6840000
+41 1 6860000
+41 1 6880000
+41 1 6900000
+41 1 6920000
+41 1 6940000
+41 1 6960000
+41 1 6980000
+41 1 7000000
+41 1 7020000
+41 1 7040000
+41 1 7060000
+41 1 7080000
+41 1 7100000
+41 1 7120000
+41 1 7140000
+41 1 7160000
+41 1 7180000
+41 1 7200000
+41 1 7220000
+41 1 7240000
+41 1 7260000
+41 1 7280000
+41 1 7300000
+41 1 7320000
+41 1 7340000
+41 1 7360000
+41 1 7380000
+41 1 7400000
+41 1 7420000
+41 1 7440000
+41 1 7460000
+41 1 7480000
+41 1 7500000
+41 1 7520000
+41 1 7540000
+41 1 7560000
+41 1 7580000
+41 1 7600000
+41 1 7620000
+41 1 7640000
+41 1 7660000
+41 1 7680000
+41 1 7700000
+41 1 7720000
+41 1 7740000
+41 1 7760000
+41 1 7780000
+41 1 7800000
+41 1 7820000
+41 1 7840000
+41 1 7860000
+41 1 7880000
+41 1 7900000
+41 1 7920000
+41 1 7940000
+41 1 7960000
+41 1 7980000
+41 1 8000000
+41 1 8020000
+41 1 8040000
+41 1 8060000
+41 1 8080000
+41 1 8100000
+41 1 8120000
+41 1 8140000
+41 1 8160000
+41 1 8180000
+41 1 8200000
+41 1 8220000
+41 1 8240000
+41 1 8260000
+41 1 8280000
+41 1 8300000
+41 1 8320000
+41 1 8340000
+41 1 8360000
+41 1 8380000
+41 1 8400000
+41 1 8420000
+41 1 8440000
+41 1 8460000
+41 1 8480000
+41 1 8500000
+41 1 8520000
+41 1 8540000
+41 1 8560000
+41 1 8580000
+41 1 8600000
+41 1 8620000
+41 1 8640000
+41 1 8660000
+41 1 8680000
+41 1 8700000
+41 1 8720000
+41 1 8740000
+41 1 8760000
+41 1 8780000
+41 1 8800000
+41 1 8820000
+41 1 8840000
+41 1 8860000
+41 1 8880000
+41 1 8900000
+41 1 8920000
+41 1 8940000
+41 1 8960000
+41 1 8980000
+41 1 9000000
+41 1 9020000
+41 1 9040000
+41 1 9060000
+41 1 9080000
+41 1 9100000
+41 1 9120000
+41 1 9140000
+41 1 9160000
+41 1 9180000
+41 1 9200000
+41 1 9220000
+41 1 9240000
+41 1 9260000
+41 1 9280000
+41 1 9300000
+41 1 9320000
+41 1 9340000
+41 1 9360000
+41 1 9380000
+41 1 9400000
+41 1 9420000
+41 1 9440000
+41 1 9460000
+41 1 9480000
+41 1 9500000
+41 1 9520000
+41 1 9540000
+41 1 9560000
+41 1 9580000
+41 1 9600000
+41 1 9620000
+41 1 9640000
+41 1 9660000
+41 1 9680000
+41 1 9700000
+41 1 9720000
+41 1 9740000
+41 1 9760000
+41 1 9780000
+41 1 9800000
+41 1 9820000
+41 1 9840000
+41 1 9860000
diff --git a/media/codec2/hidl/1.0/vts/res/bbb_mp3_stereo_192kbps_48000hz_multi_frame.info b/media/codec2/hidl/1.0/vts/res/bbb_mp3_stereo_192kbps_48000hz_multi_frame.info
new file mode 100644
index 0000000..575c75f
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/res/bbb_mp3_stereo_192kbps_48000hz_multi_frame.info
@@ -0,0 +1,385 @@
+576 1 0
+576 1 24000
+1152 1 48000
+1152 1 96000
+1152 1 144000
+1152 1 192000
+1728 1 240000
+1728 1 312000
+1728 1 384000
+1728 1 456000
+1728 1 528000
+2304 1 600000
+2304 1 696000
+2304 1 792000
+2304 1 888000
+2304 1 984000
+576 1 1080000
+576 1 1104000
+576 1 1128000
+576 1 1152000
+576 1 1176000
+576 1 1200000
+576 1 1224000
+576 1 1248000
+576 1 1272000
+576 1 1296000
+576 1 1320000
+576 1 1344000
+576 1 1368000
+576 1 1392000
+576 1 1416000
+576 1 1440000
+576 1 1464000
+576 1 1488000
+576 1 1512000
+576 1 1536000
+576 1 1560000
+576 1 1584000
+576 1 1608000
+576 1 1632000
+576 1 1656000
+576 1 1680000
+576 1 1704000
+576 1 1728000
+576 1 1752000
+576 1 1776000
+576 1 1800000
+576 1 1824000
+576 1 1848000
+576 1 1872000
+576 1 1896000
+576 1 1920000
+576 1 1944000
+576 1 1968000
+576 1 1992000
+576 1 2016000
+576 1 2040000
+576 1 2064000
+576 1 2088000
+576 1 2112000
+576 1 2136000
+576 1 2160000
+576 1 2184000
+576 1 2208000
+576 1 2232000
+576 1 2256000
+576 1 2280000
+576 1 2304000
+576 1 2328000
+576 1 2352000
+576 1 2376000
+576 1 2400000
+576 1 2424000
+576 1 2448000
+576 1 2472000
+576 1 2496000
+576 1 2520000
+576 1 2544000
+576 1 2568000
+576 1 2592000
+576 1 2616000
+576 1 2640000
+576 1 2664000
+576 1 2688000
+576 1 2712000
+576 1 2736000
+576 1 2760000
+576 1 2784000
+576 1 2808000
+576 1 2832000
+576 1 2856000
+576 1 2880000
+576 1 2904000
+576 1 2928000
+576 1 2952000
+576 1 2976000
+576 1 3000000
+576 1 3024000
+576 1 3048000
+576 1 3072000
+576 1 3096000
+576 1 3120000
+576 1 3144000
+576 1 3168000
+576 1 3192000
+576 1 3216000
+576 1 3240000
+576 1 3264000
+576 1 3288000
+576 1 3312000
+576 1 3336000
+576 1 3360000
+576 1 3384000
+576 1 3408000
+576 1 3432000
+576 1 3456000
+576 1 3480000
+576 1 3504000
+576 1 3528000
+576 1 3552000
+576 1 3576000
+576 1 3600000
+576 1 3624000
+576 1 3648000
+576 1 3672000
+576 1 3696000
+576 1 3720000
+576 1 3744000
+576 1 3768000
+576 1 3792000
+576 1 3816000
+576 1 3840000
+576 1 3864000
+576 1 3888000
+576 1 3912000
+576 1 3936000
+576 1 3960000
+576 1 3984000
+576 1 4008000
+576 1 4032000
+576 1 4056000
+576 1 4080000
+576 1 4104000
+576 1 4128000
+576 1 4152000
+576 1 4176000
+576 1 4200000
+576 1 4224000
+576 1 4248000
+576 1 4272000
+576 1 4296000
+576 1 4320000
+576 1 4344000
+576 1 4368000
+576 1 4392000
+576 1 4416000
+576 1 4440000
+576 1 4464000
+576 1 4488000
+576 1 4512000
+576 1 4536000
+576 1 4560000
+576 1 4584000
+576 1 4608000
+576 1 4632000
+576 1 4656000
+576 1 4680000
+576 1 4704000
+576 1 4728000
+576 1 4752000
+576 1 4776000
+576 1 4800000
+576 1 4824000
+576 1 4848000
+576 1 4872000
+576 1 4896000
+576 1 4920000
+576 1 4944000
+576 1 4968000
+576 1 4992000
+576 1 5016000
+576 1 5040000
+576 1 5064000
+576 1 5088000
+576 1 5112000
+576 1 5136000
+576 1 5160000
+576 1 5184000
+576 1 5208000
+576 1 5232000
+576 1 5256000
+576 1 5280000
+576 1 5304000
+576 1 5328000
+576 1 5352000
+576 1 5376000
+576 1 5400000
+576 1 5424000
+576 1 5448000
+576 1 5472000
+576 1 5496000
+576 1 5520000
+576 1 5544000
+576 1 5568000
+576 1 5592000
+576 1 5616000
+576 1 5640000
+576 1 5664000
+576 1 5688000
+576 1 5712000
+576 1 5736000
+576 1 5760000
+576 1 5784000
+576 1 5808000
+576 1 5832000
+576 1 5856000
+576 1 5880000
+576 1 5904000
+576 1 5928000
+576 1 5952000
+576 1 5976000
+576 1 6000000
+576 1 6024000
+576 1 6048000
+576 1 6072000
+576 1 6096000
+576 1 6120000
+576 1 6144000
+576 1 6168000
+576 1 6192000
+576 1 6216000
+576 1 6240000
+576 1 6264000
+576 1 6288000
+576 1 6312000
+576 1 6336000
+576 1 6360000
+576 1 6384000
+576 1 6408000
+576 1 6432000
+576 1 6456000
+576 1 6480000
+576 1 6504000
+576 1 6528000
+576 1 6552000
+576 1 6576000
+576 1 6600000
+576 1 6624000
+576 1 6648000
+576 1 6672000
+576 1 6696000
+576 1 6720000
+576 1 6744000
+576 1 6768000
+576 1 6792000
+576 1 6816000
+576 1 6840000
+576 1 6864000
+576 1 6888000
+576 1 6912000
+576 1 6936000
+576 1 6960000
+576 1 6984000
+576 1 7008000
+576 1 7032000
+576 1 7056000
+576 1 7080000
+576 1 7104000
+576 1 7128000
+576 1 7152000
+576 1 7176000
+576 1 7200000
+576 1 7224000
+576 1 7248000
+576 1 7272000
+576 1 7296000
+576 1 7320000
+576 1 7344000
+576 1 7368000
+576 1 7392000
+576 1 7416000
+576 1 7440000
+576 1 7464000
+576 1 7488000
+576 1 7512000
+576 1 7536000
+576 1 7560000
+576 1 7584000
+576 1 7608000
+576 1 7632000
+576 1 7656000
+576 1 7680000
+576 1 7704000
+576 1 7728000
+576 1 7752000
+576 1 7776000
+576 1 7800000
+576 1 7824000
+576 1 7848000
+576 1 7872000
+576 1 7896000
+576 1 7920000
+576 1 7944000
+576 1 7968000
+576 1 7992000
+576 1 8016000
+576 1 8040000
+576 1 8064000
+576 1 8088000
+576 1 8112000
+576 1 8136000
+576 1 8160000
+576 1 8184000
+576 1 8208000
+576 1 8232000
+576 1 8256000
+576 1 8280000
+576 1 8304000
+576 1 8328000
+576 1 8352000
+576 1 8376000
+576 1 8400000
+576 1 8424000
+576 1 8448000
+576 1 8472000
+576 1 8496000
+576 1 8520000
+576 1 8544000
+576 1 8568000
+576 1 8592000
+576 1 8616000
+576 1 8640000
+576 1 8664000
+576 1 8688000
+576 1 8712000
+576 1 8736000
+576 1 8760000
+576 1 8784000
+576 1 8808000
+576 1 8832000
+576 1 8856000
+576 1 8880000
+576 1 8904000
+576 1 8928000
+576 1 8952000
+576 1 8976000
+576 1 9000000
+576 1 9024000
+576 1 9048000
+576 1 9072000
+576 1 9096000
+576 1 9120000
+576 1 9144000
+576 1 9168000
+576 1 9192000
+576 1 9216000
+576 1 9240000
+576 1 9264000
+576 1 9288000
+576 1 9312000
+576 1 9336000
+576 1 9360000
+576 1 9384000
+576 1 9408000
+576 1 9432000
+576 1 9456000
+576 1 9480000
+576 1 9504000
+576 1 9528000
+576 1 9552000
+576 1 9576000
+576 1 9600000
+576 1 9624000
+576 1 9648000
+576 1 9672000
+576 1 9696000
+576 1 9720000
+576 1 9744000
+576 1 9768000
+576 1 9792000
+576 1 9816000
+576 1 9840000
+576 1 9864000
+576 1 9888000
+576 1 9912000
diff --git a/media/codec2/hidl/1.0/vts/res/sine_amrnb_1ch_12kbps_8000hz_multi_frame.info b/media/codec2/hidl/1.0/vts/res/sine_amrnb_1ch_12kbps_8000hz_multi_frame.info
new file mode 100644
index 0000000..0176eaf4
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/res/sine_amrnb_1ch_12kbps_8000hz_multi_frame.info
@@ -0,0 +1,807 @@
+32 1 0
+32 1 20000
+64 1 40000
+64 1 80000
+64 1 120000
+96 1 160000
+96 1 220000
+96 1 280000
+96 1 340000
+128 1 400000
+128 1 480000
+128 1 560000
+128 1 640000
+128 1 720000
+32 1 800000
+32 1 820000
+32 1 840000
+32 1 860000
+32 1 880000
+32 1 900000
+32 1 920000
+32 1 940000
+32 1 960000
+32 1 980000
+32 1 1000000
+32 1 1020000
+32 1 1040000
+32 1 1060000
+32 1 1080000
+32 1 1100000
+32 1 1120000
+32 1 1140000
+32 1 1160000
+32 1 1180000
+32 1 1200000
+32 1 1220000
+32 1 1240000
+32 1 1260000
+32 1 1280000
+32 1 1300000
+32 1 1320000
+32 1 1340000
+32 1 1360000
+32 1 1380000
+32 1 1400000
+32 1 1420000
+32 1 1440000
+32 1 1460000
+32 1 1480000
+32 1 1500000
+32 1 1520000
+32 1 1540000
+32 1 1560000
+32 1 1580000
+32 1 1600000
+32 1 1620000
+32 1 1640000
+32 1 1660000
+32 1 1680000
+32 1 1700000
+32 1 1720000
+32 1 1740000
+32 1 1760000
+32 1 1780000
+32 1 1800000
+32 1 1820000
+32 1 1840000
+32 1 1860000
+32 1 1880000
+32 1 1900000
+32 1 1920000
+32 1 1940000
+32 1 1960000
+32 1 1980000
+32 1 2000000
+32 1 2020000
+32 1 2040000
+32 1 2060000
+32 1 2080000
+32 1 2100000
+32 1 2120000
+32 1 2140000
+32 1 2160000
+32 1 2180000
+32 1 2200000
+32 1 2220000
+32 1 2240000
+32 1 2260000
+32 1 2280000
+32 1 2300000
+32 1 2320000
+32 1 2340000
+32 1 2360000
+32 1 2380000
+32 1 2400000
+32 1 2420000
+32 1 2440000
+32 1 2460000
+32 1 2480000
+32 1 2500000
+32 1 2520000
+32 1 2540000
+32 1 2560000
+32 1 2580000
+32 1 2600000
+32 1 2620000
+32 1 2640000
+32 1 2660000
+32 1 2680000
+32 1 2700000
+32 1 2720000
+32 1 2740000
+32 1 2760000
+32 1 2780000
+32 1 2800000
+32 1 2820000
+32 1 2840000
+32 1 2860000
+32 1 2880000
+32 1 2900000
+32 1 2920000
+32 1 2940000
+32 1 2960000
+32 1 2980000
+32 1 3000000
+32 1 3020000
+32 1 3040000
+32 1 3060000
+32 1 3080000
+32 1 3100000
+32 1 3120000
+32 1 3140000
+32 1 3160000
+32 1 3180000
+32 1 3200000
+32 1 3220000
+32 1 3240000
+32 1 3260000
+32 1 3280000
+32 1 3300000
+32 1 3320000
+32 1 3340000
+32 1 3360000
+32 1 3380000
+32 1 3400000
+32 1 3420000
+32 1 3440000
+32 1 3460000
+32 1 3480000
+32 1 3500000
+32 1 3520000
+32 1 3540000
+32 1 3560000
+32 1 3580000
+32 1 3600000
+32 1 3620000
+32 1 3640000
+32 1 3660000
+32 1 3680000
+32 1 3700000
+32 1 3720000
+32 1 3740000
+32 1 3760000
+32 1 3780000
+32 1 3800000
+32 1 3820000
+32 1 3840000
+32 1 3860000
+32 1 3880000
+32 1 3900000
+32 1 3920000
+32 1 3940000
+32 1 3960000
+32 1 3980000
+32 1 4000000
+32 1 4020000
+32 1 4040000
+32 1 4060000
+32 1 4080000
+32 1 4100000
+32 1 4120000
+32 1 4140000
+32 1 4160000
+32 1 4180000
+32 1 4200000
+32 1 4220000
+32 1 4240000
+32 1 4260000
+32 1 4280000
+32 1 4300000
+32 1 4320000
+32 1 4340000
+32 1 4360000
+32 1 4380000
+32 1 4400000
+32 1 4420000
+32 1 4440000
+32 1 4460000
+32 1 4480000
+32 1 4500000
+32 1 4520000
+32 1 4540000
+32 1 4560000
+32 1 4580000
+32 1 4600000
+32 1 4620000
+32 1 4640000
+32 1 4660000
+32 1 4680000
+32 1 4700000
+32 1 4720000
+32 1 4740000
+32 1 4760000
+32 1 4780000
+32 1 4800000
+32 1 4820000
+32 1 4840000
+32 1 4860000
+32 1 4880000
+32 1 4900000
+32 1 4920000
+32 1 4940000
+32 1 4960000
+32 1 4980000
+32 1 5000000
+32 1 5020000
+32 1 5040000
+32 1 5060000
+32 1 5080000
+32 1 5100000
+32 1 5120000
+32 1 5140000
+32 1 5160000
+32 1 5180000
+32 1 5200000
+32 1 5220000
+32 1 5240000
+32 1 5260000
+32 1 5280000
+32 1 5300000
+32 1 5320000
+32 1 5340000
+32 1 5360000
+32 1 5380000
+32 1 5400000
+32 1 5420000
+32 1 5440000
+32 1 5460000
+32 1 5480000
+32 1 5500000
+32 1 5520000
+32 1 5540000
+32 1 5560000
+32 1 5580000
+32 1 5600000
+32 1 5620000
+32 1 5640000
+32 1 5660000
+32 1 5680000
+32 1 5700000
+32 1 5720000
+32 1 5740000
+32 1 5760000
+32 1 5780000
+32 1 5800000
+32 1 5820000
+32 1 5840000
+32 1 5860000
+32 1 5880000
+32 1 5900000
+32 1 5920000
+32 1 5940000
+32 1 5960000
+32 1 5980000
+32 1 6000000
+32 1 6020000
+32 1 6040000
+32 1 6060000
+32 1 6080000
+32 1 6100000
+32 1 6120000
+32 1 6140000
+32 1 6160000
+32 1 6180000
+32 1 6200000
+32 1 6220000
+32 1 6240000
+32 1 6260000
+32 1 6280000
+32 1 6300000
+32 1 6320000
+32 1 6340000
+32 1 6360000
+32 1 6380000
+32 1 6400000
+32 1 6420000
+32 1 6440000
+32 1 6460000
+32 1 6480000
+32 1 6500000
+32 1 6520000
+32 1 6540000
+32 1 6560000
+32 1 6580000
+32 1 6600000
+32 1 6620000
+32 1 6640000
+32 1 6660000
+32 1 6680000
+32 1 6700000
+32 1 6720000
+32 1 6740000
+32 1 6760000
+32 1 6780000
+32 1 6800000
+32 1 6820000
+32 1 6840000
+32 1 6860000
+32 1 6880000
+32 1 6900000
+32 1 6920000
+32 1 6940000
+32 1 6960000
+32 1 6980000
+32 1 7000000
+32 1 7020000
+32 1 7040000
+32 1 7060000
+32 1 7080000
+32 1 7100000
+32 1 7120000
+32 1 7140000
+32 1 7160000
+32 1 7180000
+32 1 7200000
+32 1 7220000
+32 1 7240000
+32 1 7260000
+32 1 7280000
+32 1 7300000
+32 1 7320000
+32 1 7340000
+32 1 7360000
+32 1 7380000
+32 1 7400000
+32 1 7420000
+32 1 7440000
+32 1 7460000
+32 1 7480000
+32 1 7500000
+32 1 7520000
+32 1 7540000
+32 1 7560000
+32 1 7580000
+32 1 7600000
+32 1 7620000
+32 1 7640000
+32 1 7660000
+32 1 7680000
+32 1 7700000
+32 1 7720000
+32 1 7740000
+32 1 7760000
+32 1 7780000
+32 1 7800000
+32 1 7820000
+32 1 7840000
+32 1 7860000
+32 1 7880000
+32 1 7900000
+32 1 7920000
+32 1 7940000
+32 1 7960000
+32 1 7980000
+32 1 8000000
+32 1 8020000
+32 1 8040000
+32 1 8060000
+32 1 8080000
+32 1 8100000
+32 1 8120000
+32 1 8140000
+32 1 8160000
+32 1 8180000
+32 1 8200000
+32 1 8220000
+32 1 8240000
+32 1 8260000
+32 1 8280000
+32 1 8300000
+32 1 8320000
+32 1 8340000
+32 1 8360000
+32 1 8380000
+32 1 8400000
+32 1 8420000
+32 1 8440000
+32 1 8460000
+32 1 8480000
+32 1 8500000
+32 1 8520000
+32 1 8540000
+32 1 8560000
+32 1 8580000
+32 1 8600000
+32 1 8620000
+32 1 8640000
+32 1 8660000
+32 1 8680000
+32 1 8700000
+32 1 8720000
+32 1 8740000
+32 1 8760000
+32 1 8780000
+32 1 8800000
+32 1 8820000
+32 1 8840000
+32 1 8860000
+32 1 8880000
+32 1 8900000
+32 1 8920000
+32 1 8940000
+32 1 8960000
+32 1 8980000
+32 1 9000000
+32 1 9020000
+32 1 9040000
+32 1 9060000
+32 1 9080000
+32 1 9100000
+32 1 9120000
+32 1 9140000
+32 1 9160000
+32 1 9180000
+32 1 9200000
+32 1 9220000
+32 1 9240000
+32 1 9260000
+32 1 9280000
+32 1 9300000
+32 1 9320000
+32 1 9340000
+32 1 9360000
+32 1 9380000
+32 1 9400000
+32 1 9420000
+32 1 9440000
+32 1 9460000
+32 1 9480000
+32 1 9500000
+32 1 9520000
+32 1 9540000
+32 1 9560000
+32 1 9580000
+32 1 9600000
+32 1 9620000
+32 1 9640000
+32 1 9660000
+32 1 9680000
+32 1 9700000
+32 1 9720000
+32 1 9740000
+32 1 9760000
+32 1 9780000
+32 1 9800000
+32 1 9820000
+32 1 9840000
+32 1 9860000
+32 1 9880000
+32 1 9900000
+32 1 9920000
+32 1 9940000
+32 1 9960000
+32 1 9980000
+32 1 10000000
+32 1 10020000
+32 1 10040000
+32 1 10060000
+32 1 10080000
+32 1 10100000
+32 1 10120000
+32 1 10140000
+32 1 10160000
+32 1 10180000
+32 1 10200000
+32 1 10220000
+32 1 10240000
+32 1 10260000
+32 1 10280000
+32 1 10300000
+32 1 10320000
+32 1 10340000
+32 1 10360000
+32 1 10380000
+32 1 10400000
+32 1 10420000
+32 1 10440000
+32 1 10460000
+32 1 10480000
+32 1 10500000
+32 1 10520000
+32 1 10540000
+32 1 10560000
+32 1 10580000
+32 1 10600000
+32 1 10620000
+32 1 10640000
+32 1 10660000
+32 1 10680000
+32 1 10700000
+32 1 10720000
+32 1 10740000
+32 1 10760000
+32 1 10780000
+32 1 10800000
+32 1 10820000
+32 1 10840000
+32 1 10860000
+32 1 10880000
+32 1 10900000
+32 1 10920000
+32 1 10940000
+32 1 10960000
+32 1 10980000
+32 1 11000000
+32 1 11020000
+32 1 11040000
+32 1 11060000
+32 1 11080000
+32 1 11100000
+32 1 11120000
+32 1 11140000
+32 1 11160000
+32 1 11180000
+32 1 11200000
+32 1 11220000
+32 1 11240000
+32 1 11260000
+32 1 11280000
+32 1 11300000
+32 1 11320000
+32 1 11340000
+32 1 11360000
+32 1 11380000
+32 1 11400000
+32 1 11420000
+32 1 11440000
+32 1 11460000
+32 1 11480000
+32 1 11500000
+32 1 11520000
+32 1 11540000
+32 1 11560000
+32 1 11580000
+32 1 11600000
+32 1 11620000
+32 1 11640000
+32 1 11660000
+32 1 11680000
+32 1 11700000
+32 1 11720000
+32 1 11740000
+32 1 11760000
+32 1 11780000
+32 1 11800000
+32 1 11820000
+32 1 11840000
+32 1 11860000
+32 1 11880000
+32 1 11900000
+32 1 11920000
+32 1 11940000
+32 1 11960000
+32 1 11980000
+32 1 12000000
+32 1 12020000
+32 1 12040000
+32 1 12060000
+32 1 12080000
+32 1 12100000
+32 1 12120000
+32 1 12140000
+32 1 12160000
+32 1 12180000
+32 1 12200000
+32 1 12220000
+32 1 12240000
+32 1 12260000
+32 1 12280000
+32 1 12300000
+32 1 12320000
+32 1 12340000
+32 1 12360000
+32 1 12380000
+32 1 12400000
+32 1 12420000
+32 1 12440000
+32 1 12460000
+32 1 12480000
+32 1 12500000
+32 1 12520000
+32 1 12540000
+32 1 12560000
+32 1 12580000
+32 1 12600000
+32 1 12620000
+32 1 12640000
+32 1 12660000
+32 1 12680000
+32 1 12700000
+32 1 12720000
+32 1 12740000
+32 1 12760000
+32 1 12780000
+32 1 12800000
+32 1 12820000
+32 1 12840000
+32 1 12860000
+32 1 12880000
+32 1 12900000
+32 1 12920000
+32 1 12940000
+32 1 12960000
+32 1 12980000
+32 1 13000000
+32 1 13020000
+32 1 13040000
+32 1 13060000
+32 1 13080000
+32 1 13100000
+32 1 13120000
+32 1 13140000
+32 1 13160000
+32 1 13180000
+32 1 13200000
+32 1 13220000
+32 1 13240000
+32 1 13260000
+32 1 13280000
+32 1 13300000
+32 1 13320000
+32 1 13340000
+32 1 13360000
+32 1 13380000
+32 1 13400000
+32 1 13420000
+32 1 13440000
+32 1 13460000
+32 1 13480000
+32 1 13500000
+32 1 13520000
+32 1 13540000
+32 1 13560000
+32 1 13580000
+32 1 13600000
+32 1 13620000
+32 1 13640000
+32 1 13660000
+32 1 13680000
+32 1 13700000
+32 1 13720000
+32 1 13740000
+32 1 13760000
+32 1 13780000
+32 1 13800000
+32 1 13820000
+32 1 13840000
+32 1 13860000
+32 1 13880000
+32 1 13900000
+32 1 13920000
+32 1 13940000
+32 1 13960000
+32 1 13980000
+32 1 14000000
+32 1 14020000
+32 1 14040000
+32 1 14060000
+32 1 14080000
+32 1 14100000
+32 1 14120000
+32 1 14140000
+32 1 14160000
+32 1 14180000
+32 1 14200000
+32 1 14220000
+32 1 14240000
+32 1 14260000
+32 1 14280000
+32 1 14300000
+32 1 14320000
+32 1 14340000
+32 1 14360000
+32 1 14380000
+32 1 14400000
+32 1 14420000
+32 1 14440000
+32 1 14460000
+32 1 14480000
+32 1 14500000
+32 1 14520000
+32 1 14540000
+32 1 14560000
+32 1 14580000
+32 1 14600000
+32 1 14620000
+32 1 14640000
+32 1 14660000
+32 1 14680000
+32 1 14700000
+32 1 14720000
+32 1 14740000
+32 1 14760000
+32 1 14780000
+32 1 14800000
+32 1 14820000
+32 1 14840000
+32 1 14860000
+32 1 14880000
+32 1 14900000
+32 1 14920000
+32 1 14940000
+32 1 14960000
+32 1 14980000
+32 1 15000000
+32 1 15020000
+32 1 15040000
+32 1 15060000
+32 1 15080000
+32 1 15100000
+32 1 15120000
+32 1 15140000
+32 1 15160000
+32 1 15180000
+32 1 15200000
+32 1 15220000
+32 1 15240000
+32 1 15260000
+32 1 15280000
+32 1 15300000
+32 1 15320000
+32 1 15340000
+32 1 15360000
+32 1 15380000
+32 1 15400000
+32 1 15420000
+32 1 15440000
+32 1 15460000
+32 1 15480000
+32 1 15500000
+32 1 15520000
+32 1 15540000
+32 1 15560000
+32 1 15580000
+32 1 15600000
+32 1 15620000
+32 1 15640000
+32 1 15660000
+32 1 15680000
+32 1 15700000
+32 1 15720000
+32 1 15740000
+32 1 15760000
+32 1 15780000
+32 1 15800000
+32 1 15820000
+32 1 15840000
+32 1 15860000
+32 1 15880000
+32 1 15900000
+32 1 15920000
+32 1 15940000
+32 1 15960000
+32 1 15980000
+32 1 16000000
+32 1 16020000
+32 1 16040000
+32 1 16060000
+32 1 16080000
+32 1 16100000
+32 1 16120000
+32 1 16140000
+32 1 16160000
+32 1 16180000
+32 1 16200000
+32 1 16220000
+32 1 16240000
+32 1 16260000
+32 1 16280000
+32 1 16300000
+32 1 16320000
+32 1 16340000
+32 1 16360000
+32 1 16380000
+32 1 16400000
+32 1 16420000
+32 1 16440000
+32 1 16460000
+32 1 16480000
+32 1 16500000
+32 1 16520000
+32 1 16540000
+32 1 16560000
+32 1 16580000
+32 1 16600000
+32 1 16620000
+32 1 16640000
diff --git a/media/codec2/hidl/1.0/vts/video/VtsHidlC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/video/VtsHidlC2V1_0TargetVideoDecTest.cpp
index 9a42d72..8cbb7a7 100644
--- a/media/codec2/hidl/1.0/vts/video/VtsHidlC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/video/VtsHidlC2V1_0TargetVideoDecTest.cpp
@@ -421,17 +421,21 @@
ASSERT_EQ(mDisableTest, false);
}
+class Codec2VideoDecDecodeTest : public Codec2VideoDecHidlTest,
+ public ::testing::WithParamInterface<int32_t> {
+};
+
// Bitstream Test
-TEST_F(Codec2VideoDecHidlTest, DecodeTest) {
+TEST_P(Codec2VideoDecDecodeTest, DecodeTest) {
description("Decodes input file");
if (mDisableTest) return;
+ uint32_t streamIndex = GetParam();
char mURL[512], info[512];
std::ifstream eleStream, eleInfo;
-
strcpy(mURL, gEnv->getRes().c_str());
strcpy(info, gEnv->getRes().c_str());
- GetURLForComponent(mCompName, mURL, info);
+ GetURLForComponent(mCompName, mURL, info, streamIndex);
eleInfo.open(info);
ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
@@ -452,6 +456,9 @@
eleInfo.close();
ASSERT_EQ(mComponent->start(), C2_OK);
+ // Reset total no of frames received
+ mFramesReceived = 0;
+ mTimestampUs = 0;
ALOGV("mURL : %s", mURL);
eleStream.open(mURL, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
@@ -476,8 +483,11 @@
}
if (mTimestampDevTest) EXPECT_EQ(mTimestampUslist.empty(), true);
+ ASSERT_EQ(mComponent->stop(), C2_OK);
}
+INSTANTIATE_TEST_CASE_P(StreamIndexes, Codec2VideoDecDecodeTest,
+ ::testing::Values(0, 1));
// Adaptive Test
TEST_F(Codec2VideoDecHidlTest, AdaptiveDecodeTest) {
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index f36027e..5d0ccd2 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -517,6 +517,13 @@
caps->addProfileLevel(VP9Profile2, VP9Level5);
caps->addProfileLevel(VP9Profile2HDR, VP9Level5);
}
+ } else if (mediaType == MIMETYPE_VIDEO_AV1 && !encoder) {
+ caps->addProfileLevel(AV1Profile0, AV1Level2);
+ caps->addProfileLevel(AV1Profile0, AV1Level21);
+ caps->addProfileLevel(AV1Profile1, AV1Level22);
+ caps->addProfileLevel(AV1Profile1, AV1Level3);
+ caps->addProfileLevel(AV1Profile2, AV1Level31);
+ caps->addProfileLevel(AV1Profile2, AV1Level32);
} else if (mediaType == MIMETYPE_VIDEO_HEVC && !encoder) {
caps->addProfileLevel(HEVCProfileMain, HEVCMainTierLevel51);
caps->addProfileLevel(HEVCProfileMainStill, HEVCMainTierLevel51);
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 97e17e8..b1b33e1 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -326,6 +326,41 @@
{ C2Config::PROFILE_VP9_3, VP9Profile3 },
};
+ALookup<C2Config::level_t, int32_t> sAv1Levels = {
+ { C2Config::LEVEL_AV1_2, AV1Level2 },
+ { C2Config::LEVEL_AV1_2_1, AV1Level21 },
+ { C2Config::LEVEL_AV1_2_2, AV1Level22 },
+ { C2Config::LEVEL_AV1_2_3, AV1Level23 },
+ { C2Config::LEVEL_AV1_3, AV1Level3 },
+ { C2Config::LEVEL_AV1_3_1, AV1Level31 },
+ { C2Config::LEVEL_AV1_3_2, AV1Level32 },
+ { C2Config::LEVEL_AV1_3_3, AV1Level33 },
+ { C2Config::LEVEL_AV1_4, AV1Level4 },
+ { C2Config::LEVEL_AV1_4_1, AV1Level41 },
+ { C2Config::LEVEL_AV1_4_2, AV1Level42 },
+ { C2Config::LEVEL_AV1_4_3, AV1Level43 },
+ { C2Config::LEVEL_AV1_5, AV1Level5 },
+ { C2Config::LEVEL_AV1_5_1, AV1Level51 },
+ { C2Config::LEVEL_AV1_5_2, AV1Level52 },
+ { C2Config::LEVEL_AV1_5_3, AV1Level53 },
+ { C2Config::LEVEL_AV1_6, AV1Level6 },
+ { C2Config::LEVEL_AV1_6_1, AV1Level61 },
+ { C2Config::LEVEL_AV1_6_2, AV1Level62 },
+ { C2Config::LEVEL_AV1_6_3, AV1Level63 },
+ { C2Config::LEVEL_AV1_7, AV1Level7 },
+ { C2Config::LEVEL_AV1_7_1, AV1Level71 },
+ { C2Config::LEVEL_AV1_7_2, AV1Level72 },
+ { C2Config::LEVEL_AV1_7_3, AV1Level73 },
+};
+
+
+ALookup<C2Config::profile_t, int32_t> sAv1Profiles = {
+ { C2Config::PROFILE_AV1_0, AV1Profile0 },
+ { C2Config::PROFILE_AV1_1, AV1Profile1 },
+ { C2Config::PROFILE_AV1_2, AV1Profile2 },
+};
+
+
/**
* A helper that passes through vendor extension profile and level values.
*/
diff --git a/media/codec2/vndk/C2Config.cpp b/media/codec2/vndk/C2Config.cpp
index da12903..782bec5 100644
--- a/media/codec2/vndk/C2Config.cpp
+++ b/media/codec2/vndk/C2Config.cpp
@@ -139,6 +139,9 @@
{ "vp9-1", C2Config::PROFILE_VP9_1 },
{ "vp9-2", C2Config::PROFILE_VP9_2 },
{ "vp9-3", C2Config::PROFILE_VP9_3 },
+ { "av1-0", C2Config::PROFILE_AV1_0 },
+ { "av1-1", C2Config::PROFILE_AV1_1 },
+ { "av1-2", C2Config::PROFILE_AV1_2 },
}))
DEFINE_C2_ENUM_VALUE_CUSTOM_HELPER(C2Config::level_t, ({
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index 2d4e19e..a5dd203 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -821,6 +821,7 @@
emplace("c2.android.vp9.decoder", "libcodec2_soft_vp9dec.so");
emplace("c2.android.vp8.encoder", "libcodec2_soft_vp8enc.so");
emplace("c2.android.vp9.encoder", "libcodec2_soft_vp9enc.so");
+ emplace("c2.android.av1.decoder", "libcodec2_soft_av1dec.so");
emplace("c2.android.raw.decoder", "libcodec2_soft_rawdec.so");
emplace("c2.android.flac.decoder", "libcodec2_soft_flacdec.so");
emplace("c2.android.flac.encoder", "libcodec2_soft_flacenc.so");
diff --git a/media/extractors/mkv/MatroskaExtractor.cpp b/media/extractors/mkv/MatroskaExtractor.cpp
index 42a9c42..9f197b0 100644
--- a/media/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/extractors/mkv/MatroskaExtractor.cpp
@@ -1463,6 +1463,8 @@
AMediaFormat_setBuffer(meta,
AMEDIAFORMAT_KEY_CSD_0, codecPrivate, codecPrivateSize);
}
+ } else if (!strcmp("V_AV1", codecID)) {
+ AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_VIDEO_AV1);
} else {
ALOGW("%s is not supported.", codecID);
continue;
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 2909a50..524db4e 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -80,7 +80,8 @@
Vector<SidxEntry> &sidx,
const Trex *trex,
off64_t firstMoofOffset,
- const sp<ItemTable> &itemTable);
+ const sp<ItemTable> &itemTable,
+ int32_t elstShiftStartTicks);
virtual status_t init();
virtual media_status_t start();
@@ -109,7 +110,7 @@
off64_t mFirstMoofOffset;
off64_t mCurrentMoofOffset;
off64_t mNextMoofOffset;
- uint32_t mCurrentTime;
+ uint32_t mCurrentTime; // in media timescale ticks
int32_t mLastParsedTrackId;
int32_t mTrackId;
@@ -143,6 +144,10 @@
bool mIsHeif;
sp<ItemTable> mItemTable;
+ // Start offset from composition time to presentation time.
+ // Support shift only for video tracks through mElstShiftStartTicks for now.
+ int32_t mElstShiftStartTicks;
+
size_t parseNALSize(const uint8_t *data) const;
status_t parseChunk(off64_t *offset);
status_t parseTrackFragmentHeader(off64_t offset, off64_t size);
@@ -338,6 +343,8 @@
case FOURCC('a', 'l', 'a', 'c'):
return MEDIA_MIMETYPE_AUDIO_ALAC;
+ case FOURCC('a', 'v', '0', '1'):
+ return MEDIA_MIMETYPE_VIDEO_AV1;
default:
ALOGW("Unknown fourcc: %c%c%c%c",
(fourcc >> 24) & 0xff,
@@ -459,11 +466,12 @@
[=] {
int64_t duration;
int32_t samplerate;
+ // Only for audio track.
if (track->has_elst && mHeaderTimescale != 0 &&
AMediaFormat_getInt64(track->meta, AMEDIAFORMAT_KEY_DURATION, &duration) &&
AMediaFormat_getInt32(track->meta, AMEDIAFORMAT_KEY_SAMPLE_RATE, &samplerate)) {
- // elst has to be processed only the first time this function is called
+ // Elst has to be processed only the first time this function is called.
track->has_elst = false;
if (track->elst_segment_duration > INT64_MAX) {
@@ -479,67 +487,72 @@
halfscale, mHeaderTimescale, track->timescale);
if ((uint32_t)samplerate != track->timescale){
- ALOGV("samplerate:%" PRId32 ", track->timescale and samplerate are different!", samplerate);
+ ALOGV("samplerate:%" PRId32 ", track->timescale and samplerate are different!",
+ samplerate);
}
-
- int64_t delay;
- // delay = ((media_time * samplerate) + halfscale) / track->timescale;
- if (__builtin_mul_overflow(media_time, samplerate, &delay) ||
- __builtin_add_overflow(delay, halfscale, &delay) ||
- (delay /= track->timescale, false) ||
- delay > INT32_MAX ||
- delay < INT32_MIN) {
- ALOGW("ignoring edit list with bogus values");
- return;
+ // Both delay and paddingsamples have to be set inorder for either to be
+ // effective in the lower layers.
+ int64_t delay = 0;
+ if (media_time > 0) { // Gapless playback
+ // delay = ((media_time * samplerate) + halfscale) / track->timescale;
+ if (__builtin_mul_overflow(media_time, samplerate, &delay) ||
+ __builtin_add_overflow(delay, halfscale, &delay) ||
+ (delay /= track->timescale, false) ||
+ delay > INT32_MAX ||
+ delay < INT32_MIN) {
+ ALOGW("ignoring edit list with bogus values");
+ return;
+ }
}
ALOGV("delay = %" PRId64, delay);
AMediaFormat_setInt32(track->meta, AMEDIAFORMAT_KEY_ENCODER_DELAY, delay);
- int64_t scaled_duration;
- // scaled_duration = duration * mHeaderTimescale;
- if (__builtin_mul_overflow(duration, mHeaderTimescale, &scaled_duration)) {
- return;
- }
- ALOGV("scaled_duration = %" PRId64, scaled_duration);
-
- int64_t segment_end;
- int64_t padding;
- int64_t segment_duration_e6;
- int64_t media_time_scaled_e6;
- int64_t media_time_scaled;
- // padding = scaled_duration - ((segment_duration * 1000000) +
- // ((media_time * mHeaderTimeScale * 1000000)/track->timescale) )
- // segment_duration is based on timescale in movie header box(mdhd)
- // media_time is based on timescale track header/media timescale
- if (__builtin_mul_overflow(segment_duration, 1000000, &segment_duration_e6) ||
- __builtin_mul_overflow(media_time, mHeaderTimescale, &media_time_scaled) ||
- __builtin_mul_overflow(media_time_scaled, 1000000, &media_time_scaled_e6)) {
- return;
- }
- media_time_scaled_e6 /= track->timescale;
- if(__builtin_add_overflow(segment_duration_e6, media_time_scaled_e6, &segment_end) ||
- __builtin_sub_overflow(scaled_duration, segment_end, &padding)) {
- return;
- }
- ALOGV("segment_end = %" PRId64 ", padding = %" PRId64, segment_end, padding);
int64_t paddingsamples = 0;
- if (padding < 0) {
+ if (segment_duration > 0) {
+ int64_t scaled_duration;
+ // scaled_duration = duration * mHeaderTimescale;
+ if (__builtin_mul_overflow(duration, mHeaderTimescale, &scaled_duration)) {
+ return;
+ }
+ ALOGV("scaled_duration = %" PRId64, scaled_duration);
+
+ int64_t segment_end;
+ int64_t padding;
+ int64_t segment_duration_e6;
+ int64_t media_time_scaled_e6;
+ int64_t media_time_scaled;
+ // padding = scaled_duration - ((segment_duration * 1000000) +
+ // ((media_time * mHeaderTimescale * 1000000)/track->timescale) )
+ // segment_duration is based on timescale in movie header box(mdhd)
+ // media_time is based on timescale track header/media timescale
+ if (__builtin_mul_overflow(segment_duration, 1000000, &segment_duration_e6) ||
+ __builtin_mul_overflow(media_time, mHeaderTimescale, &media_time_scaled) ||
+ __builtin_mul_overflow(media_time_scaled, 1000000, &media_time_scaled_e6)) {
+ return;
+ }
+ media_time_scaled_e6 /= track->timescale;
+ if (__builtin_add_overflow(segment_duration_e6, media_time_scaled_e6, &segment_end)
+ || __builtin_sub_overflow(scaled_duration, segment_end, &padding)) {
+ return;
+ }
+ ALOGV("segment_end = %" PRId64 ", padding = %" PRId64, segment_end, padding);
// track duration from media header (which is what AMEDIAFORMAT_KEY_DURATION is)
// might be slightly shorter than the segment duration, which would make the
// padding negative. Clamp to zero.
- padding = 0;
- } else {
- int64_t halfscale_e6;
- int64_t timescale_e6;
- // paddingsamples = ((padding * samplerate) + (halfscale * 1000000))
- // / (mHeaderTimescale * 1000000);
- if (__builtin_mul_overflow(padding, samplerate, &paddingsamples) ||
- __builtin_mul_overflow(halfscale, 1000000, &halfscale_e6) ||
- __builtin_mul_overflow(mHeaderTimescale, 1000000, ×cale_e6) ||
- __builtin_add_overflow(paddingsamples, halfscale_e6, &paddingsamples) ||
- (paddingsamples /= timescale_e6, false) ||
- paddingsamples > INT32_MAX) {
- return;
+ if (padding > 0) {
+ int64_t halfscale_mht = mHeaderTimescale / 2;
+ int64_t halfscale_e6;
+ int64_t timescale_e6;
+ // paddingsamples = ((padding * samplerate) + (halfscale_mht * 1000000))
+ // / (mHeaderTimescale * 1000000);
+ if (__builtin_mul_overflow(padding, samplerate, &paddingsamples) ||
+ __builtin_mul_overflow(halfscale_mht, 1000000, &halfscale_e6) ||
+ __builtin_mul_overflow(mHeaderTimescale, 1000000, ×cale_e6) ||
+ __builtin_add_overflow(paddingsamples, halfscale_e6, &paddingsamples) ||
+ (paddingsamples /= timescale_e6, false) ||
+ paddingsamples > INT32_MAX) {
+ return;
+ }
}
}
ALOGV("paddingsamples = %" PRId64, paddingsamples);
@@ -668,6 +681,7 @@
track->includes_expensive_metadata = false;
track->skipTrack = false;
track->timescale = 1000000;
+ track->elstShiftStartTicks = 0;
}
}
@@ -965,6 +979,7 @@
AMEDIAFORMAT_KEY_MIME, "application/octet-stream");
track->has_elst = false;
track->subsample_encryption = false;
+ track->elstShiftStartTicks = 0;
}
off64_t stop_offset = *offset + chunk_size;
@@ -1092,6 +1107,7 @@
if (entry_count != 1) {
// we only support a single entry at the moment, for gapless playback
+ // or start offset
ALOGW("ignoring edit list with %d entries", entry_count);
} else {
off64_t entriesoffset = data_offset + 8;
@@ -1740,6 +1756,7 @@
case FOURCC('a', 'v', 'c', '1'):
case FOURCC('h', 'v', 'c', '1'):
case FOURCC('h', 'e', 'v', '1'):
+ case FOURCC('a', 'v', '0', '1'):
{
uint8_t buffer[78];
if (chunk_data_size < (ssize_t)sizeof(buffer)) {
@@ -3929,9 +3946,15 @@
}
}
+ if (track->has_elst and !strncasecmp("video/", mime, 6) and track->elst_media_time > 0) {
+ track->elstShiftStartTicks = track->elst_media_time;
+ ALOGV("video track->elstShiftStartTicks :%" PRId64, track->elst_media_time);
+ }
+
MPEG4Source *source = new MPEG4Source(
track->meta, mDataSource, track->timescale, track->sampleTable,
- mSidxEntries, trex, mMoofOffset, itemTable);
+ mSidxEntries, trex, mMoofOffset, itemTable,
+ track->elstShiftStartTicks);
if (source->init() != OK) {
delete source;
return NULL;
@@ -4332,7 +4355,8 @@
Vector<SidxEntry> &sidx,
const Trex *trex,
off64_t firstMoofOffset,
- const sp<ItemTable> &itemTable)
+ const sp<ItemTable> &itemTable,
+ int32_t elstShiftStartTicks)
: mFormat(format),
mDataSource(dataSource),
mTimescale(timeScale),
@@ -4360,7 +4384,8 @@
mBuffer(NULL),
mSrcBuffer(NULL),
mIsHeif(itemTable != NULL),
- mItemTable(itemTable) {
+ mItemTable(itemTable),
+ mElstShiftStartTicks(elstShiftStartTicks) {
memset(&mTrackFragmentHeaderInfo, 0, sizeof(mTrackFragmentHeaderInfo));
@@ -4445,11 +4470,31 @@
}
status_t MPEG4Source::init() {
+ status_t err = OK;
+ const char *mime;
+ CHECK(AMediaFormat_getString(mFormat, AMEDIAFORMAT_KEY_MIME, &mime));
if (mFirstMoofOffset != 0) {
off64_t offset = mFirstMoofOffset;
- return parseChunk(&offset);
+ err = parseChunk(&offset);
+ if(err == OK && !strncasecmp("video/", mime, 6)
+ && !mCurrentSamples.isEmpty()) {
+ // Start offset should be less or equal to composition time of first sample.
+ // ISO : sample_composition_time_offset, version 0 (unsigned) for major brands.
+ mElstShiftStartTicks = std::min(mElstShiftStartTicks,
+ (*mCurrentSamples.begin()).compositionOffset);
+ }
+ return err;
}
- return OK;
+
+ if (!strncasecmp("video/", mime, 6)) {
+ uint32_t firstSampleCTS = 0;
+ err = mSampleTable->getMetaDataForSample(0, NULL, NULL, &firstSampleCTS);
+ // Start offset should be less or equal to composition time of first sample.
+ // Composition time stamp of first sample cannot be negative.
+ mElstShiftStartTicks = std::min(mElstShiftStartTicks, (int32_t)firstSampleCTS);
+ }
+
+ return err;
}
MPEG4Source::~MPEG4Source() {
@@ -4990,7 +5035,7 @@
status_t MPEG4Source::parseTrackFragmentRun(off64_t offset, off64_t size) {
- ALOGV("MPEG4Extractor::parseTrackFragmentRun");
+ ALOGV("MPEG4Source::parseTrackFragmentRun");
if (size < 8) {
return -EINVAL;
}
@@ -5132,10 +5177,10 @@
}
ALOGV("adding sample %d at offset 0x%08" PRIx64 ", size %u, duration %u, "
- " flags 0x%08x", i + 1,
+ " flags 0x%08x ctsOffset %" PRIu32, i + 1,
dataOffset, sampleSize, sampleDuration,
(flags & kFirstSampleFlagsPresent) && i == 0
- ? firstSampleFlags : sampleFlags);
+ ? firstSampleFlags : sampleFlags, sampleCtsOffset);
tmp.offset = dataOffset;
tmp.size = sampleSize;
tmp.duration = sampleDuration;
@@ -5227,6 +5272,7 @@
int64_t seekTimeUs;
ReadOptions::SeekMode mode;
if (options && options->getSeekTo(&seekTimeUs, &mode)) {
+
if (mIsHeif) {
CHECK(mSampleTable == NULL);
CHECK(mItemTable != NULL);
@@ -5264,6 +5310,9 @@
CHECK(!"Should not be here.");
break;
}
+ if( mode != ReadOptions::SEEK_FRAME_INDEX) {
+ seekTimeUs += ((int64_t)mElstShiftStartTicks * 1000000) / mTimescale;
+ }
uint32_t sampleIndex;
status_t err = mSampleTable->findSampleAtTime(
@@ -5305,6 +5354,7 @@
if (mode == ReadOptions::SEEK_CLOSEST
|| mode == ReadOptions::SEEK_FRAME_INDEX) {
+ sampleTime -= mElstShiftStartTicks;
targetSampleTimeUs = (sampleTime * 1000000ll) / mTimescale;
}
@@ -5343,6 +5393,10 @@
if (!mIsHeif) {
err = mSampleTable->getMetaDataForSample(
mCurrentSampleIndex, &offset, &size, &cts, &isSyncSample, &stts);
+ if(err == OK) {
+ cts -= mElstShiftStartTicks;
+ }
+
} else {
err = mItemTable->getImageOffsetAndSize(
options && options->getSeekTo(&seekTimeUs, &mode) ?
@@ -5623,6 +5677,10 @@
ReadOptions::SeekMode mode;
if (options && options->getSeekTo(&seekTimeUs, &mode)) {
+ seekTimeUs += ((int64_t)mElstShiftStartTicks * 1000000) / mTimescale;
+ ALOGV("shifted seekTimeUs :%" PRId64 ", mElstShiftStartTicks:%" PRId32, seekTimeUs,
+ mElstShiftStartTicks);
+
int numSidxEntries = mSegments.size();
if (numSidxEntries != 0) {
int64_t totalTime = 0;
@@ -5709,6 +5767,8 @@
offset = smpl->offset;
size = smpl->size;
cts = mCurrentTime + smpl->compositionOffset;
+ cts -= mElstShiftStartTicks;
+
mCurrentTime += smpl->duration;
isSyncSample = (mCurrentSampleIndex == 0);
@@ -5960,6 +6020,7 @@
FOURCC('a', 'v', 'c', '1'),
FOURCC('h', 'v', 'c', '1'),
FOURCC('h', 'e', 'v', '1'),
+ FOURCC('a', 'v', '0', '1'),
FOURCC('3', 'g', 'p', '4'),
FOURCC('m', 'p', '4', '1'),
FOURCC('m', 'p', '4', '2'),
diff --git a/media/extractors/mp4/MPEG4Extractor.h b/media/extractors/mp4/MPEG4Extractor.h
index 79d5ff6dd..fadfb50 100644
--- a/media/extractors/mp4/MPEG4Extractor.h
+++ b/media/extractors/mp4/MPEG4Extractor.h
@@ -85,6 +85,7 @@
bool has_elst;
int64_t elst_media_time;
uint64_t elst_segment_duration;
+ int32_t elstShiftStartTicks;
bool subsample_encryption;
};
diff --git a/media/libaudioclient/AudioPolicy.cpp b/media/libaudioclient/AudioPolicy.cpp
index d1f7525..9601d6d 100644
--- a/media/libaudioclient/AudioPolicy.cpp
+++ b/media/libaudioclient/AudioPolicy.cpp
@@ -22,6 +22,22 @@
namespace android {
//
+// AudioDeviceTypeAddr implementation
+//
+status_t AudioDeviceTypeAddr::readFromParcel(Parcel *parcel) {
+ mType = (audio_devices_t) parcel->readInt32();
+ mAddress = parcel->readString8();
+ return NO_ERROR;
+}
+
+status_t AudioDeviceTypeAddr::writeToParcel(Parcel *parcel) const {
+ parcel->writeInt32((int32_t) mType);
+ parcel->writeString8(mAddress);
+ return NO_ERROR;
+}
+
+
+//
// AudioMixMatchCriterion implementation
//
AudioMixMatchCriterion::AudioMixMatchCriterion(audio_usage_t usage,
@@ -40,11 +56,22 @@
status_t AudioMixMatchCriterion::readFromParcel(Parcel *parcel)
{
mRule = parcel->readInt32();
- if (mRule == RULE_MATCH_ATTRIBUTE_USAGE ||
- mRule == RULE_EXCLUDE_ATTRIBUTE_USAGE) {
- mValue.mUsage = (audio_usage_t)parcel->readInt32();
- } else {
- mValue.mSource = (audio_source_t)parcel->readInt32();
+ switch (mRule) {
+ case RULE_MATCH_ATTRIBUTE_USAGE:
+ case RULE_EXCLUDE_ATTRIBUTE_USAGE:
+ mValue.mUsage = (audio_usage_t) parcel->readInt32();
+ break;
+ case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ case RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET:
+ mValue.mSource = (audio_source_t) parcel->readInt32();
+ break;
+ case RULE_MATCH_UID:
+ case RULE_EXCLUDE_UID:
+ mValue.mUid = (uid_t) parcel->readInt32();
+ break;
+ default:
+ ALOGE("Trying to build AudioMixMatchCriterion from unknown rule %d", mRule);
+ return BAD_VALUE;
}
return NO_ERROR;
}
@@ -116,4 +143,11 @@
return NO_ERROR;
}
+void AudioMix::excludeUid(uid_t uid) const {
+ AudioMixMatchCriterion crit;
+ crit.mRule = RULE_EXCLUDE_UID;
+ crit.mValue.mUid = uid;
+ mCriteria.add(crit);
+}
+
} // namespace android
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index dc7531c..baeae8b 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -1236,6 +1236,19 @@
return aps->registerPolicyMixes(mixes, registration);
}
+status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->setUidDeviceAffinities(uid, devices);
+}
+
+status_t AudioSystem::removeUidDeviceAffinities(uid_t uid) {
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->removeUidDeviceAffinities(uid);
+}
+
status_t AudioSystem::startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
audio_port_handle_t *portId)
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index 0ce8b16..272415c 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -90,6 +90,8 @@
SET_ASSISTANT_UID,
SET_A11Y_SERVICES_UIDS,
IS_HAPTIC_PLAYBACK_SUPPORTED,
+ SET_UID_DEVICE_AFFINITY,
+ REMOVE_UID_DEVICE_AFFINITY,
};
#define MAX_ITEMS_PER_LIST 1024
@@ -990,6 +992,50 @@
return reply.readBool();
}
+ virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+
+ data.writeInt32((int32_t) uid);
+ size_t size = devices.size();
+ size_t sizePosition = data.dataPosition();
+ data.writeInt32((int32_t) size);
+ size_t finalSize = size;
+ for (size_t i = 0; i < size; i++) {
+ size_t position = data.dataPosition();
+ if (devices[i].writeToParcel(&data) != NO_ERROR) {
+ data.setDataPosition(position);
+ finalSize--;
+ }
+ }
+ if (size != finalSize) {
+ size_t position = data.dataPosition();
+ data.setDataPosition(sizePosition);
+ data.writeInt32(finalSize);
+ data.setDataPosition(position);
+ }
+
+ status_t status = remote()->transact(SET_UID_DEVICE_AFFINITY, data, &reply);
+ if (status == NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ }
+ return status;
+ }
+
+ virtual status_t removeUidDeviceAffinities(uid_t uid)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+
+ data.writeInt32((int32_t) uid);
+
+ status_t status = remote()->transact(REMOVE_UID_DEVICE_AFFINITY, data, &reply);
+ if (status == NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ }
+ return status;
+ }
};
IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService");
@@ -1048,7 +1094,9 @@
case GET_SURROUND_FORMATS:
case SET_SURROUND_FORMAT_ENABLED:
case SET_ASSISTANT_UID:
- case SET_A11Y_SERVICES_UIDS: {
+ case SET_A11Y_SERVICES_UIDS:
+ case SET_UID_DEVICE_AFFINITY:
+ case REMOVE_UID_DEVICE_AFFINITY: {
if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
__func__, code, IPCThreadState::self()->getCallingPid(),
@@ -1811,6 +1859,30 @@
CHECK_INTERFACE(IAudioPolicyService, data, reply);
bool isSupported = isHapticPlaybackSupported();
reply->writeBool(isSupported);
+ return NO_ERROR;
+ }
+
+ case SET_UID_DEVICE_AFFINITY: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ const uid_t uid = (uid_t) data.readInt32();
+ Vector<AudioDeviceTypeAddr> devices;
+ size_t size = (size_t)data.readInt32();
+ for (size_t i = 0; i < size; i++) {
+ AudioDeviceTypeAddr device;
+ if (device.readFromParcel((Parcel*)&data) == NO_ERROR) {
+ devices.add(device);
+ }
+ }
+ status_t status = setUidDeviceAffinities(uid, devices);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ }
+
+ case REMOVE_UID_DEVICE_AFFINITY: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ const uid_t uid = (uid_t) data.readInt32();
+ status_t status = removeUidDeviceAffinities(uid);
+ reply->writeInt32(status);
return NO_ERROR;
}
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index 8da0069..96e1235 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -56,6 +56,19 @@
#define MAX_MIXES_PER_POLICY 10
#define MAX_CRITERIA_PER_MIX 20
+class AudioDeviceTypeAddr {
+public:
+ AudioDeviceTypeAddr() {}
+ AudioDeviceTypeAddr(audio_devices_t type, String8 address) :
+ mType(type), mAddress(address) {}
+
+ status_t readFromParcel(Parcel *parcel);
+ status_t writeToParcel(Parcel *parcel) const;
+
+ audio_devices_t mType;
+ String8 mAddress;
+};
+
class AudioMixMatchCriterion {
public:
AudioMixMatchCriterion() {}
@@ -87,7 +100,9 @@
status_t readFromParcel(Parcel *parcel);
status_t writeToParcel(Parcel *parcel) const;
- Vector<AudioMixMatchCriterion> mCriteria;
+ void excludeUid(uid_t uid) const;
+
+ mutable Vector<AudioMixMatchCriterion> mCriteria;
uint32_t mMixType;
audio_config_t mFormat;
uint32_t mRouteFlags;
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index b0da5b8..781e9df 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -325,6 +325,10 @@
static status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
+ static status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
+
+ static status_t removeUidDeviceAffinities(uid_t uid);
+
static status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
audio_port_handle_t *portId);
diff --git a/media/libaudioclient/include/media/IAudioPolicyService.h b/media/libaudioclient/include/media/IAudioPolicyService.h
index 61f3b27..fb4fe93 100644
--- a/media/libaudioclient/include/media/IAudioPolicyService.h
+++ b/media/libaudioclient/include/media/IAudioPolicyService.h
@@ -167,6 +167,11 @@
virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration) = 0;
+ virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
+ = 0;
+
+ virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
+
virtual status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
audio_port_handle_t *portId) = 0;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index 8537608..7a9e843 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -277,18 +277,18 @@
HidlUtils::audioConfigFromHal(*config, &hidlConfig);
Result retval = Result::NOT_INITIALIZED;
#if MAJOR_VERSION == 2
- auto sourceMetadata = AudioSource(source);
+ auto sinkMetadata = AudioSource(source);
#elif MAJOR_VERSION >= 4
// TODO: correctly propagate the tracks sources and volume
// for now, only send the main source at 1dbfs
- SinkMetadata sourceMetadata = {{{AudioSource(source), 1}}};
+ SinkMetadata sinkMetadata = {{{ .source = AudioSource(source), .gain = 1 }}};
#endif
Return<void> ret = mDevice->openInputStream(
handle,
hidlDevice,
hidlConfig,
EnumBitfield<AudioInputFlag>(flags),
- sourceMetadata,
+ sinkMetadata,
[&](Result r, const sp<IStreamIn>& result, const AudioConfig& suggestedConfig) {
retval = r;
if (retval == Result::OK) {
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 2567b3b..86711de 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -302,14 +302,19 @@
if (audio_channel_mask_get_representation(channelMask)
== AUDIO_CHANNEL_REPRESENTATION_POSITION
&& DownmixerBufferProvider::isMultichannelCapable()) {
- mDownmixerBufferProvider.reset(new DownmixerBufferProvider(channelMask,
- mMixerChannelMask,
- AUDIO_FORMAT_PCM_16_BIT /* TODO: use mMixerInFormat, now only PCM 16 */,
- sampleRate, sessionId, kCopyBufferFrameCount));
- if (static_cast<DownmixerBufferProvider *>(mDownmixerBufferProvider.get())->isValid()) {
- mDownmixRequiresFormat = AUDIO_FORMAT_PCM_16_BIT; // PCM 16 bit required for downmix
- reconfigureBufferProviders();
- return NO_ERROR;
+
+ // Check if we have a float or int16 downmixer, in that order.
+ for (const audio_format_t format : { AUDIO_FORMAT_PCM_FLOAT, AUDIO_FORMAT_PCM_16_BIT }) {
+ mDownmixerBufferProvider.reset(new DownmixerBufferProvider(
+ channelMask, mMixerChannelMask,
+ format,
+ sampleRate, sessionId, kCopyBufferFrameCount));
+ if (static_cast<DownmixerBufferProvider *>(mDownmixerBufferProvider.get())
+ ->isValid()) {
+ mDownmixRequiresFormat = format;
+ reconfigureBufferProviders();
+ return NO_ERROR;
+ }
}
// mDownmixerBufferProvider reset below.
}
diff --git a/media/libeffects/downmix/Android.bp b/media/libeffects/downmix/Android.bp
index 227f2a1..9c82b1d 100644
--- a/media/libeffects/downmix/Android.bp
+++ b/media/libeffects/downmix/Android.bp
@@ -13,7 +13,7 @@
relative_install_path: "soundfx",
cflags: [
- //"-DBUILD_FLOAT",
+ "-DBUILD_FLOAT",
"-fvisibility=hidden",
"-Wall",
"-Werror",
diff --git a/media/libeffects/downmix/EffectDownmix.c b/media/libeffects/downmix/EffectDownmix.c
index b4a1d77..99ac4f5 100644
--- a/media/libeffects/downmix/EffectDownmix.c
+++ b/media/libeffects/downmix/EffectDownmix.c
@@ -31,10 +31,12 @@
// Do not submit with DOWNMIX_ALWAYS_USE_GENERIC_DOWNMIXER defined, strictly for testing
//#define DOWNMIX_ALWAYS_USE_GENERIC_DOWNMIXER 0
-#define MINUS_3_DB_IN_Q19_12 2896 // -3dB = 0.707 * 2^12 = 2896
-
#ifdef BUILD_FLOAT
#define MINUS_3_DB_IN_FLOAT 0.70710678f // -3dB = 0.70710678f
+const audio_format_t gTargetFormat = AUDIO_FORMAT_PCM_FLOAT;
+#else
+#define MINUS_3_DB_IN_Q19_12 2896 // -3dB = 0.707 * 2^12 = 2896
+const audio_format_t gTargetFormat = AUDIO_FORMAT_PCM_16_BIT;
#endif
// subset of possible audio_channel_mask_t values, and AUDIO_CHANNEL_OUT_* renamed to CHANNEL_MASK_*
@@ -703,7 +705,7 @@
memset(&pDwmModule->context, 0, sizeof(downmix_object_t));
pDwmModule->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
- pDwmModule->config.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+ pDwmModule->config.inputCfg.format = gTargetFormat;
pDwmModule->config.inputCfg.channels = AUDIO_CHANNEL_OUT_7POINT1;
pDwmModule->config.inputCfg.bufferProvider.getBuffer = NULL;
pDwmModule->config.inputCfg.bufferProvider.releaseBuffer = NULL;
@@ -715,7 +717,7 @@
// set a default value for the access mode, but should be overwritten by caller
pDwmModule->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
- pDwmModule->config.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+ pDwmModule->config.outputCfg.format = gTargetFormat;
pDwmModule->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
pDwmModule->config.outputCfg.bufferProvider.getBuffer = NULL;
pDwmModule->config.outputCfg.bufferProvider.releaseBuffer = NULL;
@@ -762,8 +764,8 @@
// Check configuration compatibility with build options, and effect capabilities
if (pConfig->inputCfg.samplingRate != pConfig->outputCfg.samplingRate
|| pConfig->outputCfg.channels != DOWNMIX_OUTPUT_CHANNELS
- || pConfig->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT
- || pConfig->outputCfg.format != AUDIO_FORMAT_PCM_16_BIT) {
+ || pConfig->inputCfg.format != gTargetFormat
+ || pConfig->outputCfg.format != gTargetFormat) {
ALOGE("Downmix_Configure error: invalid config");
return -EINVAL;
}
@@ -1185,8 +1187,8 @@
if (accumulate) {
while (numFrames) {
// centerPlusLfeContrib = FC(-3dB) + LFE(-3dB)
- centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_Q19_12)
- + (pSrc[3] * MINUS_3_DB_IN_Q19_12);
+ centerPlusLfeContrib = (pSrc[2] * MINUS_3_DB_IN_FLOAT)
+ + (pSrc[3] * MINUS_3_DB_IN_FLOAT);
// FL + centerPlusLfeContrib + SL + RL
lt = pSrc[0] + centerPlusLfeContrib + pSrc[6] + pSrc[4];
// FR + centerPlusLfeContrib + SR + RR
@@ -1427,4 +1429,4 @@
}
return true;
}
-#endif
\ No newline at end of file
+#endif
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index b5860de..0c6f8de 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -1544,7 +1544,7 @@
int VirtualizerIsConfigurationSupported(audio_channel_mask_t channelMask,
audio_devices_t deviceType) {
uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
- if (channelCount < 1 || channelCount > LVM_MAX_CHANNELS) {
+ if (channelCount < 1 || channelCount > FCC_2) { // TODO: update to 8 channels when supported.
return -EINVAL;
}
return VirtualizerIsDeviceSupported(deviceType);
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 6002e95..7d759e0 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -216,8 +216,7 @@
"libutils",
"libbinder",
"libsonivox",
- "libicuuc",
- "libicui18n",
+ "libandroidicu",
"libexpat",
"libcamera_client",
"libstagefright_foundation",
@@ -232,8 +231,7 @@
export_shared_lib_headers: [
"libaudioclient",
"libbinder",
- "libicuuc",
- "libicui18n",
+ "libandroidicu",
"libsonivox",
"libmedia_omx",
],
diff --git a/media/libmedia/NdkWrapper.cpp b/media/libmedia/NdkWrapper.cpp
index eed96e7..6dbc9b8 100644
--- a/media/libmedia/NdkWrapper.cpp
+++ b/media/libmedia/NdkWrapper.cpp
@@ -57,6 +57,10 @@
AMEDIAFORMAT_KEY_COLOR_STANDARD,
AMEDIAFORMAT_KEY_COLOR_TRANSFER,
AMEDIAFORMAT_KEY_COMPLEXITY,
+ AMEDIAFORMAT_KEY_CRYPTO_DEFAULT_IV_SIZE,
+ AMEDIAFORMAT_KEY_CRYPTO_ENCRYPTED_BYTE_BLOCK,
+ AMEDIAFORMAT_KEY_CRYPTO_MODE,
+ AMEDIAFORMAT_KEY_CRYPTO_SKIP_BYTE_BLOCK,
AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL,
AMEDIAFORMAT_KEY_GRID_COLUMNS,
AMEDIAFORMAT_KEY_GRID_ROWS,
@@ -102,6 +106,8 @@
};
static const char *AMediaFormatKeyGroupBuffer[] = {
+ AMEDIAFORMAT_KEY_CRYPTO_IV,
+ AMEDIAFORMAT_KEY_CRYPTO_KEY,
AMEDIAFORMAT_KEY_HDR_STATIC_INFO,
AMEDIAFORMAT_KEY_SEI,
AMEDIAFORMAT_KEY_MPEG_USER_DATA,
@@ -1243,7 +1249,11 @@
if (mAMediaExtractor == NULL) {
return NULL;
}
- return new AMediaCodecCryptoInfoWrapper(AMediaExtractor_getSampleCryptoInfo(mAMediaExtractor));
+ AMediaCodecCryptoInfo *cryptoInfo = AMediaExtractor_getSampleCryptoInfo(mAMediaExtractor);
+ if (cryptoInfo == NULL) {
+ return NULL;
+ }
+ return new AMediaCodecCryptoInfoWrapper(cryptoInfo);
}
AMediaDataSourceWrapper::AMediaDataSourceWrapper(const sp<DataSource> &dataSource)
diff --git a/media/libmedia/include/media/mediarecorder.h b/media/libmedia/include/media/mediarecorder.h
index caa0186..33be559 100644
--- a/media/libmedia/include/media/mediarecorder.h
+++ b/media/libmedia/include/media/mediarecorder.h
@@ -115,9 +115,6 @@
* The state machine of the media_recorder.
*/
enum media_recorder_states {
- // Error state.
- MEDIA_RECORDER_ERROR = 0,
-
// Recorder was just created.
MEDIA_RECORDER_IDLE = 1 << 0,
@@ -132,6 +129,9 @@
// Recording is in progress.
MEDIA_RECORDER_RECORDING = 1 << 4,
+
+ // Error state.
+ MEDIA_RECORDER_ERROR = 1 << 5,
};
// The "msg" code passed to the listener in notify.
diff --git a/media/libmediaextractor/Android.bp b/media/libmediaextractor/Android.bp
index 1aa1e13..4758cd6 100644
--- a/media/libmediaextractor/Android.bp
+++ b/media/libmediaextractor/Android.bp
@@ -27,7 +27,6 @@
"libbinder",
"libstagefright_foundation",
"libutils",
- "libcutils",
"liblog",
],
diff --git a/media/libmediametrics/Android.bp b/media/libmediametrics/Android.bp
index 8f8c478..e188e54 100644
--- a/media/libmediametrics/Android.bp
+++ b/media/libmediametrics/Android.bp
@@ -10,11 +10,9 @@
],
shared_libs: [
- "libbase",
"libbinder",
"libcutils",
"liblog",
- "libstagefright_foundation",
"libutils",
],
diff --git a/media/libmediaplayer2/Android.bp b/media/libmediaplayer2/Android.bp
index 6b43375..38f42dc 100644
--- a/media/libmediaplayer2/Android.bp
+++ b/media/libmediaplayer2/Android.bp
@@ -16,6 +16,7 @@
"libandroid_runtime",
"libaudioclient",
"libbinder",
+ "libbinder_ndk",
"libcutils",
"libgui",
"liblog",
@@ -91,18 +92,15 @@
"JMedia2HTTPConnection.cpp",
],
+ header_libs: [
+ "libbinder_headers",
+ "libnativehelper_header_only",
+ ],
+
shared_libs: [
- "android.hidl.token@1.0-utils",
"liblog",
- "libcutils",
"libutils",
- "libbinder",
- "libstagefright_foundation",
- "libmediaextractor",
"libdl",
- "libaudioutils",
- "libaudioclient",
- "libnativehelper",
],
include_dirs: [
diff --git a/media/libmediaplayer2/include/mediaplayer2/MediaPlayer2Interface.h b/media/libmediaplayer2/include/mediaplayer2/MediaPlayer2Interface.h
index 5e98589..0c8d016 100644
--- a/media/libmediaplayer2/include/mediaplayer2/MediaPlayer2Interface.h
+++ b/media/libmediaplayer2/include/mediaplayer2/MediaPlayer2Interface.h
@@ -30,7 +30,6 @@
#include <media/AudioSystem.h>
#include <media/AudioTimestamp.h>
#include <media/BufferingSettings.h>
-#include <media/Metadata.h>
#include <media/stagefright/foundation/AHandler.h>
#include <mediaplayer2/MediaPlayer2Types.h>
@@ -224,18 +223,6 @@
// @return OK if the call was successful.
virtual status_t invoke(const PlayerMessage &request, PlayerMessage *reply) = 0;
- // The Client in the MetadataPlayerService calls this method on
- // the native player to retrieve all or a subset of metadata.
- //
- // @param ids SortedList of metadata ID to be fetch. If empty, all
- // the known metadata should be returned.
- // @param[inout] records Parcel where the player appends its metadata.
- // @return OK if the call was successful.
- virtual status_t getMetadata(const media::Metadata::Filter& /* ids */,
- Parcel* /* records */) {
- return INVALID_OPERATION;
- };
-
void setListener(const sp<MediaPlayer2InterfaceListener> &listener) {
Mutex::Autolock autoLock(mListenerLock);
mListener = listener;
diff --git a/media/libmediaplayer2/include/mediaplayer2/mediaplayer2.h b/media/libmediaplayer2/include/mediaplayer2/mediaplayer2.h
index a945ffd..78865c4 100644
--- a/media/libmediaplayer2/include/mediaplayer2/mediaplayer2.h
+++ b/media/libmediaplayer2/include/mediaplayer2/mediaplayer2.h
@@ -20,7 +20,6 @@
#include <media/AVSyncSettings.h>
#include <media/AudioResamplerPublic.h>
#include <media/BufferingSettings.h>
-#include <media/Metadata.h>
#include <media/mediaplayer_common.h>
#include <mediaplayer2/MediaPlayer2Interface.h>
#include <mediaplayer2/MediaPlayer2Types.h>
@@ -55,7 +54,7 @@
public:
~MediaPlayer2();
- static sp<MediaPlayer2> Create(int32_t sessionId);
+ static sp<MediaPlayer2> Create(int32_t sessionId, jobject context);
static status_t DumpAll(int fd, const Vector<String16>& args);
void disconnect();
@@ -118,7 +117,7 @@
status_t dump(int fd, const Vector<String16>& args);
private:
- MediaPlayer2(int32_t sessionId);
+ MediaPlayer2(int32_t sessionId, jobject context);
bool init();
// Disconnect from the currently connected ANativeWindow.
@@ -154,6 +153,7 @@
int mVideoHeight;
int32_t mAudioSessionId;
sp<JObjectHolder> mAudioAttributes;
+ sp<JObjectHolder> mContext;
float mSendLevel;
sp<ANativeWindowWrapper> mConnectedWindow;
};
diff --git a/media/libmediaplayer2/mediaplayer2.cpp b/media/libmediaplayer2/mediaplayer2.cpp
index f432059..f75380c 100644
--- a/media/libmediaplayer2/mediaplayer2.cpp
+++ b/media/libmediaplayer2/mediaplayer2.cpp
@@ -18,14 +18,11 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaPlayer2Native"
-#include <binder/IServiceManager.h>
-#include <binder/IPCThreadState.h>
-
+#include <android/binder_ibinder.h>
#include <media/AudioSystem.h>
#include <media/DataSourceDesc.h>
#include <media/MediaAnalyticsItem.h>
#include <media/MemoryLeakTrackUtil.h>
-#include <media/Metadata.h>
#include <media/NdkWrapper.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooperRoster.h>
@@ -103,114 +100,110 @@
String8 result;
SortedVector< sp<MediaPlayer2> > players; //to serialise the mutex unlock & client destruction.
- if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
- snprintf(buffer, SIZE, "Permission Denial: can't dump MediaPlayer2\n");
- result.append(buffer);
+ {
+ Mutex::Autolock lock(sRecordLock);
+ ensureInit_l();
+ for (int i = 0, n = sPlayers->size(); i < n; ++i) {
+ sp<MediaPlayer2> p = (*sPlayers)[i].promote();
+ if (p != 0) {
+ p->dump(fd, args);
+ }
+ players.add(p);
+ }
+ }
+
+ result.append(" Files opened and/or mapped:\n");
+ snprintf(buffer, SIZE, "/proc/%d/maps", getpid());
+ FILE *f = fopen(buffer, "r");
+ if (f) {
+ while (!feof(f)) {
+ fgets(buffer, SIZE, f);
+ if (strstr(buffer, " /storage/") ||
+ strstr(buffer, " /system/sounds/") ||
+ strstr(buffer, " /data/") ||
+ strstr(buffer, " /system/media/")) {
+ result.append(" ");
+ result.append(buffer);
+ }
+ }
+ fclose(f);
} else {
- {
- Mutex::Autolock lock(sRecordLock);
- ensureInit_l();
- for (int i = 0, n = sPlayers->size(); i < n; ++i) {
- sp<MediaPlayer2> p = (*sPlayers)[i].promote();
- if (p != 0) {
- p->dump(fd, args);
- }
- players.add(p);
- }
- }
+ result.append("couldn't open ");
+ result.append(buffer);
+ result.append("\n");
+ }
- result.append(" Files opened and/or mapped:\n");
- snprintf(buffer, SIZE, "/proc/%d/maps", getpid());
- FILE *f = fopen(buffer, "r");
- if (f) {
- while (!feof(f)) {
- fgets(buffer, SIZE, f);
- if (strstr(buffer, " /storage/") ||
- strstr(buffer, " /system/sounds/") ||
- strstr(buffer, " /data/") ||
- strstr(buffer, " /system/media/")) {
- result.append(" ");
- result.append(buffer);
- }
- }
- fclose(f);
- } else {
- result.append("couldn't open ");
- result.append(buffer);
- result.append("\n");
- }
-
- snprintf(buffer, SIZE, "/proc/%d/fd", getpid());
- DIR *d = opendir(buffer);
- if (d) {
- struct dirent *ent;
- while((ent = readdir(d)) != NULL) {
- if (strcmp(ent->d_name,".") && strcmp(ent->d_name,"..")) {
- snprintf(buffer, SIZE, "/proc/%d/fd/%s", getpid(), ent->d_name);
- struct stat s;
- if (lstat(buffer, &s) == 0) {
- if ((s.st_mode & S_IFMT) == S_IFLNK) {
- char linkto[256];
- int len = readlink(buffer, linkto, sizeof(linkto));
- if(len > 0) {
- if(len > 255) {
- linkto[252] = '.';
- linkto[253] = '.';
- linkto[254] = '.';
- linkto[255] = 0;
- } else {
- linkto[len] = 0;
- }
- if (strstr(linkto, "/storage/") == linkto ||
- strstr(linkto, "/system/sounds/") == linkto ||
- strstr(linkto, "/data/") == linkto ||
- strstr(linkto, "/system/media/") == linkto) {
- result.append(" ");
- result.append(buffer);
- result.append(" -> ");
- result.append(linkto);
- result.append("\n");
- }
+ snprintf(buffer, SIZE, "/proc/%d/fd", getpid());
+ DIR *d = opendir(buffer);
+ if (d) {
+ struct dirent *ent;
+ while((ent = readdir(d)) != NULL) {
+ if (strcmp(ent->d_name,".") && strcmp(ent->d_name,"..")) {
+ snprintf(buffer, SIZE, "/proc/%d/fd/%s", getpid(), ent->d_name);
+ struct stat s;
+ if (lstat(buffer, &s) == 0) {
+ if ((s.st_mode & S_IFMT) == S_IFLNK) {
+ char linkto[256];
+ int len = readlink(buffer, linkto, sizeof(linkto));
+ if(len > 0) {
+ if(len > 255) {
+ linkto[252] = '.';
+ linkto[253] = '.';
+ linkto[254] = '.';
+ linkto[255] = 0;
+ } else {
+ linkto[len] = 0;
}
- } else {
- result.append(" unexpected type for ");
- result.append(buffer);
- result.append("\n");
+ if (strstr(linkto, "/storage/") == linkto ||
+ strstr(linkto, "/system/sounds/") == linkto ||
+ strstr(linkto, "/data/") == linkto ||
+ strstr(linkto, "/system/media/") == linkto) {
+ result.append(" ");
+ result.append(buffer);
+ result.append(" -> ");
+ result.append(linkto);
+ result.append("\n");
+ }
}
+ } else {
+ result.append(" unexpected type for ");
+ result.append(buffer);
+ result.append("\n");
}
}
}
- closedir(d);
- } else {
- result.append("couldn't open ");
- result.append(buffer);
- result.append("\n");
}
+ closedir(d);
+ } else {
+ result.append("couldn't open ");
+ result.append(buffer);
+ result.append("\n");
+ }
- gLooperRoster.dump(fd, args);
+ gLooperRoster.dump(fd, args);
- bool dumpMem = false;
- bool unreachableMemory = false;
- for (size_t i = 0; i < args.size(); i++) {
- if (args[i] == String16("-m")) {
- dumpMem = true;
- } else if (args[i] == String16("--unreachable")) {
- unreachableMemory = true;
- }
- }
- if (dumpMem) {
- result.append("\nDumping memory:\n");
- std::string s = dumpMemoryAddresses(100 /* limit */);
- result.append(s.c_str(), s.size());
- }
- if (unreachableMemory) {
- result.append("\nDumping unreachable memory:\n");
- // TODO - should limit be an argument parameter?
- // TODO: enable GetUnreachableMemoryString if it's part of stable API
- //std::string s = GetUnreachableMemoryString(true /* contents */, 10000 /* limit */);
- //result.append(s.c_str(), s.size());
+ bool dumpMem = false;
+ bool unreachableMemory = false;
+ for (size_t i = 0; i < args.size(); i++) {
+ if (args[i] == String16("-m")) {
+ dumpMem = true;
+ } else if (args[i] == String16("--unreachable")) {
+ unreachableMemory = true;
}
}
+ if (dumpMem) {
+ result.append("\nDumping memory:\n");
+ std::string s = dumpMemoryAddresses(100 /* limit */);
+ result.append(s.c_str(), s.size());
+ }
+ if (unreachableMemory) {
+ result.append("\nDumping unreachable memory:\n");
+ // TODO - should limit be an argument parameter?
+ // TODO: enable GetUnreachableMemoryString if it's part of stable API
+ //std::string s = GetUnreachableMemoryString(true /* contents */, 10000 /* limit */);
+ //result.append(s.c_str(), s.size());
+ }
+
write(fd, result.string(), result.size());
return NO_ERROR;
}
@@ -218,8 +211,8 @@
} // anonymous namespace
//static
-sp<MediaPlayer2> MediaPlayer2::Create(int32_t sessionId) {
- sp<MediaPlayer2> player = new MediaPlayer2(sessionId);
+sp<MediaPlayer2> MediaPlayer2::Create(int32_t sessionId, jobject context) {
+ sp<MediaPlayer2> player = new MediaPlayer2(sessionId, context);
if (!player->init()) {
return NULL;
@@ -236,13 +229,14 @@
return dumpPlayers(fd, args);
}
-MediaPlayer2::MediaPlayer2(int32_t sessionId) {
+MediaPlayer2::MediaPlayer2(int32_t sessionId, jobject context) {
ALOGV("constructor");
mSrcId = 0;
mLockThreadId = 0;
mListener = NULL;
mStreamType = AUDIO_STREAM_MUSIC;
mAudioAttributes = NULL;
+ mContext = new JObjectHolder(context);
mCurrentPosition = -1;
mCurrentSeekMode = MediaPlayer2SeekMode::SEEK_PREVIOUS_SYNC;
mSeekPosition = -1;
@@ -253,9 +247,8 @@
mVideoWidth = mVideoHeight = 0;
mSendLevel = 0;
- // TODO: get pid and uid from JAVA
- mPid = IPCThreadState::self()->getCallingPid();
- mUid = IPCThreadState::self()->getCallingUid();
+ mPid = AIBinder_getCallingPid();
+ mUid = AIBinder_getCallingUid();
mAudioOutput = new MediaPlayer2AudioOutput(sessionId, mUid, mPid, NULL /*attributes*/);
}
@@ -334,15 +327,15 @@
sp<MediaPlayer2Interface> oldPlayer;
- Mutex::Autolock _l(mLock);
{
+ Mutex::Autolock _l(mLock);
if (!((mCurrentState & MEDIA_PLAYER2_IDLE)
|| mCurrentState == MEDIA_PLAYER2_STATE_ERROR)) {
ALOGE("setDataSource called in wrong state %d", mCurrentState);
return INVALID_OPERATION;
}
- sp<MediaPlayer2Interface> player = new NuPlayer2Driver(mPid, mUid);
+ sp<MediaPlayer2Interface> player = new NuPlayer2Driver(mPid, mUid, mContext);
status_t err = player->initCheck();
if (err != NO_ERROR) {
ALOGE("Failed to create player object, initCheck failed(%d)", err);
diff --git a/media/libmediaplayer2/nuplayer2/GenericSource2.cpp b/media/libmediaplayer2/nuplayer2/GenericSource2.cpp
index f01361b..9552580 100644
--- a/media/libmediaplayer2/nuplayer2/GenericSource2.cpp
+++ b/media/libmediaplayer2/nuplayer2/GenericSource2.cpp
@@ -1286,6 +1286,11 @@
mVideoTimeUs = timeUs;
}
+ sp<AMediaCodecCryptoInfoWrapper> cryptInfo = extractor->getSampleCryptoInfo();
+ if (cryptInfo != NULL) {
+ meta->setObject("cryptInfo", cryptInfo);
+ }
+
queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);
if (numBuffers == 0 && actualTimeUs != nullptr) {
diff --git a/media/libmediaplayer2/nuplayer2/JWakeLock.cpp b/media/libmediaplayer2/nuplayer2/JWakeLock.cpp
index c9a1071..983d77e 100644
--- a/media/libmediaplayer2/nuplayer2/JWakeLock.cpp
+++ b/media/libmediaplayer2/nuplayer2/JWakeLock.cpp
@@ -20,55 +20,50 @@
#include "JWakeLock.h"
-#include <binder/IPCThreadState.h>
-#include <binder/IServiceManager.h>
#include <media/stagefright/foundation/ADebug.h>
-#include <powermanager/PowerManager.h>
-
namespace android {
-//TODO: use JAVA PowerManager, instead of binder
-JWakeLock::JWakeLock() :
- mPowerManager(NULL),
- mWakeLockToken(NULL),
+JWakeLock::JWakeLock(const sp<JObjectHolder> &context) :
mWakeLockCount(0),
- mDeathRecipient(new PMDeathRecipient(this)) {}
+ mWakeLock(NULL),
+ mContext(context) {}
JWakeLock::~JWakeLock() {
- if (mPowerManager != NULL) {
- sp<IBinder> binder = IInterface::asBinder(mPowerManager);
- binder->unlinkToDeath(mDeathRecipient);
- }
- clearPowerManager();
+ clearJavaWakeLock();
}
bool JWakeLock::acquire() {
if (mWakeLockCount == 0) {
- CHECK(mWakeLockToken == NULL);
- if (mPowerManager == NULL) {
- // use checkService() to avoid blocking if power service is not up yet
- sp<IBinder> binder =
- defaultServiceManager()->checkService(String16("power"));
- if (binder == NULL) {
- ALOGW("could not get the power manager service");
- } else {
- mPowerManager = interface_cast<IPowerManager>(binder);
- binder->linkToDeath(mDeathRecipient);
- }
+ if (mWakeLock == NULL) {
+ JNIEnv *env = JavaVMHelper::getJNIEnv();
+ jclass jContextCls = env->FindClass("android/content/Context");
+ jclass jPowerManagerCls = env->FindClass("android/os/PowerManager");
+
+ jmethodID jGetSystemService = env->GetMethodID(jContextCls,
+ "getSystemService", "(Ljava/lang/String;)Ljava/lang/Object;");
+ jobject javaPowerManagerObj = env->CallObjectMethod(mContext->getJObject(),
+ jGetSystemService, env->NewStringUTF("power"));
+
+ jfieldID jPARTIAL_WAKE_LOCK = env->GetStaticFieldID(jPowerManagerCls,
+ "PARTIAL_WAKE_LOCK", "I");
+ jint PARTIAL_WAKE_LOCK = env->GetStaticIntField(jPowerManagerCls, jPARTIAL_WAKE_LOCK);
+
+ jmethodID jNewWakeLock = env->GetMethodID(jPowerManagerCls,
+ "newWakeLock", "(ILjava/lang/String;)Landroid/os/PowerManager$WakeLock;");
+ jobject javaWakeLock = env->CallObjectMethod(javaPowerManagerObj,
+ jNewWakeLock, PARTIAL_WAKE_LOCK, env->NewStringUTF("JWakeLock"));
+ mWakeLock = new JObjectHolder(javaWakeLock);
+ env->DeleteLocalRef(javaPowerManagerObj);
+ env->DeleteLocalRef(javaWakeLock);
}
- if (mPowerManager != NULL) {
- sp<IBinder> binder = new BBinder();
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- status_t status = mPowerManager->acquireWakeLock(
- POWERMANAGER_PARTIAL_WAKE_LOCK,
- binder, String16("JWakeLock"), String16("media"));
- IPCThreadState::self()->restoreCallingIdentity(token);
- if (status == NO_ERROR) {
- mWakeLockToken = binder;
- mWakeLockCount++;
- return true;
- }
+ if (mWakeLock != NULL) {
+ JNIEnv *env = JavaVMHelper::getJNIEnv();
+ jclass wakeLockCls = env->FindClass("android/os/PowerManager$WakeLock");
+ jmethodID jAcquire = env->GetMethodID(wakeLockCls, "acquire", "()V");
+ env->CallVoidMethod(mWakeLock->getJObject(), jAcquire);
+ mWakeLockCount++;
+ return true;
}
} else {
mWakeLockCount++;
@@ -86,25 +81,17 @@
mWakeLockCount = 1;
}
if (--mWakeLockCount == 0) {
- CHECK(mWakeLockToken != NULL);
- if (mPowerManager != NULL) {
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mPowerManager->releaseWakeLock(mWakeLockToken, 0 /* flags */);
- IPCThreadState::self()->restoreCallingIdentity(token);
+ if (mWakeLock != NULL) {
+ JNIEnv *env = JavaVMHelper::getJNIEnv();
+ jclass wakeLockCls = env->FindClass("android/os/PowerManager$WakeLock");
+ jmethodID jRelease = env->GetMethodID(wakeLockCls, "release", "()V");
+ env->CallVoidMethod(mWakeLock->getJObject(), jRelease);
}
- mWakeLockToken.clear();
}
}
-void JWakeLock::clearPowerManager() {
+void JWakeLock::clearJavaWakeLock() {
release(true);
- mPowerManager.clear();
-}
-
-void JWakeLock::PMDeathRecipient::binderDied(const wp<IBinder>& who __unused) {
- if (mWakeLock != NULL) {
- mWakeLock->clearPowerManager();
- }
}
} // namespace android
diff --git a/media/libmediaplayer2/nuplayer2/JWakeLock.h b/media/libmediaplayer2/nuplayer2/JWakeLock.h
index eace87e..36c542e 100644
--- a/media/libmediaplayer2/nuplayer2/JWakeLock.h
+++ b/media/libmediaplayer2/nuplayer2/JWakeLock.h
@@ -18,7 +18,7 @@
#define J_WAKELOCK_H_
#include <media/stagefright/foundation/ABase.h>
-#include <powermanager/IPowerManager.h>
+#include <mediaplayer2/JObjectHolder.h>
#include <utils/RefBase.h>
namespace android {
@@ -26,7 +26,7 @@
class JWakeLock : public RefBase {
public:
- JWakeLock();
+ JWakeLock(const sp<JObjectHolder> &context);
// NOTE: acquire and release are not thread safe
@@ -37,28 +37,11 @@
virtual ~JWakeLock();
private:
- sp<IPowerManager> mPowerManager;
- sp<IBinder> mWakeLockToken;
- uint32_t mWakeLockCount;
+ uint32_t mWakeLockCount;
+ sp<JObjectHolder> mWakeLock;
+ const sp<JObjectHolder> mContext;
- class PMDeathRecipient : public IBinder::DeathRecipient {
- public:
- explicit PMDeathRecipient(JWakeLock *wakeLock) : mWakeLock(wakeLock) {}
- virtual ~PMDeathRecipient() {}
-
- // IBinder::DeathRecipient
- virtual void binderDied(const wp<IBinder> &who);
-
- private:
- PMDeathRecipient(const PMDeathRecipient&);
- PMDeathRecipient& operator= (const PMDeathRecipient&);
-
- JWakeLock *mWakeLock;
- };
-
- const sp<PMDeathRecipient> mDeathRecipient;
-
- void clearPowerManager();
+ void clearJavaWakeLock();
DISALLOW_EVIL_CONSTRUCTORS(JWakeLock);
};
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2.cpp
index 080d923..5da6e24 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2.cpp
@@ -209,7 +209,8 @@
////////////////////////////////////////////////////////////////////////////////
-NuPlayer2::NuPlayer2(pid_t pid, uid_t uid, const sp<MediaClock> &mediaClock)
+NuPlayer2::NuPlayer2(
+ pid_t pid, uid_t uid, const sp<MediaClock> &mediaClock, const sp<JObjectHolder> &context)
: mPID(pid),
mUID(uid),
mMediaClock(mediaClock),
@@ -240,7 +241,8 @@
mVideoDecoderError(false),
mPaused(false),
mPausedByClient(true),
- mPausedForBuffering(false) {
+ mPausedForBuffering(false),
+ mContext(context) {
CHECK(mediaClock != NULL);
clearFlushComplete();
}
@@ -1738,7 +1740,7 @@
sp<AMessage> notify = new AMessage(kWhatRendererNotify, this);
++mRendererGeneration;
notify->setInt32("generation", mRendererGeneration);
- mRenderer = new Renderer(mAudioSink, mMediaClock, notify, flags);
+ mRenderer = new Renderer(mAudioSink, mMediaClock, notify, mContext, flags);
mRendererLooper = new ALooper;
mRendererLooper->setName("NuPlayer2Renderer");
mRendererLooper->start(false, true, ANDROID_PRIORITY_AUDIO);
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2.h b/media/libmediaplayer2/nuplayer2/NuPlayer2.h
index fdc128f..798c725 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2.h
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2.h
@@ -22,6 +22,7 @@
#include <media/stagefright/foundation/AHandler.h>
#include <mediaplayer2/MediaPlayer2Interface.h>
+#include <mediaplayer2/JObjectHolder.h>
#include "mediaplayer2.pb.h"
@@ -42,7 +43,8 @@
struct NuPlayer2Driver;
struct NuPlayer2 : public AHandler {
- explicit NuPlayer2(pid_t pid, uid_t uid, const sp<MediaClock> &mediaClock);
+ explicit NuPlayer2(pid_t pid, uid_t uid,
+ const sp<MediaClock> &mediaClock, const sp<JObjectHolder> &context);
void setDriver(const wp<NuPlayer2Driver> &driver);
@@ -272,6 +274,9 @@
// Pause state as requested by source (internally) due to buffering
bool mPausedForBuffering;
+ // Passed from JAVA
+ const sp<JObjectHolder> mContext;
+
inline const sp<DecoderBase> &getDecoder(bool audio) {
return audio ? mAudioDecoder : mVideoDecoder;
}
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Decoder.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2Decoder.cpp
index 49e3e3b..a5bd62d 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Decoder.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Decoder.cpp
@@ -1108,6 +1108,11 @@
} // buffer->data()
} // needsCopy
+ sp<RefBase> cryptInfoObj;
+ if (buffer->meta()->findObject("cryptInfo", &cryptInfoObj)) {
+ cryptInfo = static_cast<AMediaCodecCryptoInfoWrapper *>(cryptInfoObj.get());
+ }
+
status_t err;
if (cryptInfo != NULL) {
err = mCodec->queueSecureInputBuffer(
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp
index 2dab2dd..56e9471 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp
@@ -108,7 +108,7 @@
static const char *kPlayerRebufferingAtExit = "android.media.mediaplayer.rebufferExit";
-NuPlayer2Driver::NuPlayer2Driver(pid_t pid, uid_t uid)
+NuPlayer2Driver::NuPlayer2Driver(pid_t pid, uid_t uid, const sp<JObjectHolder> &context)
: mState(STATE_IDLE),
mAsyncResult(UNKNOWN_ERROR),
mSrcId(0),
@@ -123,7 +123,7 @@
mLooper(new ALooper),
mNuPlayer2Looper(new ALooper),
mMediaClock(new MediaClock),
- mPlayer(new NuPlayer2(pid, uid, mMediaClock)),
+ mPlayer(new NuPlayer2(pid, uid, mMediaClock, context)),
mPlayerFlags(0),
mAnalyticsItem(NULL),
mClientUid(uid),
@@ -662,33 +662,6 @@
return INVALID_OPERATION;
}
-status_t NuPlayer2Driver::getMetadata(
- const media::Metadata::Filter& /* ids */, Parcel *records) {
- Mutex::Autolock autoLock(mLock);
-
- using media::Metadata;
-
- Metadata meta(records);
-
- meta.appendBool(
- Metadata::kPauseAvailable,
- mPlayerFlags & NuPlayer2::Source::FLAG_CAN_PAUSE);
-
- meta.appendBool(
- Metadata::kSeekBackwardAvailable,
- mPlayerFlags & NuPlayer2::Source::FLAG_CAN_SEEK_BACKWARD);
-
- meta.appendBool(
- Metadata::kSeekForwardAvailable,
- mPlayerFlags & NuPlayer2::Source::FLAG_CAN_SEEK_FORWARD);
-
- meta.appendBool(
- Metadata::kSeekAvailable,
- mPlayerFlags & NuPlayer2::Source::FLAG_CAN_SEEK);
-
- return OK;
-}
-
void NuPlayer2Driver::notifyResetComplete(int64_t /* srcId */) {
ALOGD("notifyResetComplete(%p)", this);
Mutex::Autolock autoLock(mLock);
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.h b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.h
index bb30c76..0ec3a4b 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.h
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.h
@@ -18,6 +18,7 @@
#include <media/MediaAnalyticsItem.h>
#include <media/stagefright/foundation/ABase.h>
+#include <mediaplayer2/JObjectHolder.h>
namespace android {
@@ -26,7 +27,7 @@
struct NuPlayer2;
struct NuPlayer2Driver : public MediaPlayer2Interface {
- explicit NuPlayer2Driver(pid_t pid, uid_t uid);
+ explicit NuPlayer2Driver(pid_t pid, uid_t uid, const sp<JObjectHolder> &context);
virtual status_t initCheck() override;
@@ -61,9 +62,6 @@
virtual status_t setParameter(int key, const Parcel &request) override;
virtual status_t getParameter(int key, Parcel *reply) override;
- virtual status_t getMetadata(
- const media::Metadata::Filter& ids, Parcel *records) override;
-
virtual status_t dump(int fd, const Vector<String16> &args) const override;
virtual void onMessageReceived(const sp<AMessage> &msg) override;
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp
index e3c9b4b..3be7e36 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.cpp
@@ -26,6 +26,8 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/Utils.h>
#include <media/stagefright/VideoFrameScheduler2.h>
@@ -86,10 +88,25 @@
// static
const int64_t NuPlayer2::Renderer::kMinPositionUpdateDelayUs = 100000LL;
+static audio_format_t constexpr audioFormatFromEncoding(int32_t pcmEncoding) {
+ switch (pcmEncoding) {
+ case kAudioEncodingPcmFloat:
+ return AUDIO_FORMAT_PCM_FLOAT;
+ case kAudioEncodingPcm16bit:
+ return AUDIO_FORMAT_PCM_16_BIT;
+ case kAudioEncodingPcm8bit:
+ return AUDIO_FORMAT_PCM_8_BIT; // TODO: do we want to support this?
+ default:
+ ALOGE("%s: Invalid encoding: %d", __func__, pcmEncoding);
+ return AUDIO_FORMAT_INVALID;
+ }
+}
+
NuPlayer2::Renderer::Renderer(
const sp<MediaPlayer2Interface::AudioSink> &sink,
const sp<MediaClock> &mediaClock,
const sp<AMessage> ¬ify,
+ const sp<JObjectHolder> &context,
uint32_t flags)
: mAudioSink(sink),
mUseVirtualAudioSink(false),
@@ -131,7 +148,7 @@
mTotalBuffersQueued(0),
mLastAudioBufferDrained(0),
mUseAudioCallback(false),
- mWakeLock(new JWakeLock()) {
+ mWakeLock(new JWakeLock(context)) {
CHECK(mediaClock != NULL);
mMediaClock->setPlaybackRate(mPlaybackSettings.mSpeed);
}
@@ -1269,10 +1286,10 @@
mAnchorTimeMediaUs = mediaTimeUs;
}
}
- mNextVideoTimeMediaUs = mediaTimeUs + 100000;
+ mNextVideoTimeMediaUs = mediaTimeUs;
if (!mHasAudio) {
// smooth out videos >= 10fps
- mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
+ mMediaClock->updateMaxTimeMedia(mediaTimeUs + 100000);
}
if (!mVideoSampleReceived || mediaTimeUs < mAudioFirstAnchorTimeMediaUs) {
@@ -1406,9 +1423,15 @@
mHasAudio = false;
if (mNextVideoTimeMediaUs >= 0) {
int64_t mediaUs = 0;
- mMediaClock->getMediaTime(ALooper::GetNowUs(), &mediaUs);
- if (mNextVideoTimeMediaUs > mediaUs) {
- mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
+ int64_t nowUs = ALooper::GetNowUs();
+ status_t result = mMediaClock->getMediaTime(nowUs, &mediaUs);
+ if (result == OK) {
+ if (mNextVideoTimeMediaUs > mediaUs) {
+ mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
+ }
+ } else {
+ mMediaClock->updateAnchor(
+ mNextVideoTimeMediaUs, nowUs, mNextVideoTimeMediaUs + 100000);
}
}
}
@@ -1864,8 +1887,13 @@
int32_t sampleRate;
CHECK(format->findInt32("sample-rate", &sampleRate));
+ // read pcm encoding from MediaCodec output format, if available
+ int32_t pcmEncoding;
+ audio_format_t audioFormat =
+ format->findInt32(KEY_PCM_ENCODING, &pcmEncoding) ?
+ audioFormatFromEncoding(pcmEncoding) : AUDIO_FORMAT_PCM_16_BIT;
+
if (offloadingAudio()) {
- audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
AString mime;
CHECK(format->findString("mime", &mime));
status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
@@ -1966,7 +1994,7 @@
const PcmInfo info = {
(audio_channel_mask_t)channelMask,
(audio_output_flags_t)pcmFlags,
- AUDIO_FORMAT_PCM_16_BIT, // TODO: change to audioFormat
+ audioFormat,
numChannels,
sampleRate
};
@@ -1998,7 +2026,7 @@
sampleRate,
numChannels,
(audio_channel_mask_t)channelMask,
- AUDIO_FORMAT_PCM_16_BIT,
+ audioFormat,
mUseAudioCallback ? &NuPlayer2::Renderer::AudioSinkCallback : NULL,
mUseAudioCallback ? this : NULL,
(audio_output_flags_t)pcmFlags,
@@ -2054,4 +2082,3 @@
}
} // namespace android
-
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.h b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.h
index 484d9b7..d065dee 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.h
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Renderer.h
@@ -20,6 +20,7 @@
#include <media/AudioResamplerPublic.h>
#include <media/AVSyncSettings.h>
+#include <mediaplayer2/JObjectHolder.h>
#include "NuPlayer2.h"
@@ -38,6 +39,7 @@
Renderer(const sp<MediaPlayer2Interface::AudioSink> &sink,
const sp<MediaClock> &mediaClock,
const sp<AMessage> ¬ify,
+ const sp<JObjectHolder> &context,
uint32_t flags = 0);
static size_t AudioSinkCallback(
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index e2aa8f8..1e85804 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -171,6 +171,7 @@
if (extractor == NULL) {
ALOGE("initFromDataSource, cannot create extractor!");
+ mLock.lock();
return UNKNOWN_ERROR;
}
@@ -179,6 +180,7 @@
size_t numtracks = extractor->countTracks();
if (numtracks == 0) {
ALOGE("initFromDataSource, source has no track!");
+ mLock.lock();
return UNKNOWN_ERROR;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index c8f6738..c990b2a 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -1299,10 +1299,10 @@
mAnchorTimeMediaUs = mediaTimeUs;
}
}
- mNextVideoTimeMediaUs = mediaTimeUs + 100000;
+ mNextVideoTimeMediaUs = mediaTimeUs;
if (!mHasAudio) {
// smooth out videos >= 10fps
- mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
+ mMediaClock->updateMaxTimeMedia(mediaTimeUs + 100000);
}
if (!mVideoSampleReceived || mediaTimeUs < mAudioFirstAnchorTimeMediaUs) {
@@ -1436,9 +1436,15 @@
mHasAudio = false;
if (mNextVideoTimeMediaUs >= 0) {
int64_t mediaUs = 0;
- mMediaClock->getMediaTime(ALooper::GetNowUs(), &mediaUs);
- if (mNextVideoTimeMediaUs > mediaUs) {
- mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
+ int64_t nowUs = ALooper::GetNowUs();
+ status_t result = mMediaClock->getMediaTime(nowUs, &mediaUs);
+ if (result == OK) {
+ if (mNextVideoTimeMediaUs > mediaUs) {
+ mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
+ }
+ } else {
+ mMediaClock->updateAnchor(
+ mNextVideoTimeMediaUs, nowUs, mNextVideoTimeMediaUs + 100000);
}
}
}
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 6ad0417..2ea5286 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -2183,7 +2183,8 @@
err = setupG711Codec(encoder, sampleRate, numChannels);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) {
- int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1;
+ // numChannels needs to be set to properly communicate PCM values.
+ int32_t numChannels = 2, sampleRate = 44100, compressionLevel = -1;
if (encoder &&
(!msg->findInt32("channel-count", &numChannels)
|| !msg->findInt32("sample-rate", &sampleRate))) {
@@ -2209,7 +2210,7 @@
}
}
err = setupFlacCodec(
- encoder, numChannels, sampleRate, compressionLevel);
+ encoder, numChannels, sampleRate, compressionLevel, pcmEncoding);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
int32_t numChannels, sampleRate;
@@ -2320,12 +2321,16 @@
(void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding);
mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding);
if (mConverter[kPortIndexInput] != NULL) {
+ ALOGD("%s: encoder %s input format pcm encoding converter from %d to %d",
+ __func__, mComponentName.c_str(), pcmEncoding, codecPcmEncoding);
mInputFormat->setInt32("pcm-encoding", pcmEncoding);
}
} else {
(void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding);
mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding);
if (mConverter[kPortIndexOutput] != NULL) {
+ ALOGD("%s: decoder %s output format pcm encoding converter from %d to %d",
+ __func__, mComponentName.c_str(), codecPcmEncoding, pcmEncoding);
mOutputFormat->setInt32("pcm-encoding", pcmEncoding);
}
}
@@ -3029,8 +3034,8 @@
}
status_t ACodec::setupFlacCodec(
- bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) {
-
+ bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel,
+ AudioEncoding encoding) {
if (encoder) {
OMX_AUDIO_PARAM_FLACTYPE def;
InitOMXParams(&def);
@@ -3053,7 +3058,8 @@
return setupRawAudioFormat(
encoder ? kPortIndexInput : kPortIndexOutput,
sampleRate,
- numChannels);
+ numChannels,
+ encoding);
}
status_t ACodec::setupRawAudioFormat(
@@ -3111,6 +3117,7 @@
pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear;
if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) {
+ ALOGE("%s: incorrect numChannels: %d", __func__, numChannels);
return OMX_ErrorNone;
}
@@ -4421,8 +4428,8 @@
h264type.nBFrames = mLatency == 0 ? 1 : std::min(1U, mLatency - 1);
// disable B-frames until MPEG4Writer can guarantee finalizing files with B-frames
- h264type.nRefFrames = 1;
- h264type.nBFrames = 0;
+ // h264type.nRefFrames = 1;
+ // h264type.nBFrames = 0;
h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames);
h264type.nAllowedPictureTypes =
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 3388ed9..249f2a4 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -146,7 +146,6 @@
"libdl",
"libdrmframework",
"libgui",
- "libion",
"liblog",
"libmedia",
"libmedia_omx",
@@ -154,7 +153,6 @@
"libmediaextractor",
"libmediametrics",
"libmediautils",
- "libnetd_client",
"libui",
"libutils",
"libmedia_helper",
@@ -162,7 +160,6 @@
"libstagefright_foundation",
"libstagefright_omx_utils",
"libstagefright_opus_common",
- "libstagefright_xmlparser",
"libRScpp",
"libhidlallocatorutils",
"libhidlbase",
@@ -171,8 +168,6 @@
"android.hidl.allocator@1.0",
"android.hardware.cas.native@1.0",
"android.hardware.media.omx@1.0",
- "android.hardware.graphics.allocator@2.0",
- "android.hardware.graphics.mapper@2.0",
],
static_libs: [
@@ -190,6 +185,7 @@
],
header_libs:[
+ "libstagefright_xmlparser_headers",
"media_ndk_headers",
],
diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp
index a6f0a0b..199b57b 100644
--- a/media/libstagefright/AudioPlayer.cpp
+++ b/media/libstagefright/AudioPlayer.cpp
@@ -118,6 +118,13 @@
}
sp<MetaData> format = mSource->getFormat();
+
+ if (format == NULL) {
+ ALOGE("No metadata b/118620871");
+ android_errorWriteLog(0x534e4554, "118620871");
+ return BAD_VALUE;
+ }
+
const char *mime;
bool success = format->findCString(kKeyMIMEType, &mime);
CHECK(success);
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 3ad82d9..2a819ad 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -19,6 +19,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "CameraSourceTimeLapse"
+#include <media/hardware/HardwareAPI.h>
#include <binder/IPCThreadState.h>
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
@@ -172,8 +173,16 @@
ALOGV("signalBufferReturned");
Mutex::Autolock autoLock(mQuickStopLock);
if (mQuickStop && (buffer == mLastReadBufferCopy)) {
+ if (metaDataStoredInVideoBuffers() == kMetadataBufferTypeNativeHandleSource) {
+ native_handle_t* handle = (
+ (VideoNativeHandleMetadata*)(mLastReadBufferCopy->data()))->pHandle;
+ native_handle_close(handle);
+ native_handle_delete(handle);
+ }
buffer->setObserver(NULL);
buffer->release();
+ mLastReadBufferCopy = NULL;
+ mForceRead = true;
} else {
return CameraSource::signalBufferReturned(buffer);
}
@@ -182,7 +191,8 @@
void createMediaBufferCopy(
const MediaBufferBase& sourceBuffer,
int64_t frameTime,
- MediaBufferBase **newBuffer) {
+ MediaBufferBase **newBuffer,
+ int32_t videoBufferMode) {
ALOGV("createMediaBufferCopy");
size_t sourceSize = sourceBuffer.size();
@@ -192,13 +202,20 @@
memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
(*newBuffer)->meta_data().setInt64(kKeyTime, frameTime);
+
+ if (videoBufferMode == kMetadataBufferTypeNativeHandleSource) {
+ ((VideoNativeHandleMetadata*)((*newBuffer)->data()))->pHandle =
+ native_handle_clone(
+ ((VideoNativeHandleMetadata*)(sourceBuffer.data()))->pHandle);
+ }
}
void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBufferBase& sourceBuffer) {
ALOGV("fillLastReadBufferCopy");
int64_t frameTime;
CHECK(sourceBuffer.meta_data().findInt64(kKeyTime, &frameTime));
- createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
+ createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy,
+ metaDataStoredInVideoBuffers());
mLastReadBufferCopy->add_ref();
mLastReadBufferCopy->setObserver(this);
}
diff --git a/media/libstagefright/HTTPBase.cpp b/media/libstagefright/HTTPBase.cpp
index 03e0d12..d118e8c 100644
--- a/media/libstagefright/HTTPBase.cpp
+++ b/media/libstagefright/HTTPBase.cpp
@@ -26,8 +26,6 @@
#include <cutils/properties.h>
#include <cutils/qtaguid.h>
-#include <NetdClient.h>
-
namespace android {
HTTPBase::HTTPBase()
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index a48466a..b45eb03 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -137,6 +137,8 @@
private:
enum {
+ // TODO: need to increase this considering the bug
+ // about camera app not sending video frames continuously?
kMaxCttsOffsetTimeUs = 1000000LL, // 1 second
kSampleArraySize = 1000,
};
@@ -317,6 +319,7 @@
ListTableEntries<uint32_t, 1> *mStssTableEntries;
ListTableEntries<uint32_t, 2> *mSttsTableEntries;
ListTableEntries<uint32_t, 2> *mCttsTableEntries;
+ ListTableEntries<uint32_t, 3> *mElstTableEntries; // 3columns: segDuration, mediaTime, mediaRate
int64_t mMinCttsOffsetTimeUs;
int64_t mMinCttsOffsetTicks;
@@ -416,6 +419,8 @@
// Duration is time scale based
void addOneSttsTableEntry(size_t sampleCount, int32_t timescaledDur);
void addOneCttsTableEntry(size_t sampleCount, int32_t timescaledDur);
+ void addOneElstTableEntry(uint32_t segmentDuration, int32_t mediaTime,
+ int16_t mediaRate, int16_t mediaRateFraction);
bool isTrackMalFormed() const;
void sendTrackSummary(bool hasMultipleTracks);
@@ -448,6 +453,7 @@
void writeVideoFourCCBox();
void writeMetadataFourCCBox();
void writeStblBox(bool use32BitOffset);
+ void writeEdtsBox();
Track(const Track &);
Track &operator=(const Track &);
@@ -483,6 +489,7 @@
mStartTimestampUs = -1ll;
mStartTimeOffsetMs = -1;
+ mStartTimeOffsetBFramesUs = 0;
mPaused = false;
mStarted = false;
mWriterThreadStarted = false;
@@ -1272,6 +1279,10 @@
// Adjust the global start time.
mStartTimestampUs += minCttsOffsetTimeUs - kMaxCttsOffsetTimeUs;
+ // Add mStartTimeOffsetBFramesUs(-ve or zero) to the duration of first entry in STTS.
+ mStartTimeOffsetBFramesUs = minCttsOffsetTimeUs - kMaxCttsOffsetTimeUs;
+ ALOGV("mStartTimeOffsetBFramesUs :%" PRId32, mStartTimeOffsetBFramesUs);
+
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
if (!(*it)->isHeic()) {
@@ -1747,6 +1758,11 @@
return mStartTimestampUs;
}
+int32_t MPEG4Writer::getStartTimeOffsetBFramesUs() {
+ Mutex::Autolock autoLock(mLock);
+ return mStartTimeOffsetBFramesUs;
+}
+
size_t MPEG4Writer::numTracks() {
Mutex::Autolock autolock(mLock);
return mTracks.size();
@@ -1776,6 +1792,7 @@
mStssTableEntries(new ListTableEntries<uint32_t, 1>(1000)),
mSttsTableEntries(new ListTableEntries<uint32_t, 2>(1000)),
mCttsTableEntries(new ListTableEntries<uint32_t, 2>(1000)),
+ mElstTableEntries(new ListTableEntries<uint32_t, 3>(3)), // Reserve 3 rows, a row has 3 items
mMinCttsOffsetTimeUs(0),
mMinCttsOffsetTicks(0),
mMaxCttsOffsetTicks(0),
@@ -1842,46 +1859,48 @@
// Clear all the internal states except the CSD data.
void MPEG4Writer::Track::resetInternal() {
- mDone = false;
- mPaused = false;
- mResumed = false;
- mStarted = false;
- mGotStartKeyFrame = false;
- mIsMalformed = false;
- mTrackDurationUs = 0;
- mEstimatedTrackSizeBytes = 0;
- mSamplesHaveSameSize = 0;
- if (mStszTableEntries != NULL) {
- delete mStszTableEntries;
- mStszTableEntries = new ListTableEntries<uint32_t, 1>(1000);
- }
-
- if (mStcoTableEntries != NULL) {
- delete mStcoTableEntries;
- mStcoTableEntries = new ListTableEntries<uint32_t, 1>(1000);
- }
- if (mCo64TableEntries != NULL) {
- delete mCo64TableEntries;
- mCo64TableEntries = new ListTableEntries<off64_t, 1>(1000);
- }
-
- if (mStscTableEntries != NULL) {
- delete mStscTableEntries;
- mStscTableEntries = new ListTableEntries<uint32_t, 3>(1000);
- }
- if (mStssTableEntries != NULL) {
- delete mStssTableEntries;
- mStssTableEntries = new ListTableEntries<uint32_t, 1>(1000);
- }
- if (mSttsTableEntries != NULL) {
- delete mSttsTableEntries;
- mSttsTableEntries = new ListTableEntries<uint32_t, 2>(1000);
- }
- if (mCttsTableEntries != NULL) {
- delete mCttsTableEntries;
- mCttsTableEntries = new ListTableEntries<uint32_t, 2>(1000);
- }
- mReachedEOS = false;
+ mDone = false;
+ mPaused = false;
+ mResumed = false;
+ mStarted = false;
+ mGotStartKeyFrame = false;
+ mIsMalformed = false;
+ mTrackDurationUs = 0;
+ mEstimatedTrackSizeBytes = 0;
+ mSamplesHaveSameSize = 0;
+ if (mStszTableEntries != NULL) {
+ delete mStszTableEntries;
+ mStszTableEntries = new ListTableEntries<uint32_t, 1>(1000);
+ }
+ if (mStcoTableEntries != NULL) {
+ delete mStcoTableEntries;
+ mStcoTableEntries = new ListTableEntries<uint32_t, 1>(1000);
+ }
+ if (mCo64TableEntries != NULL) {
+ delete mCo64TableEntries;
+ mCo64TableEntries = new ListTableEntries<off64_t, 1>(1000);
+ }
+ if (mStscTableEntries != NULL) {
+ delete mStscTableEntries;
+ mStscTableEntries = new ListTableEntries<uint32_t, 3>(1000);
+ }
+ if (mStssTableEntries != NULL) {
+ delete mStssTableEntries;
+ mStssTableEntries = new ListTableEntries<uint32_t, 1>(1000);
+ }
+ if (mSttsTableEntries != NULL) {
+ delete mSttsTableEntries;
+ mSttsTableEntries = new ListTableEntries<uint32_t, 2>(1000);
+ }
+ if (mCttsTableEntries != NULL) {
+ delete mCttsTableEntries;
+ mCttsTableEntries = new ListTableEntries<uint32_t, 2>(1000);
+ }
+ if (mElstTableEntries != NULL) {
+ delete mElstTableEntries;
+ mElstTableEntries = new ListTableEntries<uint32_t, 3>(3);
+ }
+ mReachedEOS = false;
}
void MPEG4Writer::Track::updateTrackSizeEstimate() {
@@ -1900,6 +1919,7 @@
mStssTableEntries->count() * 4 + // stss box size
mSttsTableEntries->count() * 8 + // stts box size
mCttsTableEntries->count() * 8 + // ctts box size
+ mElstTableEntries->count() * 12 + // elst box size
stcoBoxSizeBytes + // stco box size
stszBoxSizeBytes; // stsz box size
}
@@ -1936,6 +1956,16 @@
mCttsTableEntries->add(htonl(duration));
}
+void MPEG4Writer::Track::addOneElstTableEntry(
+ uint32_t segmentDuration, int32_t mediaTime, int16_t mediaRate, int16_t mediaRateFraction) {
+ ALOGV("segmentDuration:%u, mediaTime:%d", segmentDuration, mediaTime);
+ ALOGV("mediaRate :%" PRId16 ", mediaRateFraction :%" PRId16 ", Ored %u", mediaRate,
+ mediaRateFraction, ((((uint32_t)mediaRate) << 16) | ((uint32_t)mediaRateFraction)));
+ mElstTableEntries->add(htonl(segmentDuration));
+ mElstTableEntries->add(htonl(mediaTime));
+ mElstTableEntries->add(htonl((((uint32_t)mediaRate) << 16) | (uint32_t)mediaRateFraction));
+}
+
status_t MPEG4Writer::setNextFd(int fd) {
ALOGV("addNextFd");
Mutex::Autolock l(mLock);
@@ -2173,6 +2203,7 @@
delete mSttsTableEntries;
delete mStssTableEntries;
delete mCttsTableEntries;
+ delete mElstTableEntries;
mStszTableEntries = NULL;
mStcoTableEntries = NULL;
@@ -2181,6 +2212,7 @@
mSttsTableEntries = NULL;
mStssTableEntries = NULL;
mCttsTableEntries = NULL;
+ mElstTableEntries = NULL;
if (mCodecSpecificData != NULL) {
free(mCodecSpecificData);
@@ -3612,6 +3644,7 @@
uint32_t now = getMpeg4Time();
mOwner->beginBox("trak");
writeTkhdBox(now);
+ writeEdtsBox();
mOwner->beginBox("mdia");
writeMdhdBox(now);
writeHdlrBox();
@@ -3982,6 +4015,33 @@
mOwner->endBox();
}
+void MPEG4Writer::Track::writeEdtsBox(){
+ ALOGV("%s : getStartTimeOffsetTimeUs of track:%" PRId64 " us", getTrackType(),
+ getStartTimeOffsetTimeUs());
+
+ // Prepone video playback.
+ if (mMinCttsOffsetTicks != mMaxCttsOffsetTicks) {
+ int32_t mvhdTimeScale = mOwner->getTimeScale();
+ uint32_t tkhdDuration = (mTrackDurationUs * mvhdTimeScale + 5E5) / 1E6;
+ int64_t mediaTime = ((kMaxCttsOffsetTimeUs - getMinCttsOffsetTimeUs())
+ * mTimeScale + 5E5) / 1E6;
+ if (tkhdDuration > 0 && mediaTime > 0) {
+ addOneElstTableEntry(tkhdDuration, mediaTime, 1, 0);
+ }
+ }
+
+ if (mElstTableEntries->count() == 0) {
+ return;
+ }
+
+ mOwner->beginBox("edts");
+ mOwner->beginBox("elst");
+ mOwner->writeInt32(0); // version=0, flags=0
+ mElstTableEntries->write(mOwner);
+ mOwner->endBox(); // elst;
+ mOwner->endBox(); // edts
+}
+
void MPEG4Writer::Track::writeMdhdBox(uint32_t now) {
int64_t trakDurationUs = getDurationUs();
int64_t mdhdDuration = (trakDurationUs * mTimeScale + 5E5) / 1E6;
@@ -4118,7 +4178,9 @@
uint32_t duration;
CHECK(mSttsTableEntries->get(duration, 1));
duration = htonl(duration); // Back to host byte order
- mSttsTableEntries->set(htonl(duration + getStartTimeOffsetScaledTime()), 1);
+ int32_t startTimeOffsetScaled = (((getStartTimeOffsetTimeUs() +
+ mOwner->getStartTimeOffsetBFramesUs()) * mTimeScale) + 500000LL) / 1000000LL;
+ mSttsTableEntries->set(htonl((int32_t)duration + startTimeOffsetScaled), 1);
}
mSttsTableEntries->write(mOwner);
mOwner->endBox(); // stts
diff --git a/media/libstagefright/MediaTrack.cpp b/media/libstagefright/MediaTrack.cpp
index 1c1be30..f158491 100644
--- a/media/libstagefright/MediaTrack.cpp
+++ b/media/libstagefright/MediaTrack.cpp
@@ -120,7 +120,7 @@
if (format->mFormat->findInt64("timeUs", &val64)) {
meta.setInt64(kKeyTime, val64);
}
- if (format->mFormat->findInt64("duration", &val64)) {
+ if (format->mFormat->findInt64("durationUs", &val64)) {
meta.setInt64(kKeyDuration, val64);
}
if (format->mFormat->findInt64("target-time", &val64)) {
diff --git a/media/libstagefright/bqhelper/Android.bp b/media/libstagefright/bqhelper/Android.bp
index 81777f1..218fe15 100644
--- a/media/libstagefright/bqhelper/Android.bp
+++ b/media/libstagefright/bqhelper/Android.bp
@@ -25,7 +25,6 @@
],
shared_libs: [
- "libbase",
"libbinder",
"libcutils",
"libgui",
@@ -38,8 +37,6 @@
"libutils",
"android.hardware.graphics.bufferqueue@1.0",
-
- "libnativewindow", // TODO(b/62923479): use header library
],
export_shared_lib_headers: [
diff --git a/media/libstagefright/codecs/flac/dec/Android.bp b/media/libstagefright/codecs/flac/dec/Android.bp
index 1674cb2..3d4a44f 100644
--- a/media/libstagefright/codecs/flac/dec/Android.bp
+++ b/media/libstagefright/codecs/flac/dec/Android.bp
@@ -29,7 +29,6 @@
},
shared_libs: [
- "libcutils",
"liblog",
"libstagefright_flacdec",
"libstagefright_omx",
diff --git a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
index 4db0060..842a7ce 100644
--- a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
+++ b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
@@ -89,12 +89,12 @@
def.eDir = OMX_DirOutput;
def.nBufferCountMin = kNumOutputBuffers;
def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = 4096 * FLACDecoder::kMaxChannels;
+ def.nBufferSize = kNumSamplesPerFrame * FLACDecoder::kMaxChannels * sizeof(float);
def.bEnabled = OMX_TRUE;
def.bPopulated = OMX_FALSE;
def.eDomain = OMX_PortDomainAudio;
def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 2;
+ def.nBufferAlignment = sizeof(float);
def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
def.format.audio.pNativeRender = NULL;
@@ -173,7 +173,7 @@
flacParams->nChannels = mStreamInfo.channels;
flacParams->nSampleRate = mStreamInfo.sample_rate;
} else {
- flacParams->nChannels = 1;
+ flacParams->nChannels = 2;
flacParams->nSampleRate = 44100;
}
@@ -195,10 +195,10 @@
return OMX_ErrorBadPortIndex;
}
- pcmParams->eNumData = OMX_NumericalDataSigned;
+ pcmParams->eNumData = mNumericalData;
pcmParams->eEndian = OMX_EndianBig;
pcmParams->bInterleaved = OMX_TRUE;
- pcmParams->nBitPerSample = 16;
+ pcmParams->nBitPerSample = mBitsPerSample;
pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
@@ -211,7 +211,7 @@
pcmParams->nChannels = mStreamInfo.channels;
pcmParams->nSamplingRate = mStreamInfo.sample_rate;
} else {
- pcmParams->nChannels = 1;
+ pcmParams->nChannels = 2;
pcmParams->nSamplingRate = 44100;
}
@@ -281,6 +281,19 @@
return OMX_ErrorBadPortIndex;
}
+ if (pcmParams->eNumData == OMX_NumericalDataFloat && pcmParams->nBitPerSample == 32) {
+ mNumericalData = OMX_NumericalDataFloat;
+ mBitsPerSample = 32;
+ } else if (pcmParams->eNumData == OMX_NumericalDataSigned
+ && pcmParams->nBitPerSample == 16) {
+ mNumericalData = OMX_NumericalDataSigned;
+ mBitsPerSample = 16;
+ } else {
+ ALOGE("Invalid eNumData %d, nBitsPerSample %d",
+ pcmParams->eNumData, pcmParams->nBitPerSample);
+ return OMX_ErrorUndefined;
+ }
+
return OMX_ErrorNone;
}
@@ -301,11 +314,13 @@
List<BufferInfo *> &inQueue = getPortQueue(0);
List<BufferInfo *> &outQueue = getPortQueue(1);
+ const bool outputFloat = mNumericalData == OMX_NumericalDataFloat;
+
ALOGV("onQueueFilled %d/%d:", inQueue.empty(), outQueue.empty());
while ((!inQueue.empty() || mSawInputEOS) && !outQueue.empty() && !mFinishedDecoder) {
BufferInfo *outInfo = *outQueue.begin();
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
- int16_t *outBuffer = reinterpret_cast<int16_t *>(outHeader->pBuffer + outHeader->nOffset);
+ void *outBuffer = reinterpret_cast<void *>(outHeader->pBuffer + outHeader->nOffset);
size_t outBufferSize = outHeader->nAllocLen - outHeader->nOffset;
int64_t timeStamp = 0;
@@ -374,7 +389,7 @@
}
status_t decoderErr = mFLACDecoder->decodeOneFrame(
- inBuffer, inBufferLength, outBuffer, &outBufferSize);
+ inBuffer, inBufferLength, outBuffer, &outBufferSize, outputFloat);
if (decoderErr != OK) {
ALOGE("onQueueFilled: FLACDecoder decodeOneFrame returns error %d", decoderErr);
mSignalledError = true;
@@ -393,7 +408,9 @@
continue;
}
} else if (mSawInputEOS) {
- status_t decoderErr = mFLACDecoder->decodeOneFrame(NULL, 0, outBuffer, &outBufferSize);
+ status_t decoderErr = mFLACDecoder->decodeOneFrame(
+ nullptr /* inBuffer */, 0 /* inBufferLen */,
+ outBuffer, &outBufferSize, outputFloat);
mFinishedDecoder = true;
if (decoderErr != OK) {
ALOGE("onQueueFilled: FLACDecoder finish returns error %d", decoderErr);
@@ -456,7 +473,8 @@
mOutputPortSettingsChange = AWAITING_ENABLED;
PortInfo *info = editPortInfo(1 /* portIndex */);
if (!info->mDef.bEnabled) {
- info->mDef.nBufferSize = mStreamInfo.max_blocksize * mStreamInfo.channels * 2;
+ info->mDef.nBufferSize =
+ mStreamInfo.max_blocksize * mStreamInfo.channels * sizeof(float);
}
break;
}
diff --git a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
index b63f7ad..ba02074 100644
--- a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
+++ b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
@@ -45,10 +45,14 @@
virtual void onReset() override;
private:
+ static constexpr unsigned int kNumSamplesPerFrame = 2048; // adjusted based on stream.
+
enum {
kNumInputBuffers = 4,
kNumOutputBuffers = 4,
};
+ OMX_NUMERICALDATATYPE mNumericalData = OMX_NumericalDataSigned;
+ OMX_U32 mBitsPerSample = 16;
FLACDecoder *mFLACDecoder;
FLAC__StreamMetadata_StreamInfo mStreamInfo;
diff --git a/media/libstagefright/codecs/flac/enc/Android.bp b/media/libstagefright/codecs/flac/enc/Android.bp
index 9b696da..b32ab08 100644
--- a/media/libstagefright/codecs/flac/enc/Android.bp
+++ b/media/libstagefright/codecs/flac/enc/Android.bp
@@ -28,7 +28,10 @@
],
header_libs: ["libbase_headers"],
- static_libs: ["libFLAC"],
+ static_libs: [
+ "libaudioutils",
+ "libFLAC",
+ ],
name: "libstagefright_soft_flacenc",
vendor_available: true,
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
index 955f211..3add006 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
@@ -20,7 +20,7 @@
#include <utils/Log.h>
#include "SoftFlacEncoder.h"
-
+#include <audio_utils/primitives.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaDefs.h>
@@ -75,7 +75,9 @@
}
if (!mSignalledError) { // no use allocating input buffer if we had an error above
- mInputBufferPcm32 = (FLAC__int32*) malloc(sizeof(FLAC__int32) * 2 * kMaxNumSamplesPerFrame);
+ // 2x the pcm16 samples can exist with the same size as pcmFloat samples.
+ mInputBufferPcm32 = (FLAC__int32*) malloc(
+ sizeof(FLAC__int32) * kNumSamplesPerFrame * kMaxChannels * 2);
if (mInputBufferPcm32 == NULL) {
ALOGE("SoftFlacEncoder::SoftFlacEncoder(name=%s) error allocating internal input buffer", name);
mSignalledError = true;
@@ -115,14 +117,14 @@
// configure input port of the encoder
def.nPortIndex = 0;
def.eDir = OMX_DirInput;
- def.nBufferCountMin = kNumBuffers;// TODO verify that 1 is enough
+ def.nBufferCountMin = kNumBuffers;
def.nBufferCountActual = def.nBufferCountMin;
def.nBufferSize = kMaxInputBufferSize;
def.bEnabled = OMX_TRUE;
def.bPopulated = OMX_FALSE;
def.eDomain = OMX_PortDomainAudio;
def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 2;
+ def.nBufferAlignment = sizeof(float);
def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
def.format.audio.pNativeRender = NULL;
@@ -134,7 +136,7 @@
// configure output port of the encoder
def.nPortIndex = 1;
def.eDir = OMX_DirOutput;
- def.nBufferCountMin = kNumBuffers;// TODO verify that 1 is enough
+ def.nBufferCountMin = kNumBuffers;
def.nBufferCountActual = def.nBufferCountMin;
def.nBufferSize = kMaxOutputBufferSize;
def.bEnabled = OMX_TRUE;
@@ -193,10 +195,10 @@
return OMX_ErrorUndefined;
}
- pcmParams->eNumData = OMX_NumericalDataSigned;
+ pcmParams->eNumData = mNumericalData;
pcmParams->eEndian = OMX_EndianBig;
pcmParams->bInterleaved = OMX_TRUE;
- pcmParams->nBitPerSample = 16;
+ pcmParams->nBitPerSample = mBitsPerSample;
pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
@@ -270,12 +272,26 @@
return OMX_ErrorUndefined;
}
- if (pcmParams->nChannels < 1 || pcmParams->nChannels > 2) {
+ if (pcmParams->nChannels < 1 || pcmParams->nChannels > kMaxChannels) {
return OMX_ErrorUndefined;
}
mNumChannels = pcmParams->nChannels;
mSampleRate = pcmParams->nSamplingRate;
+
+ if (pcmParams->eNumData == OMX_NumericalDataFloat && pcmParams->nBitPerSample == 32) {
+ mNumericalData = OMX_NumericalDataFloat;
+ mBitsPerSample = 32;
+ } else if (pcmParams->eNumData == OMX_NumericalDataSigned
+ && pcmParams->nBitPerSample == 16) {
+ mNumericalData = OMX_NumericalDataSigned;
+ mBitsPerSample = 16;
+ } else {
+ ALOGE("%s: invalid eNumData %d, nBitsPerSample %d",
+ __func__, pcmParams->eNumData, pcmParams->nBitPerSample);
+ return OMX_ErrorUndefined;
+ }
+
ALOGV("will encode %d channels at %dHz", mNumChannels, mSampleRate);
return configureEncoder();
@@ -356,6 +372,10 @@
List<BufferInfo *> &inQueue = getPortQueue(0);
List<BufferInfo *> &outQueue = getPortQueue(1);
+ const bool inputFloat = mNumericalData == OMX_NumericalDataFloat;
+ const size_t sampleSize = inputFloat ? sizeof(float) : sizeof(int16_t);
+ const size_t frameSize = sampleSize * mNumChannels;
+
FLAC__bool ok = true;
while ((!inQueue.empty() || mSawInputEOS) && !outQueue.empty() && !mSentOutputEOS) {
@@ -381,13 +401,21 @@
mEncoderReturnedNbBytes = 0;
mCurrentInputTimeStamp = inHeader->nTimeStamp;
- const unsigned nbInputFrames = inHeader->nFilledLen / (2 * mNumChannels);
- const unsigned nbInputSamples = inHeader->nFilledLen / 2;
- const OMX_S16 * const pcm16 = reinterpret_cast<OMX_S16 *>(inHeader->pBuffer);
+ const unsigned nbInputFrames = inHeader->nFilledLen / frameSize;
+ const unsigned nbInputSamples = inHeader->nFilledLen / sampleSize;
- CHECK_LE(nbInputSamples, 2 * kMaxNumSamplesPerFrame);
- for (unsigned i=0 ; i < nbInputSamples ; i++) {
- mInputBufferPcm32[i] = (FLAC__int32) pcm16[i];
+ if (inputFloat) {
+ CHECK_LE(nbInputSamples, kNumSamplesPerFrame * kMaxChannels);
+ const float * const pcmFloat = reinterpret_cast<float *>(inHeader->pBuffer);
+ memcpy_to_q8_23_from_float_with_clamp(
+ mInputBufferPcm32, pcmFloat, nbInputSamples);
+ } else {
+ // note nbInputSamples may be 2x as large for pcm16 data.
+ CHECK_LE(nbInputSamples, kNumSamplesPerFrame * kMaxChannels * 2);
+ const int16_t * const pcm16 = reinterpret_cast<int16_t *>(inHeader->pBuffer);
+ for (unsigned i = 0; i < nbInputSamples; ++i) {
+ mInputBufferPcm32[i] = (FLAC__int32) pcm16[i];
+ }
}
ALOGV(" about to encode %u samples per channel", nbInputFrames);
ok = FLAC__stream_encoder_process_interleaved(
@@ -526,10 +554,12 @@
return OMX_ErrorInvalidState;
}
+ const bool inputFloat = mNumericalData == OMX_NumericalDataFloat;
+ const int codecBitsPerSample = inputFloat ? 24 : 16;
FLAC__bool ok = true;
ok = ok && FLAC__stream_encoder_set_channels(mFlacStreamEncoder, mNumChannels);
ok = ok && FLAC__stream_encoder_set_sample_rate(mFlacStreamEncoder, mSampleRate);
- ok = ok && FLAC__stream_encoder_set_bits_per_sample(mFlacStreamEncoder, 16);
+ ok = ok && FLAC__stream_encoder_set_bits_per_sample(mFlacStreamEncoder, codecBitsPerSample);
ok = ok && FLAC__stream_encoder_set_compression_level(mFlacStreamEncoder,
(unsigned)mCompressionLevel);
ok = ok && FLAC__stream_encoder_set_verify(mFlacStreamEncoder, false);
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
index 64a6b1e..722fc13 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
@@ -45,8 +45,10 @@
private:
const unsigned int kNumBuffers = 2;
- const unsigned int kMaxNumSamplesPerFrame = 1152;
- const unsigned int kMaxInputBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t) * 2;
+ static constexpr unsigned int kMaxChannels = 2;
+ static constexpr unsigned int kNumSamplesPerFrame = 1152;
+ static constexpr unsigned int kMaxInputBufferSize =
+ kNumSamplesPerFrame * kMaxChannels * sizeof(float);
const unsigned int kMaxOutputBufferSize = 65536; //TODO check if this can be reduced
bool mSignalledError;
@@ -54,6 +56,8 @@
OMX_U32 mNumChannels;
OMX_U32 mSampleRate;
OMX_U32 mCompressionLevel;
+ OMX_NUMERICALDATATYPE mNumericalData = OMX_NumericalDataSigned;
+ OMX_U32 mBitsPerSample = 16;
// should the data received by the callback be written to the output port
bool mEncoderWriteData;
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/rate_control.cpp b/media/libstagefright/codecs/m4v_h263/enc/src/rate_control.cpp
index 53149c1..ecc3217 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/src/rate_control.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/src/rate_control.cpp
@@ -377,15 +377,15 @@
/* In/out : Nr, B, Rr */
/* Return : Void */
/* Modified : */
+/* Input argument "video" is guaranteed non-null by caller */
/* ======================================================================== */
-
PV_STATUS RC_UpdateBuffer(VideoEncData *video, Int currLayer, Int num_skip)
{
rateControl *rc = video->rc[currLayer];
MultiPass *pMP = video->pMP[currLayer];
- if (video == NULL || rc == NULL || pMP == NULL)
+ if (rc == NULL || pMP == NULL)
return PV_FAIL;
rc->VBV_fullness -= (Int)(rc->bitrate / rc->framerate * num_skip); //rc[currLayer]->Rp;
@@ -524,6 +524,7 @@
/* In/out : rc->T */
/* Return : Void */
/* Modified : */
+/* Input argument "input" is guaranteed non-null by caller */
/* ================================================================================ */
void targetBitCalculation(void *input)
@@ -537,7 +538,7 @@
Int diff_counter_BTsrc, diff_counter_BTdst, prev_counter_diff, curr_counter_diff, bound;
/* BT = Bit Transfer, for pMP->counter_BTsrc, pMP->counter_BTdst */
- if (video == NULL || currVol == NULL || pMP == NULL || rc == NULL)
+ if (currVol == NULL || pMP == NULL || rc == NULL)
return;
/* some stuff about frame dropping remained here to be done because pMP cannot be inserted into updateRateControl()*/
@@ -693,6 +694,7 @@
/* In/out : rc->T and rc->Qc */
/* Return : Void */
/* Modified : */
+/* Input argument "input" is guaranteed non-null by caller */
/* ================================================================================ */
/* Mad based variable bit allocation + QP calculation with a new quadratic method */
@@ -708,7 +710,7 @@
float curr_mad, prev_mad, curr_RD, prev_RD, average_mad, aver_QP;
- if (video == NULL || currVol == NULL || pMP == NULL || rc == NULL)
+ if (currVol == NULL || pMP == NULL || rc == NULL)
return;
/* Mad based variable bit allocation */
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
index 942f850..c6dc326 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
@@ -430,6 +430,15 @@
return;
}
+ if (size < sizeof(int64_t)) {
+ // The 2nd and 3rd input buffer are expected to contain
+ // an int64_t (see below), so make sure we get at least
+ // that much. The first input buffer must contain 19 bytes,
+ // but that is checked already.
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
if (mInputBufferCount == 0) {
delete mHeader;
mHeader = new OpusHeader();
diff --git a/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp b/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp
index f352fba..1d792fd 100644
--- a/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp
+++ b/media/libstagefright/codecs/xaacdec/SoftXAAC.cpp
@@ -110,15 +110,22 @@
{
initPorts();
- CHECK_EQ(initDecoder(), (status_t)OK);
+ mMemoryVec.clear();
+ mDrcMemoryVec.clear();
+
+ CHECK_EQ(initDecoder(), IA_NO_ERROR);
}
SoftXAAC::~SoftXAAC() {
- int errCode = deInitXAACDecoder();
- if (0 != errCode) {
- ALOGE("deInitXAACDecoder() failed %d", errCode);
+ IA_ERRORCODE err_code = deInitXAACDecoder();
+ if (IA_NO_ERROR != err_code) {
+ ALOGE("deInitXAACDecoder() failed %d", err_code);
}
+ err_code = deInitMPEGDDDrc();
+ if (IA_NO_ERROR != err_code) {
+ ALOGE("deInitMPEGDDDrc() failed %d", err_code);
+ }
mIsCodecInitialized = false;
mIsCodecConfigFlushRequired = false;
}
@@ -164,36 +171,16 @@
addPort(def);
}
-status_t SoftXAAC::initDecoder() {
- status_t status = UNKNOWN_ERROR;
-
+IA_ERRORCODE SoftXAAC::initDecoder() {
int ui_drc_val;
IA_ERRORCODE err_code = IA_NO_ERROR;
int loop = 0;
err_code = initXAACDecoder();
if (err_code != IA_NO_ERROR) {
- if (NULL == mXheaacCodecHandle) {
- ALOGE("AAC decoder handle is null");
- }
- if (NULL == mMpegDDrcHandle) {
- ALOGE("MPEG-D DRC decoder handle is null");
- }
- for (loop = 1; loop < mMallocCount; loop++) {
- if (mMemoryArray[loop] == NULL) {
- ALOGE(" memory allocation error %d\n", loop);
- break;
- }
- }
- ALOGE("initXAACDecoder Failed");
-
- for (loop = 0; loop < mMallocCount; loop++) {
- if (mMemoryArray[loop]) free(mMemoryArray[loop]);
- }
- mMallocCount = 0;
- return status;
- } else {
- status = OK;
+ ALOGE("initXAACDecoder failed with error %d", err_code);
+ deInitXAACDecoder();
+ return err_code;
}
mEndOfInput = false;
@@ -274,7 +261,7 @@
RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE");
#endif
- return status;
+ return IA_NO_ERROR;
}
OMX_ERRORTYPE SoftXAAC::internalGetParameter(OMX_INDEXTYPE index, OMX_PTR params) {
@@ -547,9 +534,6 @@
/* sample currently */
if (mIsCodecInitialized) {
numOutBytes = mOutputFrameLength * (mPcmWdSz / 8) * mNumChannels;
- if ((mPcmWdSz / 8) != 2) {
- ALOGE("XAAC assumes 2 bytes per sample! mPcmWdSz %d", mPcmWdSz);
- }
}
while ((!inQueue.empty() || mEndOfInput) && !outQueue.empty()) {
@@ -569,8 +553,8 @@
inBufferLength = inHeader->nFilledLen;
/* GA header configuration sent to Decoder! */
- int err_code = configXAACDecoder(inBuffer, inBufferLength);
- if (0 != err_code) {
+ IA_ERRORCODE err_code = configXAACDecoder(inBuffer, inBufferLength);
+ if (IA_NO_ERROR != err_code) {
ALOGW("configXAACDecoder err_code = %d", err_code);
mSignalledError = true;
notify(OMX_EventError, OMX_ErrorUndefined, err_code, NULL);
@@ -682,8 +666,8 @@
/* which should initialize the codec. Once this state is reached, call the */
/* decodeXAACStream API with same frame to decode! */
if (!mIsCodecInitialized) {
- int err_code = configXAACDecoder(inBuffer, inBufferLength);
- if (0 != err_code) {
+ IA_ERRORCODE err_code = configXAACDecoder(inBuffer, inBufferLength);
+ if (IA_NO_ERROR != err_code) {
ALOGW("configXAACDecoder Failed 2 err_code = %d", err_code);
mSignalledError = true;
notify(OMX_EventError, OMX_ErrorUndefined, err_code, NULL);
@@ -845,7 +829,7 @@
}
}
-int SoftXAAC::configflushDecode() {
+IA_ERRORCODE SoftXAAC::configflushDecode() {
IA_ERRORCODE err_code;
UWORD32 ui_init_done;
uint32_t inBufferLength = 8203;
@@ -871,16 +855,13 @@
"Found Codec with below config---\nsampFreq %d\nnumChannels %d\npcmWdSz "
"%d\nchannelMask %d\noutputFrameLength %d",
mSampFreq, mNumChannels, mPcmWdSz, mChannelMask, mOutputFrameLength);
- if (mNumChannels > MAX_CHANNEL_COUNT) {
- ALOGE(" No of channels are more than max channels\n");
- mIsCodecInitialized = false;
- } else
- mIsCodecInitialized = true;
+
+ mIsCodecInitialized = true;
}
- return err_code;
+ return IA_NO_ERROR;
}
-int SoftXAAC::drainDecoder() {
- return 0;
+IA_ERRORCODE SoftXAAC::drainDecoder() {
+ return IA_NO_ERROR;
}
void SoftXAAC::onReset() {
@@ -921,7 +902,7 @@
}
}
-int SoftXAAC::initXAACDecoder() {
+IA_ERRORCODE SoftXAAC::initXAACDecoder() {
LOOPIDX i;
/* Error code */
@@ -939,11 +920,11 @@
UWORD32 ui_proc_mem_tabs_size;
/* API size */
UWORD32 pui_api_size;
+ pVOID pv_alloc_ptr;
mInputBufferSize = 0;
mInputBuffer = 0;
mOutputBuffer = 0;
- mMallocCount = 0;
/* Process struct initing end */
/* ******************************************************************/
@@ -954,20 +935,13 @@
err_code = ixheaacd_dec_api(NULL, IA_API_CMD_GET_API_SIZE, 0, &pui_api_size);
RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_API_SIZE");
- if (mMallocCount == MAX_MEM_ALLOCS) {
- ALOGE("mMemoryArray is full");
- return IA_FATAL_ERROR;
- }
-
/* Allocate memory for API */
- mMemoryArray[mMallocCount] = memalign(4, pui_api_size);
- if (mMemoryArray[mMallocCount] == NULL) {
+ mXheaacCodecHandle = memalign(4, pui_api_size);
+ if (mXheaacCodecHandle == NULL) {
ALOGE("malloc for pui_api_size + 4 >> %d Failed", pui_api_size + 4);
return IA_FATAL_ERROR;
}
- /* Set API object with the memory allocated */
- mXheaacCodecHandle = (pVOID)((WORD8*)mMemoryArray[mMallocCount]);
- mMallocCount++;
+ mMemoryVec.push(mXheaacCodecHandle);
/* Set the config params to default values */
err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_INIT,
@@ -979,23 +953,16 @@
RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_API_SIZE");
- if (mMallocCount == MAX_MEM_ALLOCS) {
- ALOGE("mMemoryArray is full");
- return IA_FATAL_ERROR;
- }
-
/* Allocate memory for API */
- mMemoryArray[mMallocCount] = memalign(4, pui_api_size);
+ mMpegDDrcHandle = memalign(4, pui_api_size);
- if (mMemoryArray[mMallocCount] == NULL) {
+ if (mMpegDDrcHandle == NULL) {
ALOGE("malloc for drc api structure Failed");
return IA_FATAL_ERROR;
}
- memset(mMemoryArray[mMallocCount], 0, pui_api_size);
+ mMemoryVec.push(mMpegDDrcHandle);
- /* Set API object with the memory allocated */
- mMpegDDrcHandle = (pVOID)((WORD8*)mMemoryArray[mMallocCount]);
- mMallocCount++;
+ memset(mMpegDDrcHandle, 0, pui_api_size);
/* Set the config params to default values */
err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT,
@@ -1021,23 +988,17 @@
&ui_proc_mem_tabs_size);
RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEMTABS_SIZE");
- if (mMallocCount == MAX_MEM_ALLOCS) {
- ALOGE("mMemoryArray is full");
- return IA_FATAL_ERROR;
- }
-
- mMemoryArray[mMallocCount] = memalign(4, ui_proc_mem_tabs_size);
- if (mMemoryArray[mMallocCount] == NULL) {
+ pv_alloc_ptr = memalign(4, ui_proc_mem_tabs_size);
+ if (pv_alloc_ptr == NULL) {
ALOGE("Malloc for size (ui_proc_mem_tabs_size + 4) = %d failed!",
ui_proc_mem_tabs_size + 4);
return IA_FATAL_ERROR;
}
- mMallocCount++;
- /* Set pointer for process memory tables */
- err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_MEMTABS_PTR, 0,
- (pVOID)((WORD8*)mMemoryArray[mMallocCount - 1]));
- RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEMTABS_PTR");
+ mMemoryVec.push(pv_alloc_ptr);
+ /* Set pointer for process memory tables */
+ err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_MEMTABS_PTR, 0, pv_alloc_ptr);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEMTABS_PTR");
/* initialize the API, post config, fill memory tables */
err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_INIT,
@@ -1066,17 +1027,12 @@
err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_MEM_INFO_TYPE, i, &ui_type);
RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_TYPE");
- if (mMallocCount == MAX_MEM_ALLOCS) {
- ALOGE("mMemoryArray is full");
- return IA_FATAL_ERROR;
- }
- mMemoryArray[mMallocCount] = memalign(ui_alignment, ui_size);
- if (mMemoryArray[mMallocCount] == NULL) {
+ pv_alloc_ptr = memalign(ui_alignment, ui_size);
+ if (pv_alloc_ptr == NULL) {
ALOGE("Malloc for size (ui_size + ui_alignment) = %d failed!", ui_size + ui_alignment);
return IA_FATAL_ERROR;
}
- pv_alloc_ptr = (pVOID)((WORD8*)mMemoryArray[mMallocCount]);
- mMallocCount++;
+ mMemoryVec.push(pv_alloc_ptr);
/* Set the buffer pointer */
err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_MEM_PTR, i, pv_alloc_ptr);
@@ -1095,7 +1051,7 @@
return IA_NO_ERROR;
}
-int SoftXAAC::configXAACDecoder(uint8_t* inBuffer, uint32_t inBufferLength) {
+IA_ERRORCODE SoftXAAC::configXAACDecoder(uint8_t* inBuffer, uint32_t inBufferLength) {
UWORD32 ui_init_done;
int32_t i_bytes_consumed;
@@ -1154,13 +1110,73 @@
return IA_NO_ERROR;
}
-int SoftXAAC::configMPEGDDrc() {
+IA_ERRORCODE SoftXAAC::initMPEGDDDrc() {
+ IA_ERRORCODE err_code = IA_NO_ERROR;
+ int i;
+
+ for (i = 0; i < (WORD32)2; i++) {
+ WORD32 ui_size, ui_alignment, ui_type;
+ pVOID pv_alloc_ptr;
+
+ /* Get memory size */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_SIZE, i, &ui_size);
+
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_SIZE");
+
+ /* Get memory alignment */
+ err_code =
+ ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_ALIGNMENT, i, &ui_alignment);
+
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_ALIGNMENT");
+
+ /* Get memory type */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_TYPE, i, &ui_type);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_TYPE");
+
+ pv_alloc_ptr = memalign(4, ui_size);
+ if (pv_alloc_ptr == NULL) {
+ ALOGE(" Cannot create requested memory %d", ui_size);
+ return IA_FATAL_ERROR;
+ }
+ mDrcMemoryVec.push(pv_alloc_ptr);
+
+ /* Set the buffer pointer */
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, i, pv_alloc_ptr);
+
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
+ }
+
+ WORD32 ui_size;
+ ui_size = 8192 * 2;
+
+ mDrcInBuf = (int8_t*)memalign(4, ui_size);
+ if (mDrcInBuf == NULL) {
+ ALOGE(" Cannot create requested memory %d", ui_size);
+ return IA_FATAL_ERROR;
+ }
+ mDrcMemoryVec.push(mDrcInBuf);
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, 2, mDrcInBuf);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
+
+ mDrcOutBuf = (int8_t*)memalign(4, ui_size);
+ if (mDrcOutBuf == NULL) {
+ ALOGE(" Cannot create requested memory %d", ui_size);
+ return IA_FATAL_ERROR;
+ }
+ mDrcMemoryVec.push(mDrcOutBuf);
+
+ err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, 3, mDrcOutBuf);
+ RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
+
+ return IA_NO_ERROR;
+}
+IA_ERRORCODE SoftXAAC::configMPEGDDrc() {
IA_ERRORCODE err_code = IA_NO_ERROR;
int i_effect_type;
int i_loud_norm;
int i_target_loudness;
unsigned int i_sbr_mode;
- int n_mems;
int i;
#ifdef ENABLE_MPEG_D_DRC
@@ -1217,78 +1233,16 @@
RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS");
- for (i = 0; i < (WORD32)2; i++) {
- WORD32 ui_size, ui_alignment, ui_type;
- pVOID pv_alloc_ptr;
+ /* Free any memory that is allocated for MPEG D Drc so far */
+ deInitMPEGDDDrc();
- /* Get memory size */
- err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_SIZE, i, &ui_size);
-
- RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_SIZE");
-
- /* Get memory alignment */
- err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_ALIGNMENT, i,
- &ui_alignment);
-
- RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_ALIGNMENT");
-
- /* Get memory type */
- err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_TYPE, i, &ui_type);
- RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_TYPE");
- if (mMallocCount == MAX_MEM_ALLOCS) {
- ALOGE("mMemoryArray is full");
- return IA_FATAL_ERROR;
- }
-
- mMemoryArray[mMallocCount] = memalign(4, ui_size);
- if (mMemoryArray[mMallocCount] == NULL) {
- ALOGE(" Cannot create requested memory %d", ui_size);
- return IA_FATAL_ERROR;
- }
- pv_alloc_ptr = (pVOID)((WORD8*)mMemoryArray[mMallocCount]);
- mMallocCount++;
-
- /* Set the buffer pointer */
- err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, i, pv_alloc_ptr);
-
- RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
+ err_code = initMPEGDDDrc();
+ if (err_code != IA_NO_ERROR) {
+ ALOGE("initMPEGDDDrc failed with error %d", err_code);
+ deInitMPEGDDDrc();
+ return err_code;
}
- {
- WORD32 ui_size;
- ui_size = 8192 * 2;
- if (mMallocCount == MAX_MEM_ALLOCS) {
- ALOGE("mMemoryArray is full");
- return IA_FATAL_ERROR;
- }
- mMemoryArray[mMallocCount] = memalign(4, ui_size);
- if (mMemoryArray[mMallocCount] == NULL) {
- ALOGE(" Cannot create requested memory %d", ui_size);
- return IA_FATAL_ERROR;
- }
-
- mDrcInBuf = (int8_t*)mMemoryArray[mMallocCount];
- mMallocCount++;
- err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, 2,
- /*mOutputBuffer*/ mDrcInBuf);
- RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
-
- if (mMallocCount == MAX_MEM_ALLOCS) {
- ALOGE("mMemoryArray is full");
- return IA_FATAL_ERROR;
- }
- mMemoryArray[mMallocCount] = memalign(4, ui_size);
- if (mMemoryArray[mMallocCount] == NULL) {
- ALOGE(" Cannot create requested memory %d", ui_size);
- return IA_FATAL_ERROR;
- }
-
- mDrcOutBuf = (int8_t*)mMemoryArray[mMallocCount];
- mMallocCount++;
- err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, 3,
- /*mOutputBuffer*/ mDrcOutBuf);
- RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR");
- }
/* DRC buffers
buf[0] - contains extension element pay load loudness related
buf[1] - contains extension element pay load*/
@@ -1423,10 +1377,10 @@
}
#endif
- return err_code;
+ return IA_NO_ERROR;
}
-int SoftXAAC::decodeXAACStream(uint8_t* inBuffer, uint32_t inBufferLength, int32_t* bytesConsumed,
- int32_t* outBytes) {
+IA_ERRORCODE SoftXAAC::decodeXAACStream(uint8_t* inBuffer, uint32_t inBufferLength,
+ int32_t* bytesConsumed, int32_t* outBytes) {
if (mInputBufferSize < inBufferLength) {
ALOGE("Cannot config AAC, input buffer size %d < inBufferLength %d", mInputBufferSize,
inBufferLength);
@@ -1516,24 +1470,33 @@
memcpy(mOutputBuffer, mDrcOutBuf, *outBytes);
}
#endif
- return err_code;
+ return IA_NO_ERROR;
}
-int SoftXAAC::deInitXAACDecoder() {
+IA_ERRORCODE SoftXAAC::deInitXAACDecoder() {
ALOGI("deInitXAACDecoder");
/* Tell that the input is over in this buffer */
IA_ERRORCODE err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_INPUT_OVER, 0, NULL);
- RETURN_IF_FATAL(err_code, "IA_API_CMD_INPUT_OVER");
- for (int i = 0; i < mMallocCount; i++) {
- if (mMemoryArray[i]) free(mMemoryArray[i]);
+ /* Irrespective of error returned in IA_API_CMD_INPUT_OVER, free allocated memory */
+ for (void* buf : mMemoryVec) {
+ free(buf);
}
- mMallocCount = 0;
-
+ mMemoryVec.clear();
return err_code;
}
+IA_ERRORCODE SoftXAAC::deInitMPEGDDDrc() {
+ ALOGI("deInitMPEGDDDrc");
+
+ for (void* buf : mDrcMemoryVec) {
+ free(buf);
+ }
+ mDrcMemoryVec.clear();
+ return IA_NO_ERROR;
+}
+
IA_ERRORCODE SoftXAAC::getXAACStreamInfo() {
IA_ERRORCODE err_code = IA_NO_ERROR;
@@ -1546,11 +1509,19 @@
err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
IA_ENHAACPLUS_DEC_CONFIG_PARAM_NUM_CHANNELS, &mNumChannels);
RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_NUM_CHANNELS");
+ if (mNumChannels > MAX_CHANNEL_COUNT) {
+ ALOGE(" No of channels are more than max channels\n");
+ return IA_FATAL_ERROR;
+ }
/* PCM word size */
err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
IA_ENHAACPLUS_DEC_CONFIG_PARAM_PCM_WDSZ, &mPcmWdSz);
RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_PCM_WDSZ");
+ if ((mPcmWdSz / 8) != 2) {
+ ALOGE("Invalid Number of bytes per sample: %d, Expected is 2", mPcmWdSz);
+ return IA_FATAL_ERROR;
+ }
/* channel mask to tell the arrangement of channels in bit stream */
err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM,
diff --git a/media/libstagefright/codecs/xaacdec/SoftXAAC.h b/media/libstagefright/codecs/xaacdec/SoftXAAC.h
index 6176082..a62a797 100644
--- a/media/libstagefright/codecs/xaacdec/SoftXAAC.h
+++ b/media/libstagefright/codecs/xaacdec/SoftXAAC.h
@@ -33,8 +33,6 @@
#include "impd_apicmd_standards.h"
#include "impd_drc_config_params.h"
-#define MAX_MEM_ALLOCS 100
-
extern "C" IA_ERRORCODE ixheaacd_dec_api(pVOID p_ia_module_obj, WORD32 i_cmd, WORD32 i_idx,
pVOID pv_value);
extern "C" IA_ERRORCODE ia_drc_dec_api(pVOID p_ia_module_obj, WORD32 i_cmd, WORD32 i_idx,
@@ -80,18 +78,19 @@
enum { NONE, AWAITING_DISABLED, AWAITING_ENABLED } mOutputPortSettingsChange;
void initPorts();
- status_t initDecoder();
+ IA_ERRORCODE initDecoder();
bool isConfigured() const;
- int drainDecoder();
- int initXAACDecoder();
- int deInitXAACDecoder();
+ IA_ERRORCODE drainDecoder();
+ IA_ERRORCODE initXAACDecoder();
+ IA_ERRORCODE deInitXAACDecoder();
+ IA_ERRORCODE initMPEGDDDrc();
+ IA_ERRORCODE deInitMPEGDDDrc();
+ IA_ERRORCODE configXAACDecoder(uint8_t* inBuffer, uint32_t inBufferLength);
+ IA_ERRORCODE configMPEGDDrc();
+ IA_ERRORCODE decodeXAACStream(uint8_t* inBuffer, uint32_t inBufferLength,
+ int32_t* bytesConsumed, int32_t* outBytes);
- int configXAACDecoder(uint8_t* inBuffer, uint32_t inBufferLength);
- int configMPEGDDrc();
- int decodeXAACStream(uint8_t* inBuffer, uint32_t inBufferLength, int32_t* bytesConsumed,
- int32_t* outBytes);
-
- int configflushDecode();
+ IA_ERRORCODE configflushDecode();
IA_ERRORCODE getXAACStreamInfo();
IA_ERRORCODE setXAACDRCInfo(int32_t drcCut, int32_t drcBoost, int32_t drcRefLevel,
int32_t drcHeavyCompression
@@ -120,9 +119,8 @@
int8_t* mDrcOutBuf;
int32_t mMpegDDRCPresent;
int32_t mDRCFlag;
-
- void* mMemoryArray[MAX_MEM_ALLOCS];
- int32_t mMallocCount;
+ Vector<void*> mMemoryVec;
+ Vector<void*> mDrcMemoryVec;
DISALLOW_EVIL_CONSTRUCTORS(SoftXAAC);
};
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 86bd9d6..d136d9e 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -27,6 +27,7 @@
#include "libyuv/convert_from.h"
#include "libyuv/convert_argb.h"
+#include "libyuv/planar_functions.h"
#include "libyuv/video_common.h"
#include <functional>
#include <sys/time.h>
@@ -91,10 +92,17 @@
case OMX_COLOR_FormatCbYCrY:
case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
- case OMX_COLOR_FormatYUV420SemiPlanar:
case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
return mDstFormat == OMX_COLOR_Format16bitRGB565;
+ case OMX_COLOR_FormatYUV420SemiPlanar:
+#ifdef USE_LIBYUV
+ return mDstFormat == OMX_COLOR_Format16bitRGB565
+ || mDstFormat == OMX_COLOR_Format32BitRGBA8888;
+#else
+ return mDstFormat == OMX_COLOR_Format16bitRGB565;
+#endif
+
default:
return false;
}
@@ -236,7 +244,11 @@
break;
case OMX_COLOR_FormatYUV420SemiPlanar:
+#ifdef USE_LIBYUV
+ err = convertYUV420SemiPlanarUseLibYUV(src, dst);
+#else
err = convertYUV420SemiPlanar(src, dst);
+#endif
break;
case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
@@ -365,6 +377,36 @@
return OK;
}
+status_t ColorConverter::convertYUV420SemiPlanarUseLibYUV(
+ const BitmapParams &src, const BitmapParams &dst) {
+ uint8_t *dst_ptr = (uint8_t *)dst.mBits
+ + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
+
+ const uint8_t *src_y =
+ (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
+
+ const uint8_t *src_u =
+ (const uint8_t *)src.mBits + src.mStride * src.mHeight
+ + (src.mCropTop / 2) * src.mStride + src.mCropLeft;
+
+ switch (mDstFormat) {
+ case OMX_COLOR_Format16bitRGB565:
+ libyuv::NV12ToRGB565(src_y, src.mStride, src_u, src.mStride, (uint8 *)dst_ptr,
+ dst.mStride, src.cropWidth(), src.cropHeight());
+ break;
+
+ case OMX_COLOR_Format32BitRGBA8888:
+ libyuv::NV12ToARGB(src_y, src.mStride, src_u, src.mStride, (uint8 *)dst_ptr,
+ dst.mStride, src.cropWidth(), src.cropHeight());
+ break;
+
+ default:
+ return ERROR_UNSUPPORTED;
+ }
+
+ return OK;
+}
+
std::function<void (void *, void *, void *, size_t,
signed *, signed *, signed *, signed *)>
getReadFromSrc(OMX_COLOR_FORMATTYPE srcFormat) {
@@ -852,7 +894,7 @@
const uint8_t *src_u =
(const uint8_t *)src.mBits + src.mHeight * src.mStride +
- src.mCropTop * src.mStride / 2 + src.mCropLeft;
+ (src.mCropTop / 2) * src.mStride + src.mCropLeft;
for (size_t y = 0; y < src.cropHeight(); ++y) {
for (size_t x = 0; x < src.cropWidth(); x += 2) {
diff --git a/media/libstagefright/flac/dec/Android.bp b/media/libstagefright/flac/dec/Android.bp
index 6bfab16..307c9b0 100644
--- a/media/libstagefright/flac/dec/Android.bp
+++ b/media/libstagefright/flac/dec/Android.bp
@@ -27,17 +27,21 @@
},
static: {
- whole_static_libs: ["libFLAC"],
+ whole_static_libs: [
+ "libFLAC",
+ "libaudioutils",
+ ],
},
shared: {
- static_libs: ["libFLAC"],
+ static_libs: [
+ "libFLAC",
+ "libaudioutils",
+ ],
},
shared_libs: [
"liblog",
- "libstagefright_foundation",
- "libutils",
],
header_libs: ["libmedia_headers"],
}
diff --git a/media/libstagefright/flac/dec/FLACDecoder.cpp b/media/libstagefright/flac/dec/FLACDecoder.cpp
index dfdc41c..cef0bc6 100644
--- a/media/libstagefright/flac/dec/FLACDecoder.cpp
+++ b/media/libstagefright/flac/dec/FLACDecoder.cpp
@@ -20,6 +20,7 @@
#include "FLACDecoder.h"
+#include <audio_utils/primitives.h> // float_from_i32
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MediaDefs.h>
@@ -117,104 +118,43 @@
mErrorStatus = status;
}
-// Copy samples from FLAC native 32-bit non-interleaved to 16-bit interleaved.
+// Copy samples from FLAC native 32-bit non-interleaved to 16-bit signed
+// or 32-bit float interleaved.
+// TODO: Consider moving to audio_utils. See similar code at FLACExtractor.cpp
// These are candidates for optimization if needed.
-static void copyMono8(
- int16_t *dst,
- const int * src[FLACDecoder::kMaxChannels],
+static void copyTo16Signed(
+ short *dst,
+ const int *const *src,
unsigned nSamples,
- unsigned /* nChannels */) {
- for (unsigned i = 0; i < nSamples; ++i) {
- *dst++ = src[0][i] << 8;
- }
-}
-
-static void copyStereo8(
- int16_t *dst,
- const int * src[FLACDecoder::kMaxChannels],
- unsigned nSamples,
- unsigned /* nChannels */) {
- for (unsigned i = 0; i < nSamples; ++i) {
- *dst++ = src[0][i] << 8;
- *dst++ = src[1][i] << 8;
- }
-}
-
-static void copyMultiCh8(
- int16_t *dst,
- const int * src[FLACDecoder::kMaxChannels],
- unsigned nSamples,
- unsigned nChannels) {
- for (unsigned i = 0; i < nSamples; ++i) {
- for (unsigned c = 0; c < nChannels; ++c) {
- *dst++ = src[c][i] << 8;
+ unsigned nChannels,
+ unsigned bitsPerSample) {
+ const int leftShift = 16 - (int)bitsPerSample; // cast to int to prevent unsigned overflow.
+ if (leftShift >= 0) {
+ for (unsigned i = 0; i < nSamples; ++i) {
+ for (unsigned c = 0; c < nChannels; ++c) {
+ *dst++ = src[c][i] << leftShift;
+ }
+ }
+ } else {
+ const int rightShift = -leftShift;
+ for (unsigned i = 0; i < nSamples; ++i) {
+ for (unsigned c = 0; c < nChannels; ++c) {
+ *dst++ = src[c][i] >> rightShift;
+ }
}
}
}
-static void copyMono16(
- int16_t *dst,
- const int * src[FLACDecoder::kMaxChannels],
+static void copyToFloat(
+ float *dst,
+ const int *const *src,
unsigned nSamples,
- unsigned /* nChannels */) {
- for (unsigned i = 0; i < nSamples; ++i) {
- *dst++ = src[0][i];
- }
-}
-
-static void copyStereo16(
- int16_t *dst,
- const int * src[FLACDecoder::kMaxChannels],
- unsigned nSamples,
- unsigned /* nChannels */) {
- for (unsigned i = 0; i < nSamples; ++i) {
- *dst++ = src[0][i];
- *dst++ = src[1][i];
- }
-}
-
-static void copyMultiCh16(
- int16_t *dst,
- const int * src[FLACDecoder::kMaxChannels],
- unsigned nSamples,
- unsigned nChannels) {
+ unsigned nChannels,
+ unsigned bitsPerSample) {
+ const unsigned leftShift = 32 - bitsPerSample;
for (unsigned i = 0; i < nSamples; ++i) {
for (unsigned c = 0; c < nChannels; ++c) {
- *dst++ = src[c][i];
- }
- }
-}
-
-// TODO: 24-bit versions should do dithering or noise-shaping, here or in AudioFlinger
-static void copyMono24(
- int16_t *dst,
- const int * src[FLACDecoder::kMaxChannels],
- unsigned nSamples,
- unsigned /* nChannels */) {
- for (unsigned i = 0; i < nSamples; ++i) {
- *dst++ = src[0][i] >> 8;
- }
-}
-
-static void copyStereo24(
- int16_t *dst,
- const int * src[FLACDecoder::kMaxChannels],
- unsigned nSamples,
- unsigned /* nChannels */) {
- for (unsigned i = 0; i < nSamples; ++i) {
- *dst++ = src[0][i] >> 8;
- *dst++ = src[1][i] >> 8;
- }
-}
-
-static void copyMultiCh24(
- int16_t *dst,
- const int * src[FLACDecoder::kMaxChannels],
- unsigned nSamples,
- unsigned nChannels) {
- for (unsigned i = 0; i < nSamples; ++i) {
- for (unsigned c = 0; c < nChannels; ++c) {
- *dst++ = src[c][i] >> 8;
+ *dst++ = float_from_i32(src[c][i] << leftShift);
}
}
}
@@ -238,8 +178,7 @@
mStreamInfoValid(false),
mWriteRequested(false),
mWriteCompleted(false),
- mErrorStatus((FLAC__StreamDecoderErrorStatus) -1),
- mCopy(nullptr) {
+ mErrorStatus((FLAC__StreamDecoderErrorStatus) -1) {
ALOGV("ctor:");
memset(&mStreamInfo, 0, sizeof(mStreamInfo));
memset(&mWriteHeader, 0, sizeof(mWriteHeader));
@@ -379,6 +318,7 @@
case 8:
case 16:
case 24:
+ case 32: // generally rare, but is supported in the framework
break;
default:
@@ -387,31 +327,6 @@
return ERROR_MALFORMED;
}
- // configure the appropriate copy function, defaulting to trespass
- static const struct {
- unsigned mChannels;
- unsigned mBitsPerSample;
- void (*mCopy)(int16_t *dst, const int * src[kMaxChannels],
- unsigned nSamples, unsigned nChannels);
- } table[] = {
- { 1, 8, copyMono8 },
- { 2, 8, copyStereo8 },
- { 8, 8, copyMultiCh8 },
- { 1, 16, copyMono16 },
- { 2, 16, copyStereo16 },
- { 8, 16, copyMultiCh16 },
- { 1, 24, copyMono24 },
- { 2, 24, copyStereo24 },
- { 8, 24, copyMultiCh24 },
- };
- for (const auto &entry : table) {
- if (entry.mChannels >= getChannels() &&
- entry.mBitsPerSample == getBitsPerSample()) {
- mCopy = entry.mCopy;
- break;
- }
- }
-
// Now we have all metadata blocks.
mBufferPos = 0;
mBufferDataSize = 0;
@@ -420,7 +335,7 @@
}
status_t FLACDecoder::decodeOneFrame(const uint8_t *inBuffer, size_t inBufferLen,
- int16_t *outBuffer, size_t *outBufferLen) {
+ void *outBuffer, size_t *outBufferLen, bool outputFloat) {
ALOGV("decodeOneFrame: input size(%zu)", inBufferLen);
if (!mStreamInfoValid) {
@@ -469,21 +384,33 @@
return ERROR_MALFORMED;
}
- size_t bufferSize = blocksize * getChannels() * sizeof(int16_t);
+ const unsigned channels = getChannels();
+ const size_t sampleSize = outputFloat ? sizeof(float) : sizeof(int16_t);
+ const size_t frameSize = channels * sampleSize;
+ size_t bufferSize = blocksize * frameSize;
if (bufferSize > *outBufferLen) {
ALOGW("decodeOneFrame: output buffer holds only partial frame %zu:%zu",
*outBufferLen, bufferSize);
- blocksize = *outBufferLen / (getChannels() * sizeof(int16_t));
- bufferSize = blocksize * getChannels() * sizeof(int16_t);
+ blocksize = *outBufferLen / frameSize;
+ bufferSize = blocksize * frameSize;
}
- if (mCopy == nullptr) {
- ALOGE("decodeOneFrame: format is not supported: channels(%d), BitsPerSample(%d)",
- getChannels(), getBitsPerSample());
- return ERROR_UNSUPPORTED;
- }
// copy PCM from FLAC write buffer to output buffer, with interleaving
- (*mCopy)(outBuffer, mWriteBuffer, blocksize, getChannels());
+
+ const unsigned bitsPerSample = getBitsPerSample();
+ if (outputFloat) {
+ copyToFloat(reinterpret_cast<float*>(outBuffer),
+ mWriteBuffer,
+ blocksize,
+ channels,
+ bitsPerSample);
+ } else {
+ copyTo16Signed(reinterpret_cast<short*>(outBuffer),
+ mWriteBuffer,
+ blocksize,
+ channels,
+ bitsPerSample);
+ }
*outBufferLen = bufferSize;
return OK;
}
diff --git a/media/libstagefright/flac/dec/FLACDecoder.h b/media/libstagefright/flac/dec/FLACDecoder.h
index af419a2..694fccb 100644
--- a/media/libstagefright/flac/dec/FLACDecoder.h
+++ b/media/libstagefright/flac/dec/FLACDecoder.h
@@ -41,7 +41,7 @@
status_t parseMetadata(const uint8_t *inBuffer, size_t inBufferLen);
status_t decodeOneFrame(const uint8_t *inBuffer, size_t inBufferLen,
- int16_t *outBuffer, size_t *outBufferLen);
+ void *outBuffer, size_t *outBufferLen, bool outputFloat = false);
void flush();
virtual ~FLACDecoder();
@@ -89,8 +89,6 @@
// most recent error reported by libFLAC decoder
FLAC__StreamDecoderErrorStatus mErrorStatus;
- void (*mCopy)(int16_t *dst, const int *src[kMaxChannels], unsigned nSamples, unsigned nChannels);
-
status_t init();
// FLAC stream decoder callbacks as C++ instance methods
diff --git a/media/libstagefright/foundation/MediaDefs.cpp b/media/libstagefright/foundation/MediaDefs.cpp
index aba44bb..9d1ec1f 100644
--- a/media/libstagefright/foundation/MediaDefs.cpp
+++ b/media/libstagefright/foundation/MediaDefs.cpp
@@ -23,6 +23,7 @@
const char *MEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
const char *MEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
+const char *MEDIA_MIMETYPE_VIDEO_AV1 = "video/av01";
const char *MEDIA_MIMETYPE_VIDEO_AVC = "video/avc";
const char *MEDIA_MIMETYPE_VIDEO_HEVC = "video/hevc";
const char *MEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
index 8edddcc..e68852d 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -25,6 +25,7 @@
extern const char *MEDIA_MIMETYPE_VIDEO_VP8;
extern const char *MEDIA_MIMETYPE_VIDEO_VP9;
+extern const char *MEDIA_MIMETYPE_VIDEO_AV1;
extern const char *MEDIA_MIMETYPE_VIDEO_AVC;
extern const char *MEDIA_MIMETYPE_VIDEO_HEVC;
extern const char *MEDIA_MIMETYPE_VIDEO_MPEG4;
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 9b2853e..9d46d2d 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -488,7 +488,8 @@
status_t setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels);
status_t setupFlacCodec(
- bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel);
+ bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel,
+ AudioEncoding encoding);
status_t setupRawAudioFormat(
OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels,
diff --git a/media/libstagefright/include/media/stagefright/ColorConverter.h b/media/libstagefright/include/media/stagefright/ColorConverter.h
index 6d4c1bf..75b0d8e 100644
--- a/media/libstagefright/include/media/stagefright/ColorConverter.h
+++ b/media/libstagefright/include/media/stagefright/ColorConverter.h
@@ -90,6 +90,9 @@
status_t convertYUV420PlanarUseLibYUV(
const BitmapParams &src, const BitmapParams &dst);
+ status_t convertYUV420SemiPlanarUseLibYUV(
+ const BitmapParams &src, const BitmapParams &dst);
+
status_t convertYUV420Planar16(
const BitmapParams &src, const BitmapParams &dst);
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index f18940d..1abef8c 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -110,6 +110,7 @@
uint32_t mInterleaveDurationUs;
int32_t mTimeScale;
int64_t mStartTimestampUs;
+ int32_t mStartTimeOffsetBFramesUs; // Start time offset when B Frames are present
int mLatitudex10000;
int mLongitudex10000;
bool mAreGeoTagsAvailable;
@@ -129,6 +130,7 @@
void setStartTimestampUs(int64_t timeUs);
int64_t getStartTimestampUs(); // Not const
+ int32_t getStartTimeOffsetBFramesUs();
status_t startTracks(MetaData *params);
size_t numTracks();
int64_t estimateMoovBoxSize(int32_t bitRate);
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 704bfdd..984c23d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -153,6 +153,35 @@
constexpr int32_t VP9Level61 = 0x1000;
constexpr int32_t VP9Level62 = 0x2000;
+constexpr int32_t AV1Profile0 = 0x01;
+constexpr int32_t AV1Profile1 = 0x02;
+constexpr int32_t AV1Profile2 = 0x04;
+
+constexpr int32_t AV1Level2 = 0x1;
+constexpr int32_t AV1Level21 = 0x2;
+constexpr int32_t AV1Level22 = 0x4;
+constexpr int32_t AV1Level23 = 0x8;
+constexpr int32_t AV1Level3 = 0x10;
+constexpr int32_t AV1Level31 = 0x20;
+constexpr int32_t AV1Level32 = 0x40;
+constexpr int32_t AV1Level33 = 0x80;
+constexpr int32_t AV1Level4 = 0x100;
+constexpr int32_t AV1Level41 = 0x200;
+constexpr int32_t AV1Level42 = 0x400;
+constexpr int32_t AV1Level43 = 0x800;
+constexpr int32_t AV1Level5 = 0x1000;
+constexpr int32_t AV1Level51 = 0x2000;
+constexpr int32_t AV1Level52 = 0x4000;
+constexpr int32_t AV1Level53 = 0x8000;
+constexpr int32_t AV1Level6 = 0x10000;
+constexpr int32_t AV1Level61 = 0x20000;
+constexpr int32_t AV1Level62 = 0x40000;
+constexpr int32_t AV1Level63 = 0x80000;
+constexpr int32_t AV1Level7 = 0x100000;
+constexpr int32_t AV1Level71 = 0x200000;
+constexpr int32_t AV1Level72 = 0x400000;
+constexpr int32_t AV1Level73 = 0x800000;
+
constexpr int32_t HEVCProfileMain = 0x01;
constexpr int32_t HEVCProfileMain10 = 0x02;
constexpr int32_t HEVCProfileMainStill = 0x04;
@@ -273,6 +302,7 @@
// from MediaFormat.java
constexpr char MIMETYPE_VIDEO_VP8[] = "video/x-vnd.on2.vp8";
constexpr char MIMETYPE_VIDEO_VP9[] = "video/x-vnd.on2.vp9";
+constexpr char MIMETYPE_VIDEO_AV1[] = "video/av01";
constexpr char MIMETYPE_VIDEO_AVC[] = "video/avc";
constexpr char MIMETYPE_VIDEO_HEVC[] = "video/hevc";
constexpr char MIMETYPE_VIDEO_MPEG4[] = "video/mp4v-es";
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index e9baa1a..345f85d 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -1610,9 +1610,9 @@
detailedError = _detailedError;
});
- if (!returnVoid.isOk()) {
- ALOGE("[stream %d] descramble failed, trans=%s",
- mElementaryPID, returnVoid.description().c_str());
+ if (!returnVoid.isOk() || status != Status::OK) {
+ ALOGE("[stream %d] descramble failed, trans=%s, status=%d",
+ mElementaryPID, returnVoid.description().c_str(), status);
return UNKNOWN_ERROR;
}
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 8a76de3..362b7f5 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -53,7 +53,6 @@
"libhidlbase",
"libhidlmemory",
"libhidltransport",
- "libnativewindow", // TODO(b/62923479): use header library
"libvndksupport",
"android.hardware.media.omx@1.0",
"android.hardware.graphics.bufferqueue@1.0",
diff --git a/media/libstagefright/omx/OMXUtils.cpp b/media/libstagefright/omx/OMXUtils.cpp
index b187035..1b8493a 100644
--- a/media/libstagefright/omx/OMXUtils.cpp
+++ b/media/libstagefright/omx/OMXUtils.cpp
@@ -150,6 +150,8 @@
"video_decoder.vp8", "video_encoder.vp8" },
{ MEDIA_MIMETYPE_VIDEO_VP9,
"video_decoder.vp9", "video_encoder.vp9" },
+ { MEDIA_MIMETYPE_VIDEO_AV1,
+ "video_decoder.av1", "video_encoder.av1" },
{ MEDIA_MIMETYPE_AUDIO_RAW,
"audio_decoder.raw", "audio_encoder.raw" },
{ MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index b55dbb0..bebfb3b 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -1,3 +1,9 @@
+cc_library_headers {
+ name: "libstagefright_xmlparser_headers",
+ export_include_dirs: ["include"],
+ vendor_available: true,
+}
+
cc_library_shared {
name: "libstagefright_xmlparser",
vendor_available: true,
diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk
index f7597db..1fbb85e 100644
--- a/media/mediaserver/Android.mk
+++ b/media/mediaserver/Android.mk
@@ -20,7 +20,7 @@
libmediaplayerservice \
libutils \
libbinder \
- libicuuc \
+ libandroidicu \
android.hardware.media.omx@1.0 \
LOCAL_STATIC_LIBRARIES := \
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 6976950..73bd2ca 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -88,7 +88,6 @@
"libandroid",
"libandroid_runtime",
"libbinder",
- "libhwbinder",
"libhidlbase",
"libgui",
"libui",
@@ -141,10 +140,6 @@
],
shared_libs: [
- "libstagefright_foundation",
- "liblog",
- "libutils",
- "libcutils",
],
sanitize: {
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index b86ab42..1a0c3b1 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -357,8 +357,10 @@
it != mAcquiredImages.end(); it++) {
AImage* image = *it;
Mutex::Autolock _l(image->mLock);
- releaseImageLocked(image, /*releaseFenceFd*/-1);
+ // Do not alter mAcquiredImages while we are iterating on it
+ releaseImageLocked(image, /*releaseFenceFd*/-1, /*clearCache*/false);
}
+ mAcquiredImages.clear();
// Delete Buffer Items
for (auto it = mBuffers.begin();
@@ -497,7 +499,7 @@
}
void
-AImageReader::releaseImageLocked(AImage* image, int releaseFenceFd) {
+AImageReader::releaseImageLocked(AImage* image, int releaseFenceFd, bool clearCache) {
BufferItem* buffer = image->mBuffer;
if (buffer == nullptr) {
// This should not happen, but is not fatal
@@ -521,6 +523,10 @@
image->mLockedBuffer = nullptr;
image->mIsClosed = true;
+ if (!clearCache) {
+ return;
+ }
+
bool found = false;
// cleanup acquired image list
for (auto it = mAcquiredImages.begin();
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index 78152d2..e328cb1 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -88,7 +88,7 @@
media_status_t acquireImageLocked(/*out*/AImage** image, /*out*/int* fenceFd);
// Called by AImage/~AImageReader to close image. Caller is responsible to grab AImage::mLock
- void releaseImageLocked(AImage* image, int releaseFenceFd);
+ void releaseImageLocked(AImage* image, int releaseFenceFd, bool clearCache = true);
static int getBufferWidth(BufferItem* buffer);
static int getBufferHeight(BufferItem* buffer);
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index 92d3aef..fcb706d 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -298,7 +298,7 @@
EXPORT const char* AMEDIAFORMAT_KEY_CRYPTO_IV = "crypto-iv";
EXPORT const char* AMEDIAFORMAT_KEY_CRYPTO_KEY = "crypto-key";
EXPORT const char* AMEDIAFORMAT_KEY_CRYPTO_MODE = "crypto-mode";
-EXPORT const char* AMEDIAFORMAT_KEY_CRYPTO_PLAIN_SIZES = "crypto-encrypted-sizes";
+EXPORT const char* AMEDIAFORMAT_KEY_CRYPTO_PLAIN_SIZES = "crypto-plain-sizes";
EXPORT const char* AMEDIAFORMAT_KEY_CRYPTO_SKIP_BYTE_BLOCK = "crypto-skip-byte-block";
EXPORT const char* AMEDIAFORMAT_KEY_CSD = "csd";
EXPORT const char* AMEDIAFORMAT_KEY_CSD_0 = "csd-0";
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index b05e022..a11602b 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -27,7 +27,6 @@
],
shared_libs: [
"libbinder",
- "libcutils",
"liblog",
"libutils",
"libmemunreachable",
diff --git a/packages/MediaComponents/apex/java/android/media/session/ISessionController.aidl b/packages/MediaComponents/apex/java/android/media/session/ISessionController.aidl
index 74897f7..433b12f 100644
--- a/packages/MediaComponents/apex/java/android/media/session/ISessionController.aidl
+++ b/packages/MediaComponents/apex/java/android/media/session/ISessionController.aidl
@@ -48,9 +48,9 @@
PendingIntent getLaunchPendingIntent();
long getFlags();
ParcelableVolumeInfo getVolumeAttributes();
- void adjustVolume(String packageName, ISessionControllerCallback caller,
+ void adjustVolume(String packageName, String opPackageName, ISessionControllerCallback caller,
boolean asSystemService, int direction, int flags);
- void setVolumeTo(String packageName, ISessionControllerCallback caller,
+ void setVolumeTo(String packageName, String opPackageName, ISessionControllerCallback caller,
int value, int flags);
// These commands are for the TransportControls
diff --git a/packages/MediaComponents/apex/java/android/media/session/ISessionManager.aidl b/packages/MediaComponents/apex/java/android/media/session/ISessionManager.aidl
index 3578c16..d6c226f 100644
--- a/packages/MediaComponents/apex/java/android/media/session/ISessionManager.aidl
+++ b/packages/MediaComponents/apex/java/android/media/session/ISessionManager.aidl
@@ -17,7 +17,6 @@
import android.content.ComponentName;
import android.media.IRemoteVolumeController;
-import android.media.ISessionTokensListener;
import android.media.session.IActiveSessionsListener;
import android.media.session.ICallback;
import android.media.session.IOnMediaKeyListener;
@@ -36,9 +35,10 @@
List<IBinder> getSessions(in ComponentName compName, int userId);
void dispatchMediaKeyEvent(String packageName, boolean asSystemService, in KeyEvent keyEvent,
boolean needWakeLock);
- void dispatchVolumeKeyEvent(String packageName, boolean asSystemService, in KeyEvent keyEvent,
- int stream, boolean musicOnly);
- void dispatchAdjustVolume(String packageName, int suggestedStream, int delta, int flags);
+ void dispatchVolumeKeyEvent(String packageName, String opPackageName, boolean asSystemService,
+ in KeyEvent keyEvent, int stream, boolean musicOnly);
+ void dispatchAdjustVolume(String packageName, String opPackageName, int suggestedStream,
+ int delta, int flags);
void addSessionsListener(in IActiveSessionsListener listener, in ComponentName compName,
int userId);
void removeSessionsListener(in IActiveSessionsListener listener);
@@ -55,12 +55,4 @@
// MediaSession2
boolean isTrusted(String controllerPackageName, int controllerPid, int controllerUid);
- boolean createSession2(in Bundle sessionToken);
- void destroySession2(in Bundle sessionToken);
- List<Bundle> getSessionTokens(boolean activeSessionOnly, boolean sessionServiceOnly,
- String packageName);
-
- void addSessionTokensListener(in ISessionTokensListener listener, int userId,
- String packageName);
- void removeSessionTokensListener(in ISessionTokensListener listener, String packageName);
}
diff --git a/packages/MediaComponents/apex/java/android/media/session/MediaController.java b/packages/MediaComponents/apex/java/android/media/session/MediaController.java
index 8c3a013..65682a8 100644
--- a/packages/MediaComponents/apex/java/android/media/session/MediaController.java
+++ b/packages/MediaComponents/apex/java/android/media/session/MediaController.java
@@ -153,9 +153,7 @@
return false;
}
try {
- //TODO(b/119748678): Resolve mContext.getOpPackageName() through this file.
- // Temporarilly it's replaced with "mContext.getOpPackageName()" for compiling.
- return mSessionBinder.sendMediaButton("mContext.getOpPackageName()", mCbStub,
+ return mSessionBinder.sendMediaButton(mContext.getPackageName(), mCbStub,
asSystemService, keyEvent);
} catch (RemoteException e) {
// System is dead. =(
@@ -188,8 +186,9 @@
break;
}
try {
- mSessionBinder.adjustVolume("mContext.getOpPackageName()", mCbStub, true,
- direction, AudioManager.FLAG_SHOW_UI);
+ mSessionBinder.adjustVolume(mContext.getPackageName(),
+ mContext.getOpPackageName(), mCbStub, true, direction,
+ AudioManager.FLAG_SHOW_UI);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling adjustVolumeBy", e);
}
@@ -199,8 +198,8 @@
final int flags = AudioManager.FLAG_PLAY_SOUND | AudioManager.FLAG_VIBRATE
| AudioManager.FLAG_FROM_KEY;
try {
- mSessionBinder.adjustVolume("mContext.getOpPackageName()", mCbStub, true, 0,
- flags);
+ mSessionBinder.adjustVolume(mContext.getPackageName(),
+ mContext.getOpPackageName(), mCbStub, true, 0, flags);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling adjustVolumeBy", e);
}
@@ -369,7 +368,8 @@
*/
public void setVolumeTo(int value, int flags) {
try {
- mSessionBinder.setVolumeTo("mContext.getOpPackageName()", mCbStub, value, flags);
+ mSessionBinder.setVolumeTo(mContext.getPackageName(), mContext.getOpPackageName(),
+ mCbStub, value, flags);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling setVolumeTo.", e);
}
@@ -390,8 +390,8 @@
*/
public void adjustVolume(int direction, int flags) {
try {
- mSessionBinder.adjustVolume("mContext.getOpPackageName()", mCbStub, false, direction,
- flags);
+ mSessionBinder.adjustVolume(mContext.getPackageName(), mContext.getOpPackageName(),
+ mCbStub, false, direction, flags);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling adjustVolumeBy.", e);
}
@@ -457,7 +457,7 @@
throw new IllegalArgumentException("command cannot be null or empty");
}
try {
- mSessionBinder.sendCommand("mContext.getOpPackageName()", mCbStub, command, args, cb);
+ mSessionBinder.sendCommand(mContext.getPackageName(), mCbStub, command, args, cb);
} catch (RemoteException e) {
Log.d(TAG, "Dead object in sendCommand.", e);
}
@@ -523,7 +523,7 @@
if (!mCbRegistered) {
try {
- mSessionBinder.registerCallbackListener("mContext.getOpPackageName()", mCbStub);
+ mSessionBinder.registerCallbackListener(mContext.getPackageName(), mCbStub);
mCbRegistered = true;
} catch (RemoteException e) {
Log.e(TAG, "Dead object in registerCallback", e);
@@ -670,7 +670,7 @@
*/
public void prepare() {
try {
- mSessionBinder.prepare("mContext.getOpPackageName()", mCbStub);
+ mSessionBinder.prepare(mContext.getPackageName(), mCbStub);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling prepare.", e);
}
@@ -694,7 +694,7 @@
"You must specify a non-empty String for prepareFromMediaId.");
}
try {
- mSessionBinder.prepareFromMediaId("mContext.getOpPackageName()", mCbStub, mediaId,
+ mSessionBinder.prepareFromMediaId(mContext.getPackageName(), mCbStub, mediaId,
extras);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling prepare(" + mediaId + ").", e);
@@ -721,7 +721,7 @@
query = "";
}
try {
- mSessionBinder.prepareFromSearch("mContext.getOpPackageName()", mCbStub, query,
+ mSessionBinder.prepareFromSearch(mContext.getPackageName(), mCbStub, query,
extras);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling prepare(" + query + ").", e);
@@ -746,7 +746,7 @@
"You must specify a non-empty Uri for prepareFromUri.");
}
try {
- mSessionBinder.prepareFromUri("mContext.getOpPackageName()", mCbStub, uri, extras);
+ mSessionBinder.prepareFromUri(mContext.getPackageName(), mCbStub, uri, extras);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling prepare(" + uri + ").", e);
}
@@ -757,7 +757,7 @@
*/
public void play() {
try {
- mSessionBinder.play("mContext.getOpPackageName()", mCbStub);
+ mSessionBinder.play(mContext.getPackageName(), mCbStub);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling play.", e);
}
@@ -776,7 +776,7 @@
"You must specify a non-empty String for playFromMediaId.");
}
try {
- mSessionBinder.playFromMediaId("mContext.getOpPackageName()", mCbStub, mediaId,
+ mSessionBinder.playFromMediaId(mContext.getPackageName(), mCbStub, mediaId,
extras);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling play(" + mediaId + ").", e);
@@ -799,7 +799,7 @@
query = "";
}
try {
- mSessionBinder.playFromSearch("mContext.getOpPackageName()", mCbStub, query, extras);
+ mSessionBinder.playFromSearch(mContext.getPackageName(), mCbStub, query, extras);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling play(" + query + ").", e);
}
@@ -818,7 +818,7 @@
"You must specify a non-empty Uri for playFromUri.");
}
try {
- mSessionBinder.playFromUri("mContext.getOpPackageName()", mCbStub, uri, extras);
+ mSessionBinder.playFromUri(mContext.getPackageName(), mCbStub, uri, extras);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling play(" + uri + ").", e);
}
@@ -830,7 +830,7 @@
*/
public void skipToQueueItem(long id) {
try {
- mSessionBinder.skipToQueueItem("mContext.getOpPackageName()", mCbStub, id);
+ mSessionBinder.skipToQueueItem(mContext.getPackageName(), mCbStub, id);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling skipToItem(" + id + ").", e);
}
@@ -842,7 +842,7 @@
*/
public void pause() {
try {
- mSessionBinder.pause("mContext.getOpPackageName()", mCbStub);
+ mSessionBinder.pause(mContext.getPackageName(), mCbStub);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling pause.", e);
}
@@ -854,7 +854,7 @@
*/
public void stop() {
try {
- mSessionBinder.stop("mContext.getOpPackageName()", mCbStub);
+ mSessionBinder.stop(mContext.getPackageName(), mCbStub);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling stop.", e);
}
@@ -867,7 +867,7 @@
*/
public void seekTo(long pos) {
try {
- mSessionBinder.seekTo("mContext.getOpPackageName()", mCbStub, pos);
+ mSessionBinder.seekTo(mContext.getPackageName(), mCbStub, pos);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling seekTo.", e);
}
@@ -879,7 +879,7 @@
*/
public void fastForward() {
try {
- mSessionBinder.fastForward("mContext.getOpPackageName()", mCbStub);
+ mSessionBinder.fastForward(mContext.getPackageName(), mCbStub);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling fastForward.", e);
}
@@ -890,7 +890,7 @@
*/
public void skipToNext() {
try {
- mSessionBinder.next("mContext.getOpPackageName()", mCbStub);
+ mSessionBinder.next(mContext.getPackageName(), mCbStub);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling next.", e);
}
@@ -902,7 +902,7 @@
*/
public void rewind() {
try {
- mSessionBinder.rewind("mContext.getOpPackageName()", mCbStub);
+ mSessionBinder.rewind(mContext.getPackageName(), mCbStub);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling rewind.", e);
}
@@ -913,7 +913,7 @@
*/
public void skipToPrevious() {
try {
- mSessionBinder.previous("mContext.getOpPackageName()", mCbStub);
+ mSessionBinder.previous(mContext.getPackageName(), mCbStub);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling previous.", e);
}
@@ -928,7 +928,7 @@
*/
public void setRating(Rating rating) {
try {
- mSessionBinder.rate("mContext.getOpPackageName()", mCbStub, rating);
+ mSessionBinder.rate(mContext.getPackageName(), mCbStub, rating);
} catch (RemoteException e) {
Log.wtf(TAG, "Error calling rate.", e);
}
@@ -963,7 +963,7 @@
throw new IllegalArgumentException("CustomAction cannot be null.");
}
try {
- mSessionBinder.sendCustomAction("mContext.getOpPackageName()", mCbStub, action, args);
+ mSessionBinder.sendCustomAction(mContext.getPackageName(), mCbStub, action, args);
} catch (RemoteException e) {
Log.d(TAG, "Dead object in sendCustomAction.", e);
}
@@ -1142,8 +1142,7 @@
private boolean mRegistered = false;
public MessageHandler(Looper looper, MediaController.Callback cb) {
- //TODO:(b/119539849) Uncomment below line and resolve the error.
- // super(looper, null, true);
+ super(looper);
mCallback = cb;
}
@@ -1182,6 +1181,7 @@
public void post(int what, Object obj, Bundle data) {
Message msg = obtainMessage(what, obj);
+ msg.setAsynchronous(true);
msg.setData(data);
msg.sendToTarget();
}
diff --git a/packages/MediaComponents/apex/java/android/media/session/MediaSession.java b/packages/MediaComponents/apex/java/android/media/session/MediaSession.java
index 943843d..73e16a6 100644
--- a/packages/MediaComponents/apex/java/android/media/session/MediaSession.java
+++ b/packages/MediaComponents/apex/java/android/media/session/MediaSession.java
@@ -1458,8 +1458,7 @@
private RemoteUserInfo mCurrentControllerInfo;
public CallbackMessageHandler(Looper looper, MediaSession.Callback callback) {
- //TODO:(b/119539849) Uncomment below line and resolve the error.
- //super(looper, null, true);
+ super(looper);
mCallback = callback;
mCallback.mHandler = this;
}
@@ -1467,6 +1466,7 @@
public void post(RemoteUserInfo caller, int what, Object obj, Bundle data, long delayMs) {
Pair<RemoteUserInfo, Object> objWithCaller = Pair.create(caller, obj);
Message msg = obtainMessage(what, objWithCaller);
+ msg.setAsynchronous(true);
msg.setData(data);
if (delayMs > 0) {
sendMessageDelayed(msg, delayMs);
diff --git a/services/audiopolicy/Android.mk b/services/audiopolicy/Android.mk
index bfa1b5e..ebb4f3b 100644
--- a/services/audiopolicy/Android.mk
+++ b/services/audiopolicy/Android.mk
@@ -51,6 +51,7 @@
libcutils \
libutils \
liblog \
+ libaudioclient \
libsoundtrigger
ifeq ($(USE_CONFIGURABLE_AUDIO_POLICY), 1)
@@ -85,7 +86,7 @@
LOCAL_SHARED_LIBRARIES += libmedia_helper
LOCAL_SHARED_LIBRARIES += libmediametrics
-LOCAL_SHARED_LIBRARIES += libhidlbase libicuuc libxml2
+LOCAL_SHARED_LIBRARIES += libhidlbase libxml2
ifeq ($(USE_XML_AUDIO_POLICY_CONF), 1)
LOCAL_CFLAGS += -DUSE_XML_AUDIO_POLICY_CONF
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index ad12a90..1c2b9d7 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -210,6 +210,10 @@
virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes) = 0;
virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes) = 0;
+ virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices)
+ = 0;
+ virtual status_t removeUidDeviceAffinities(uid_t uid) = 0;
+
virtual status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
audio_port_handle_t *portId,
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index 96c00ea..955e87b 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -80,6 +80,10 @@
status_t getInputMixForAttr(audio_attributes_t attr, AudioMix **policyMix);
+ status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
+ status_t removeUidDeviceAffinities(uid_t uid);
+ status_t getDevicesForUid(uid_t uid, Vector<AudioDeviceTypeAddr>& devices) const;
+
void dump(String8 *dst) const;
};
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 3cf8014..776d98f 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -340,6 +340,87 @@
return NO_ERROR;
}
+status_t AudioPolicyMixCollection::setUidDeviceAffinities(uid_t uid,
+ const Vector<AudioDeviceTypeAddr>& devices) {
+ // remove existing rules for this uid
+ removeUidDeviceAffinities(uid);
+
+ // for each player mix: add a rule to match or exclude the uid based on the device
+ for (size_t i = 0; i < size(); i++) {
+ const AudioMix *mix = valueAt(i)->getMix();
+ if (mix->mMixType != MIX_TYPE_PLAYERS) {
+ continue;
+ }
+ // check if this mix goes to a device in the list of devices
+ bool deviceMatch = false;
+ for (size_t j = 0; j < devices.size(); j++) {
+ if (devices[j].mType == mix->mDeviceType
+ && devices[j].mAddress == mix->mDeviceAddress) {
+ deviceMatch = true;
+ break;
+ }
+ }
+ if (!deviceMatch) {
+ // this mix doesn't go to one of the listed devices for the given uid,
+ // modify its rules to exclude the uid
+ mix->excludeUid(uid);
+ }
+ }
+
+ return NO_ERROR;
+}
+
+status_t AudioPolicyMixCollection::removeUidDeviceAffinities(uid_t uid) {
+ // for each player mix: remove existing rules that match or exclude this uid
+ for (size_t i = 0; i < size(); i++) {
+ bool foundUidRule = false;
+ AudioMix *mix = valueAt(i)->getMix();
+ if (mix->mMixType != MIX_TYPE_PLAYERS) {
+ continue;
+ }
+ std::vector<size_t> criteriaToRemove;
+ for (size_t j = 0; j < mix->mCriteria.size(); j++) {
+ const uint32_t rule = mix->mCriteria[j].mRule;
+ // is this rule affecting the uid?
+ if (rule == RULE_EXCLUDE_UID
+ && uid == mix->mCriteria[j].mValue.mUid) {
+ foundUidRule = true;
+ criteriaToRemove.push_back(j);
+ }
+ }
+ if (foundUidRule) {
+ for (size_t j = criteriaToRemove.size() - 1; j >= 0; j--) {
+ mix->mCriteria.removeAt(criteriaToRemove[j]);
+ }
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t AudioPolicyMixCollection::getDevicesForUid(uid_t uid,
+ Vector<AudioDeviceTypeAddr>& devices) const {
+ // for each player mix: find rules that don't exclude this uid, and add the device to the list
+ for (size_t i = 0; i < size(); i++) {
+ bool ruleAllowsUid = true;
+ AudioMix *mix = valueAt(i)->getMix();
+ if (mix->mMixType != MIX_TYPE_PLAYERS) {
+ continue;
+ }
+ for (size_t j = 0; j < mix->mCriteria.size(); j++) {
+ const uint32_t rule = mix->mCriteria[j].mRule;
+ if (rule == RULE_EXCLUDE_UID
+ && uid == mix->mCriteria[j].mValue.mUid) {
+ ruleAllowsUid = false;
+ break;
+ }
+ }
+ if (ruleAllowsUid) {
+ devices.add(AudioDeviceTypeAddr(mix->mDeviceType, mix->mDeviceAddress));
+ }
+ }
+ return NO_ERROR;
+}
+
void AudioPolicyMixCollection::dump(String8 *dst) const
{
dst->append("\nAudio Policy Mix:\n");
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index aa205f0..5544821 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -2674,6 +2674,59 @@
}
}
+status_t AudioPolicyManager::setUidDeviceAffinities(uid_t uid,
+ const Vector<AudioDeviceTypeAddr>& devices) {
+ ALOGV("%s() uid=%d num devices %zu", __FUNCTION__, uid, devices.size());
+ // uid/device affinity is only for output devices
+ for (size_t i = 0; i < devices.size(); i++) {
+ if (!audio_is_output_device(devices[i].mType)) {
+ ALOGE("setUidDeviceAffinities() device=%08x is NOT an output device",
+ devices[i].mType);
+ return BAD_VALUE;
+ }
+ }
+ status_t res = mPolicyMixes.setUidDeviceAffinities(uid, devices);
+ if (res == NO_ERROR) {
+ // reevaluate outputs for all given devices
+ for (size_t i = 0; i < devices.size(); i++) {
+ sp<DeviceDescriptor> devDesc = mHwModules.getDeviceDescriptor(
+ devices[i].mType, devices[i].mAddress, String8());
+ SortedVector<audio_io_handle_t> outputs;
+ if (checkOutputsForDevice(devDesc, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ outputs,
+ devDesc->address()) != NO_ERROR) {
+ ALOGE("setUidDeviceAffinities() error in checkOutputsForDevice for device=%08x"
+ " addr=%s", devices[i].mType, devices[i].mAddress.string());
+ return INVALID_OPERATION;
+ }
+ }
+ }
+ return res;
+}
+
+status_t AudioPolicyManager::removeUidDeviceAffinities(uid_t uid) {
+ ALOGV("%s() uid=%d", __FUNCTION__, uid);
+ Vector<AudioDeviceTypeAddr> devices;
+ status_t res = mPolicyMixes.getDevicesForUid(uid, devices);
+ if (res == NO_ERROR) {
+ // reevaluate outputs for all found devices
+ for (size_t i = 0; i < devices.size(); i++) {
+ sp<DeviceDescriptor> devDesc = mHwModules.getDeviceDescriptor(
+ devices[i].mType, devices[i].mAddress, String8());
+ SortedVector<audio_io_handle_t> outputs;
+ if (checkOutputsForDevice(devDesc, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ outputs,
+ devDesc->address()) != NO_ERROR) {
+ ALOGE("%s() error in checkOutputsForDevice for device=%08x addr=%s",
+ __FUNCTION__, devices[i].mType, devices[i].mAddress.string());
+ return INVALID_OPERATION;
+ }
+ }
+ }
+
+ return res;
+}
+
void AudioPolicyManager::dump(String8 *dst) const
{
dst->appendFormat("\nAudioPolicyManager Dump: %p\n", this);
@@ -4089,9 +4142,9 @@
for (size_t i = 0; i < mAvailableInputDevices.size(); i++) {
if (mAvailableInputDevices[i]->address().isEmpty()) {
if (mAvailableInputDevices[i]->type() == AUDIO_DEVICE_IN_BUILTIN_MIC) {
- mAvailableInputDevices[i]->address() = String8(AUDIO_BOTTOM_MICROPHONE_ADDRESS);
+ mAvailableInputDevices[i]->setAddress(String8(AUDIO_BOTTOM_MICROPHONE_ADDRESS));
} else if (mAvailableInputDevices[i]->type() == AUDIO_DEVICE_IN_BACK_MIC) {
- mAvailableInputDevices[i]->address() = String8(AUDIO_BACK_MICROPHONE_ADDRESS);
+ mAvailableInputDevices[i]->setAddress(String8(AUDIO_BACK_MICROPHONE_ADDRESS));
}
}
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 35dd87c..9eb1dcf 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -218,6 +218,9 @@
virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes);
virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes);
+ virtual status_t setUidDeviceAffinities(uid_t uid,
+ const Vector<AudioDeviceTypeAddr>& devices);
+ virtual status_t removeUidDeviceAffinities(uid_t uid);
virtual status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 439764b..80503fd 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -1037,6 +1037,31 @@
}
}
+status_t AudioPolicyService::setUidDeviceAffinities(uid_t uid,
+ const Vector<AudioDeviceTypeAddr>& devices) {
+ Mutex::Autolock _l(mLock);
+ if(!modifyAudioRoutingAllowed()) {
+ return PERMISSION_DENIED;
+ }
+ if (mAudioPolicyManager == NULL) {
+ return NO_INIT;
+ }
+ AutoCallerClear acc;
+ return mAudioPolicyManager->setUidDeviceAffinities(uid, devices);
+}
+
+status_t AudioPolicyService::removeUidDeviceAffinities(uid_t uid) {
+ Mutex::Autolock _l(mLock);
+ if(!modifyAudioRoutingAllowed()) {
+ return PERMISSION_DENIED;
+ }
+ if (mAudioPolicyManager == NULL) {
+ return NO_INIT;
+ }
+ AutoCallerClear acc;
+ return mAudioPolicyManager->removeUidDeviceAffinities(uid);
+}
+
status_t AudioPolicyService::startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
audio_port_handle_t *portId)
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index c44d816..959e757 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -199,6 +199,10 @@
virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
+ virtual status_t setUidDeviceAffinities(uid_t uid, const Vector<AudioDeviceTypeAddr>& devices);
+
+ virtual status_t removeUidDeviceAffinities(uid_t uid);
+
virtual status_t startAudioSource(const struct audio_port_config *source,
const audio_attributes_t *attributes,
audio_port_handle_t *portId);
diff --git a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
index e5e5024..f063506 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
+++ b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
@@ -139,18 +139,18 @@
}
CameraMetadataNative &result = resultWrapper->mResult;
auto resultExtras = resultWrapper->mResultExtras;
- auto &physicalCaptureResultInfos = resultWrapper->mPhysicalCaptureResultInfos;
HCaptureResultExtras hResultExtras =
hardware::cameraservice::utils::conversion::convertToHidl(resultExtras);
- hidl_vec<HPhysicalCaptureResultInfo> hPhysicalCaptureResultInfos =
- hardware::cameraservice::utils::conversion::convertToHidl(
- physicalCaptureResultInfos, converter->mCaptureResultMetadataQueue);
// Convert Metadata into HCameraMetadata;
FmqSizeOrMetadata hResult;
const camera_metadata_t *rawMetadata = result.getAndLock();
converter->convertResultMetadataToHidl(rawMetadata, &hResult);
result.unlock(rawMetadata);
+ auto &physicalCaptureResultInfos = resultWrapper->mPhysicalCaptureResultInfos;
+ hidl_vec<HPhysicalCaptureResultInfo> hPhysicalCaptureResultInfos =
+ hardware::cameraservice::utils::conversion::convertToHidl(
+ physicalCaptureResultInfos, converter->mCaptureResultMetadataQueue);
auto ret = converter->mBase->onResultReceived(hResult, hResultExtras,
hPhysicalCaptureResultInfos);
if (!ret.isOk()) {
diff --git a/services/camera/libcameraservice/hidl/Convert.cpp b/services/camera/libcameraservice/hidl/Convert.cpp
index 582ce34..a87812b 100644
--- a/services/camera/libcameraservice/hidl/Convert.cpp
+++ b/services/camera/libcameraservice/hidl/Convert.cpp
@@ -89,8 +89,9 @@
for (auto &handle : windowHandles) {
iGBPs.push_back(new H2BGraphicBufferProducer(AImageReader_getHGBPFromHandle(handle)));
}
+ String16 physicalCameraId16(hOutputConfiguration.physicalCameraId.c_str());
hardware::camera2::params::OutputConfiguration outputConfiguration(
- iGBPs, convertFromHidl(hOutputConfiguration.rotation),
+ iGBPs, convertFromHidl(hOutputConfiguration.rotation), physicalCameraId16,
hOutputConfiguration.windowGroupId, OutputConfiguration::SURFACE_TYPE_UNKNOWN, 0, 0,
(windowHandles.size() > 1));
return outputConfiguration;
diff --git a/services/mediacodec/registrant/Android.bp b/services/mediacodec/registrant/Android.bp
index 653317b..f119472 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/services/mediacodec/registrant/Android.bp
@@ -39,6 +39,7 @@
"libcodec2_soft_opusdec",
"libcodec2_soft_vp8dec",
"libcodec2_soft_vp9dec",
+ "libcodec2_soft_av1dec",
"libcodec2_soft_vp8enc",
"libcodec2_soft_vp9enc",
"libcodec2_soft_rawdec",
diff --git a/services/mediaextractor/Android.mk b/services/mediaextractor/Android.mk
index e31eadc..6101c8a 100644
--- a/services/mediaextractor/Android.mk
+++ b/services/mediaextractor/Android.mk
@@ -40,7 +40,7 @@
LOCAL_SRC_FILES := main_extractorservice.cpp
LOCAL_SHARED_LIBRARIES := libmedia libmediaextractorservice libbinder libutils \
- liblog libbase libicuuc libavservices_minijail
+ liblog libbase libandroidicu libavservices_minijail
LOCAL_STATIC_LIBRARIES := libicuandroid_utils
LOCAL_MODULE:= mediaextractor
LOCAL_INIT_RC := mediaextractor.rc
diff --git a/services/mediaextractor/seccomp_policy/mediaextractor-x86_64.policy b/services/mediaextractor/seccomp_policy/mediaextractor-x86_64.policy
index 6d9ed6f..35ac458 100644
--- a/services/mediaextractor/seccomp_policy/mediaextractor-x86_64.policy
+++ b/services/mediaextractor/seccomp_policy/mediaextractor-x86_64.policy
@@ -21,6 +21,7 @@
getuid: 1
setpriority: 1
sigaltstack: 1
+fstat: 1
fstatfs: 1
newfstatat: 1
restart_syscall: 1
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index ca96f62..bee5d25 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -9,10 +9,8 @@
shared_libs: [
"libaudioutils",
"libbinder",
- "libcutils",
"liblog",
"libmediautils",
- "libnbaio",
"libnblog",
"libutils",
],