Merge changes from topic "aidlCameraHal"
* changes:
cameraserver: Modified rules for different kinds of system cameras.
Implement AidlCamera3Device implementation.
Add AidlProviderInfo to handle aidl hal providers.
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 0982bba..a16e747 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -85,6 +85,10 @@
return mStreamUseCase;
}
+int OutputConfiguration::getTimestampBase() const {
+ return mTimestampBase;
+}
+
OutputConfiguration::OutputConfiguration() :
mRotation(INVALID_ROTATION),
mSurfaceSetID(INVALID_SET_ID),
@@ -95,7 +99,8 @@
mIsShared(false),
mIsMultiResolution(false),
mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
- mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+ mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+ mTimestampBase(TIMESTAMP_BASE_DEFAULT) {
}
OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -188,6 +193,12 @@
return err;
}
+ int timestampBase = TIMESTAMP_BASE_DEFAULT;
+ if ((err = parcel->readInt32(×tampBase)) != OK) {
+ ALOGE("%s: Failed to read timestamp base from parcel", __FUNCTION__);
+ return err;
+ }
+
mRotation = rotation;
mSurfaceSetID = setID;
mSurfaceType = surfaceType;
@@ -197,6 +208,7 @@
mIsShared = isShared != 0;
mIsMultiResolution = isMultiResolution != 0;
mStreamUseCase = streamUseCase;
+ mTimestampBase = timestampBase;
for (auto& surface : surfaceShims) {
ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
surface.graphicBufferProducer.get(),
@@ -208,9 +220,9 @@
mDynamicRangeProfile = dynamicProfile;
ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
- " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %d", __FUNCTION__,
- mRotation, mSurfaceSetID, mSurfaceType, String8(mPhysicalCameraId).string(),
- mIsMultiResolution, mStreamUseCase);
+ " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %d, timestampBase = %d",
+ __FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
+ String8(mPhysicalCameraId).string(), mIsMultiResolution, mStreamUseCase, timestampBase);
return err;
}
@@ -227,6 +239,7 @@
mIsMultiResolution = false;
mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+ mTimestampBase = TIMESTAMP_BASE_DEFAULT;
}
OutputConfiguration::OutputConfiguration(
@@ -237,7 +250,8 @@
mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
- mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) { }
+ mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+ mTimestampBase(TIMESTAMP_BASE_DEFAULT) { }
status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
@@ -290,6 +304,9 @@
err = parcel->writeInt32(mStreamUseCase);
if (err != OK) return err;
+ err = parcel->writeInt32(mTimestampBase);
+ if (err != OK) return err;
+
return OK;
}
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index f1cb6bd..9a91cc0 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -38,6 +38,14 @@
SURFACE_TYPE_SURFACE_VIEW = 0,
SURFACE_TYPE_SURFACE_TEXTURE = 1
};
+ enum TimestampBaseByte {
+ TIMESTAMP_BASE_DEFAULT = 0,
+ TIMESTAMP_BASE_SENSOR = 1,
+ TIMESTAMP_BASE_MONOTONIC = 2,
+ TIMESTAMP_BASE_REALTIME = 3,
+ TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED = 4
+ };
+
const std::vector<sp<IGraphicBufferProducer>>& getGraphicBufferProducers() const;
int getRotation() const;
int getSurfaceSetID() const;
@@ -50,6 +58,7 @@
String16 getPhysicalCameraId() const;
bool isMultiResolution() const;
int getStreamUseCase() const;
+ int getTimestampBase() const;
// set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
const std::vector<int32_t>& getSensorPixelModesUsed() const;
@@ -93,7 +102,8 @@
mIsMultiResolution == other.mIsMultiResolution &&
sensorPixelModesUsedEqual(other) &&
mDynamicRangeProfile == other.mDynamicRangeProfile &&
- mStreamUseCase == other.mStreamUseCase );
+ mStreamUseCase == other.mStreamUseCase &&
+ mTimestampBase == other.mTimestampBase);
}
bool operator != (const OutputConfiguration& other) const {
return !(*this == other);
@@ -136,6 +146,9 @@
if (mStreamUseCase != other.mStreamUseCase) {
return mStreamUseCase < other.mStreamUseCase;
}
+ if (mTimestampBase != other.mTimestampBase) {
+ return mTimestampBase < other.mTimestampBase;
+ }
return gbpsLessThan(other);
}
@@ -162,6 +175,7 @@
std::vector<int32_t> mSensorPixelModesUsed;
int mDynamicRangeProfile;
int mStreamUseCase;
+ int mTimestampBase;
};
} // namespace params
} // namespace camera2
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 5cc6759..4911429 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -72,6 +72,8 @@
ACAMERA_DISTORTION_CORRECTION,
ACAMERA_HEIC,
ACAMERA_HEIC_INFO,
+ ACAMERA_AUTOMOTIVE,
+ ACAMERA_AUTOMOTIVE_LENS,
ACAMERA_SECTION_COUNT,
ACAMERA_VENDOR = 0x8000
@@ -115,6 +117,8 @@
<< 16,
ACAMERA_HEIC_START = ACAMERA_HEIC << 16,
ACAMERA_HEIC_INFO_START = ACAMERA_HEIC_INFO << 16,
+ ACAMERA_AUTOMOTIVE_START = ACAMERA_AUTOMOTIVE << 16,
+ ACAMERA_AUTOMOTIVE_LENS_START = ACAMERA_AUTOMOTIVE_LENS << 16,
ACAMERA_VENDOR_START = ACAMERA_VENDOR << 16
} acamera_metadata_section_start_t;
@@ -2694,6 +2698,9 @@
* with PRIMARY_CAMERA.</p>
* <p>When ACAMERA_LENS_POSE_REFERENCE is UNDEFINED, this position cannot be accurately
* represented by the camera device, and will be represented as <code>(0, 0, 0)</code>.</p>
+ * <p>When ACAMERA_LENS_POSE_REFERENCE is AUTOMOTIVE, then this position is relative to the
+ * origin of the automotive sensor coordinate system, which is at the center of the rear
+ * axle.</p>
*
* @see ACAMERA_LENS_DISTORTION
* @see ACAMERA_LENS_INTRINSIC_CALIBRATION
@@ -7199,6 +7206,87 @@
ACAMERA_HEIC_START + 5,
ACAMERA_HEIC_END,
+ /**
+ * <p>Location of the cameras on the automotive devices.</p>
+ *
+ * <p>Type: byte (acamera_metadata_enum_android_automotive_location_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This enum defines the locations of the cameras relative to the vehicle body frame on
+ * <a href="https://source.android.com/devices/sensors/sensor-types#auto_axes">the automotive sensor coordinate system</a>.
+ * If the system has FEATURE_AUTOMOTIVE, the camera will have this entry in its static
+ * metadata.</p>
+ * <ul>
+ * <li>INTERIOR is the inside of the vehicle body frame (or the passenger cabin).</li>
+ * <li>EXTERIOR is the outside of the vehicle body frame.</li>
+ * <li>EXTRA is the extra vehicle such as a trailer.</li>
+ * </ul>
+ * <p>Each side of the vehicle body frame on this coordinate system is defined as below:</p>
+ * <ul>
+ * <li>FRONT is where the Y-axis increases toward.</li>
+ * <li>REAR is where the Y-axis decreases toward.</li>
+ * <li>LEFT is where the X-axis decreases toward.</li>
+ * <li>RIGHT is where the X-axis increases toward.</li>
+ * </ul>
+ * <p>If the camera has either EXTERIOR_OTHER or EXTRA_OTHER, its static metadata will list
+ * the following entries, so that applications can determine the camera's exact location:</p>
+ * <ul>
+ * <li>ACAMERA_LENS_POSE_REFERENCE</li>
+ * <li>ACAMERA_LENS_POSE_ROTATION</li>
+ * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
+ * </ul>
+ *
+ * @see ACAMERA_LENS_POSE_REFERENCE
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION = // byte (acamera_metadata_enum_android_automotive_location_t)
+ ACAMERA_AUTOMOTIVE_START,
+ ACAMERA_AUTOMOTIVE_END,
+
+ /**
+ * <p>The direction of the camera faces relative to the vehicle body frame and the
+ * passenger seats.</p>
+ *
+ * <p>Type: byte[n] (acamera_metadata_enum_android_automotive_lens_facing_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This enum defines the lens facing characteristic of the cameras on the automotive
+ * devices with locations ACAMERA_AUTOMOTIVE_LOCATION defines. If the system has
+ * FEATURE_AUTOMOTIVE, the camera will have this entry in its static metadata.</p>
+ * <p>When ACAMERA_AUTOMOTIVE_LOCATION is INTERIOR, this has one or more INTERIOR_*
+ * values or a single EXTERIOR_* value. When this has more than one INTERIOR_*,
+ * the first value must be the one for the seat closest to the optical axis. If this
+ * contains INTERIOR_OTHER, all other values will be ineffective.</p>
+ * <p>When ACAMERA_AUTOMOTIVE_LOCATION is EXTERIOR_* or EXTRA, this has a single
+ * EXTERIOR_* value.</p>
+ * <p>If a camera has INTERIOR_OTHER or EXTERIOR_OTHER, or more than one camera is at the
+ * same location and facing the same direction, their static metadata will list the
+ * following entries, so that applications can determain their lenses' exact facing
+ * directions:</p>
+ * <ul>
+ * <li>ACAMERA_LENS_POSE_REFERENCE</li>
+ * <li>ACAMERA_LENS_POSE_ROTATION</li>
+ * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
+ * </ul>
+ *
+ * @see ACAMERA_AUTOMOTIVE_LOCATION
+ * @see ACAMERA_LENS_POSE_REFERENCE
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING = // byte[n] (acamera_metadata_enum_android_automotive_lens_facing_t)
+ ACAMERA_AUTOMOTIVE_LENS_START,
+ ACAMERA_AUTOMOTIVE_LENS_END,
+
} acamera_metadata_tag_t;
/**
@@ -8591,6 +8679,14 @@
*/
ACAMERA_LENS_POSE_REFERENCE_UNDEFINED = 2,
+ /**
+ * <p>The value of ACAMERA_LENS_POSE_TRANSLATION is relative to the origin of the
+ * automotive sensor coodinate system, which is at the center of the rear axle.</p>
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_LENS_POSE_REFERENCE_AUTOMOTIVE = 3,
+
} acamera_metadata_enum_android_lens_pose_reference_t;
@@ -10329,6 +10425,167 @@
+// ACAMERA_AUTOMOTIVE_LOCATION
+typedef enum acamera_metadata_enum_acamera_automotive_location {
+ /**
+ * <p>The camera device exists inside of the vehicle cabin.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_INTERIOR = 0,
+
+ /**
+ * <p>The camera exists outside of the vehicle body frame but not exactly on one of the
+ * exterior locations this enum defines. The applications should determine the exact
+ * location from ACAMERA_LENS_POSE_TRANSLATION.</p>
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_OTHER = 1,
+
+ /**
+ * <p>The camera device exists outside of the vehicle body frame and on its front side.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_FRONT = 2,
+
+ /**
+ * <p>The camera device exists outside of the vehicle body frame and on its rear side.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_REAR = 3,
+
+ /**
+ * <p>The camera device exists outside and on left side of the vehicle body frame.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_LEFT = 4,
+
+ /**
+ * <p>The camera device exists outside and on right side of the vehicle body frame.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT = 5,
+
+ /**
+ * <p>The camera device exists on an extra vehicle, such as the trailer, but not exactly
+ * on one of front, rear, left, or right side. Applications should determine the exact
+ * location from ACAMERA_LENS_POSE_TRANSLATION.</p>
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_OTHER = 6,
+
+ /**
+ * <p>The camera device exists outside of the extra vehicle's body frame and on its front
+ * side.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_FRONT = 7,
+
+ /**
+ * <p>The camera device exists outside of the extra vehicle's body frame and on its rear
+ * side.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_REAR = 8,
+
+ /**
+ * <p>The camera device exists outside and on left side of the extra vehicle body.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_LEFT = 9,
+
+ /**
+ * <p>The camera device exists outside and on right side of the extra vehicle body.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_RIGHT = 10,
+
+} acamera_metadata_enum_android_automotive_location_t;
+
+
+// ACAMERA_AUTOMOTIVE_LENS_FACING
+typedef enum acamera_metadata_enum_acamera_automotive_lens_facing {
+ /**
+ * <p>The camera device faces the outside of the vehicle body frame but not exactly
+ * one of the exterior sides defined by this enum. Applications should determine
+ * the exact facing direction from ACAMERA_LENS_POSE_ROTATION and
+ * ACAMERA_LENS_POSE_TRANSLATION.</p>
+ *
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_OTHER = 0,
+
+ /**
+ * <p>The camera device faces the front of the vehicle body frame.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_FRONT = 1,
+
+ /**
+ * <p>The camera device faces the rear of the vehicle body frame.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_REAR = 2,
+
+ /**
+ * <p>The camera device faces the left side of the vehicle body frame.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_LEFT = 3,
+
+ /**
+ * <p>The camera device faces the right side of the vehicle body frame.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_RIGHT = 4,
+
+ /**
+ * <p>The camera device faces the inside of the vehicle body frame but not exactly
+ * one of seats described by this enum. Applications should determine the exact
+ * facing direction from ACAMERA_LENS_POSE_ROTATION and ACAMERA_LENS_POSE_TRANSLATION.</p>
+ *
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_OTHER = 5,
+
+ /**
+ * <p>The camera device faces the left side seat of the first row.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_1_LEFT = 6,
+
+ /**
+ * <p>The camera device faces the center seat of the first row.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_1_CENTER = 7,
+
+ /**
+ * <p>The camera device faces the right seat of the first row.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_1_RIGHT = 8,
+
+ /**
+ * <p>The camera device faces the left side seat of the second row.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_2_LEFT = 9,
+
+ /**
+ * <p>The camera device faces the center seat of the second row.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_2_CENTER = 10,
+
+ /**
+ * <p>The camera device faces the right side seat of the second row.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_2_RIGHT = 11,
+
+ /**
+ * <p>The camera device faces the left side seat of the third row.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_LEFT = 12,
+
+ /**
+ * <p>The camera device faces the center seat of the third row.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_CENTER = 13,
+
+ /**
+ * <p>The camera device faces the right seat of the third row.</p>
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_RIGHT = 14,
+
+} acamera_metadata_enum_android_automotive_lens_facing_t;
+
+
__END_DECLS
diff --git a/drm/libmediadrm/CryptoHalAidl.cpp b/drm/libmediadrm/CryptoHalAidl.cpp
index 03782ef..5989ca2 100644
--- a/drm/libmediadrm/CryptoHalAidl.cpp
+++ b/drm/libmediadrm/CryptoHalAidl.cpp
@@ -41,7 +41,7 @@
using ::aidl::android::hardware::common::Ashmem;
using ::android::sp;
-using ::android::DrmUtils::toStatusTAidl;
+using ::android::DrmUtils::statusAidlToStatusT;
using ::android::hardware::hidl_array;
using ::android::hardware::hidl_handle;
using ::android::hardware::hidl_memory;
@@ -281,7 +281,7 @@
}
auto err = mPlugin->setMediaDrmSession(toStdVec(sessionId));
- return err.isOk() ? toStatusTAidl(err.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(err);
}
ssize_t CryptoHalAidl::decrypt(const uint8_t keyId[16], const uint8_t iv[16],
@@ -350,13 +350,12 @@
std::vector<uint8_t> keyIdAidl(toStdVec(keyId, 16));
std::vector<uint8_t> ivAidl(toStdVec(iv, 16));
DecryptResult result;
- err = mPlugin->decrypt(secure, keyIdAidl, ivAidl, aMode, aPattern, stdSubSamples,
+ ::ndk::ScopedAStatus statusAidl = mPlugin->decrypt(secure,
+ keyIdAidl, ivAidl, aMode, aPattern, stdSubSamples,
hidlSharedBufferToAidlSharedBuffer(hSource), offset,
- hidlDestinationBufferToAidlDestinationBuffer(hDestination), &result)
- .isOk()
- ? OK
- : DEAD_OBJECT;
+ hidlDestinationBufferToAidlDestinationBuffer(hDestination), &result);
+ err = statusAidlToStatusT(statusAidl);
*errorDetailMsg = toString8(result.detailedError);
if (err != OK) {
ALOGE("Failed on decrypt, error message:%s, bytes written:%d", result.detailedError.c_str(),
@@ -416,6 +415,6 @@
Mutex::Autolock autoLock(mLock);
// Need to convert logmessage
- return DrmUtils::GetLogMessagesAidl<ICryptoPluginAidl>(mPlugin, logs);
+ return DrmUtils::GetLogMessagesAidl<ICryptoPluginAidl>(mPlugin, logs);
}
} // namespace android
\ No newline at end of file
diff --git a/drm/libmediadrm/DrmHalAidl.cpp b/drm/libmediadrm/DrmHalAidl.cpp
index 9f640e0..1dba6e1 100644
--- a/drm/libmediadrm/DrmHalAidl.cpp
+++ b/drm/libmediadrm/DrmHalAidl.cpp
@@ -28,7 +28,7 @@
#include <mediadrm/DrmSessionManager.h>
#include <mediadrm/DrmUtils.h>
-using ::android::DrmUtils::toStatusTAidl;
+using ::android::DrmUtils::statusAidlToStatusT;
using ::aidl::android::hardware::drm::DrmMetricNamedValue;
using ::aidl::android::hardware::drm::DrmMetricValue;
@@ -511,7 +511,7 @@
::ndk::ScopedAStatus status = mPlugin->openSession(aSecurityLevel, &aSessionId);
if (status.isOk()) sessionId = toVector(aSessionId);
- err = status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ err = statusAidlToStatusT(status);
if (err == ERROR_DRM_RESOURCE_BUSY && retry) {
mLock.unlock();
@@ -545,6 +545,7 @@
std::vector<uint8_t> sessionIdAidl = toStdVec(sessionId);
::ndk::ScopedAStatus status = mPlugin->closeSession(sessionIdAidl);
+ status_t response = statusAidlToStatusT(status);
if (status.isOk()) {
DrmSessionManager::Instance()->removeSession(sessionId);
for (auto i = mOpenSessions.begin(); i != mOpenSessions.end(); i++) {
@@ -554,14 +555,11 @@
}
}
- status_t response = toStatusTAidl(status.getServiceSpecificError());
mMetrics.SetSessionEnd(sessionId);
- mMetrics.mCloseSessionCounter.Increment(response);
- return response;
}
- mMetrics.mCloseSessionCounter.Increment(DEAD_OBJECT);
- return DEAD_OBJECT;
+ mMetrics.mCloseSessionCounter.Increment(response);
+ return response;
}
status_t DrmHalAidl::getKeyRequest(Vector<uint8_t> const& sessionId,
@@ -603,7 +601,7 @@
*keyRequestType = toKeyRequestType(keyRequest.requestType);
}
- err = status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ err = statusAidlToStatusT(status);
keyRequestTimer.SetAttribute(err);
return err;
}
@@ -626,7 +624,7 @@
mPlugin->provideKeyResponse(sessionIdAidl, responseAidl, &keySetIdsAidl);
if (status.isOk()) keySetId = toVector(keySetIdsAidl.keySetId);
- err = status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ err = statusAidlToStatusT(status);
keyResponseTimer.SetAttribute(err);
return err;
}
@@ -636,7 +634,7 @@
INIT_CHECK();
::ndk::ScopedAStatus status = mPlugin->removeKeys(toStdVec(keySetId));
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::restoreKeys(Vector<uint8_t> const& sessionId,
@@ -649,7 +647,7 @@
KeySetId keySetIdsAidl;
keySetIdsAidl.keySetId = toStdVec(keySetId);
::ndk::ScopedAStatus status = mPlugin->restoreKeys(toStdVec(sessionId), keySetIdsAidl);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::queryKeyStatus(Vector<uint8_t> const& sessionId,
@@ -664,7 +662,7 @@
infoMap = toKeyedVector(infoMapAidl);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::getProvisionRequest(String8 const& certType, String8 const& certAuthority,
@@ -681,7 +679,7 @@
request = toVector(requestAidl.request);
defaultUrl = toString8(requestAidl.defaultUrl);
- err = status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ err = statusAidlToStatusT(status);
mMetrics.mGetProvisionRequestCounter.Increment(err);
return err;
}
@@ -698,7 +696,7 @@
certificate = toVector(result.certificate);
wrappedKey = toVector(result.wrappedKey);
- err = status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ err = statusAidlToStatusT(status);
mMetrics.mProvideProvisionResponseCounter.Increment(err);
return err;
}
@@ -712,7 +710,7 @@
secureStops = toSecureStops(result);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::getSecureStopIds(List<Vector<uint8_t>>& secureStopIds) {
@@ -724,7 +722,7 @@
secureStopIds = toSecureStopIds(result);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::getSecureStop(Vector<uint8_t> const& ssid, Vector<uint8_t>& secureStop) {
@@ -739,7 +737,7 @@
secureStop = toVector(result.opaqueData);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::releaseSecureStops(Vector<uint8_t> const& ssRelease) {
@@ -750,7 +748,7 @@
ssId.opaqueData = toStdVec(ssRelease);
::ndk::ScopedAStatus status = mPlugin->releaseSecureStops(ssId);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::removeSecureStop(Vector<uint8_t> const& ssid) {
@@ -761,7 +759,7 @@
SecureStopId ssidAidl;
ssidAidl.secureStopId = toStdVec(ssid);
::ndk::ScopedAStatus status = mPlugin->removeSecureStop(ssidAidl);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::removeAllSecureStops() {
@@ -769,7 +767,7 @@
INIT_CHECK();
::ndk::ScopedAStatus status = mPlugin->releaseAllSecureStops();
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::getHdcpLevels(DrmPlugin::HdcpLevel* connected,
@@ -790,7 +788,7 @@
*connected = toHdcpLevel(lvlsAidl.connectedLevel);
*max = toHdcpLevel(lvlsAidl.maxLevel);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::getNumberOfSessions(uint32_t* open, uint32_t* max) const {
@@ -810,7 +808,7 @@
*open = result.currentSessions;
*max = result.maxSessions;
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::getSecurityLevel(Vector<uint8_t> const& sessionId,
@@ -829,7 +827,7 @@
*level = toSecurityLevel(result);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::getOfflineLicenseKeySetIds(List<Vector<uint8_t>>& keySetIds) const {
@@ -841,7 +839,7 @@
keySetIds = toKeySetIds(result);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::removeOfflineLicense(Vector<uint8_t> const& keySetId) {
@@ -851,7 +849,7 @@
KeySetId keySetIdAidl;
keySetIdAidl.keySetId = toStdVec(keySetId);
::ndk::ScopedAStatus status = mPlugin->removeOfflineLicense(keySetIdAidl);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::getOfflineLicenseState(Vector<uint8_t> const& keySetId,
@@ -869,7 +867,7 @@
*licenseState = toOfflineLicenseState(result);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::getPropertyString(String8 const& name, String8& value) const {
@@ -887,7 +885,7 @@
value = toString8(result);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::getPropertyByteArray(String8 const& name, Vector<uint8_t>& value) const {
@@ -907,7 +905,7 @@
::ndk::ScopedAStatus status = mPlugin->getPropertyByteArray(toStdString(name), &result);
value = toVector(result);
- err = status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ err = statusAidlToStatusT(status);
if (name == kPropertyDeviceUniqueId) {
mMetrics.mGetDeviceUniqueIdCounter.Increment(err);
}
@@ -919,7 +917,7 @@
INIT_CHECK();
::ndk::ScopedAStatus status = mPlugin->setPropertyString(toStdString(name), toStdString(value));
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::setPropertyByteArray(String8 const& name, Vector<uint8_t> const& value) const {
@@ -927,7 +925,7 @@
INIT_CHECK();
::ndk::ScopedAStatus status = mPlugin->setPropertyByteArray(toStdString(name), toStdVec(value));
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::getMetrics(const sp<IDrmMetricsConsumer>& consumer) {
@@ -963,7 +961,7 @@
consumer->consumeHidlMetrics(vendor, pluginMetrics);
}
- err = status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ err = statusAidlToStatusT(status);
return err;
}
@@ -977,7 +975,7 @@
::ndk::ScopedAStatus status =
mPlugin->setCipherAlgorithm(toStdVec(sessionId), toStdString(algorithm));
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::setMacAlgorithm(Vector<uint8_t> const& sessionId, String8 const& algorithm) {
@@ -988,7 +986,7 @@
::ndk::ScopedAStatus status =
mPlugin->setMacAlgorithm(toStdVec(sessionId), toStdString(algorithm));
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::encrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
@@ -1005,7 +1003,7 @@
output = toVector(result);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::decrypt(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
@@ -1022,7 +1020,7 @@
output = toVector(result);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::sign(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
@@ -1038,7 +1036,7 @@
signature = toVector(result);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::verify(Vector<uint8_t> const& sessionId, Vector<uint8_t> const& keyId,
@@ -1052,7 +1050,7 @@
::ndk::ScopedAStatus status = mPlugin->verify(toStdVec(sessionId), toStdVec(keyId),
toStdVec(message), toStdVec(signature), &match);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::signRSA(Vector<uint8_t> const& sessionId, String8 const& algorithm,
@@ -1070,7 +1068,7 @@
signature = toVector(result);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::requiresSecureDecoder(const char* mime, bool* required) const {
@@ -1095,12 +1093,13 @@
auto aLevel = toAidlSecurityLevel(securityLevel);
std::string mimeAidl(mime);
::ndk::ScopedAStatus status = mPlugin->requiresSecureDecoder(mimeAidl, aLevel, required);
+
+ status_t err = statusAidlToStatusT(status);
if (!status.isOk()) {
DrmUtils::LOG2BE("requiresSecureDecoder txn failed: %d", status.getServiceSpecificError());
- return DEAD_OBJECT;
}
- return OK;
+ return err;
}
status_t DrmHalAidl::setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId) {
@@ -1108,7 +1107,7 @@
INIT_CHECK();
std::string playbackIdAidl(playbackId);
::ndk::ScopedAStatus status = mPlugin->setPlaybackId(toStdVec(sessionId), playbackIdAidl);
- return status.isOk() ? toStatusTAidl(status.getServiceSpecificError()) : DEAD_OBJECT;
+ return statusAidlToStatusT(status);
}
status_t DrmHalAidl::getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const {
diff --git a/drm/libmediadrm/DrmHalHidl.cpp b/drm/libmediadrm/DrmHalHidl.cpp
index a5dd4d7..c83b52b 100644
--- a/drm/libmediadrm/DrmHalHidl.cpp
+++ b/drm/libmediadrm/DrmHalHidl.cpp
@@ -330,13 +330,14 @@
std::vector<sp<IDrmFactory>> DrmHalHidl::makeDrmFactories() {
static std::vector<sp<IDrmFactory>> factories(DrmUtils::MakeDrmFactories());
if (factories.size() == 0) {
- // must be in passthrough mode, load the default passthrough service
+ DrmUtils::LOG2BI("No hidl drm factories found");
+ // could be in passthrough mode, load the default passthrough service
auto passthrough = IDrmFactory::getService();
if (passthrough != NULL) {
DrmUtils::LOG2BI("makeDrmFactories: using default passthrough drm instance");
factories.push_back(passthrough);
} else {
- DrmUtils::LOG2BE("Failed to find any drm factories");
+ DrmUtils::LOG2BE("Failed to find passthrough drm factories");
}
}
return factories;
diff --git a/drm/libmediadrm/interface/mediadrm/DrmUtils.h b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
index 5679cfd..0368e43 100644
--- a/drm/libmediadrm/interface/mediadrm/DrmUtils.h
+++ b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
@@ -186,8 +186,10 @@
return toStatusT_1_4(err);
}
-inline status_t toStatusTAidl(int32_t serviceError) {
- auto status = static_cast<StatusAidl>(serviceError);
+inline status_t statusAidlToStatusT(::ndk::ScopedAStatus &statusAidl) {
+ if (statusAidl.isOk()) return OK;
+ if (statusAidl.getExceptionCode() != EX_SERVICE_SPECIFIC) return DEAD_OBJECT;
+ auto status = static_cast<StatusAidl>(statusAidl.getServiceSpecificError());
switch (status) {
case StatusAidl::OK:
return OK;
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
index 5478bcd..76b4667 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -175,7 +175,7 @@
UNUSED(in_optionalParameters);
KeyRequestType keyRequestType = KeyRequestType::UNKNOWN;
- std::string defaultUrl("");
+ std::string defaultUrl("https://default.url");
_aidl_return->request = {};
_aidl_return->requestType = keyRequestType;
diff --git a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
index 1167bb0..0233ee1 100644
--- a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
+++ b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
@@ -202,7 +202,7 @@
int32_t framesPerBurst = AAudioStream_getFramesPerBurst(mAaudioStream);
uint8_t numberOfBursts = fdp.ConsumeIntegral<uint8_t>();
- int32_t maxInputFrames = numberOfBursts * framesPerBurst;
+ int32_t maxFrames = numberOfBursts * framesPerBurst;
int32_t requestedBufferSize =
fdp.ConsumeIntegral<uint16_t>() * framesPerBurst;
AAudioStream_setBufferSizeInFrames(mAaudioStream, requestedBufferSize);
@@ -218,26 +218,24 @@
int32_t count = fdp.ConsumeIntegral<int32_t>();
direction = AAudioStream_getDirection(mAaudioStream);
- framesPerDataCallback = AAudioStream_getFramesPerDataCallback(mAaudioStream);
if (actualFormat == AAUDIO_FORMAT_PCM_I16) {
- std::vector<int16_t> inputShortData(maxInputFrames * actualChannelCount,
- 0x0);
- if (direction == AAUDIO_DIRECTION_INPUT) {
- AAudioStream_read(mAaudioStream, inputShortData.data(),
- framesPerDataCallback, count * kNanosPerMillisecond);
+ std::vector<int16_t> inputShortData(maxFrames * actualChannelCount, 0x0);
+ if (direction == AAUDIO_DIRECTION_INPUT) {
+ AAudioStream_read(mAaudioStream, inputShortData.data(), maxFrames,
+ count * kNanosPerMillisecond);
} else if (direction == AAUDIO_DIRECTION_OUTPUT) {
- AAudioStream_write(mAaudioStream, inputShortData.data(),
- framesPerDataCallback, count * kNanosPerMillisecond);
+ AAudioStream_write(mAaudioStream, inputShortData.data(), maxFrames,
+ count * kNanosPerMillisecond);
}
} else if (actualFormat == AAUDIO_FORMAT_PCM_FLOAT) {
- std::vector<float> inputFloatData(maxInputFrames * actualChannelCount, 0x0);
- if (direction == AAUDIO_DIRECTION_INPUT) {
- AAudioStream_read(mAaudioStream, inputFloatData.data(),
- framesPerDataCallback, count * kNanosPerMillisecond);
+ std::vector<float> inputFloatData(maxFrames * actualChannelCount, 0x0);
+ if (direction == AAUDIO_DIRECTION_INPUT) {
+ AAudioStream_read(mAaudioStream, inputFloatData.data(), maxFrames,
+ count * kNanosPerMillisecond);
} else if (direction == AAUDIO_DIRECTION_OUTPUT) {
- AAudioStream_write(mAaudioStream, inputFloatData.data(),
- framesPerDataCallback, count * kNanosPerMillisecond);
+ AAudioStream_write(mAaudioStream, inputFloatData.data(), maxFrames,
+ count * kNanosPerMillisecond);
}
}
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index 2be3d65..a100aa9 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -321,9 +321,8 @@
mFramesPerDataCallback);
ALOGI("usage = %6d, contentType = %d, inputPreset = %d, allowedCapturePolicy = %d",
getUsage(), getContentType(), getInputPreset(), getAllowedCapturePolicy());
- ALOGI("privacy sensitive = %s", isPrivacySensitive() ? "true" : "false");
- ALOGI("opPackageName = %s", !getOpPackageName().has_value() ?
- "(null)" : getOpPackageName().value().c_str());
- ALOGI("attributionTag = %s", !getAttributionTag().has_value() ?
- "(null)" : getAttributionTag().value().c_str());
+ ALOGI("privacy sensitive = %s, opPackageName = %s, attributionTag = %s",
+ isPrivacySensitive() ? "true" : "false",
+ !getOpPackageName().has_value() ? "(null)" : getOpPackageName().value().c_str(),
+ !getAttributionTag().has_value() ? "(null)" : getAttributionTag().value().c_str());
}
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index eee7f7e..7b273ec 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -61,8 +61,7 @@
status_t AudioEffect::set(const effect_uuid_t *type,
const effect_uuid_t *uuid,
int32_t priority,
- effect_callback_t cbf,
- void* user,
+ const wp<IAudioEffectCallback>& callback,
audio_session_t sessionId,
audio_io_handle_t io,
const AudioDeviceTypeAddr& device,
@@ -73,7 +72,7 @@
sp<IMemory> cblk;
int enabled;
- ALOGV("set %p mUserData: %p uuid: %p timeLow %08x", this, user, type, type ? type->timeLow : 0);
+ ALOGV("set %p uuid: %p timeLow %08x", this, type, type ? type->timeLow : 0);
if (mIEffect != 0) {
ALOGW("Effect already in use");
@@ -96,9 +95,8 @@
}
mProbe = probe;
mPriority = priority;
- mCbf = cbf;
- mUserData = user;
mSessionId = sessionId;
+ mCallback = callback;
memset(&mDescriptor, 0, sizeof(effect_descriptor_t));
mDescriptor.type = *(type != NULL ? type : EFFECT_UUID_NULL);
@@ -187,11 +185,60 @@
return mStatus;
}
+namespace {
+class LegacyCallbackWrapper : public AudioEffect::IAudioEffectCallback {
+ public:
+ LegacyCallbackWrapper(AudioEffect::legacy_callback_t callback, void* user):
+ mCallback(callback), mUser(user) {}
+ private:
+ void onControlStatusChanged(bool isGranted) override {
+ mCallback(AudioEffect::EVENT_CONTROL_STATUS_CHANGED, mUser, &isGranted);
+ }
+
+ void onEnableStatusChanged(bool isEnabled) override {
+ mCallback(AudioEffect::EVENT_ENABLE_STATUS_CHANGED, mUser, &isEnabled);
+ }
+
+ void onParameterChanged(std::vector<uint8_t> param) override {
+ mCallback(AudioEffect::EVENT_PARAMETER_CHANGED, mUser, param.data());
+ }
+
+ void onError(status_t errorCode) override {
+ mCallback(AudioEffect::EVENT_ERROR, mUser, &errorCode);
+ }
+
+ void onFramesProcessed(int32_t framesProcessed) override {
+ mCallback(AudioEffect::EVENT_FRAMES_PROCESSED, mUser, &framesProcessed);
+ }
+
+ const AudioEffect::legacy_callback_t mCallback;
+ void* const mUser;
+};
+} // namespace
+
+status_t AudioEffect::set(const effect_uuid_t *type,
+ const effect_uuid_t *uuid,
+ int32_t priority,
+ legacy_callback_t cbf,
+ void* user,
+ audio_session_t sessionId,
+ audio_io_handle_t io,
+ const AudioDeviceTypeAddr& device,
+ bool probe,
+ bool notifyFramesProcessed)
+{
+ if (cbf != nullptr) {
+ mLegacyWrapper = sp<LegacyCallbackWrapper>::make(cbf, user);
+ } else if (user != nullptr) {
+ LOG_ALWAYS_FATAL("%s: User provided without callback", __func__);
+ }
+ return set(type, uuid, priority, mLegacyWrapper, sessionId, io, device, probe,
+ notifyFramesProcessed);
+}
status_t AudioEffect::set(const char *typeStr,
const char *uuidStr,
int32_t priority,
- effect_callback_t cbf,
- void* user,
+ const wp<IAudioEffectCallback>& callback,
audio_session_t sessionId,
audio_io_handle_t io,
const AudioDeviceTypeAddr& device,
@@ -213,11 +260,29 @@
pUuid = &uuid;
}
- return set(pType, pUuid, priority, cbf, user, sessionId, io,
+ return set(pType, pUuid, priority, callback, sessionId, io,
device, probe, notifyFramesProcessed);
}
-
+status_t AudioEffect::set(const char *typeStr,
+ const char *uuidStr,
+ int32_t priority,
+ legacy_callback_t cbf,
+ void* user,
+ audio_session_t sessionId,
+ audio_io_handle_t io,
+ const AudioDeviceTypeAddr& device,
+ bool probe,
+ bool notifyFramesProcessed)
+{
+ if (cbf != nullptr) {
+ mLegacyWrapper = sp<LegacyCallbackWrapper>::make(cbf, user);
+ } else if (user != nullptr) {
+ LOG_ALWAYS_FATAL("%s: User provided without callback", __func__);
+ }
+ return set(typeStr, uuidStr, priority, mLegacyWrapper, sessionId, io, device, probe,
+ notifyFramesProcessed);
+}
AudioEffect::~AudioEffect()
{
ALOGV("Destructor %p", this);
@@ -471,9 +536,9 @@
{
ALOGW("IEffect died");
mStatus = DEAD_OBJECT;
- if (mCbf != NULL) {
- status_t status = DEAD_OBJECT;
- mCbf(EVENT_ERROR, mUserData, &status);
+ auto cb = mCallback.promote();
+ if (cb != nullptr) {
+ cb->onError(mStatus);
}
mIEffect.clear();
}
@@ -482,8 +547,8 @@
void AudioEffect::controlStatusChanged(bool controlGranted)
{
- ALOGV("controlStatusChanged %p control %d callback %p mUserData %p", this, controlGranted, mCbf,
- mUserData);
+ auto cb = mCallback.promote();
+ ALOGV("controlStatusChanged %p control %d callback %p", this, controlGranted, cb.get());
if (controlGranted) {
if (mStatus == ALREADY_EXISTS) {
mStatus = NO_ERROR;
@@ -493,18 +558,19 @@
mStatus = ALREADY_EXISTS;
}
}
- if (mCbf != NULL) {
- mCbf(EVENT_CONTROL_STATUS_CHANGED, mUserData, &controlGranted);
+ if (cb != nullptr) {
+ cb->onControlStatusChanged(controlGranted);
}
}
void AudioEffect::enableStatusChanged(bool enabled)
{
- ALOGV("enableStatusChanged %p enabled %d mCbf %p", this, enabled, mCbf);
+ auto cb = mCallback.promote();
+ ALOGV("enableStatusChanged %p enabled %d mCallback %p", this, enabled, cb.get());
if (mStatus == ALREADY_EXISTS) {
mEnabled = enabled;
- if (mCbf != NULL) {
- mCbf(EVENT_ENABLE_STATUS_CHANGED, mUserData, &enabled);
+ if (cb != nullptr) {
+ cb->onEnableStatusChanged(enabled);
}
}
}
@@ -516,19 +582,20 @@
if (cmdData.empty() || replyData.empty()) {
return;
}
-
- if (mCbf != NULL && cmdCode == EFFECT_CMD_SET_PARAM) {
+ auto cb = mCallback.promote();
+ if (cb != nullptr && cmdCode == EFFECT_CMD_SET_PARAM) {
std::vector<uint8_t> cmdDataCopy(cmdData);
effect_param_t* cmd = reinterpret_cast<effect_param_t *>(cmdDataCopy.data());
cmd->status = *reinterpret_cast<const int32_t *>(replyData.data());
- mCbf(EVENT_PARAMETER_CHANGED, mUserData, cmd);
+ cb->onParameterChanged(std::move(cmdDataCopy));
}
}
void AudioEffect::framesProcessed(int32_t frames)
{
- if (mCbf != NULL) {
- mCbf(EVENT_FRAMES_PROCESSED, mUserData, &frames);
+ auto cb = mCallback.promote();
+ if (cb != nullptr) {
+ cb->onFramesProcessed(frames);
}
}
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index de14e1c..4bc61f5 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -537,6 +537,8 @@
float maxRequiredSpeed,
audio_port_handle_t selectedDeviceId)
{
+ LOG_ALWAYS_FATAL_IF(mInitialized, "%s: should not be called twice", __func__);
+ mInitialized = true;
status_t status;
uint32_t channelCount;
pid_t callingPid;
@@ -1591,12 +1593,12 @@
status_t AudioTrack::setMarkerPosition(uint32_t marker)
{
+ AutoMutex lock(mLock);
// The only purpose of setting marker position is to get a callback
- if (!mCallback.promote() || isOffloadedOrDirect()) {
+ if (!mCallback.promote() || isOffloadedOrDirect_l()) {
return INVALID_OPERATION;
}
- AutoMutex lock(mLock);
mMarkerPosition = marker;
mMarkerReached = false;
@@ -1624,12 +1626,12 @@
status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod)
{
+ AutoMutex lock(mLock);
// The only purpose of setting position update period is to get a callback
- if (!mCallback.promote() || isOffloadedOrDirect()) {
+ if (!mCallback.promote() || isOffloadedOrDirect_l()) {
return INVALID_OPERATION;
}
- AutoMutex lock(mLock);
mNewPosition = updateAndGetPosition_l() + updatePeriod;
mUpdatePeriod = updatePeriod;
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index ee262f3..56884a3 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -277,7 +277,7 @@
static status_t removeStreamDefaultEffect(audio_unique_id_t id);
/*
- * Events used by callback function (effect_callback_t).
+ * Events used by callback function (legacy_callback_t).
*/
enum event_type {
EVENT_CONTROL_STATUS_CHANGED = 0,
@@ -287,6 +287,47 @@
EVENT_FRAMES_PROCESSED = 4,
};
+ class IAudioEffectCallback : public virtual RefBase {
+ friend AudioEffect;
+ protected:
+ /* The event is received when an application loses or
+ * gains the control of the effect engine. Loss of control happens
+ * if another application requests the use of the engine by creating an AudioEffect for
+ * the same effect type but with a higher priority. Control is returned when the
+ * application having the control deletes its AudioEffect object.
+ * @param isGranted: True if control has been granted. False if stolen.
+ */
+ virtual void onControlStatusChanged([[maybe_unused]] bool isGranted) {}
+
+ /* The event is received by all applications not having the
+ * control of the effect engine when the effect is enabled or disabled.
+ * @param isEnabled: True if enabled. False if disabled.
+ */
+ virtual void onEnableStatusChanged([[maybe_unused]] bool isEnabled) {}
+
+ /* The event is received by all applications not having the
+ * control of the effect engine when an effect parameter is changed.
+ * @param param: A vector containing the raw bytes of a effect_param_type containing
+ * raw data representing a param type, value pair.
+ */
+ // TODO pass an AIDL parcel instead of effect_param_type
+ virtual void onParameterChanged([[maybe_unused]] std::vector<uint8_t> param) {}
+
+ /* The event is received when the binder connection to the mediaserver
+ * is no longer valid. Typically the server has been killed.
+ * @param errorCode: A code representing the type of error.
+ */
+ virtual void onError([[maybe_unused]] status_t errorCode) {}
+
+
+ /* The event is received when the audio server has processed a block of
+ * data.
+ * @param framesProcessed: The number of frames the audio server has
+ * processed.
+ */
+ virtual void onFramesProcessed([[maybe_unused]] int32_t framesProcessed) {}
+ };
+
/* Callback function notifying client application of a change in effect engine state or
* configuration.
* An effect engine can be shared by several applications but only one has the control
@@ -315,7 +356,7 @@
* - EVENT_ERROR: status_t indicating the error (DEAD_OBJECT when media server dies).
*/
- typedef void (*effect_callback_t)(int32_t event, void* user, void *info);
+ typedef void (*legacy_callback_t)(int32_t event, void* user, void *info);
/* Constructor.
@@ -360,7 +401,7 @@
* priority: requested priority for effect control: the priority level corresponds to the
* value of priority parameter: negative values indicate lower priorities, positive values
* higher priorities, 0 being the normal priority.
- * cbf: optional callback function (see effect_callback_t)
+ * cbf: optional callback function (see legacy_callback_t)
* user: pointer to context for use by the callback receiver.
* sessionId: audio session this effect is associated to.
* If equal to AUDIO_SESSION_OUTPUT_MIX, the effect will be global to
@@ -383,10 +424,20 @@
* - NO_INIT: audio flinger or audio hardware not initialized
*/
status_t set(const effect_uuid_t *type,
- const effect_uuid_t *uuid = NULL,
+ const effect_uuid_t *uuid = nullptr,
int32_t priority = 0,
- effect_callback_t cbf = NULL,
- void* user = NULL,
+ const wp<IAudioEffectCallback>& callback = nullptr,
+ audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
+ audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
+ const AudioDeviceTypeAddr& device = {},
+ bool probe = false,
+ bool notifyFramesProcessed = false);
+
+ status_t set(const effect_uuid_t *type,
+ const effect_uuid_t *uuid,
+ int32_t priority,
+ legacy_callback_t cbf,
+ void* user,
audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
const AudioDeviceTypeAddr& device = {},
@@ -396,10 +447,21 @@
* Same as above but with type and uuid specified by character strings.
*/
status_t set(const char *typeStr,
- const char *uuidStr = NULL,
+ const char *uuidStr = nullptr,
int32_t priority = 0,
- effect_callback_t cbf = NULL,
- void* user = NULL,
+ const wp<IAudioEffectCallback>& callback = nullptr,
+ audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
+ audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
+ const AudioDeviceTypeAddr& device = {},
+ bool probe = false,
+ bool notifyFramesProcessed = false);
+
+
+ status_t set(const char *typeStr,
+ const char *uuidStr,
+ int32_t priority,
+ legacy_callback_t cbf,
+ void* user,
audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
const AudioDeviceTypeAddr& device = {},
@@ -535,18 +597,19 @@
protected:
android::content::AttributionSourceState mClientAttributionSource; // source for app op checks.
- bool mEnabled = false; // enable state
- audio_session_t mSessionId = AUDIO_SESSION_OUTPUT_MIX; // audio session ID
- int32_t mPriority = 0; // priority for effect control
- status_t mStatus = NO_INIT; // effect status
- bool mProbe = false; // effect created in probe mode: all commands
- // are no ops because mIEffect is NULL
- effect_callback_t mCbf = nullptr; // callback function for status, control and
- // parameter changes notifications
- void* mUserData = nullptr;// client context for callback function
- effect_descriptor_t mDescriptor = {}; // effect descriptor
- int32_t mId = -1; // system wide unique effect engine instance ID
- Mutex mLock; // Mutex for mEnabled access
+ bool mEnabled = false; // enable state
+ audio_session_t mSessionId = AUDIO_SESSION_OUTPUT_MIX; // audio session ID
+ int32_t mPriority = 0; // priority for effect control
+ status_t mStatus = NO_INIT; // effect status
+ bool mProbe = false; // effect created in probe mode: all commands
+ // are no ops because mIEffect is nullptr
+
+ wp<IAudioEffectCallback> mCallback = nullptr; // callback interface for status, control and
+ // parameter changes notifications
+ sp<IAudioEffectCallback> mLegacyWrapper = nullptr;
+ effect_descriptor_t mDescriptor = {}; // effect descriptor
+ int32_t mId = -1; // system wide unique effect engine instance ID
+ Mutex mLock; // Mutex for mEnabled access
// IEffectClient
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 16e10b5..153d4b5 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1348,6 +1348,7 @@
sp<IAudioTrackCallback> mLegacyCallbackWrapper; // wrapper for legacy callback interface
// for notification APIs
+ bool mInitialized = false; // Set after track is initialized
// next 2 fields are const after constructor or set()
uint32_t mNotificationFramesReq; // requested number of frames between each
// notification callback,
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index ed7e50b..dd435fe 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -10,12 +10,8 @@
filegroup {
name: "audio_core_hal_client_sources",
srcs: [
- "DeviceHalLocal.cpp",
- "DevicesFactoryHalHybrid.cpp",
- "DevicesFactoryHalLocal.cpp",
"DeviceHalHidl.cpp",
"DevicesFactoryHalHidl.cpp",
- "StreamHalLocal.cpp",
"StreamHalHidl.cpp",
],
}
@@ -170,10 +166,10 @@
],
shared_libs: [
"android.hardware.audio.common@7.0",
- "android.hardware.audio.common@7.0-util",
+ "android.hardware.audio.common@7.1-util",
"android.hardware.audio@7.0",
"android.hardware.audio@7.1",
- "android.hardware.audio@7.0-util",
+ "android.hardware.audio@7.1-util",
],
cflags: [
"-DMAJOR_VERSION=7",
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index 9438191..16863e4 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -512,6 +512,18 @@
return processReturn("setConnectedState", mDevice->setConnectedState(hidlAddress, connected));
}
+error::Result<audio_hw_sync_t> DeviceHalHidl::getHwAvSync() {
+ if (mDevice == 0) return NO_INIT;
+ audio_hw_sync_t value;
+ Result result;
+ Return<void> ret = mDevice->getHwAvSync([&value, &result](Result r, audio_hw_sync_t v) {
+ value = v;
+ result = r;
+ });
+ RETURN_IF_ERROR(processReturn("getHwAvSync", ret, result));
+ return value;
+}
+
status_t DeviceHalHidl::dump(int fd, const Vector<String16>& args) {
if (mDevice == 0) return NO_INIT;
native_handle_t* hidlHandle = native_handle_create(1, 0);
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index cd9535e..8a97a55 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -134,6 +134,8 @@
status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
+ error::Result<audio_hw_sync_t> getHwAvSync() override;
+
status_t dump(int fd, const Vector<String16>& args) override;
private:
diff --git a/media/libaudiohal/impl/DeviceHalLocal.cpp b/media/libaudiohal/impl/DeviceHalLocal.cpp
deleted file mode 100644
index e473e41..0000000
--- a/media/libaudiohal/impl/DeviceHalLocal.cpp
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "DeviceHalLocal"
-//#define LOG_NDEBUG 0
-
-#include <media/AudioParameter.h>
-#include <utils/Log.h>
-
-#include "DeviceHalLocal.h"
-#include "StreamHalLocal.h"
-
-namespace android {
-
-DeviceHalLocal::DeviceHalLocal(audio_hw_device_t *dev)
- : mDev(dev) {
-}
-
-DeviceHalLocal::~DeviceHalLocal() {
- int status = audio_hw_device_close(mDev);
- ALOGW_IF(status, "Error closing audio hw device %p: %s", mDev, strerror(-status));
- mDev = 0;
-}
-
-status_t DeviceHalLocal::getSupportedDevices(uint32_t *devices) {
- if (mDev->get_supported_devices == NULL) return INVALID_OPERATION;
- *devices = mDev->get_supported_devices(mDev);
- return OK;
-}
-
-status_t DeviceHalLocal::initCheck() {
- return mDev->init_check(mDev);
-}
-
-status_t DeviceHalLocal::setVoiceVolume(float volume) {
- return mDev->set_voice_volume(mDev, volume);
-}
-
-status_t DeviceHalLocal::setMasterVolume(float volume) {
- if (mDev->set_master_volume == NULL) return INVALID_OPERATION;
- return mDev->set_master_volume(mDev, volume);
-}
-
-status_t DeviceHalLocal::getMasterVolume(float *volume) {
- if (mDev->get_master_volume == NULL) return INVALID_OPERATION;
- return mDev->get_master_volume(mDev, volume);
-}
-
-status_t DeviceHalLocal::setMode(audio_mode_t mode) {
- return mDev->set_mode(mDev, mode);
-}
-
-status_t DeviceHalLocal::setMicMute(bool state) {
- return mDev->set_mic_mute(mDev, state);
-}
-
-status_t DeviceHalLocal::getMicMute(bool *state) {
- return mDev->get_mic_mute(mDev, state);
-}
-
-status_t DeviceHalLocal::setMasterMute(bool state) {
- if (mDev->set_master_mute == NULL) return INVALID_OPERATION;
- return mDev->set_master_mute(mDev, state);
-}
-
-status_t DeviceHalLocal::getMasterMute(bool *state) {
- if (mDev->get_master_mute == NULL) return INVALID_OPERATION;
- return mDev->get_master_mute(mDev, state);
-}
-
-status_t DeviceHalLocal::setParameters(const String8& kvPairs) {
- return mDev->set_parameters(mDev, kvPairs.string());
-}
-
-status_t DeviceHalLocal::getParameters(const String8& keys, String8 *values) {
- char *halValues = mDev->get_parameters(mDev, keys.string());
- if (halValues != NULL) {
- values->setTo(halValues);
- free(halValues);
- } else {
- values->clear();
- }
- return OK;
-}
-
-status_t DeviceHalLocal::getInputBufferSize(
- const struct audio_config *config, size_t *size) {
- *size = mDev->get_input_buffer_size(mDev, config);
- return OK;
-}
-
-status_t DeviceHalLocal::openOutputStream(
- audio_io_handle_t handle,
- audio_devices_t deviceType,
- audio_output_flags_t flags,
- struct audio_config *config,
- const char *address,
- sp<StreamOutHalInterface> *outStream) {
- audio_stream_out_t *halStream;
- ALOGV("open_output_stream handle: %d devices: %x flags: %#x"
- "srate: %d format %#x channels %x address %s",
- handle, deviceType, flags,
- config->sample_rate, config->format, config->channel_mask,
- address);
- int openResut = mDev->open_output_stream(
- mDev, handle, deviceType, flags, config, &halStream, address);
- if (openResut == OK) {
- *outStream = new StreamOutHalLocal(halStream, this);
- }
- ALOGV("open_output_stream status %d stream %p", openResut, halStream);
- return openResut;
-}
-
-status_t DeviceHalLocal::openInputStream(
- audio_io_handle_t handle,
- audio_devices_t devices,
- struct audio_config *config,
- audio_input_flags_t flags,
- const char *address,
- audio_source_t source,
- audio_devices_t /*outputDevice*/,
- const char */*outputDeviceAddress*/,
- sp<StreamInHalInterface> *inStream) {
- audio_stream_in_t *halStream;
- ALOGV("open_input_stream handle: %d devices: %x flags: %#x "
- "srate: %d format %#x channels %x address %s source %d",
- handle, devices, flags,
- config->sample_rate, config->format, config->channel_mask,
- address, source);
- int openResult = mDev->open_input_stream(
- mDev, handle, devices, config, &halStream, flags, address, source);
- if (openResult == OK) {
- *inStream = new StreamInHalLocal(halStream, this);
- }
- ALOGV("open_input_stream status %d stream %p", openResult, inStream);
- return openResult;
-}
-
-status_t DeviceHalLocal::supportsAudioPatches(bool *supportsPatches) {
- *supportsPatches = version() >= AUDIO_DEVICE_API_VERSION_3_0;
- return OK;
-}
-
-status_t DeviceHalLocal::createAudioPatch(
- unsigned int num_sources,
- const struct audio_port_config *sources,
- unsigned int num_sinks,
- const struct audio_port_config *sinks,
- audio_patch_handle_t *patch) {
- if (version() >= AUDIO_DEVICE_API_VERSION_3_0) {
- return mDev->create_audio_patch(
- mDev, num_sources, sources, num_sinks, sinks, patch);
- } else {
- return INVALID_OPERATION;
- }
-}
-
-status_t DeviceHalLocal::releaseAudioPatch(audio_patch_handle_t patch) {
- if (version() >= AUDIO_DEVICE_API_VERSION_3_0) {
- return mDev->release_audio_patch(mDev, patch);
- } else {
- return INVALID_OPERATION;
- }
-}
-
-status_t DeviceHalLocal::getAudioPort(struct audio_port *port) {
- return mDev->get_audio_port(mDev, port);
-}
-
-status_t DeviceHalLocal::getAudioPort(struct audio_port_v7 *port) {
-#if MAJOR_VERSION >= 7
- if (version() >= AUDIO_DEVICE_API_VERSION_3_2) {
- // get_audio_port_v7 is mandatory if legacy HAL support this API version.
- return mDev->get_audio_port_v7(mDev, port);
- }
-#endif
- struct audio_port audioPort = {};
- audio_populate_audio_port(port, &audioPort);
- status_t status = getAudioPort(&audioPort);
- if (status == NO_ERROR) {
- audio_populate_audio_port_v7(&audioPort, port);
- }
- return status;
-}
-
-status_t DeviceHalLocal::setAudioPortConfig(const struct audio_port_config *config) {
- if (version() >= AUDIO_DEVICE_API_VERSION_3_0)
- return mDev->set_audio_port_config(mDev, config);
- else
- return INVALID_OPERATION;
-}
-
-#if MAJOR_VERSION == 2
-status_t DeviceHalLocal::getMicrophones(
- std::vector<media::MicrophoneInfo> *microphones __unused) {
- return INVALID_OPERATION;
-}
-#elif MAJOR_VERSION >= 4
-status_t DeviceHalLocal::getMicrophones(std::vector<media::MicrophoneInfo> *microphones) {
- if (mDev->get_microphones == NULL) return INVALID_OPERATION;
- size_t actual_mics = AUDIO_MICROPHONE_MAX_COUNT;
- audio_microphone_characteristic_t mic_array[AUDIO_MICROPHONE_MAX_COUNT];
- status_t status = mDev->get_microphones(mDev, &mic_array[0], &actual_mics);
- for (size_t i = 0; i < actual_mics; i++) {
- media::MicrophoneInfo microphoneInfo = media::MicrophoneInfo(mic_array[i]);
- microphones->push_back(microphoneInfo);
- }
- return status;
-}
-#endif
-
-// Local HAL implementation does not support effects
-status_t DeviceHalLocal::addDeviceEffect(
- audio_port_handle_t device __unused, sp<EffectHalInterface> effect __unused) {
- return INVALID_OPERATION;
-}
-
-status_t DeviceHalLocal::removeDeviceEffect(
- audio_port_handle_t device __unused, sp<EffectHalInterface> effect __unused) {
- return INVALID_OPERATION;
-}
-
-status_t DeviceHalLocal::setConnectedState(const struct audio_port_v7 *port, bool connected) {
- AudioParameter param(String8(port->ext.device.address));
- const String8 key(connected ?
- AudioParameter::keyDeviceConnect : AudioParameter::keyDeviceDisconnect);
- param.addInt(key, port->ext.device.type);
- return setParameters(param.toString());
-}
-
-status_t DeviceHalLocal::dump(int fd, const Vector<String16>& /* args */) {
- return mDev->dump(mDev, fd);
-}
-
-void DeviceHalLocal::closeOutputStream(struct audio_stream_out *stream_out) {
- mDev->close_output_stream(mDev, stream_out);
-}
-
-void DeviceHalLocal::closeInputStream(struct audio_stream_in *stream_in) {
- mDev->close_input_stream(mDev, stream_in);
-}
-
-} // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalLocal.h b/media/libaudiohal/impl/DeviceHalLocal.h
deleted file mode 100644
index 3e586cf..0000000
--- a/media/libaudiohal/impl/DeviceHalLocal.h
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
-#define ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
-
-#include <hardware/audio.h>
-#include <media/audiohal/DeviceHalInterface.h>
-
-namespace android {
-
-class DeviceHalLocal : public DeviceHalInterface
-{
- public:
- // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
- virtual status_t getSupportedDevices(uint32_t *devices);
-
- // Check to see if the audio hardware interface has been initialized.
- virtual status_t initCheck();
-
- // Set the audio volume of a voice call. Range is between 0.0 and 1.0.
- virtual status_t setVoiceVolume(float volume);
-
- // Set the audio volume for all audio activities other than voice call.
- virtual status_t setMasterVolume(float volume);
-
- // Get the current master volume value for the HAL.
- virtual status_t getMasterVolume(float *volume);
-
- // Called when the audio mode changes.
- virtual status_t setMode(audio_mode_t mode);
-
- // Muting control.
- virtual status_t setMicMute(bool state);
- virtual status_t getMicMute(bool *state);
- virtual status_t setMasterMute(bool state);
- virtual status_t getMasterMute(bool *state);
-
- // Set global audio parameters.
- virtual status_t setParameters(const String8& kvPairs);
-
- // Get global audio parameters.
- virtual status_t getParameters(const String8& keys, String8 *values);
-
- // Returns audio input buffer size according to parameters passed.
- virtual status_t getInputBufferSize(const struct audio_config *config,
- size_t *size);
-
- // Creates and opens the audio hardware output stream. The stream is closed
- // by releasing all references to the returned object.
- virtual status_t openOutputStream(
- audio_io_handle_t handle,
- audio_devices_t devices,
- audio_output_flags_t flags,
- struct audio_config *config,
- const char *address,
- sp<StreamOutHalInterface> *outStream);
-
- // Creates and opens the audio hardware input stream. The stream is closed
- // by releasing all references to the returned object.
- virtual status_t openInputStream(
- audio_io_handle_t handle,
- audio_devices_t devices,
- struct audio_config *config,
- audio_input_flags_t flags,
- const char *address,
- audio_source_t source,
- audio_devices_t outputDevice,
- const char *outputDeviceAddress,
- sp<StreamInHalInterface> *inStream);
-
- // Returns whether createAudioPatch and releaseAudioPatch operations are supported.
- virtual status_t supportsAudioPatches(bool *supportsPatches);
-
- // Creates an audio patch between several source and sink ports.
- virtual status_t createAudioPatch(
- unsigned int num_sources,
- const struct audio_port_config *sources,
- unsigned int num_sinks,
- const struct audio_port_config *sinks,
- audio_patch_handle_t *patch);
-
- // Releases an audio patch.
- virtual status_t releaseAudioPatch(audio_patch_handle_t patch);
-
- // Fills the list of supported attributes for a given audio port.
- virtual status_t getAudioPort(struct audio_port *port);
-
- // Fills the list of supported attributes for a given audio port.
- virtual status_t getAudioPort(struct audio_port_v7 *port);
-
- // Set audio port configuration.
- virtual status_t setAudioPortConfig(const struct audio_port_config *config);
-
- // List microphones
- virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
-
- status_t addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
- status_t removeDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
-
- status_t getMmapPolicyInfos(
- media::audio::common::AudioMMapPolicyType policyType __unused,
- std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos __unused) override {
- // This function will only be available on AIDL HAL.
- return INVALID_OPERATION;
- }
-
- int32_t getAAudioMixerBurstCount() override {
- // This function will only be available on AIDL HAL.
- return INVALID_OPERATION;
- }
-
- int32_t getAAudioHardwareBurstMinUsec() override {
- // This function will only be available on AIDL HAL.
- return INVALID_OPERATION;
- }
-
- status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
-
- status_t dump(int fd, const Vector<String16>& args) override;
-
- void closeOutputStream(struct audio_stream_out *stream_out);
- void closeInputStream(struct audio_stream_in *stream_in);
-
- uint32_t version() const { return mDev->common.version; }
-
- private:
- audio_hw_device_t *mDev;
-
- friend class DevicesFactoryHalLocal;
-
- // Can not be constructed directly by clients.
- explicit DeviceHalLocal(audio_hw_device_t *dev);
-
- // The destructor automatically closes the device.
- virtual ~DeviceHalLocal();
-};
-
-} // namespace android
-
-#endif // ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
index f475729..8f3c907 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
@@ -227,4 +227,10 @@
return mDeviceFactories;
}
+// Main entry-point to the shared library.
+extern "C" __attribute__((visibility("default"))) void* createIDevicesFactory() {
+ auto service = hardware::audio::CPP_VERSION::IDevicesFactory::getService();
+ return service ? new DevicesFactoryHalHidl(service) : nullptr;
+}
+
} // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp b/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp
deleted file mode 100644
index d684c27..0000000
--- a/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "DevicesFactoryHalHybrid"
-//#define LOG_NDEBUG 0
-
-#include "DevicesFactoryHalHidl.h"
-#include "DevicesFactoryHalHybrid.h"
-#include "DevicesFactoryHalLocal.h"
-
-namespace android {
-
-DevicesFactoryHalHybrid::DevicesFactoryHalHybrid(sp<IDevicesFactory> hidlFactory)
- : mLocalFactory(new DevicesFactoryHalLocal()),
- mHidlFactory(new DevicesFactoryHalHidl(hidlFactory)) {
-}
-
-status_t DevicesFactoryHalHybrid::openDevice(const char *name, sp<DeviceHalInterface> *device) {
- if (mHidlFactory != 0 && strcmp(AUDIO_HARDWARE_MODULE_ID_A2DP, name) != 0 &&
- strcmp(AUDIO_HARDWARE_MODULE_ID_HEARING_AID, name) != 0) {
- return mHidlFactory->openDevice(name, device);
- }
- return mLocalFactory->openDevice(name, device);
-}
-
-status_t DevicesFactoryHalHybrid::getHalPids(std::vector<pid_t> *pids) {
- if (mHidlFactory != 0) {
- return mHidlFactory->getHalPids(pids);
- }
- return INVALID_OPERATION;
-}
-
-status_t DevicesFactoryHalHybrid::setCallbackOnce(sp<DevicesFactoryHalCallback> callback) {
- if (mHidlFactory) {
- return mHidlFactory->setCallbackOnce(callback);
- }
- return INVALID_OPERATION;
-}
-
-extern "C" __attribute__((visibility("default"))) void* createIDevicesFactory() {
- auto service = hardware::audio::CPP_VERSION::IDevicesFactory::getService();
- return service ? new DevicesFactoryHalHybrid(service) : nullptr;
-}
-
-} // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHybrid.h b/media/libaudiohal/impl/DevicesFactoryHalHybrid.h
deleted file mode 100644
index 221584c..0000000
--- a/media/libaudiohal/impl/DevicesFactoryHalHybrid.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
-#define ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
-
-#include PATH(android/hardware/audio/FILE_VERSION/IDevicesFactory.h)
-#include <media/audiohal/DevicesFactoryHalInterface.h>
-#include <utils/Errors.h>
-#include <utils/RefBase.h>
-
-using ::android::hardware::audio::CPP_VERSION::IDevicesFactory;
-
-namespace android {
-
-class DevicesFactoryHalHybrid : public DevicesFactoryHalInterface
-{
- public:
- DevicesFactoryHalHybrid(sp<IDevicesFactory> hidlFactory);
-
- // Opens a device with the specified name. To close the device, it is
- // necessary to release references to the returned object.
- virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device);
-
- status_t getHalPids(std::vector<pid_t> *pids) override;
-
- status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) override;
-
- float getHalVersion() const override {
- return MAJOR_VERSION + (float)MINOR_VERSION / 10;
- }
-
- private:
- sp<DevicesFactoryHalInterface> mLocalFactory;
- sp<DevicesFactoryHalInterface> mHidlFactory;
-};
-
-} // namespace android
-
-#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
diff --git a/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp b/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp
deleted file mode 100644
index 13a9acd..0000000
--- a/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "DevicesFactoryHalLocal"
-//#define LOG_NDEBUG 0
-
-#include <string.h>
-
-#include <hardware/audio.h>
-#include <utils/Log.h>
-
-#include "DeviceHalLocal.h"
-#include "DevicesFactoryHalLocal.h"
-
-namespace android {
-
-static status_t load_audio_interface(const char *if_name, audio_hw_device_t **dev)
-{
- const hw_module_t *mod;
- int rc;
-
- rc = hw_get_module_by_class(AUDIO_HARDWARE_MODULE_ID, if_name, &mod);
- if (rc) {
- ALOGE("%s couldn't load audio hw module %s.%s (%s)", __func__,
- AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
- goto out;
- }
- rc = audio_hw_device_open(mod, dev);
- if (rc) {
- ALOGE("%s couldn't open audio hw device in %s.%s (%s)", __func__,
- AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
- goto out;
- }
- if ((*dev)->common.version < AUDIO_DEVICE_API_VERSION_MIN) {
- ALOGE("%s wrong audio hw device version %04x", __func__, (*dev)->common.version);
- rc = BAD_VALUE;
- audio_hw_device_close(*dev);
- goto out;
- }
- return OK;
-
-out:
- *dev = NULL;
- return rc;
-}
-
-status_t DevicesFactoryHalLocal::openDevice(const char *name, sp<DeviceHalInterface> *device) {
- audio_hw_device_t *dev;
- status_t rc = load_audio_interface(name, &dev);
- if (rc == OK) {
- *device = new DeviceHalLocal(dev);
- }
- return rc;
-}
-
-} // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalLocal.h b/media/libaudiohal/impl/DevicesFactoryHalLocal.h
deleted file mode 100644
index a0da125..0000000
--- a/media/libaudiohal/impl/DevicesFactoryHalLocal.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
-#define ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
-
-#include <media/audiohal/DevicesFactoryHalInterface.h>
-#include <utils/Errors.h>
-#include <utils/RefBase.h>
-
-#include "DeviceHalLocal.h"
-
-namespace android {
-
-class DevicesFactoryHalLocal : public DevicesFactoryHalInterface
-{
- public:
- // Opens a device with the specified name. To close the device, it is
- // necessary to release references to the returned object.
- virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device);
-
- status_t getHalPids(std::vector<pid_t> *pids __unused) override {
- return INVALID_OPERATION;
- }
-
- status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback __unused) override {
- return INVALID_OPERATION;
- }
-
- float getHalVersion() const override {
- return MAJOR_VERSION + (float)MINOR_VERSION / 10;
- }
-
- private:
- friend class DevicesFactoryHalHybrid;
-
- // Can not be constructed directly by clients.
- DevicesFactoryHalLocal() {}
-
- virtual ~DevicesFactoryHalLocal() {}
-};
-
-} // namespace android
-
-#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 6916ca1..263ee67 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -271,6 +271,32 @@
return err == 0;
}
+status_t StreamHalHidl::legacyCreateAudioPatch(const struct audio_port_config& port,
+ std::optional<audio_source_t> source,
+ audio_devices_t type) {
+ LOG_ALWAYS_FATAL_IF(port.type != AUDIO_PORT_TYPE_DEVICE, "port type must be device");
+ char* address;
+ if (strcmp(port.ext.device.address, "") != 0) {
+ // FIXME: we only support address on first sink with HAL version < 3.0
+ address = audio_device_address_to_parameter(port.ext.device.type, port.ext.device.address);
+ } else {
+ address = (char*)calloc(1, 1);
+ }
+ AudioParameter param = AudioParameter(String8(address));
+ free(address);
+ param.addInt(String8(AudioParameter::keyRouting), (int)type);
+ if (source.has_value()) {
+ param.addInt(String8(AudioParameter::keyInputSource), (int)source.value());
+ }
+ return setParameters(param.toString());
+}
+
+status_t StreamHalHidl::legacyReleaseAudioPatch() {
+ AudioParameter param;
+ param.addInt(String8(AudioParameter::keyRouting), 0);
+ return setParameters(param.toString());
+}
+
namespace {
/* Notes on callback ownership.
@@ -905,6 +931,11 @@
callback->onRecommendedLatencyModeChanged(modes);
}
+status_t StreamOutHalHidl::exit() {
+ // FIXME this is using hard-coded strings but in the future, this functionality will be
+ // converted to use audio HAL extensions required to support tunneling
+ return setParameters(String8("exiting=1"));
+}
StreamInHalHidl::StreamInHalHidl(
const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& stream)
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 44bf60a..4e80e88 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -89,6 +89,12 @@
// (must match the priority of the audioflinger's thread that calls 'read' / 'write')
virtual status_t setHalThreadPriority(int priority);
+ status_t legacyCreateAudioPatch(const struct audio_port_config& port,
+ std::optional<audio_source_t> source,
+ audio_devices_t type) override;
+
+ status_t legacyReleaseAudioPatch() override;
+
protected:
// Subclasses can not be constructed directly by clients.
explicit StreamHalHidl(IStream *stream);
@@ -197,6 +203,8 @@
void onRecommendedLatencyModeChanged(const std::vector<audio_latency_mode_t>& modes);
+ status_t exit() override;
+
private:
friend class DeviceHalHidl;
typedef MessageQueue<WriteCommand, hardware::kSynchronizedReadWrite> CommandMQ;
diff --git a/media/libaudiohal/impl/StreamHalLocal.cpp b/media/libaudiohal/impl/StreamHalLocal.cpp
deleted file mode 100644
index 477f510..0000000
--- a/media/libaudiohal/impl/StreamHalLocal.cpp
+++ /dev/null
@@ -1,520 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "StreamHalLocal"
-//#define LOG_NDEBUG 0
-
-#include <audio_utils/Metadata.h>
-#include <hardware/audio.h>
-#include <media/AudioParameter.h>
-#include <utils/Log.h>
-
-#include "DeviceHalLocal.h"
-#include "ParameterUtils.h"
-#include "StreamHalLocal.h"
-
-namespace android {
-
-StreamHalLocal::StreamHalLocal(audio_stream_t *stream, sp<DeviceHalLocal> device)
- : mDevice(device),
- mStream(stream) {
- // Instrument audio signal power logging.
- // Note: This assumes channel mask, format, and sample rate do not change after creation.
- if (mStream != nullptr /* && mStreamPowerLog.isUserDebugOrEngBuild() */) {
- mStreamPowerLog.init(mStream->get_sample_rate(mStream),
- mStream->get_channels(mStream),
- mStream->get_format(mStream));
- }
-}
-
-StreamHalLocal::~StreamHalLocal() {
- mStream = 0;
- mDevice.clear();
-}
-
-status_t StreamHalLocal::getBufferSize(size_t *size) {
- *size = mStream->get_buffer_size(mStream);
- return OK;
-}
-
-status_t StreamHalLocal::getAudioProperties(audio_config_base_t *configBase) {
- configBase->sample_rate = mStream->get_sample_rate(mStream);
- configBase->channel_mask = mStream->get_channels(mStream);
- configBase->format = mStream->get_format(mStream);
- return OK;
-}
-
-status_t StreamHalLocal::setParameters(const String8& kvPairs) {
- return mStream->set_parameters(mStream, kvPairs.string());
-}
-
-status_t StreamHalLocal::getParameters(const String8& keys, String8 *values) {
- char *halValues = mStream->get_parameters(mStream, keys.string());
- if (halValues != NULL) {
- values->setTo(halValues);
- free(halValues);
- } else {
- values->clear();
- }
- return OK;
-}
-
-status_t StreamHalLocal::addEffect(sp<EffectHalInterface>) {
- LOG_ALWAYS_FATAL("Local streams can not have effects");
- return INVALID_OPERATION;
-}
-
-status_t StreamHalLocal::removeEffect(sp<EffectHalInterface>) {
- LOG_ALWAYS_FATAL("Local streams can not have effects");
- return INVALID_OPERATION;
-}
-
-status_t StreamHalLocal::standby() {
- return mStream->standby(mStream);
-}
-
-status_t StreamHalLocal::dump(int fd, const Vector<String16>& args) {
- (void) args;
- status_t status = mStream->dump(mStream, fd);
- mStreamPowerLog.dump(fd);
- return status;
-}
-
-status_t StreamHalLocal::setHalThreadPriority(int) {
- // Don't need to do anything as local hal is executed by audioflinger directly
- // on the same thread.
- return OK;
-}
-
-StreamOutHalLocal::StreamOutHalLocal(audio_stream_out_t *stream, sp<DeviceHalLocal> device)
- : StreamHalLocal(&stream->common, device), mStream(stream) {
-}
-
-StreamOutHalLocal::~StreamOutHalLocal() {
- mCallback.clear();
- mDevice->closeOutputStream(mStream);
- mStream = 0;
-}
-
-status_t StreamOutHalLocal::getFrameSize(size_t *size) {
- *size = audio_stream_out_frame_size(mStream);
- return OK;
-}
-
-status_t StreamOutHalLocal::getLatency(uint32_t *latency) {
- *latency = mStream->get_latency(mStream);
- return OK;
-}
-
-status_t StreamOutHalLocal::setVolume(float left, float right) {
- if (mStream->set_volume == NULL) return INVALID_OPERATION;
- return mStream->set_volume(mStream, left, right);
-}
-
-status_t StreamOutHalLocal::selectPresentation(int presentationId, int programId) {
- AudioParameter param;
- param.addInt(String8(AudioParameter::keyPresentationId), presentationId);
- param.addInt(String8(AudioParameter::keyProgramId), programId);
- return setParameters(param.toString());
-}
-
-status_t StreamOutHalLocal::write(const void *buffer, size_t bytes, size_t *written) {
- ssize_t writeResult = mStream->write(mStream, buffer, bytes);
- if (writeResult > 0) {
- *written = writeResult;
- mStreamPowerLog.log(buffer, *written);
- return OK;
- } else {
- *written = 0;
- return writeResult;
- }
-}
-
-status_t StreamOutHalLocal::getRenderPosition(uint32_t *dspFrames) {
- return mStream->get_render_position(mStream, dspFrames);
-}
-
-status_t StreamOutHalLocal::getNextWriteTimestamp(int64_t *timestamp) {
- if (mStream->get_next_write_timestamp == NULL) return INVALID_OPERATION;
- return mStream->get_next_write_timestamp(mStream, timestamp);
-}
-
-status_t StreamOutHalLocal::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
- if (mStream->set_callback == NULL) return INVALID_OPERATION;
- status_t result = mStream->set_callback(mStream, StreamOutHalLocal::asyncCallback, this);
- if (result == OK) {
- mCallback = callback;
- }
- return result;
-}
-
-// static
-int StreamOutHalLocal::asyncCallback(stream_callback_event_t event, void*, void *cookie) {
- // We act as if we gave a wp<StreamOutHalLocal> to HAL. This way we should handle
- // correctly the case when the callback is invoked while StreamOutHalLocal's destructor is
- // already running, because the destructor is invoked after the refcount has been atomically
- // decremented.
- wp<StreamOutHalLocal> weakSelf(static_cast<StreamOutHalLocal*>(cookie));
- sp<StreamOutHalLocal> self = weakSelf.promote();
- if (self == 0) return 0;
- sp<StreamOutHalInterfaceCallback> callback = self->mCallback.promote();
- if (callback == 0) return 0;
- ALOGV("asyncCallback() event %d", event);
- switch (event) {
- case STREAM_CBK_EVENT_WRITE_READY:
- callback->onWriteReady();
- break;
- case STREAM_CBK_EVENT_DRAIN_READY:
- callback->onDrainReady();
- break;
- case STREAM_CBK_EVENT_ERROR:
- callback->onError();
- break;
- default:
- ALOGW("asyncCallback() unknown event %d", event);
- break;
- }
- return 0;
-}
-
-status_t StreamOutHalLocal::supportsPauseAndResume(bool *supportsPause, bool *supportsResume) {
- *supportsPause = mStream->pause != NULL;
- *supportsResume = mStream->resume != NULL;
- return OK;
-}
-
-status_t StreamOutHalLocal::pause() {
- if (mStream->pause == NULL) return INVALID_OPERATION;
- return mStream->pause(mStream);
-}
-
-status_t StreamOutHalLocal::resume() {
- if (mStream->resume == NULL) return INVALID_OPERATION;
- return mStream->resume(mStream);
-}
-
-status_t StreamOutHalLocal::supportsDrain(bool *supportsDrain) {
- *supportsDrain = mStream->drain != NULL;
- return OK;
-}
-
-status_t StreamOutHalLocal::drain(bool earlyNotify) {
- if (mStream->drain == NULL) return INVALID_OPERATION;
- return mStream->drain(mStream, earlyNotify ? AUDIO_DRAIN_EARLY_NOTIFY : AUDIO_DRAIN_ALL);
-}
-
-status_t StreamOutHalLocal::flush() {
- if (mStream->flush == NULL) return INVALID_OPERATION;
- return mStream->flush(mStream);
-}
-
-status_t StreamOutHalLocal::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
- if (mStream->get_presentation_position == NULL) return INVALID_OPERATION;
- return mStream->get_presentation_position(mStream, frames, timestamp);
-}
-
-void StreamOutHalLocal::doUpdateSourceMetadata(const SourceMetadata& sourceMetadata) {
- std::vector<playback_track_metadata> halTracks;
- halTracks.reserve(sourceMetadata.tracks.size());
- for (auto& metadata : sourceMetadata.tracks) {
- playback_track_metadata halTrackMetadata;
- playback_track_metadata_from_v7(&halTrackMetadata, &metadata);
- halTracks.push_back(halTrackMetadata);
- }
- const source_metadata_t halMetadata = {
- .track_count = halTracks.size(),
- .tracks = halTracks.data(),
- };
- mStream->update_source_metadata(mStream, &halMetadata);
-}
-
-#if MAJOR_VERSION >= 7
-void StreamOutHalLocal::doUpdateSourceMetadataV7(const SourceMetadata& sourceMetadata) {
- const source_metadata_v7_t metadata {
- .track_count = sourceMetadata.tracks.size(),
- // const cast is fine as it is in a const structure
- .tracks = const_cast<playback_track_metadata_v7*>(sourceMetadata.tracks.data()),
- };
- mStream->update_source_metadata_v7(mStream, &metadata);
-}
-#endif
-
-status_t StreamOutHalLocal::updateSourceMetadata(const SourceMetadata& sourceMetadata) {
-#if MAJOR_VERSION < 7
- if (mStream->update_source_metadata == nullptr) {
- return INVALID_OPERATION;
- }
- doUpdateSourceMetadata(sourceMetadata);
-#else
- if (mDevice->version() < AUDIO_DEVICE_API_VERSION_3_2) {
- if (mStream->update_source_metadata == nullptr) {
- return INVALID_OPERATION;
- }
- doUpdateSourceMetadata(sourceMetadata);
- } else {
- if (mStream->update_source_metadata_v7 == nullptr) {
- return INVALID_OPERATION;
- }
- doUpdateSourceMetadataV7(sourceMetadata);
- }
-#endif
- return OK;
-}
-
-
-status_t StreamOutHalLocal::start() {
- if (mStream->start == NULL) return INVALID_OPERATION;
- return mStream->start(mStream);
-}
-
-status_t StreamOutHalLocal::stop() {
- if (mStream->stop == NULL) return INVALID_OPERATION;
- return mStream->stop(mStream);
-}
-
-status_t StreamOutHalLocal::createMmapBuffer(int32_t minSizeFrames,
- struct audio_mmap_buffer_info *info) {
- if (mStream->create_mmap_buffer == NULL) return INVALID_OPERATION;
- return mStream->create_mmap_buffer(mStream, minSizeFrames, info);
-}
-
-status_t StreamOutHalLocal::getMmapPosition(struct audio_mmap_position *position) {
- if (mStream->get_mmap_position == NULL) return INVALID_OPERATION;
- return mStream->get_mmap_position(mStream, position);
-}
-
-status_t StreamOutHalLocal::getDualMonoMode(audio_dual_mono_mode_t* mode) {
- if (mStream->get_dual_mono_mode == nullptr) return INVALID_OPERATION;
- return mStream->get_dual_mono_mode(mStream, mode);
-}
-
-status_t StreamOutHalLocal::setDualMonoMode(audio_dual_mono_mode_t mode) {
- if (mStream->set_dual_mono_mode == nullptr) return INVALID_OPERATION;
- return mStream->set_dual_mono_mode(mStream, mode);
-}
-
-status_t StreamOutHalLocal::getAudioDescriptionMixLevel(float* leveldB) {
- if (mStream->get_audio_description_mix_level == nullptr) return INVALID_OPERATION;
- return mStream->get_audio_description_mix_level(mStream, leveldB);
-}
-
-status_t StreamOutHalLocal::setAudioDescriptionMixLevel(float leveldB) {
- if (mStream->set_audio_description_mix_level == nullptr) return INVALID_OPERATION;
- return mStream->set_audio_description_mix_level(mStream, leveldB);
-}
-
-status_t StreamOutHalLocal::getPlaybackRateParameters(audio_playback_rate_t* playbackRate) {
- if (mStream->get_playback_rate_parameters == nullptr) return INVALID_OPERATION;
- return mStream->get_playback_rate_parameters(mStream, playbackRate);
-}
-
-status_t StreamOutHalLocal::setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) {
- if (mStream->set_playback_rate_parameters == nullptr) return INVALID_OPERATION;
- return mStream->set_playback_rate_parameters(mStream, &playbackRate);
-}
-
-status_t StreamOutHalLocal::setEventCallback(
- const sp<StreamOutHalInterfaceEventCallback>& callback) {
- if (mStream->set_event_callback == nullptr) {
- return INVALID_OPERATION;
- }
- stream_event_callback_t asyncCallback =
- callback == nullptr ? nullptr : StreamOutHalLocal::asyncEventCallback;
- status_t result = mStream->set_event_callback(mStream, asyncCallback, this);
- if (result == OK) {
- mEventCallback = callback;
- }
- return result;
-}
-
-// static
-int StreamOutHalLocal::asyncEventCallback(
- stream_event_callback_type_t event, void *param, void *cookie) {
- // We act as if we gave a wp<StreamOutHalLocal> to HAL. This way we should handle
- // correctly the case when the callback is invoked while StreamOutHalLocal's destructor is
- // already running, because the destructor is invoked after the refcount has been atomically
- // decremented.
- wp<StreamOutHalLocal> weakSelf(static_cast<StreamOutHalLocal*>(cookie));
- sp<StreamOutHalLocal> self = weakSelf.promote();
- if (self == nullptr) return 0;
- sp<StreamOutHalInterfaceEventCallback> callback = self->mEventCallback.promote();
- if (callback.get() == nullptr) return 0;
- switch (event) {
- case STREAM_EVENT_CBK_TYPE_CODEC_FORMAT_CHANGED:
- // void* param is the byte string buffer from byte_string_from_audio_metadata().
- // As the byte string buffer may have embedded zeroes, we cannot use strlen()
- callback->onCodecFormatChanged(std::basic_string<uint8_t>(
- (const uint8_t*)param,
- audio_utils::metadata::dataByteStringLen((const uint8_t*)param)));
- break;
- default:
- ALOGW("%s unknown event %d", __func__, event);
- break;
- }
- return 0;
-}
-
-StreamInHalLocal::StreamInHalLocal(audio_stream_in_t *stream, sp<DeviceHalLocal> device)
- : StreamHalLocal(&stream->common, device), mStream(stream) {
-}
-
-StreamInHalLocal::~StreamInHalLocal() {
- mDevice->closeInputStream(mStream);
- mStream = 0;
-}
-
-status_t StreamInHalLocal::getFrameSize(size_t *size) {
- *size = audio_stream_in_frame_size(mStream);
- return OK;
-}
-
-status_t StreamInHalLocal::setGain(float gain) {
- return mStream->set_gain(mStream, gain);
-}
-
-status_t StreamInHalLocal::read(void *buffer, size_t bytes, size_t *read) {
- ssize_t readResult = mStream->read(mStream, buffer, bytes);
- if (readResult > 0) {
- *read = readResult;
- mStreamPowerLog.log( buffer, *read);
- return OK;
- } else {
- *read = 0;
- return readResult;
- }
-}
-
-status_t StreamInHalLocal::getInputFramesLost(uint32_t *framesLost) {
- *framesLost = mStream->get_input_frames_lost(mStream);
- return OK;
-}
-
-status_t StreamInHalLocal::getCapturePosition(int64_t *frames, int64_t *time) {
- if (mStream->get_capture_position == NULL) return INVALID_OPERATION;
- return mStream->get_capture_position(mStream, frames, time);
-}
-
-void StreamInHalLocal::doUpdateSinkMetadata(const SinkMetadata& sinkMetadata) {
- std::vector<record_track_metadata> halTracks;
- halTracks.reserve(sinkMetadata.tracks.size());
- for (auto& metadata : sinkMetadata.tracks) {
- record_track_metadata halTrackMetadata;
- record_track_metadata_from_v7(&halTrackMetadata, &metadata);
- halTracks.push_back(halTrackMetadata);
- }
- const sink_metadata_t halMetadata = {
- .track_count = halTracks.size(),
- .tracks = halTracks.data(),
- };
- mStream->update_sink_metadata(mStream, &halMetadata);
-}
-
-#if MAJOR_VERSION >= 7
-void StreamInHalLocal::doUpdateSinkMetadataV7(const SinkMetadata& sinkMetadata) {
- const sink_metadata_v7_t halMetadata {
- .track_count = sinkMetadata.tracks.size(),
- // const cast is fine as it is in a const structure
- .tracks = const_cast<record_track_metadata_v7*>(sinkMetadata.tracks.data()),
- };
- mStream->update_sink_metadata_v7(mStream, &halMetadata);
-}
-#endif
-
-status_t StreamInHalLocal::updateSinkMetadata(const SinkMetadata& sinkMetadata) {
-#if MAJOR_VERSION < 7
- if (mStream->update_sink_metadata == nullptr) {
- return INVALID_OPERATION; // not supported by the HAL
- }
- doUpdateSinkMetadata(sinkMetadata);
-#else
- if (mDevice->version() < AUDIO_DEVICE_API_VERSION_3_2) {
- if (mStream->update_sink_metadata == nullptr) {
- return INVALID_OPERATION; // not supported by the HAL
- }
- doUpdateSinkMetadata(sinkMetadata);
- } else {
- if (mStream->update_sink_metadata_v7 == nullptr) {
- return INVALID_OPERATION; // not supported by the HAL
- }
- doUpdateSinkMetadataV7(sinkMetadata);
- }
-#endif
- return OK;
-}
-
-status_t StreamInHalLocal::start() {
- if (mStream->start == NULL) return INVALID_OPERATION;
- return mStream->start(mStream);
-}
-
-status_t StreamInHalLocal::stop() {
- if (mStream->stop == NULL) return INVALID_OPERATION;
- return mStream->stop(mStream);
-}
-
-status_t StreamInHalLocal::createMmapBuffer(int32_t minSizeFrames,
- struct audio_mmap_buffer_info *info) {
- if (mStream->create_mmap_buffer == NULL) return INVALID_OPERATION;
- return mStream->create_mmap_buffer(mStream, minSizeFrames, info);
-}
-
-status_t StreamInHalLocal::getMmapPosition(struct audio_mmap_position *position) {
- if (mStream->get_mmap_position == NULL) return INVALID_OPERATION;
- return mStream->get_mmap_position(mStream, position);
-}
-
-#if MAJOR_VERSION == 2
-status_t StreamInHalLocal::getActiveMicrophones(
- std::vector<media::MicrophoneInfo> *microphones __unused) {
- return INVALID_OPERATION;
-}
-#elif MAJOR_VERSION >= 4
-status_t StreamInHalLocal::getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones) {
- if (mStream->get_active_microphones == NULL) return INVALID_OPERATION;
- size_t actual_mics = AUDIO_MICROPHONE_MAX_COUNT;
- audio_microphone_characteristic_t mic_array[AUDIO_MICROPHONE_MAX_COUNT];
- status_t status = mStream->get_active_microphones(mStream, &mic_array[0], &actual_mics);
- for (size_t i = 0; i < actual_mics; i++) {
- media::MicrophoneInfo microphoneInfo = media::MicrophoneInfo(mic_array[i]);
- microphones->push_back(microphoneInfo);
- }
- return status;
-}
-#endif
-
-#if MAJOR_VERSION < 5
-status_t StreamInHalLocal::setPreferredMicrophoneDirection(
- audio_microphone_direction_t direction __unused) {
- return INVALID_OPERATION;
-}
-
-status_t StreamInHalLocal::setPreferredMicrophoneFieldDimension(float zoom __unused) {
- return INVALID_OPERATION;
-}
-#else
-status_t StreamInHalLocal::setPreferredMicrophoneDirection(audio_microphone_direction_t direction) {
- if (mStream->set_microphone_direction == NULL) return INVALID_OPERATION;
- return mStream->set_microphone_direction(mStream, direction);
-}
-
-status_t StreamInHalLocal::setPreferredMicrophoneFieldDimension(float zoom) {
- if (mStream->set_microphone_field_dimension == NULL) return INVALID_OPERATION;
- return mStream->set_microphone_field_dimension(mStream, zoom);
-
-}
-#endif
-
-} // namespace android
diff --git a/media/libaudiohal/impl/StreamHalLocal.h b/media/libaudiohal/impl/StreamHalLocal.h
deleted file mode 100644
index 770137f..0000000
--- a/media/libaudiohal/impl/StreamHalLocal.h
+++ /dev/null
@@ -1,262 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
-#define ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
-
-#include <media/audiohal/StreamHalInterface.h>
-#include "StreamPowerLog.h"
-
-namespace android {
-
-class DeviceHalLocal;
-
-class StreamHalLocal : public virtual StreamHalInterface
-{
- public:
- // Return size of input/output buffer in bytes for this stream - eg. 4800.
- virtual status_t getBufferSize(size_t *size);
-
- // Return the base configuration of the stream:
- // - channel mask;
- // - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
- // - sampling rate in Hz - eg. 44100.
- virtual status_t getAudioProperties(audio_config_base_t *configBase);
-
- // Set audio stream parameters.
- virtual status_t setParameters(const String8& kvPairs);
-
- // Get audio stream parameters.
- virtual status_t getParameters(const String8& keys, String8 *values);
-
- // Add or remove the effect on the stream.
- virtual status_t addEffect(sp<EffectHalInterface> effect);
- virtual status_t removeEffect(sp<EffectHalInterface> effect);
-
- // Put the audio hardware input/output into standby mode.
- virtual status_t standby();
-
- virtual status_t dump(int fd, const Vector<String16>& args) override;
-
- // Start a stream operating in mmap mode.
- virtual status_t start() = 0;
-
- // Stop a stream operating in mmap mode.
- virtual status_t stop() = 0;
-
- // Retrieve information on the data buffer in mmap mode.
- virtual status_t createMmapBuffer(int32_t minSizeFrames,
- struct audio_mmap_buffer_info *info) = 0;
-
- // Get current read/write position in the mmap buffer
- virtual status_t getMmapPosition(struct audio_mmap_position *position) = 0;
-
- // Set the priority of the thread that interacts with the HAL
- // (must match the priority of the audioflinger's thread that calls 'read' / 'write')
- virtual status_t setHalThreadPriority(int priority);
-
- protected:
- // Subclasses can not be constructed directly by clients.
- StreamHalLocal(audio_stream_t *stream, sp<DeviceHalLocal> device);
-
- // The destructor automatically closes the stream.
- virtual ~StreamHalLocal();
-
- sp<DeviceHalLocal> mDevice;
-
- // mStreamPowerLog is used for audio signal power logging.
- StreamPowerLog mStreamPowerLog;
-
- private:
- audio_stream_t *mStream;
-};
-
-class StreamOutHalLocal : public StreamOutHalInterface, public StreamHalLocal {
- public:
- // Return the frame size (number of bytes per sample) of a stream.
- virtual status_t getFrameSize(size_t *size);
-
- // Return the audio hardware driver estimated latency in milliseconds.
- virtual status_t getLatency(uint32_t *latency);
-
- // Use this method in situations where audio mixing is done in the hardware.
- virtual status_t setVolume(float left, float right);
-
- // Selects the audio presentation (if available).
- virtual status_t selectPresentation(int presentationId, int programId);
-
- // Write audio buffer to driver.
- virtual status_t write(const void *buffer, size_t bytes, size_t *written);
-
- // Return the number of audio frames written by the audio dsp to DAC since
- // the output has exited standby.
- virtual status_t getRenderPosition(uint32_t *dspFrames);
-
- // Get the local time at which the next write to the audio driver will be presented.
- virtual status_t getNextWriteTimestamp(int64_t *timestamp);
-
- // Set the callback for notifying completion of non-blocking write and drain.
- virtual status_t setCallback(wp<StreamOutHalInterfaceCallback> callback);
-
- // Returns whether pause and resume operations are supported.
- virtual status_t supportsPauseAndResume(bool *supportsPause, bool *supportsResume);
-
- // Notifies to the audio driver to resume playback following a pause.
- virtual status_t pause();
-
- // Notifies to the audio driver to resume playback following a pause.
- virtual status_t resume();
-
- // Returns whether drain operation is supported.
- virtual status_t supportsDrain(bool *supportsDrain);
-
- // Requests notification when data buffered by the driver/hardware has been played.
- virtual status_t drain(bool earlyNotify);
-
- // Notifies to the audio driver to flush the queued data.
- virtual status_t flush();
-
- // Return a recent count of the number of audio frames presented to an external observer.
- virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
-
- // Start a stream operating in mmap mode.
- virtual status_t start();
-
- // Stop a stream operating in mmap mode.
- virtual status_t stop();
-
- // Retrieve information on the data buffer in mmap mode.
- virtual status_t createMmapBuffer(int32_t minSizeFrames,
- struct audio_mmap_buffer_info *info);
-
- // Get current read/write position in the mmap buffer
- virtual status_t getMmapPosition(struct audio_mmap_position *position);
-
- // Called when the metadata of the stream's source has been changed.
- status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
-
- // Returns the Dual Mono mode presentation setting.
- status_t getDualMonoMode(audio_dual_mono_mode_t* mode) override;
-
- // Sets the Dual Mono mode presentation on the output device.
- status_t setDualMonoMode(audio_dual_mono_mode_t mode) override;
-
- // Returns the Audio Description Mix level in dB.
- status_t getAudioDescriptionMixLevel(float* leveldB) override;
-
- // Sets the Audio Description Mix level in dB.
- status_t setAudioDescriptionMixLevel(float leveldB) override;
-
- // Retrieves current playback rate parameters.
- status_t getPlaybackRateParameters(audio_playback_rate_t* playbackRate) override;
-
- // Sets the playback rate parameters that control playback behavior.
- status_t setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) override;
-
- status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) override;
-
- status_t setLatencyMode(audio_latency_mode_t mode __unused) override {
- return INVALID_OPERATION;
- }
- status_t getRecommendedLatencyModes(
- std::vector<audio_latency_mode_t> *modes __unused) override {
- return INVALID_OPERATION;
- }
- status_t setLatencyModeCallback(
- const sp<StreamOutHalInterfaceLatencyModeCallback>& callback __unused) override {
- return INVALID_OPERATION;
- }
-
- private:
- audio_stream_out_t *mStream;
- wp<StreamOutHalInterfaceCallback> mCallback;
- wp<StreamOutHalInterfaceEventCallback> mEventCallback;
-
- friend class DeviceHalLocal;
-
- // Can not be constructed directly by clients.
- StreamOutHalLocal(audio_stream_out_t *stream, sp<DeviceHalLocal> device);
-
- virtual ~StreamOutHalLocal();
-
- static int asyncCallback(stream_callback_event_t event, void *param, void *cookie);
-
- static int asyncEventCallback(stream_event_callback_type_t event, void *param, void *cookie);
-
- void doUpdateSourceMetadataV7(const SourceMetadata& sourceMetadata);
- void doUpdateSourceMetadata(const SourceMetadata& sourceMetadata);
-};
-
-class StreamInHalLocal : public StreamInHalInterface, public StreamHalLocal {
- public:
- // Return the frame size (number of bytes per sample) of a stream.
- virtual status_t getFrameSize(size_t *size);
-
- // Set the input gain for the audio driver.
- virtual status_t setGain(float gain);
-
- // Read audio buffer in from driver.
- virtual status_t read(void *buffer, size_t bytes, size_t *read);
-
- // Return the amount of input frames lost in the audio driver.
- virtual status_t getInputFramesLost(uint32_t *framesLost);
-
- // Return a recent count of the number of audio frames received and
- // the clock time associated with that frame count.
- virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
-
- // Start a stream operating in mmap mode.
- virtual status_t start();
-
- // Stop a stream operating in mmap mode.
- virtual status_t stop();
-
- // Retrieve information on the data buffer in mmap mode.
- virtual status_t createMmapBuffer(int32_t minSizeFrames,
- struct audio_mmap_buffer_info *info);
-
- // Get current read/write position in the mmap buffer
- virtual status_t getMmapPosition(struct audio_mmap_position *position);
-
- // Get active microphones
- virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones);
-
- // Sets microphone direction (for processing)
- virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
-
- // Sets microphone zoom (for processing)
- virtual status_t setPreferredMicrophoneFieldDimension(float zoom);
-
- // Called when the metadata of the stream's sink has been changed.
- status_t updateSinkMetadata(const SinkMetadata& sinkMetadata) override;
-
- private:
- audio_stream_in_t *mStream;
-
- friend class DeviceHalLocal;
-
- // Can not be constructed directly by clients.
- StreamInHalLocal(audio_stream_in_t *stream, sp<DeviceHalLocal> device);
-
- virtual ~StreamInHalLocal();
-
- void doUpdateSinkMetadata(const SinkMetadata& sinkMetadata);
- void doUpdateSinkMetadataV7(const SinkMetadata& sinkMetadata);
-};
-
-} // namespace android
-
-#endif // ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 9ca7118..d27ad4c 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -19,6 +19,7 @@
#include <android/media/audio/common/AudioMMapPolicyInfo.h>
#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <error/Result.h>
#include <media/audiohal/EffectHalInterface.h>
#include <media/MicrophoneInfo.h>
#include <system/audio.h>
@@ -131,6 +132,8 @@
// Update the connection status of an external device.
virtual status_t setConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
+ virtual error::Result<audio_hw_sync_t> getHwAvSync() = 0;
+
virtual status_t dump(int fd, const Vector<String16>& args) = 0;
protected:
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index e12fe77..1d52b7d 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -89,6 +89,12 @@
// (must match the priority of the audioflinger's thread that calls 'read' / 'write')
virtual status_t setHalThreadPriority(int priority) = 0;
+ virtual status_t legacyCreateAudioPatch(const struct audio_port_config& port,
+ std::optional<audio_source_t> source,
+ audio_devices_t type) = 0;
+
+ virtual status_t legacyReleaseAudioPatch() = 0;
+
protected:
// Subclasses can not be constructed directly by clients.
StreamHalInterface() {}
@@ -242,6 +248,11 @@
virtual status_t setLatencyModeCallback(
const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) = 0;
+ /**
+ * Signal the end of audio output, interrupting an ongoing 'write' operation.
+ */
+ virtual status_t exit() = 0;
+
protected:
virtual ~StreamOutHalInterface() {}
};
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index f50acae..f7bc177 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -178,10 +178,6 @@
"NdkMediaDataSourceCallbacks.cpp",
],
- include_dirs: [
- "frameworks/av/media/libstagefright/include",
- "frameworks/av/media/ndk/include",
- ],
export_include_dirs: [
"include",
@@ -194,6 +190,7 @@
],
header_libs: [
+ "libstagefright_headers",
"libmedia_headers",
],
@@ -224,6 +221,7 @@
"libcutils",
"android.hardware.graphics.bufferqueue@1.0",
],
+
header_libs: [
"libstagefright_foundation_headers",
],
@@ -231,9 +229,6 @@
cflags: [
"-D__ANDROID_VNDK__",
],
- include_dirs: [
- "frameworks/av/media/ndk/",
- ],
}
cc_library_static {
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index 0e2de4e..354971a 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -29,6 +29,7 @@
#include <gui/Surface.h>
#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/PersistentSurface.h>
@@ -59,6 +60,7 @@
kWhatAsyncNotify,
kWhatRequestActivityNotifications,
kWhatStopActivityNotifications,
+ kWhatFrameRenderedNotify,
};
struct AMediaCodecPersistentSurface : public Surface {
@@ -98,6 +100,11 @@
mutable Mutex mAsyncCallbackLock;
AMediaCodecOnAsyncNotifyCallback mAsyncCallback;
void *mAsyncCallbackUserData;
+
+ sp<AMessage> mFrameRenderedNotify;
+ mutable Mutex mFrameRenderedCallbackLock;
+ AMediaCodecOnFrameRendered mFrameRenderedCallback;
+ void *mFrameRenderedCallbackUserData;
};
CodecHandler::CodecHandler(AMediaCodec *codec) {
@@ -158,8 +165,7 @@
}
Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
- if (mCodec->mAsyncCallbackUserData != NULL
- || mCodec->mAsyncCallback.onAsyncInputAvailable != NULL) {
+ if (mCodec->mAsyncCallback.onAsyncInputAvailable != NULL) {
mCodec->mAsyncCallback.onAsyncInputAvailable(
mCodec,
mCodec->mAsyncCallbackUserData,
@@ -205,8 +211,7 @@
(uint32_t)flags};
Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
- if (mCodec->mAsyncCallbackUserData != NULL
- || mCodec->mAsyncCallback.onAsyncOutputAvailable != NULL) {
+ if (mCodec->mAsyncCallback.onAsyncOutputAvailable != NULL) {
mCodec->mAsyncCallback.onAsyncOutputAvailable(
mCodec,
mCodec->mAsyncCallbackUserData,
@@ -234,8 +239,7 @@
AMediaFormat *aMediaFormat = AMediaFormat_fromMsg(©);
Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
- if (mCodec->mAsyncCallbackUserData != NULL
- || mCodec->mAsyncCallback.onAsyncFormatChanged != NULL) {
+ if (mCodec->mAsyncCallback.onAsyncFormatChanged != NULL) {
mCodec->mAsyncCallback.onAsyncFormatChanged(
mCodec,
mCodec->mAsyncCallbackUserData,
@@ -263,8 +267,7 @@
err, actionCode, detail.c_str());
Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
- if (mCodec->mAsyncCallbackUserData != NULL
- || mCodec->mAsyncCallback.onAsyncError != NULL) {
+ if (mCodec->mAsyncCallback.onAsyncError != NULL) {
mCodec->mAsyncCallback.onAsyncError(
mCodec,
mCodec->mAsyncCallbackUserData,
@@ -298,6 +301,43 @@
break;
}
+ case kWhatFrameRenderedNotify:
+ {
+ sp<AMessage> data;
+ if (!msg->findMessage("data", &data)) {
+ ALOGE("kWhatFrameRenderedNotify: data is expected.");
+ break;
+ }
+
+ AMessage::Type type;
+ int64_t mediaTimeUs, systemNano;
+ size_t index = 0;
+
+ // TODO. This code has dependency with MediaCodec::CreateFramesRenderedMessage.
+ for (size_t ix = 0; ix < data->countEntries(); ix++) {
+ AString name = data->getEntryNameAt(ix, &type);
+ if (name.startsWith(AStringPrintf("%zu-media-time-us", index).c_str())) {
+ AMessage::ItemData data = msg->getEntryAt(index);
+ data.find(&mediaTimeUs);
+ } else if (name.startsWith(AStringPrintf("%zu-system-nano", index).c_str())) {
+ AMessage::ItemData data = msg->getEntryAt(index);
+ data.find(&systemNano);
+
+ Mutex::Autolock _l(mCodec->mFrameRenderedCallbackLock);
+ if (mCodec->mFrameRenderedCallback != NULL) {
+ mCodec->mFrameRenderedCallback(
+ mCodec,
+ mCodec->mFrameRenderedCallbackUserData,
+ mediaTimeUs,
+ systemNano);
+ }
+
+ index++;
+ }
+ }
+ break;
+ }
+
default:
ALOGE("shouldn't be here");
break;
@@ -474,22 +514,46 @@
AMediaCodec *mData,
AMediaCodecOnAsyncNotifyCallback callback,
void *userdata) {
- if (mData->mAsyncNotify == NULL && userdata != NULL) {
- mData->mAsyncNotify = new AMessage(kWhatAsyncNotify, mData->mHandler);
- status_t err = mData->mCodec->setCallback(mData->mAsyncNotify);
- if (err != OK) {
- ALOGE("setAsyncNotifyCallback: err(%d), failed to set async callback", err);
- return translate_error(err);
- }
- }
Mutex::Autolock _l(mData->mAsyncCallbackLock);
+
+ if (mData->mAsyncNotify == NULL) {
+ mData->mAsyncNotify = new AMessage(kWhatAsyncNotify, mData->mHandler);
+ }
+
+ // always call, codec may have been reset/re-configured since last call.
+ status_t err = mData->mCodec->setCallback(mData->mAsyncNotify);
+ if (err != OK) {
+ ALOGE("setAsyncNotifyCallback: err(%d), failed to set async callback", err);
+ return translate_error(err);
+ }
+
mData->mAsyncCallback = callback;
mData->mAsyncCallbackUserData = userdata;
return AMEDIA_OK;
}
+EXPORT
+media_status_t AMediaCodec_setOnFrameRenderedCallback(
+ AMediaCodec *mData,
+ AMediaCodecOnFrameRendered callback,
+ void *userdata) {
+ Mutex::Autolock _l(mData->mFrameRenderedCallbackLock);
+ if (mData->mFrameRenderedNotify == NULL) {
+ mData->mFrameRenderedNotify = new AMessage(kWhatFrameRenderedNotify, mData->mHandler);
+ }
+ status_t err = mData->mCodec->setOnFrameRenderedNotification(mData->mFrameRenderedNotify);
+ if (err != OK) {
+ ALOGE("setOnFrameRenderedNotifyCallback: err(%d), failed to set callback", err);
+ return translate_error(err);
+ }
+
+ mData->mFrameRenderedCallback = callback;
+ mData->mFrameRenderedCallbackUserData = userdata;
+
+ return AMEDIA_OK;
+}
EXPORT
media_status_t AMediaCodec_releaseCrypto(AMediaCodec *mData) {
diff --git a/media/ndk/include/media/NdkMediaCodec.h b/media/ndk/include/media/NdkMediaCodec.h
index 519148e..5633374 100644
--- a/media/ndk/include/media/NdkMediaCodec.h
+++ b/media/ndk/include/media/NdkMediaCodec.h
@@ -39,6 +39,8 @@
#include <stdint.h>
#include <sys/cdefs.h>
+#include <android/api-level.h>
+
#include "NdkMediaCrypto.h"
#include "NdkMediaError.h"
#include "NdkMediaFormat.h"
@@ -122,6 +124,25 @@
} AMediaCodecOnAsyncNotifyCallback;
/**
+ * Called when an output frame has rendered on the output surface.
+ *
+ * \param codec The codec object that generated this notification.
+ * \param userdata The user data set at AMediaCodec_setOnFrameRenderedCallback.
+ * \param mediaTimeUs The presentation time (media time) of the frame rendered.
+ * This is usually the same as specified in
+ * AMediaCodec_queueInputBuffer, but some codecs may alter
+ * the media time by applying some time-based transformation,
+ * such as frame rate conversion. In that case, presentation
+ * time corresponds to the actual output frame rendered.
+ * \param systemNano The system time when the frame was rendered.
+ */
+typedef void (*AMediaCodecOnFrameRendered)(
+ AMediaCodec *codec,
+ void *userdata,
+ int64_t mediaTimeUs,
+ int64_t systemNano);
+
+/**
* Create codec by name. Use this if you know the exact codec you want to use.
* When configuring, you will need to specify whether to use the codec as an
* encoder or decoder.
@@ -441,6 +462,32 @@
void *userdata) __INTRODUCED_IN(28);
/**
+ * Registers a callback to be invoked when an output frame is rendered on the output surface.
+ *
+ * This method can be called in any codec state, but will only have an effect in the
+ * Executing state for codecs that render buffers to the output surface.
+ *
+ * This callback is for informational purposes only: to get precise
+ * render timing samples, and can be significantly delayed and batched. Some frames may have
+ * been rendered even if there was no callback generated.
+ *
+ * Refer to the definition of AMediaCodecOnFrameRendered on how each
+ * callback function is called and what are specified.
+ * The specified userdata is the pointer used when those callback functions are
+ * called.
+ *
+ * All callbacks are fired on one NDK internal thread.
+ * AMediaCodec_setOnFrameRenderedCallback should not be called on the callback thread.
+ * No heavy duty task should be performed on callback thread.
+ *
+ * Available since Android T.
+ */
+media_status_t AMediaCodec_setOnFrameRenderedCallback(
+ AMediaCodec*,
+ AMediaCodecOnFrameRendered callback,
+ void *userdata) __INTRODUCED_IN(__ANDROID_API_T__);
+
+/**
* Release the crypto if applicable.
*
* Available since API level 28.
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index b228945..c8faced 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -203,6 +203,7 @@
AMediaCodec_releaseOutputBuffer;
AMediaCodec_releaseOutputBufferAtTime;
AMediaCodec_setAsyncNotifyCallback; # introduced=28
+ AMediaCodec_setOnFrameRenderedCallback; # introduced=Tiramisu
AMediaCodec_setOutputSurface; # introduced=24
AMediaCodec_setParameters; # introduced=26
AMediaCodec_setInputSurface; # introduced=26
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 5b53331..479906f 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -2493,21 +2493,17 @@
if (dev == nullptr) {
return AUDIO_HW_SYNC_INVALID;
}
- String8 reply;
- AudioParameter param;
- if (dev->getParameters(String8(AudioParameter::keyHwAvSync), &reply) == OK) {
- param = AudioParameter(reply);
- }
- int value;
- if (param.getInt(String8(AudioParameter::keyHwAvSync), value) != NO_ERROR) {
+ error::Result<audio_hw_sync_t> result = dev->getHwAvSync();
+ if (!result.ok()) {
ALOGW("getAudioHwSyncForSession error getting sync for session %d", sessionId);
return AUDIO_HW_SYNC_INVALID;
}
+ audio_hw_sync_t value = VALUE_OR_FATAL(result);
// allow only one session for a given HW A/V sync ID.
for (size_t i = 0; i < mHwAvSyncIds.size(); i++) {
- if (mHwAvSyncIds.valueAt(i) == (audio_hw_sync_t)value) {
+ if (mHwAvSyncIds.valueAt(i) == value) {
ALOGV("getAudioHwSyncForSession removing ID %d for session %d",
value, mHwAvSyncIds.keyAt(i));
mHwAvSyncIds.removeItemsAt(i);
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index dd278f0..ae5772d 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2104,10 +2104,8 @@
void AudioFlinger::PlaybackThread::preExit()
{
ALOGV(" preExit()");
- // FIXME this is using hard-coded strings but in the future, this functionality will be
- // converted to use audio HAL extensions required to support tunneling
- status_t result = mOutput->stream->setParameters(String8("exiting=1"));
- ALOGE_IF(result != OK, "Error when setting parameters on exit: %d", result);
+ status_t result = mOutput->stream->exit();
+ ALOGE_IF(result != OK, "Error when calling exit(): %d", result);
}
void AudioFlinger::PlaybackThread::dumpTracks_l(int fd, const Vector<String16>& args __unused)
@@ -4585,19 +4583,7 @@
patch->sinks,
handle);
} else {
- char *address;
- if (strcmp(patch->sinks[0].ext.device.address, "") != 0) {
- //FIXME: we only support address on first sink with HAL version < 3.0
- address = audio_device_address_to_parameter(
- patch->sinks[0].ext.device.type,
- patch->sinks[0].ext.device.address);
- } else {
- address = (char *)calloc(1, 1);
- }
- AudioParameter param = AudioParameter(String8(address));
- free(address);
- param.addInt(String8(AudioParameter::keyRouting), (int)type);
- status = mOutput->stream->setParameters(param.toString());
+ status = mOutput->stream->legacyCreateAudioPatch(patch->sinks[0], std::nullopt, type);
*handle = AUDIO_PATCH_HANDLE_NONE;
}
const std::string patchSinksAsString = patchSinksToString(patch);
@@ -4642,9 +4628,7 @@
sp<DeviceHalInterface> hwDevice = mOutput->audioHwDev->hwDevice();
status = hwDevice->releaseAudioPatch(handle);
} else {
- AudioParameter param;
- param.addInt(String8(AudioParameter::keyRouting), 0);
- status = mOutput->stream->setParameters(param.toString());
+ status = mOutput->stream->legacyReleaseAudioPatch();
}
return status;
}
@@ -9130,21 +9114,9 @@
patch->sinks,
handle);
} else {
- char *address;
- if (strcmp(patch->sources[0].ext.device.address, "") != 0) {
- address = audio_device_address_to_parameter(
- patch->sources[0].ext.device.type,
- patch->sources[0].ext.device.address);
- } else {
- address = (char *)calloc(1, 1);
- }
- AudioParameter param = AudioParameter(String8(address));
- free(address);
- param.addInt(String8(AudioParameter::keyRouting),
- (int)patch->sources[0].ext.device.type);
- param.addInt(String8(AudioParameter::keyInputSource),
- (int)patch->sinks[0].ext.mix.usecase.source);
- status = mInput->stream->setParameters(param.toString());
+ status = mInput->stream->legacyCreateAudioPatch(patch->sources[0],
+ patch->sinks[0].ext.mix.usecase.source,
+ patch->sources[0].ext.device.type);
*handle = AUDIO_PATCH_HANDLE_NONE;
}
@@ -9176,9 +9148,7 @@
sp<DeviceHalInterface> hwDevice = mInput->audioHwDev->hwDevice();
status = hwDevice->releaseAudioPatch(handle);
} else {
- AudioParameter param;
- param.addInt(String8(AudioParameter::keyRouting), 0);
- status = mInput->stream->setParameters(param.toString());
+ status = mInput->stream->legacyReleaseAudioPatch();
}
return status;
}
@@ -9897,29 +9867,18 @@
}
if (mAudioHwDev->supportsAudioPatches()) {
- status = mHalDevice->createAudioPatch(patch->num_sources,
- patch->sources,
- patch->num_sinks,
- patch->sinks,
- handle);
+ status = mHalDevice->createAudioPatch(patch->num_sources, patch->sources, patch->num_sinks,
+ patch->sinks, handle);
} else {
- char *address;
- if (strcmp(patch->sinks[0].ext.device.address, "") != 0) {
- //FIXME: we only support address on first sink with HAL version < 3.0
- address = audio_device_address_to_parameter(
- patch->sinks[0].ext.device.type,
- patch->sinks[0].ext.device.address);
+ audio_port_config port;
+ std::optional<audio_source_t> source;
+ if (isOutput()) {
+ port = patch->sinks[0];
} else {
- address = (char *)calloc(1, 1);
+ port = patch->sources[0];
+ source = patch->sinks[0].ext.mix.usecase.source;
}
- AudioParameter param = AudioParameter(String8(address));
- free(address);
- param.addInt(String8(AudioParameter::keyRouting), (int)type);
- if (!isOutput()) {
- param.addInt(String8(AudioParameter::keyInputSource),
- (int)patch->sinks[0].ext.mix.usecase.source);
- }
- status = mHalStream->setParameters(param.toString());
+ status = mHalStream->legacyCreateAudioPatch(port, source, type);
*handle = AUDIO_PATCH_HANDLE_NONE;
}
@@ -9958,9 +9917,7 @@
if (supportsAudioPatches) {
status = mHalDevice->releaseAudioPatch(handle);
} else {
- AudioParameter param;
- param.addInt(String8(AudioParameter::keyRouting), 0);
- status = mHalStream->setParameters(param.toString());
+ status = mHalStream->legacyReleaseAudioPatch();
}
return status;
}
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index dff36e2..7c6907d 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -127,13 +127,20 @@
return result;
}();
+/**
+ * AudioSource - AUDIO_SOURCE_VOICE_COMMUNICATION and AUDIO_SOURCE_HOTWORD
+ * are excluded from kAudioSources[] in order to avoid the abort triggered
+ * for these two types of AudioSource in Engine::getDeviceForInputSource()
+ */
static const std::vector<audio_source_t> kAudioSources = [] {
std::vector<audio_source_t> result;
for (const auto enumVal : xsdc_enum_range<xsd::AudioSource>{}) {
audio_source_t audioSourceHal;
std::string audioSource = toString(enumVal);
- if (audio_source_from_string(audioSource.c_str(), &audioSourceHal)) {
- result.push_back(audioSourceHal);
+ if (enumVal != xsd::AudioSource::AUDIO_SOURCE_VOICE_COMMUNICATION &&
+ enumVal != xsd::AudioSource::AUDIO_SOURCE_HOTWORD &&
+ audio_source_from_string(audioSource.c_str(), &audioSourceHal)) {
+ result.push_back(audioSourceHal);
}
}
return result;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 7829abf..3e44230 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -864,6 +864,7 @@
bool isMultiResolution = outputConfiguration.isMultiResolution();
int dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
int streamUseCase = outputConfiguration.getStreamUseCase();
+ int timestampBase = outputConfiguration.getTimestampBase();
res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
outputConfiguration.getSurfaceType());
@@ -908,7 +909,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase);
+ streamUseCase, timestampBase);
if (!res.isOk())
return res;
@@ -954,7 +955,8 @@
static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
- /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase);
+ /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
+ streamInfo.timestampBase);
}
if (err != OK) {
@@ -1065,7 +1067,8 @@
std::forward_as_tuple(width, height, format, dataSpace, consumerUsage,
overriddenSensorPixelModesUsed,
outputConfiguration.getDynamicRangeProfile(),
- outputConfiguration.getStreamUseCase()));
+ outputConfiguration.getStreamUseCase(),
+ outputConfiguration.getTimestampBase()));
ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
" (%d x %d) stream with format 0x%x.",
@@ -1254,7 +1257,7 @@
const std::vector<int32_t> &sensorPixelModesUsed =
outputConfiguration.getSensorPixelModesUsed();
int streamUseCase = outputConfiguration.getStreamUseCase();
-
+ int timestampBase = outputConfiguration.getTimestampBase();
int dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
for (size_t i = 0; i < newOutputsMap.size(); i++) {
@@ -1263,7 +1266,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
/*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase);
+ streamUseCase, timestampBase);
if (!res.isOk())
return res;
@@ -1622,6 +1625,7 @@
outputConfiguration.getSensorPixelModesUsed();
int dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
int streamUseCase= outputConfiguration.getStreamUseCase();
+ int timestampBase = outputConfiguration.getTimestampBase();
for (auto& bufferProducer : bufferProducers) {
// Don't create multiple streams for the same target surface
ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
@@ -1635,7 +1639,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase);
+ streamUseCase, timestampBase);
if (!res.isOk())
return res;
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 6ddf500..baa21f0 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -184,7 +184,8 @@
bool isShared = false, bool isMultiResolution = false,
uint64_t consumerUsage = 0,
int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) = 0;
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT) = 0;
/**
* Create an output stream of the requested size, format, rotation and
@@ -203,7 +204,8 @@
bool isShared = false, bool isMultiResolution = false,
uint64_t consumerUsage = 0,
int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) = 0;
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT) = 0;
/**
* Create an input stream of width, height, and format.
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 59b0305..eb3cdf2 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -84,6 +84,7 @@
mStatusWaiters(0),
mUsePartialResult(false),
mNumPartialResults(1),
+ mDeviceTimeBaseIsRealtime(false),
mTimestampOffset(0),
mNextResultFrameNumber(0),
mNextReprocessResultFrameNumber(0),
@@ -187,11 +188,12 @@
mIsInputStreamMultiResolution = false;
// Measure the clock domain offset between camera and video/hw_composer
+ mTimestampOffset = getMonoToBoottimeOffset();
camera_metadata_entry timestampSource =
mDeviceInfo.find(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE);
if (timestampSource.count > 0 && timestampSource.data.u8[0] ==
ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME) {
- mTimestampOffset = getMonoToBoottimeOffset();
+ mDeviceTimeBaseIsRealtime = true;
}
// Will the HAL be sending in early partial result metadata?
@@ -976,7 +978,7 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
- uint64_t consumerUsage, int dynamicRangeProfile, int streamUseCase) {
+ uint64_t consumerUsage, int dynamicRangeProfile, int streamUseCase, int timestampBase) {
ATRACE_CALL();
if (consumer == nullptr) {
@@ -990,7 +992,7 @@
return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
- streamUseCase);
+ streamUseCase, timestampBase);
}
static bool isRawFormat(int format) {
@@ -1010,16 +1012,18 @@
android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
- uint64_t consumerUsage, int dynamicRangeProfile, int streamUseCase) {
+ uint64_t consumerUsage, int dynamicRangeProfile, int streamUseCase, int timestampBase) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
Mutex::Autolock l(mLock);
ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
- " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d",
+ " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
+ " dynamicRangeProfile %d, streamUseCase %d, timestampBase %d",
mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
- consumerUsage, isShared, physicalCameraId.string(), isMultiResolution);
+ consumerUsage, isShared, physicalCameraId.string(), isMultiResolution,
+ dynamicRangeProfile, streamUseCase, timestampBase);
status_t res;
bool wasActive = false;
@@ -1088,7 +1092,8 @@
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, blobBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
- isMultiResolution, dynamicRangeProfile, streamUseCase);
+ isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
+ timestampBase);
} else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
bool maxResolution =
sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1102,22 +1107,26 @@
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
- isMultiResolution, dynamicRangeProfile, streamUseCase);
+ isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
+ timestampBase);
} else if (isShared) {
newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
- mUseHalBufManager, dynamicRangeProfile, streamUseCase);
+ mUseHalBufManager, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
+ timestampBase);
} else if (consumers.size() == 0 && hasDeferredConsumer) {
newStream = new Camera3OutputStream(mNextStreamId,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
- isMultiResolution, dynamicRangeProfile, streamUseCase);
+ isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
+ timestampBase);
} else {
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
- isMultiResolution, dynamicRangeProfile, streamUseCase);
+ isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
+ timestampBase);
}
size_t consumerCount = consumers.size();
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index ba63c20..e60e617 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -141,7 +141,8 @@
uint64_t consumerUsage = 0,
int dynamicRangeProfile =
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) override;
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT) override;
status_t createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -154,7 +155,8 @@
uint64_t consumerUsage = 0,
int dynamicRangeProfile =
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) override;
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT) override;
status_t createInputStream(
uint32_t width, uint32_t height, int format, bool isMultiResolution,
@@ -528,6 +530,7 @@
/**** End scope for mLock ****/
+ bool mDeviceTimeBaseIsRealtime;
// The offset converting from clock domain of other subsystem
// (video/hardware composer) to that of camera. Assumption is that this
// offset won't change during the life cycle of the camera device. In other
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index dc3a6f3..f737ed6 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -34,11 +34,12 @@
android_dataspace dataSpace, camera_stream_rotation_t rotation,
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
- int setId, bool isMultiResolution, int dynamicRangeProfile, int streamUseCase) :
+ int setId, bool isMultiResolution, int dynamicRangeProfile, int streamUseCase,
+ bool deviceTimeBaseIsRealtime, int timestampBase) :
Camera3Stream(id, type,
width, height, maxSize, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
- dynamicRangeProfile, streamUseCase),
+ dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase),
mTotalBufferCount(0),
mHandoutTotalBufferCount(0),
mHandoutOutputBufferCount(0),
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index f2b1536..300f207 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -39,7 +39,9 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ bool deviceTimeBaseIsRealtime = false,
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
public:
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 95d19ec..a82d19b 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -47,11 +47,12 @@
nsecs_t timestampOffset, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int dynamicRangeProfile,
- int streamUseCase) :
+ int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
- dynamicRangeProfile, streamUseCase),
+ dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
+ timestampBase),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
@@ -77,14 +78,14 @@
nsecs_t timestampOffset, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int dynamicRangeProfile,
- int streamUseCase) :
+ int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
- setId, isMultiResolution, dynamicRangeProfile, streamUseCase),
+ setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
+ deviceTimeBaseIsRealtime, timestampBase),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
- mUseMonoTimestamp(false),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
mConsumerUsage(0),
@@ -113,11 +114,12 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int dynamicRangeProfile,
- int streamUseCase) :
+ int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
- dynamicRangeProfile, streamUseCase),
+ dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
+ timestampBase),
mConsumer(nullptr),
mTransform(0),
mTraceFirstBuffer(true),
@@ -152,18 +154,19 @@
android_dataspace dataSpace,
camera_stream_rotation_t rotation,
const String8& physicalCameraId,
- const std::unordered_set<int32_t> &sensorPixelModesUsed,
+ const std::unordered_set<int32_t> &sensorPixelModesUsed,
uint64_t consumerUsage, nsecs_t timestampOffset,
int setId, bool isMultiResolution,
- int dynamicRangeProfile, int streamUseCase) :
+ int dynamicRangeProfile, int streamUseCase,
+ bool deviceTimeBaseIsRealtime, int timestampBase) :
Camera3IOStreamBase(id, type, width, height,
/*maxSize*/0,
format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
- dynamicRangeProfile, streamUseCase),
+ dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
+ timestampBase),
mTransform(0),
mTraceFirstBuffer(true),
- mUseMonoTimestamp(false),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
mConsumerUsage(consumerUsage),
@@ -365,13 +368,10 @@
dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
}
- /* Certain consumers (such as AudioSource or HardwareComposer) use
- * MONOTONIC time, causing time misalignment if camera timestamp is
- * in BOOTTIME. Do the conversion if necessary. */
nsecs_t t = mPreviewFrameScheduler != nullptr ? readoutTimestamp : timestamp;
- nsecs_t adjustedTs = mUseMonoTimestamp ? t - mTimestampOffset : t;
+ t -= mTimestampOffset;
if (mPreviewFrameScheduler != nullptr) {
- res = mPreviewFrameScheduler->queuePreviewBuffer(adjustedTs, transform,
+ res = mPreviewFrameScheduler->queuePreviewBuffer(t, transform,
anwBuffer, anwReleaseFence);
if (res != OK) {
ALOGE("%s: Stream %d: Error queuing buffer to preview buffer scheduler: %s (%d)",
@@ -380,7 +380,7 @@
}
} else {
setTransform(transform);
- res = native_window_set_buffers_timestamp(mConsumer.get(), adjustedTs);
+ res = native_window_set_buffers_timestamp(mConsumer.get(), t);
if (res != OK) {
ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
__FUNCTION__, mId, strerror(-res), res);
@@ -572,10 +572,18 @@
}
mTotalBufferCount = maxConsumerBuffers + camera_stream::max_buffers;
- if (allowPreviewScheduler && isConsumedByHWComposer()) {
+
+ int timestampBase = getTimestampBase();
+ bool isDefaultTimeBase = (timestampBase ==
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
+ if (allowPreviewScheduler) {
// We cannot distinguish between a SurfaceView and an ImageReader of
// preview buffer format. The PreviewFrameScheduler needs to handle both.
- if (!property_get_bool("camera.disable_preview_scheduler", false)) {
+ bool forceChoreographer = (timestampBase ==
+ OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED);
+ bool defaultToChoreographer = (isDefaultTimeBase && isConsumedByHWComposer() &&
+ !property_get_bool("camera.disable_preview_scheduler", false));
+ if (forceChoreographer || defaultToChoreographer) {
mPreviewFrameScheduler = std::make_unique<PreviewFrameScheduler>(*this, mConsumer);
mTotalBufferCount += PreviewFrameScheduler::kQueueDepthWatermark;
}
@@ -584,7 +592,27 @@
mHandoutTotalBufferCount = 0;
mFrameCount = 0;
mLastTimestamp = 0;
- mUseMonoTimestamp = (isConsumedByHWComposer() || isVideoStream());
+
+ if (isDeviceTimeBaseRealtime()) {
+ if (isDefaultTimeBase && !isConsumedByHWComposer() && !isVideoStream()) {
+ // Default time base, but not hardware composer or video encoder
+ mTimestampOffset = 0;
+ } else if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME ||
+ timestampBase == OutputConfiguration::TIMESTAMP_BASE_SENSOR) {
+ mTimestampOffset = 0;
+ }
+ // If timestampBase is CHOREOGRAPHER SYNCED or MONOTONIC, leave
+ // timestamp offset as bootTime - monotonicTime.
+ } else {
+ if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME) {
+ // Reverse offset for monotonicTime -> bootTime
+ mTimestampOffset = -mTimestampOffset;
+ } else {
+ // If timestampBase is DEFAULT, MONOTONIC, SENSOR, or
+ // CHOREOGRAPHER_SYNCED, timestamp offset is 0.
+ mTimestampOffset = 0;
+ }
+ }
res = native_window_set_buffer_count(mConsumer.get(),
mTotalBufferCount);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 7d2d32e..c7910b7 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -91,7 +91,9 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ bool deviceTimeBaseIsRealtime = false,
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
/**
* Set up a stream for formats that have a variable buffer size for the same
* dimensions, such as compressed JPEG.
@@ -105,7 +107,9 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ bool deviceTimeBaseIsRealtime = false,
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
/**
* Set up a stream with deferred consumer for formats that have 2 dimensions, such as
* RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -118,7 +122,9 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ bool deviceTimeBaseIsRealtime = false,
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
virtual ~Camera3OutputStream();
@@ -247,7 +253,9 @@
uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ bool deviceTimeBaseIsRealtime = false,
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
/**
* Note that we release the lock briefly in this function
@@ -289,9 +297,6 @@
// Name of Surface consumer
String8 mConsumerName;
- // Whether consumer assumes MONOTONIC timestamp
- bool mUseMonoTimestamp;
-
/**
* GraphicBuffer manager this stream is registered to. Used to replace the buffer
* allocation/deallocation role of BufferQueue.
@@ -310,7 +315,11 @@
bool mUseBufferManager;
/**
- * Timestamp offset for video and hardware composer consumed streams
+ * Offset used to override camera HAL produced timestamps
+ *
+ * The offset is first initialized to bootTime - monotonicTime in
+ * constructor, and may later be updated based on the client's timestampBase
+ * setting.
*/
nsecs_t mTimestampOffset;
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index fbd1e56..047897e 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -34,11 +34,11 @@
nsecs_t timestampOffset, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool useHalBufManager, int dynamicProfile,
- int streamUseCase) :
+ int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
consumerUsage, timestampOffset, setId, /*isMultiResolution*/false,
- dynamicProfile, streamUseCase),
+ dynamicProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase),
mUseHalBufManager(useHalBufManager) {
size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
if (surfaces.size() > consumerCount) {
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 223d52b..dc22c42 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -42,7 +42,9 @@
int setId = CAMERA3_STREAM_SET_ID_INVALID,
bool useHalBufManager = false,
int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ bool deviceTimeBaseIsRealtime = false,
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
virtual ~Camera3SharedOutputStream();
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 6b093b3..3f0299b 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -55,7 +55,7 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int dynamicRangeProfile,
- int streamUseCase) :
+ int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
camera_stream(),
mId(id),
mSetId(setId),
@@ -80,7 +80,9 @@
mOriginalDataSpace(dataSpace),
mPhysicalCameraId(physicalCameraId),
mLastTimestamp(0),
- mIsMultiResolution(isMultiResolution) {
+ mIsMultiResolution(isMultiResolution),
+ mDeviceTimeBaseIsRealtime(deviceTimeBaseIsRealtime),
+ mTimestampBase(timestampBase) {
camera_stream::stream_type = type;
camera_stream::width = width;
@@ -181,6 +183,14 @@
return camera_stream::use_case;
}
+int Camera3Stream::getTimestampBase() const {
+ return mTimestampBase;
+}
+
+bool Camera3Stream::isDeviceTimeBaseRealtime() const {
+ return mDeviceTimeBaseIsRealtime;
+}
+
void Camera3Stream::setOfflineProcessingSupport(bool support) {
mSupportOfflineProcessing = support;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index ada570b..8232ce0 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -179,6 +179,8 @@
int getMaxHalBuffers() const;
const String8& physicalCameraId() const;
int getStreamUseCase() const;
+ int getTimestampBase() const;
+ bool isDeviceTimeBaseRealtime() const;
void setOfflineProcessingSupport(bool) override;
bool getOfflineProcessingSupport() const override;
@@ -507,7 +509,7 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int dynamicRangeProfile,
- int streamUseCase);
+ int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase);
wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
@@ -630,6 +632,9 @@
bool mIsMultiResolution = false;
bool mSupportOfflineProcessing = false;
+
+ bool mDeviceTimeBaseIsRealtime;
+ int mTimestampBase;
}; // class Camera3Stream
}; // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 7b1597b..8962fac 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -19,6 +19,7 @@
#include <utils/RefBase.h>
+#include <camera/camera2/OutputConfiguration.h>
#include <camera/CameraMetadata.h>
#include "Camera3StreamBufferListener.h"
#include "Camera3StreamBufferFreedListener.h"
@@ -111,18 +112,20 @@
std::unordered_set<int32_t> sensorPixelModesUsed;
int dynamicRangeProfile;
int streamUseCase;
+ int timestampBase;
OutputStreamInfo() :
width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
consumerUsage(0),
dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
- streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
+ streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+ timestampBase(OutputConfiguration::TIMESTAMP_BASE_DEFAULT) {}
OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed,
- int _dynamicRangeProfile, int _streamUseCase) :
+ int _dynamicRangeProfile, int _streamUseCase, int _timestampBase) :
width(_width), height(_height), format(_format),
dataSpace(_dataSpace), consumerUsage(_consumerUsage),
sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
- streamUseCase(_streamUseCase) {}
+ streamUseCase(_streamUseCase), timestampBase(_timestampBase) {}
};
/**
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 84dbf6e..43591c2 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -336,7 +336,7 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int dynamicRangeProfile,
- int streamUseCase) {
+ int streamUseCase, int timestampBase) {
// bufferProducer must be non-null
if (gbp == nullptr) {
String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
@@ -456,6 +456,13 @@
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
+ if (timestampBase < OutputConfiguration::TIMESTAMP_BASE_DEFAULT ||
+ timestampBase > OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED) {
+ String8 msg = String8::format("Camera %s: invalid timestamp base %d",
+ logicalCameraId.string(), timestampBase);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
if (!isStreamInfoValid) {
streamInfo.width = width;
@@ -466,6 +473,7 @@
streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
streamInfo.dynamicRangeProfile = dynamicRangeProfile;
streamInfo.streamUseCase = streamUseCase;
+ streamInfo.timestampBase = timestampBase;
return binder::Status::ok();
}
if (width != streamInfo.width) {
@@ -702,6 +710,7 @@
}
int streamUseCase = it.getStreamUseCase();
+ int timestampBase = it.getTimestampBase();
if (deferredConsumer) {
streamInfo.width = it.getWidth();
streamInfo.height = it.getHeight();
@@ -736,7 +745,7 @@
sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase);
+ streamUseCase, timestampBase);
if (!res.isOk())
return res;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 39ba079..e326ce7 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -99,7 +99,7 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int dynamicRangeProfile,
- int streamUseCase);
+ int streamUseCase, int timestampBase);
void mapStreamInfo(const camera3::OutputStreamInfo &streamInfo,
camera3::camera_stream_rotation_t rotation, String8 physicalId, int32_t groupId,
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index a936ac8..ade54d0 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -175,6 +175,21 @@
"log_session_id",
};
+static constexpr const char * const AudioRecordStatusFields[] {
+ "mediametrics_audiorecordstatus_reported",
+ "status",
+ "debug_message",
+ "status_subcode",
+ "uid",
+ "event",
+ "input_flags",
+ "source",
+ "encoding",
+ "channel_mask",
+ "buffer_frame_count",
+ "sample_rate",
+};
+
static constexpr const char * const AudioTrackStatusFields[] {
"mediametrics_audiotrackstatus_reported",
"status",
@@ -535,10 +550,92 @@
// the types of errors and the frequency of errors.
mHeatMap.add(prefixKey, suffixKey, eventStr, statusString, uid, message, subCode);
- // Certain keys/event pairs are sent to statsd.
+ // Certain keys/event pairs are sent to statsd. If we get a match (true) we return early.
+ if (reportAudioRecordStatus(item, key, eventStr, statusString, uid, message, subCode)) return;
+ if (reportAudioTrackStatus(item, key, eventStr, statusString, uid, message, subCode)) return;
+}
+
+bool AudioAnalytics::reportAudioRecordStatus(
+ const std::shared_ptr<const mediametrics::Item>& item,
+ const std::string& key, const std::string& eventStr,
+ const std::string& statusString, uid_t uid, const std::string& message,
+ int32_t subCode) const
+{
// Note that the prefixes often end with a '.' so we use startsWith.
- if (startsWith(key, AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK)
- && eventStr == AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE) {
+ if (!startsWith(key, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD)) return false;
+ if (eventStr == AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE) {
+ const int atom_status = types::lookup<types::STATUS, int32_t>(statusString);
+
+ // currently we only send create status events.
+ const int32_t event = android::util::
+ MEDIAMETRICS_AUDIO_RECORD_STATUS_REPORTED__EVENT__AUDIO_RECORD_EVENT_CREATE;
+
+ // The following fields should all be present in a create event.
+ std::string flagsStr;
+ ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_ORIGINALFLAGS, &flagsStr),
+ "%s: %s missing %s field", __func__,
+ AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_ORIGINALFLAGS);
+ const auto flags = types::lookup<types::INPUT_FLAG, int32_t>(flagsStr);
+
+ // AMEDIAMETRICS_PROP_SESSIONID omitted from atom
+
+ std::string sourceStr;
+ ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_SOURCE, &sourceStr),
+ "%s: %s missing %s field",
+ __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_SOURCE);
+ const int32_t source = types::lookup<types::SOURCE_TYPE, int32_t>(sourceStr);
+
+ // AMEDIAMETRICS_PROP_SELECTEDDEVICEID omitted from atom
+
+ std::string encodingStr;
+ ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_ENCODING, &encodingStr),
+ "%s: %s missing %s field",
+ __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_ENCODING);
+ const auto encoding = types::lookup<types::ENCODING, int32_t>(encodingStr);
+
+ int32_t channelMask = 0;
+ ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_CHANNELMASK, &channelMask),
+ "%s: %s missing %s field",
+ __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_CHANNELMASK);
+ int32_t frameCount = 0;
+ ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_FRAMECOUNT, &frameCount),
+ "%s: %s missing %s field",
+ __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_FRAMECOUNT);
+ int32_t sampleRate = 0;
+ ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_SAMPLERATE, &sampleRate),
+ "%s: %s missing %s field",
+ __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_SAMPLERATE);
+
+ const auto [ result, str ] = sendToStatsd(AudioRecordStatusFields,
+ CONDITION(android::util::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED)
+ , atom_status
+ , message.c_str()
+ , subCode
+ , uid
+ , event
+ , flags
+ , source
+ , encoding
+ , (int64_t)channelMask
+ , frameCount
+ , sampleRate
+ );
+ ALOGV("%s: statsd %s", __func__, str.c_str());
+ mStatsdLog->log(android::util::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED, str);
+ return true;
+ }
+ return false;
+}
+
+bool AudioAnalytics::reportAudioTrackStatus(
+ const std::shared_ptr<const mediametrics::Item>& item,
+ const std::string& key, const std::string& eventStr,
+ const std::string& statusString, uid_t uid, const std::string& message,
+ int32_t subCode) const
+{
+ // Note that the prefixes often end with a '.' so we use startsWith.
+ if (!startsWith(key, AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK)) return false;
+ if (eventStr == AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE) {
const int atom_status = types::lookup<types::STATUS, int32_t>(statusString);
// currently we only send create status events.
@@ -589,11 +686,13 @@
double speed = 0.f; // default is 1.f
ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_PLAYBACK_SPEED, &speed),
"%s: %s missing %s field",
- __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_PLAYBACK_SPEED);
+ __func__,
+ AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_PLAYBACK_SPEED);
double pitch = 0.f; // default is 1.f
ALOGD_IF(!item->get(AMEDIAMETRICS_PROP_PLAYBACK_PITCH, &pitch),
"%s: %s missing %s field",
- __func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_PLAYBACK_PITCH);
+ __func__,
+ AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_PLAYBACK_PITCH);
const auto [ result, str ] = sendToStatsd(AudioTrackStatusFields,
CONDITION(android::util::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED)
, atom_status
@@ -613,7 +712,9 @@
);
ALOGV("%s: statsd %s", __func__, str.c_str());
mStatsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED, str);
+ return true;
}
+ return false;
}
// HELPER METHODS
diff --git a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
index 9b54cf3..a44fcc1 100644
--- a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
+++ b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
@@ -122,6 +122,19 @@
*/
void processStatus(const std::shared_ptr<const mediametrics::Item>& item);
+ // Specific reporting methods
+ bool reportAudioRecordStatus(
+ const std::shared_ptr<const mediametrics::Item>& item,
+ const std::string& key, const std::string& eventStr,
+ const std::string& statusString, uid_t uid, const std::string& message,
+ int32_t subCode) const;
+
+ bool reportAudioTrackStatus(
+ const std::shared_ptr<const mediametrics::Item>& item,
+ const std::string& key, const std::string& eventStr,
+ const std::string& statusString, uid_t uid, const std::string& message,
+ int32_t subCode) const;
+
// HELPER METHODS
/**
* Return the audio thread associated with an audio track name.
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index 390cd5c..b55b601 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -60,7 +60,8 @@
result << " Sample Rate: " << getSampleRate() << "\n";
result << " Channel Count: " << getSamplesPerFrame() << "\n";
result << " Channel Mask: 0x" << std::hex << getChannelMask() << std::dec << "\n";
- result << " Format: " << getFormat() << "\n";
+ result << " Format: " << getFormat()
+ << " (" << audio_format_to_string(getFormat()) << ")\n";
result << " Frames Per Burst: " << mFramesPerBurst << "\n";
result << " Usage: " << getUsage() << "\n";
result << " ContentType: " << getContentType() << "\n";
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index a266d5b..3f18b95 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -183,10 +183,13 @@
? AAUDIO_SESSION_ID_NONE
: (aaudio_session_id_t) sessionId;
setSessionId(actualSessionId);
- ALOGD("%s() deviceId = %d, sessionId = %d", __func__, getDeviceId(), getSessionId());
+
+ ALOGD("%s(format = 0x%X) deviceId = %d, sessionId = %d",
+ __func__, audioFormat, getDeviceId(), getSessionId());
// Create MMAP/NOIRQ buffer.
- if (createMmapBuffer(&mAudioDataFileDescriptor) != AAUDIO_OK) {
+ result = createMmapBuffer(&mAudioDataFileDescriptor);
+ if (result != AAUDIO_OK) {
goto error;
}
@@ -206,12 +209,13 @@
mTimestampGracePeriodMs = ((int64_t) kTimestampGraceBurstCount * mFramesPerBurst
* AAUDIO_MILLIS_PER_SECOND) / getSampleRate();
- ALOGD("%s() actual rate = %d, channels = %d channelMask = %#x, deviceId = %d, capacity = %d\n",
+ ALOGD("%s() got rate = %d, channels = %d channelMask = %#x, deviceId = %d, capacity = %d\n",
__func__, getSampleRate(), getSamplesPerFrame(), getChannelMask(),
deviceId, getBufferCapacity());
- ALOGD("%s() format = 0x%08x, frame size = %d, burst size = %d",
- __func__, getFormat(), calculateBytesPerFrame(), mFramesPerBurst);
+ ALOGD("%s() got format = 0x%X = %s, frame size = %d, burst size = %d",
+ __func__, getFormat(), audio_format_to_string(getFormat()),
+ calculateBytesPerFrame(), mFramesPerBurst);
return result;
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index fb5bfa3..e8c7767 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -337,6 +337,9 @@
}
}
+ if (mFilterCallback != nullptr) {
+ mFilterCallback->detachCallbacks();
+ }
auto res = mFilter->close();
mFilter = nullptr;
mStarted = false;
@@ -470,6 +473,12 @@
}
}
+void TunerFilter::FilterCallback::detachCallbacks() {
+ Mutex::Autolock _l(mCallbackLock);
+ mOriginalCallback = nullptr;
+ mTunerFilterCallback = nullptr;
+}
+
} // namespace tuner
} // namespace tv
} // namespace media
diff --git a/services/tuner/TunerFilter.h b/services/tuner/TunerFilter.h
index 529c191..93d8898 100644
--- a/services/tuner/TunerFilter.h
+++ b/services/tuner/TunerFilter.h
@@ -67,6 +67,7 @@
void sendSharedFilterStatus(int32_t status);
void attachSharedFilterCallback(const shared_ptr<ITunerFilterCallback>& in_cb);
void detachSharedFilterCallback();
+ void detachCallbacks();
private:
shared_ptr<ITunerFilterCallback> mTunerFilterCallback;
diff --git a/services/tuner/hidl/TunerHidlFilter.cpp b/services/tuner/hidl/TunerHidlFilter.cpp
index a5bbf39..6d8ae03 100644
--- a/services/tuner/hidl/TunerHidlFilter.cpp
+++ b/services/tuner/hidl/TunerHidlFilter.cpp
@@ -510,6 +510,9 @@
}
}
+ if (mFilterCallback != nullptr) {
+ mFilterCallback->detachCallbacks();
+ }
HidlResult res = mFilter->close();
mFilter = nullptr;
mFilter_1_1 = nullptr;
@@ -970,6 +973,12 @@
}
}
+void TunerHidlFilter::FilterCallback::detachCallbacks() {
+ Mutex::Autolock _l(mCallbackLock);
+ mOriginalCallback = nullptr;
+ mTunerFilterCallback = nullptr;
+}
+
/////////////// FilterCallback Helper Methods ///////////////////////
void TunerHidlFilter::FilterCallback::getAidlFilterEvent(
const vector<HidlDemuxFilterEvent::Event>& events,
diff --git a/services/tuner/hidl/TunerHidlFilter.h b/services/tuner/hidl/TunerHidlFilter.h
index b8fad22..63c7a1b 100644
--- a/services/tuner/hidl/TunerHidlFilter.h
+++ b/services/tuner/hidl/TunerHidlFilter.h
@@ -129,6 +129,7 @@
void sendSharedFilterStatus(int32_t status);
void attachSharedFilterCallback(const shared_ptr<ITunerFilterCallback>& in_cb);
void detachSharedFilterCallback();
+ void detachCallbacks();
private:
void getAidlFilterEvent(const vector<HidlDemuxFilterEvent::Event>& events,