Merge "Clearkey PRODUCT_PACKAGES makefiles" into tm-dev
diff --git a/apex/Android.bp b/apex/Android.bp
index aa9fd89..570ca01 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -56,6 +56,7 @@
prebuilts: [
"code_coverage.policy",
"com.android.media-mediatranscoding.rc",
+ "com.android.media-mediatranscoding.32rc",
"crash_dump.policy",
"mediaextractor.policy",
"media-linker-config",
@@ -124,6 +125,26 @@
// modified by the Soong or platform compat team.
hidden_api: {
max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+
+ // The following packages contain classes from other modules on the
+ // bootclasspath. That means that the hidden API flags for this module
+ // has to explicitly list every single class this module provides in
+ // that package to differentiate them from the classes provided by other
+ // modules. That can include private classes that are not part of the
+ // API.
+ split_packages: [
+ "android.media",
+ ],
+
+ // The following packages and all their subpackages currently only
+ // contain classes from this bootclasspath_fragment. Listing a package
+ // here won't prevent other bootclasspath modules from adding classes in
+ // any of those packages but it will prevent them from adding those
+ // classes into an API surface, e.g. public, system, etc.. Doing so will
+ // result in a build failure due to inconsistent flags.
+ package_prefixes: [
+ "android.media.internal",
+ ],
},
}
@@ -157,6 +178,7 @@
],
prebuilts: [
"com.android.media.swcodec-mediaswcodec.rc",
+ "com.android.media.swcodec-mediaswcodec.32rc",
"com.android.media.swcodec-ld.config.txt",
"mediaswcodec.policy",
"code_coverage.policy",
@@ -181,17 +203,34 @@
compressible: true,
}
+// install as mediatranscoding.* and mediaswcodec.* instead of init.*
+// so we are ready for day we have more than 1 *rc file within the apex.
+
prebuilt_etc {
name: "com.android.media-mediatranscoding.rc",
src: "mediatranscoding.rc",
- filename: "init.rc",
+ filename: "mediatranscoding.rc",
+ installable: false,
+}
+
+prebuilt_etc {
+ name: "com.android.media-mediatranscoding.32rc",
+ src: "mediatranscoding.32rc",
+ filename: "mediatranscoding.32rc",
installable: false,
}
prebuilt_etc {
name: "com.android.media.swcodec-mediaswcodec.rc",
src: "mediaswcodec.rc",
- filename: "init.rc",
+ filename: "mediaswcodec.rc",
+ installable: false,
+}
+
+prebuilt_etc {
+ name: "com.android.media.swcodec-mediaswcodec.32rc",
+ src: "mediaswcodec.32rc",
+ filename: "mediaswcodec.32rc",
installable: false,
}
diff --git a/apex/manifest.json b/apex/manifest.json
index 2cf7296..752c2b5 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,6 +1,6 @@
{
"name": "com.android.media",
- "version": 339999900,
+ "version": 330100000,
"requireNativeLibs": [
"libandroid.so",
"libbinder_ndk.so",
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index 82463a2..3732a76 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,6 +1,6 @@
{
"name": "com.android.media.swcodec",
- "version": 339999900,
+ "version": 330100000,
"requireNativeLibs": [
":sphal"
]
diff --git a/apex/mediaswcodec.32rc b/apex/mediaswcodec.32rc
index 79aef36..f40d172 100644
--- a/apex/mediaswcodec.32rc
+++ b/apex/mediaswcodec.32rc
@@ -1,3 +1,5 @@
+## for SDK releases >= 32
+##
service media.swcodec /apex/com.android.media.swcodec/bin/mediaswcodec
class main
user mediacodec
diff --git a/apex/mediaswcodec.rc b/apex/mediaswcodec.rc
index 0c9b8c8..46799c7 100644
--- a/apex/mediaswcodec.rc
+++ b/apex/mediaswcodec.rc
@@ -1,3 +1,6 @@
+## for SDK releases 29..31
+## where writepid has not yet been replaced by task_profiles
+##
service media.swcodec /apex/com.android.media.swcodec/bin/mediaswcodec
class main
user mediacodec
diff --git a/apex/mediatranscoding.32rc b/apex/mediatranscoding.32rc
index 5169462..edba9b9 100644
--- a/apex/mediatranscoding.32rc
+++ b/apex/mediatranscoding.32rc
@@ -1,3 +1,6 @@
+## for SDK releases >= 32
+##
+#
# media.transcoding service is defined on com.android.media apex which goes back
# to API29, but we only want it started on API31+ devices. So we declare it as
# "disabled" and start it explicitly on boot.
diff --git a/apex/mediatranscoding.rc b/apex/mediatranscoding.rc
index ae9f8ba..6e453be 100644
--- a/apex/mediatranscoding.rc
+++ b/apex/mediatranscoding.rc
@@ -1,3 +1,7 @@
+## for SDK releases 29..31
+## where writepid has not yet been replaced by task_profiles
+##
+#
# media.transcoding service is defined on com.android.media apex which goes back
# to API29, but we only want it started on API31+ devices. So we declare it as
# "disabled" and start it explicitly on boot.
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index b109904..8088d06 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -118,14 +118,14 @@
return err;
}
- int dynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
- if ((err = parcel->readInt32(&dynamicRangeProfile)) != OK) {
+ int64_t dynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+ if ((err = parcel->readInt64(&dynamicRangeProfile)) != OK) {
ALOGE("%s: Failed to read dynamic range profile type from parcel", __FUNCTION__);
return err;
}
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
- if ((err = parcel->readInt32(&streamUseCase)) != OK) {
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+ if ((err = parcel->readInt64(&streamUseCase)) != OK) {
ALOGE("%s: Failed to read stream use case from parcel", __FUNCTION__);
return err;
}
@@ -228,12 +228,12 @@
return err;
}
- if ((err = parcel->writeInt32(mDynamicRangeProfile)) != OK) {
+ if ((err = parcel->writeInt64(mDynamicRangeProfile)) != OK) {
ALOGE("%s: Failed to write dynamic range profile type", __FUNCTION__);
return err;
}
- if ((err = parcel->writeInt32(mStreamUseCase)) != OK) {
+ if ((err = parcel->writeInt64(mStreamUseCase)) != OK) {
ALOGE("%s: Failed to write stream use case!", __FUNCTION__);
return err;
}
@@ -375,6 +375,12 @@
return err;
}
+ String16 userTag;
+ if ((err = parcel->readString16(&userTag)) != OK) {
+ ALOGE("%s: Failed to read user tag!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
mCameraId = id;
mFacing = facing;
mNewCameraState = newCameraState;
@@ -389,6 +395,7 @@
mResultErrorCount = resultErrorCount;
mDeviceError = deviceError;
mStreamStats = std::move(streamStats);
+ mUserTag = userTag;
return OK;
}
@@ -471,6 +478,10 @@
return err;
}
+ if ((err = parcel->writeString16(mUserTag)) != OK) {
+ ALOGE("%s: Failed to write user tag!", __FUNCTION__);
+ return err;
+ }
return OK;
}
diff --git a/camera/VendorTagDescriptor.cpp b/camera/VendorTagDescriptor.cpp
index 24fa912..b37803a 100644
--- a/camera/VendorTagDescriptor.cpp
+++ b/camera/VendorTagDescriptor.cpp
@@ -760,7 +760,7 @@
Mutex::Autolock al(sLock);
if (sGlobalVendorTagDescriptorCache == NULL) {
ALOGE("%s: Vendor tag descriptor cache not initialized.", __FUNCTION__);
- return VENDOR_TAG_NAME_ERR;
+ return VENDOR_TAG_TYPE_ERR;
}
return sGlobalVendorTagDescriptorCache->getTagType(tag, id);
}
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index ebc09d7..7a8a4ba 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -146,6 +146,20 @@
mSurfaceIdxList.push_back(surfaceIdx);
}
+ int32_t hasUserTag;
+ if ((err = parcel->readInt32(&hasUserTag)) != OK) {
+ ALOGE("%s: Failed to read user tag availability flag", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ if (hasUserTag) {
+ String16 userTag;
+ if ((err = parcel->readString16(&userTag)) != OK) {
+ ALOGE("%s: Failed to read user tag!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ mUserTag = String8(userTag).c_str();
+ }
+
return OK;
}
@@ -213,6 +227,14 @@
return err;
}
}
+
+ if (mUserTag.empty()) {
+ parcel->writeInt32(0);
+ } else {
+ parcel->writeInt32(1);
+ parcel->writeString16(String16(mUserTag.c_str()));
+ }
+
return OK;
}
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 5b8da34..11d4960 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -81,7 +81,7 @@
return mDynamicRangeProfile;
}
-int OutputConfiguration::getStreamUseCase() const {
+int64_t OutputConfiguration::getStreamUseCase() const {
return mStreamUseCase;
}
@@ -192,8 +192,8 @@
return err;
}
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
- if ((err = parcel->readInt32(&streamUseCase)) != OK) {
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+ if ((err = parcel->readInt64(&streamUseCase)) != OK) {
ALOGE("%s: Failed to read stream use case from parcel", __FUNCTION__);
return err;
}
@@ -232,8 +232,8 @@
mDynamicRangeProfile = dynamicProfile;
ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
- " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %d, timestampBase = %d,"
- " mirrorMode = %d",
+ " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
+ ", timestampBase = %d, mirrorMode = %d",
__FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
String8(mPhysicalCameraId).string(), mIsMultiResolution, mStreamUseCase, timestampBase,
mMirrorMode);
@@ -317,7 +317,7 @@
err = parcel->writeInt64(mDynamicRangeProfile);
if (err != OK) return err;
- err = parcel->writeInt32(mStreamUseCase);
+ err = parcel->writeInt64(mStreamUseCase);
if (err != OK) return err;
err = parcel->writeInt32(mTimestampBase);
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 6452846..e1ec6cf 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -66,7 +66,7 @@
// Dynamic range profile
int64_t mDynamicRangeProfile;
// Stream use case
- int mStreamUseCase;
+ int64_t mStreamUseCase;
CameraStreamStats() :
mWidth(0), mHeight(0), mFormat(0), mMaxPreviewFps(0), mDataSpace(0), mUsage(0),
@@ -136,6 +136,7 @@
// Whether the device runs into an error state
bool mDeviceError;
std::vector<CameraStreamStats> mStreamStats;
+ String16 mUserTag;
// Constructors
CameraSessionStats();
diff --git a/camera/include/camera/camera2/CaptureRequest.h b/camera/include/camera/camera2/CaptureRequest.h
index 506abab..28dbc7c 100644
--- a/camera/include/camera/camera2/CaptureRequest.h
+++ b/camera/include/camera/camera2/CaptureRequest.h
@@ -63,6 +63,8 @@
void* mContext; // arbitrary user context from NDK apps, null for java apps
+ std::string mUserTag; // The string representation of object passed into setTag.
+
/**
* Keep impl up-to-date with CaptureRequest.java in frameworks/base
*/
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 6b0f333..b842885 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -63,7 +63,7 @@
bool isShared() const;
String16 getPhysicalCameraId() const;
bool isMultiResolution() const;
- int getStreamUseCase() const;
+ int64_t getStreamUseCase() const;
int getTimestampBase() const;
int getMirrorMode() const;
@@ -185,7 +185,7 @@
bool mIsMultiResolution;
std::vector<int32_t> mSensorPixelModesUsed;
int64_t mDynamicRangeProfile;
- int mStreamUseCase;
+ int64_t mStreamUseCase;
int mTimestampBase;
int mMirrorMode;
};
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 4c492f0..3f7ff8b 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -521,6 +521,14 @@
* region and output only the intersection rectangle as the metering region in the result
* metadata. If the region is entirely outside the crop region, it will be ignored and
* not reported in the result metadata.</p>
+ * <p>When setting the AE metering regions, the application must consider the additional
+ * crop resulted from the aspect ratio differences between the preview stream and
+ * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+ * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+ * the boundary of AE regions will be [0, y_crop] and
+ * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+ * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+ * mismatch.</p>
* <p>Starting from API level 30, the coordinate system of activeArraySize or
* preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
* pre-zoom field of view. This means that the same aeRegions values at different
@@ -722,6 +730,14 @@
* region and output only the intersection rectangle as the metering region in the result
* metadata. If the region is entirely outside the crop region, it will be ignored and
* not reported in the result metadata.</p>
+ * <p>When setting the AF metering regions, the application must consider the additional
+ * crop resulted from the aspect ratio differences between the preview stream and
+ * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+ * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+ * the boundary of AF regions will be [0, y_crop] and
+ * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+ * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+ * mismatch.</p>
* <p>Starting from API level 30, the coordinate system of activeArraySize or
* preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
* pre-zoom field of view. This means that the same afRegions values at different
@@ -917,6 +933,14 @@
* region and output only the intersection rectangle as the metering region in the result
* metadata. If the region is entirely outside the crop region, it will be ignored and
* not reported in the result metadata.</p>
+ * <p>When setting the AWB metering regions, the application must consider the additional
+ * crop resulted from the aspect ratio differences between the preview stream and
+ * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+ * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+ * the boundary of AWB regions will be [0, y_crop] and
+ * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+ * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+ * mismatch.</p>
* <p>Starting from API level 30, the coordinate system of activeArraySize or
* preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
* pre-zoom field of view. This means that the same awbRegions values at different
@@ -4216,7 +4240,7 @@
/**
* <p>The stream use cases supported by this camera device.</p>
*
- * <p>Type: int32[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)</p>
+ * <p>Type: int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)</p>
*
* <p>This tag may appear in:
* <ul>
@@ -4260,7 +4284,7 @@
* reprocessable session, constrained high speed session, or RAW stream combinations, the
* application should leave stream use cases within the session as DEFAULT.</p>
*/
- ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES = // int32[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES = // int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
ACAMERA_SCALER_START + 25,
ACAMERA_SCALER_END,
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 4cc1292..85ab0c2 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -330,7 +330,8 @@
return ACAMERA_ERROR_UNKNOWN;
}
- mConfiguredOutputs[streamId] = std::make_pair(output->mWindow, outConfigW);
+ mConfiguredOutputs[streamId] =
+ std::move(std::make_pair(std::move(output->mWindow), std::move(outConfigW)));
return ACAMERA_OK;
}
@@ -623,7 +624,8 @@
outConfigInsert.windowHandles[0] = anw;
outConfigInsert.physicalCameraId = outConfig.mPhysicalCameraId;
native_handle_ptr_wrapper wrap(anw);
- outputSet.insert(std::make_pair(anw, outConfigInsertW));
+
+ outputSet.emplace(std::make_pair(std::move(anw), std::move(outConfigInsertW)));
}
std::set<std::pair<native_handle_ptr_wrapper, OutputConfigurationWrapper>> addSet = outputSet;
std::vector<int32_t> deleteList;
@@ -680,7 +682,7 @@
}
// add new streams
- for (auto outputPair : addSet) {
+ for (const auto &outputPair : addSet) {
int streamId;
Status status = Status::UNKNOWN_ERROR;
auto ret = mRemote->createStream(outputPair.second,
@@ -845,12 +847,32 @@
return;
}
- const auto& windowHandles = outputPairIt->second.second.mOutputConfiguration.windowHandles;
- for (const auto& outHandle : windowHandles) {
- for (auto streamAndWindowId : request->mCaptureRequest.streamAndWindowIds) {
- int32_t windowId = streamAndWindowId.windowId;
- if (utils::isWindowNativeHandleEqual(windowHandles[windowId],outHandle)) {
- const native_handle_t* anw = windowHandles[windowId].getNativeHandle();
+ // Get the surfaces corresponding to the error stream id, go through
+ // them and try to match the surfaces in the corresponding
+ // CaptureRequest.
+ const auto& errorWindowHandles =
+ outputPairIt->second.second.mOutputConfiguration.windowHandles;
+ for (const auto& errorWindowHandle : errorWindowHandles) {
+ for (const auto &requestStreamAndWindowId :
+ request->mCaptureRequest.streamAndWindowIds) {
+ // Go through the surfaces in the capture request and see which
+ // ones match the surfaces in the error stream.
+ int32_t requestWindowId = requestStreamAndWindowId.windowId;
+ auto requestSurfacePairIt =
+ mConfiguredOutputs.find(requestStreamAndWindowId.streamId);
+ if (requestSurfacePairIt == mConfiguredOutputs.end()) {
+ ALOGE("%s: Error: request stream id %d does not exist", __FUNCTION__,
+ requestStreamAndWindowId.streamId);
+ setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
+ return;
+ }
+
+ const auto &requestWindowHandles =
+ requestSurfacePairIt->second.second.mOutputConfiguration.windowHandles;
+ if (utils::isWindowNativeHandleEqual(
+ requestWindowHandles[requestWindowId], errorWindowHandle)) {
+ const native_handle_t* anw =
+ requestWindowHandles[requestWindowId].getNativeHandle();
ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
getId(), anw, frameNumber);
diff --git a/camera/ndk/ndk_vendor/impl/utils.h b/camera/ndk/ndk_vendor/impl/utils.h
index 6f5820e..62779a4 100644
--- a/camera/ndk/ndk_vendor/impl/utils.h
+++ b/camera/ndk/ndk_vendor/impl/utils.h
@@ -109,8 +109,30 @@
mOutputConfiguration.windowGroupId = -1;
};
- OutputConfigurationWrapper(OutputConfiguration &outputConfiguration)
- : mOutputConfiguration((outputConfiguration)) { }
+ OutputConfigurationWrapper(const OutputConfigurationWrapper &other) {
+ *this = other;
+ }
+
+ // Needed to make sure that OutputConfiguration in
+ // OutputConfigurationWrapper, when copied doesn't call hidl_handle's
+ // assignment operator / copy constructor, which will lead to native handle
+ // cloning, which is not what we want for app callbacks which have the native
+ // handle as parameter.
+ OutputConfigurationWrapper &operator=(const OutputConfigurationWrapper &other) {
+ const OutputConfiguration &outputConfiguration = other.mOutputConfiguration;
+ mOutputConfiguration.rotation = outputConfiguration.rotation;
+ mOutputConfiguration.isDeferred = outputConfiguration.isDeferred;
+ mOutputConfiguration.width = outputConfiguration.width;
+ mOutputConfiguration.height = outputConfiguration.height;
+ mOutputConfiguration.windowGroupId = outputConfiguration.windowGroupId;
+ mOutputConfiguration.windowHandles.resize(outputConfiguration.windowHandles.size());
+ mOutputConfiguration.physicalCameraId = outputConfiguration.physicalCameraId;
+ size_t i = 0;
+ for (const auto &handle : outputConfiguration.windowHandles) {
+ mOutputConfiguration.windowHandles[i++] = handle.getNativeHandle();
+ }
+ return *this;
+ }
bool operator ==(const OutputConfiguration &other) const {
const OutputConfiguration &self = mOutputConfiguration;
diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp
index 74e3223..f7989bd 100644
--- a/drm/drmserver/DrmManager.cpp
+++ b/drm/drmserver/DrmManager.cpp
@@ -208,7 +208,11 @@
}
status_t DrmManager::loadPlugIns() {
+#if __LP64__
+ String8 pluginDirPath("/system/lib64/drm");
+#else
String8 pluginDirPath("/system/lib/drm");
+#endif
loadPlugIns(pluginDirPath);
return DRM_NO_ERROR;
}
diff --git a/drm/libmediadrm/CryptoHalAidl.cpp b/drm/libmediadrm/CryptoHalAidl.cpp
index 3dc62e9..bda664a 100644
--- a/drm/libmediadrm/CryptoHalAidl.cpp
+++ b/drm/libmediadrm/CryptoHalAidl.cpp
@@ -353,7 +353,9 @@
err = statusAidlToStatusT(statusAidl);
std::string msgStr(statusAidl.getMessage());
- *errorDetailMsg = toString8(msgStr);
+ if (errorDetailMsg != nullptr) {
+ *errorDetailMsg = toString8(msgStr);
+ }
if (err != OK) {
ALOGE("Failed on decrypt, error description:%s", statusAidl.getDescription().c_str());
return err;
@@ -415,4 +417,4 @@
return DrmUtils::GetLogMessagesAidl<ICryptoPluginAidl>(mPlugin, logs);
}
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/drm/libmediadrm/CryptoHalHidl.cpp b/drm/libmediadrm/CryptoHalHidl.cpp
index cbb6ddf..a290704 100644
--- a/drm/libmediadrm/CryptoHalHidl.cpp
+++ b/drm/libmediadrm/CryptoHalHidl.cpp
@@ -342,7 +342,9 @@
[&](Status_V1_2 status, uint32_t hBytesWritten, hidl_string hDetailedError) {
if (status == Status_V1_2::OK) {
bytesWritten = hBytesWritten;
- *errorDetailMsg = toString8(hDetailedError);
+ if (errorDetailMsg != nullptr) {
+ *errorDetailMsg = toString8(hDetailedError);
+ }
}
err = toStatusT(status);
});
@@ -353,7 +355,9 @@
[&](Status status, uint32_t hBytesWritten, hidl_string hDetailedError) {
if (status == Status::OK) {
bytesWritten = hBytesWritten;
- *errorDetailMsg = toString8(hDetailedError);
+ if (errorDetailMsg != nullptr) {
+ *errorDetailMsg = toString8(hDetailedError);
+ }
}
err = toStatusT(status);
});
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index aa40793..c394d5a 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -286,4 +286,11 @@
return mDrmHalHidl->getLogMessages(logs);
}
+status_t DrmHal::getSupportedSchemes(std::vector<uint8_t> &schemes) const {
+ status_t statusResult;
+ statusResult = mDrmHalAidl->getSupportedSchemes(schemes);
+ if (statusResult == OK) return statusResult;
+ return mDrmHalHidl->getSupportedSchemes(schemes);
+}
+
} // namespace android
diff --git a/drm/libmediadrm/DrmHalAidl.cpp b/drm/libmediadrm/DrmHalAidl.cpp
index 284abd5..bdd83e9 100644
--- a/drm/libmediadrm/DrmHalAidl.cpp
+++ b/drm/libmediadrm/DrmHalAidl.cpp
@@ -1189,6 +1189,25 @@
return serializedMetrics;
}
+status_t DrmHalAidl::getSupportedSchemes(std::vector<uint8_t> &schemes) const {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mFactories.empty()) return UNKNOWN_ERROR;
+ for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
+ CryptoSchemes curSchemes{};
+ auto err = mFactories[i]->getSupportedCryptoSchemes(&curSchemes);
+ if (!err.isOk()) {
+ continue;
+ }
+
+ for (auto uuidObj : curSchemes.uuids) {
+ schemes.insert(schemes.end(), uuidObj.uuid.begin(), uuidObj.uuid.end());
+ }
+ }
+
+ return OK;
+}
+
void DrmHalAidl::cleanup() {
closeOpenSessions();
diff --git a/drm/libmediadrm/DrmHalHidl.cpp b/drm/libmediadrm/DrmHalHidl.cpp
index c83b52b..c38dbef 100644
--- a/drm/libmediadrm/DrmHalHidl.cpp
+++ b/drm/libmediadrm/DrmHalHidl.cpp
@@ -20,6 +20,7 @@
#include <aidl/android/media/BnResourceManagerClient.h>
#include <android/binder_manager.h>
#include <android/hardware/drm/1.2/types.h>
+#include <android/hardware/drm/1.3/IDrmFactory.h>
#include <android/hidl/manager/1.2/IServiceManager.h>
#include <hidl/ServiceManagement.h>
#include <media/EventMetric.h>
@@ -1514,4 +1515,23 @@
return DrmUtils::GetLogMessages<drm::V1_4::IDrmPlugin>(mPlugin, logs);
}
+status_t DrmHalHidl::getSupportedSchemes(std::vector<uint8_t> &schemes) const {
+ Mutex::Autolock autoLock(mLock);
+ for (auto &factory : mFactories) {
+ sp<drm::V1_3::IDrmFactory> factoryV1_3 = drm::V1_3::IDrmFactory::castFrom(factory);
+ if (factoryV1_3 == nullptr) {
+ continue;
+ }
+
+ factoryV1_3->getSupportedCryptoSchemes(
+ [&](const hardware::hidl_vec<hardware::hidl_array<uint8_t, 16>>& schemes_hidl) {
+ for (const auto &scheme : schemes_hidl) {
+ schemes.insert(schemes.end(), scheme.data(), scheme.data() + scheme.size());
+ }
+ });
+ }
+
+ return OK;
+}
+
} // namespace android
diff --git a/drm/libmediadrm/DrmUtils.cpp b/drm/libmediadrm/DrmUtils.cpp
index 731755b..be0cd4b 100644
--- a/drm/libmediadrm/DrmUtils.cpp
+++ b/drm/libmediadrm/DrmUtils.cpp
@@ -177,7 +177,7 @@
[](const char* instance, void* context) {
auto fullName = std::string(IDrmFactoryAidl::descriptor) + "/" + std::string(instance);
auto factory = IDrmFactoryAidl::fromBinder(
- ::ndk::SpAIBinder(AServiceManager_getService(fullName.c_str())));
+ ::ndk::SpAIBinder(AServiceManager_waitForService(fullName.c_str())));
if (factory == nullptr) {
ALOGE("not found IDrmFactory. Instance name:[%s]", fullName.c_str());
return;
diff --git a/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp b/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp
index eabd41f..597b72d 100644
--- a/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp
+++ b/drm/libmediadrm/fuzzer/mediadrm_fuzzer.cpp
@@ -20,6 +20,7 @@
#include <binder/MemoryDealer.h>
#include <hidlmemory/FrameworkUtils.h>
+#include <media/stagefright/foundation/AString.h>
#include <mediadrm/CryptoHal.h>
#include <mediadrm/DrmHal.h>
#include <utils/String8.h>
@@ -401,7 +402,7 @@
.secureMemory = nullptr};
const uint64_t offset = 0;
- AString *errorDetailMsg = nullptr;
+ AString errorDetailMsg;
CryptoPlugin::Mode mode;
bool shouldPassRandomCryptoMode = mFuzzedDataProvider->ConsumeBool();
if (shouldPassRandomCryptoMode) {
@@ -411,7 +412,7 @@
kCryptoMode[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumCryptoMode - 1)];
}
mCrypto->decrypt(keyId, iv, mode, pattern, sourceBuffer, offset, subSamples, numSubSamples,
- destBuffer, errorDetailMsg);
+ destBuffer, &errorDetailMsg);
if (heapSeqNum >= 0) {
mCrypto->unsetHeap(heapSeqNum);
diff --git a/drm/libmediadrm/include/mediadrm/DrmHal.h b/drm/libmediadrm/include/mediadrm/DrmHal.h
index f5e75ac..eab597b 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHal.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHal.h
@@ -117,6 +117,7 @@
Vector<uint8_t> const &sessionId,
const char *playbackId);
virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
+ virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const;
private:
sp<IDrm> mDrmHalHidl;
diff --git a/drm/libmediadrm/include/mediadrm/DrmHalAidl.h b/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
index e35140e..0f51ce9 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
@@ -105,6 +105,7 @@
bool* required) const;
virtual status_t setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId);
virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const;
+ virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const;
::ndk::ScopedAStatus onEvent(EventTypeAidl in_eventType,
const std::vector<uint8_t>& in_sessionId,
diff --git a/drm/libmediadrm/include/mediadrm/DrmHalHidl.h b/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
index 94ef285..11f0608 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
@@ -184,6 +184,7 @@
const char *playbackId);
virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const;
+ virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const;
// Methods of IDrmPluginListener
Return<void> sendEvent(EventType eventType,
diff --git a/drm/libmediadrm/include/mediadrm/IDrm.h b/drm/libmediadrm/include/mediadrm/IDrm.h
index a88784d..ee2be6a 100644
--- a/drm/libmediadrm/include/mediadrm/IDrm.h
+++ b/drm/libmediadrm/include/mediadrm/IDrm.h
@@ -165,6 +165,8 @@
virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage> &logs) const = 0;
+ virtual status_t getSupportedSchemes(std::vector<uint8_t> &schemes) const = 0;
+
protected:
IDrm() {}
diff --git a/drm/mediadrm/plugins/TEST_MAPPING b/drm/mediadrm/plugins/TEST_MAPPING
index fd4ef95..9919e90 100644
--- a/drm/mediadrm/plugins/TEST_MAPPING
+++ b/drm/mediadrm/plugins/TEST_MAPPING
@@ -1,19 +1,10 @@
{
"presubmit": [
{
- "name": "CtsMediaDrmTestCases",
+ "name": "CtsMediaDrmFrameworkTestCases",
"options" : [
{
"include-annotation": "android.platform.test.annotations.Presubmit"
- },
- {
- "include-filter": "android.mediadrm.cts.MediaDrmClearkeyTest"
- },
- {
- "include-filter": "android.mediadrm.cts.MediaDrmMetricsTest"
- },
- {
- "include-filter": "android.mediadrm.cts.NativeMediaDrmClearkeyTest"
}
]
}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
index bae55d0..ea51e9d 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -83,7 +83,7 @@
void DrmPlugin::initProperties() {
mStringProperties.clear();
mStringProperties[kVendorKey] = kAidlVendorValue;
- mStringProperties[kVersionKey] = kVersionValue;
+ mStringProperties[kVersionKey] = kAidlVersionValue;
mStringProperties[kPluginDescriptionKey] = kAidlPluginDescriptionValue;
mStringProperties[kAlgorithmsKey] = kAidlAlgorithmsValue;
mStringProperties[kListenerTestSupportKey] = kAidlListenerTestSupportValue;
@@ -380,7 +380,7 @@
} else if (name == kDrmErrorTestKey) {
value = mStringProperties[kDrmErrorTestKey];
} else if (name == kAidlVersionKey) {
- value = mStringProperties[kAidlVersionValue];
+ value = mStringProperties[kAidlVersionKey];
} else {
ALOGE("App requested unknown string property %s", name.c_str());
status = Status::ERROR_DRM_CANNOT_HANDLE;
diff --git a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service-lazy.clearkey.rc
index 019c726..c87aabc 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service-lazy.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service-lazy.clearkey.rc
@@ -1,9 +1,9 @@
-service vendor.drm-clearkey-service /vendor/bin/hw/android.hardware.drm-service.clearkey
+service vendor.drm-clearkey-service /vendor/bin/hw/android.hardware.drm-service-lazy.clearkey
+ oneshot
disabled
class hal
user media
group mediadrm drmrpc
ioprio rt 4
task_profiles ProcessCapacityHigh
- interface aidl android.hardware.drm.IDrmFactory/clearkey
- interface aidl android.hardware.drm.ICryptoFactory/clearkey
+ interface aidl android.hardware.drm.IDrmFactory/clearkey
\ No newline at end of file
diff --git a/drm/mediadrm/plugins/clearkey/aidl/include/AidlClearKeryProperties.h b/drm/mediadrm/plugins/clearkey/aidl/include/AidlClearKeryProperties.h
index 8038108..fb2cceb 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/include/AidlClearKeryProperties.h
+++ b/drm/mediadrm/plugins/clearkey/aidl/include/AidlClearKeryProperties.h
@@ -19,7 +19,7 @@
namespace clearkeydrm {
static const std::string kAidlVendorValue("Google");
-static const std::string kAidlVersionValue("1.0");
+static const std::string kAidlVersionValue("aidl-1");
static const std::string kAidlPluginDescriptionValue("ClearKey CDM");
static const std::string kAidlAlgorithmsValue("");
static const std::string kAidlListenerTestSupportValue("true");
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Android.bp b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
index 02ac943..b82d996 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
@@ -144,7 +144,6 @@
"libclearkeydevicefiles-protos",
"libjsmn",
"libprotobuf-cpp-lite",
- "libutils",
],
shared_libs: [
"android.hidl.allocator@1.0",
@@ -157,6 +156,7 @@
"libhidlbase",
"libhidlmemory",
"liblog",
+ "libutils",
],
fuzz_config: {
cc: [
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index c7985ca..3b0e949 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -261,8 +261,7 @@
CREATE_DUMP_FILE(mInFile);
CREATE_DUMP_FILE(mOutFile);
- gettimeofday(&mTimeStart, nullptr);
- gettimeofday(&mTimeEnd, nullptr);
+ mTimeStart = mTimeEnd = systemTime();
}
C2SoftAomDec::~C2SoftAomDec() {
@@ -463,19 +462,17 @@
int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
if (inSize) {
uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
- int32_t decodeTime = 0;
- int32_t delay = 0;
DUMP_TO_FILE(mOutFile, bitstream, inSize);
- GETTIME(&mTimeStart, nullptr);
- TIME_DIFF(mTimeEnd, mTimeStart, delay);
+ mTimeStart = systemTime();
+ nsecs_t delay = mTimeStart - mTimeEnd;
aom_codec_err_t err =
aom_codec_decode(mCodecCtx, bitstream, inSize, &frameIndex);
- GETTIME(&mTimeEnd, nullptr);
- TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
- ALOGV("decodeTime=%4d delay=%4d\n", decodeTime, delay);
+ mTimeEnd = systemTime();
+ nsecs_t decodeTime = mTimeEnd - mTimeStart;
+ ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
if (err != AOM_CODEC_OK) {
ALOGE("av1 decoder failed to decode frame err: %d", err);
diff --git a/media/codec2/components/aom/C2SoftAomDec.h b/media/codec2/components/aom/C2SoftAomDec.h
index 4c82647..8b953fe 100644
--- a/media/codec2/components/aom/C2SoftAomDec.h
+++ b/media/codec2/components/aom/C2SoftAomDec.h
@@ -17,15 +17,12 @@
#ifndef ANDROID_C2_SOFT_AV1_DEC_H_
#define ANDROID_C2_SOFT_AV1_DEC_H_
+#include <inttypes.h>
+
#include <SimpleC2Component.h>
#include "aom/aom_decoder.h"
#include "aom/aomdx.h"
-#define GETTIME(a, b) gettimeofday(a, b);
-#define TIME_DIFF(start, end, diff) \
- diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
- ((end).tv_usec - (start).tv_usec);
-
namespace android {
struct C2SoftAomDec : public SimpleC2Component {
@@ -60,8 +57,8 @@
char mOutFile[200];
#endif /* FILE_DUMP_ENABLE */
- struct timeval mTimeStart; // Time at the start of decode()
- struct timeval mTimeEnd; // Time at the end of decode()
+ nsecs_t mTimeStart = 0; // Time at the start of decode()
+ nsecs_t mTimeEnd = 0; // Time at the end of decode()
status_t initDecoder();
status_t destroyDecoder();
@@ -85,14 +82,13 @@
#define OUTPUT_DUMP_EXT "av1"
#define GENERATE_FILE_NAMES() \
{ \
- GETTIME(&mTimeStart, NULL); \
- strcpy(mInFile, ""); \
+ nsecs_t now = systemTime(); \
ALOGD("GENERATE_FILE_NAMES"); \
- sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, mTimeStart.tv_sec, \
- mTimeStart.tv_usec, INPUT_DUMP_EXT); \
+ sprintf(mInFile, "%s_%" PRId64 ".%s", INPUT_DUMP_PATH, \
+ now, INPUT_DUMP_EXT); \
strcpy(mOutFile, ""); \
- sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH, \
- mTimeStart.tv_sec, mTimeStart.tv_usec, OUTPUT_DUMP_EXT); \
+ sprintf(mOutFile, "%s_%" PRId64 ".%s", OUTPUT_DUMP_PATH, \
+ now, OUTPUT_DUMP_EXT); \
}
#define CREATE_DUMP_FILE(m_filename) \
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index cc4517d..953afc5 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -670,8 +670,7 @@
void C2SoftAvcDec::resetPlugin() {
mSignalledOutputEos = false;
- gettimeofday(&mTimeStart, nullptr);
- gettimeofday(&mTimeEnd, nullptr);
+ mTimeStart = mTimeEnd = systemTime();
}
status_t C2SoftAvcDec::deleteDecoder() {
@@ -866,14 +865,13 @@
setParams(mStride, IVD_DECODE_HEADER);
}
- WORD32 delay;
- GETTIME(&mTimeStart, nullptr);
- TIME_DIFF(mTimeEnd, mTimeStart, delay);
+ mTimeStart = systemTime();
+ nsecs_t delay = mTimeStart - mTimeEnd;
(void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
- WORD32 decodeTime;
- GETTIME(&mTimeEnd, nullptr);
- TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
- ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
+
+ mTimeEnd = systemTime();
+ nsecs_t decodeTime = mTimeEnd - mTimeStart;
+ ALOGV("decodeTime=%" PRId64 " delay=%" PRId64 " numBytes=%6d", decodeTime, delay,
ps_decode_op->u4_num_bytes_consumed);
}
if (IVD_MEM_ALLOC_FAILED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index 59d5184..36a463e 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -18,6 +18,7 @@
#define ANDROID_C2_SOFT_AVC_DEC_H_
#include <sys/time.h>
+#include <inttypes.h>
#include <media/stagefright/foundation/ColorUtils.h>
@@ -43,19 +44,15 @@
#define IVDEXT_CMD_CTL_SET_NUM_CORES \
(IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
#define MIN(a, b) (((a) < (b)) ? (a) : (b))
-#define GETTIME(a, b) gettimeofday(a, b);
-#define TIME_DIFF(start, end, diff) \
- diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
- ((end).tv_usec - (start).tv_usec);
#ifdef FILE_DUMP_ENABLE
#define INPUT_DUMP_PATH "/sdcard/clips/avcd_input"
#define INPUT_DUMP_EXT "h264"
#define GENERATE_FILE_NAMES() { \
- GETTIME(&mTimeStart, NULL); \
+ nsecs_t now = systemTime(); \
strcpy(mInFile, ""); \
- sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \
- mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ sprintf(mInFile, "%s_%" PRId64 "d.%s", \
+ INPUT_DUMP_PATH, now, \
INPUT_DUMP_EXT); \
}
#define CREATE_DUMP_FILE(m_filename) { \
@@ -183,8 +180,8 @@
} mBitstreamColorAspects;
// profile
- struct timeval mTimeStart;
- struct timeval mTimeEnd;
+ nsecs_t mTimeStart = 0;
+ nsecs_t mTimeEnd = 0;
#ifdef FILE_DUMP_ENABLE
char mInFile[200];
#endif /* FILE_DUMP_ENABLE */
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index d65ffa5..4ffcd59 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -656,8 +656,7 @@
mEntropyMode = DEFAULT_ENTROPY_MODE;
mBframes = DEFAULT_B_FRAMES;
- gettimeofday(&mTimeStart, nullptr);
- gettimeofday(&mTimeEnd, nullptr);
+ mTimeStart = mTimeEnd = systemTime();
}
c2_status_t C2SoftAvcEnc::setDimensions() {
@@ -1650,8 +1649,7 @@
work->worklets.front()->output.flags = work->input.flags;
IV_STATUS_T status;
- WORD32 timeDelay = 0;
- WORD32 timeTaken = 0;
+ nsecs_t timeDelay = 0;
uint64_t workIndex = work->input.ordinal.frameIndex.peekull();
// Initialize encoder if not already initialized
@@ -1817,10 +1815,10 @@
// mInFile, s_encode_ip.s_inp_buf.apv_bufs[0],
// (mHeight * mStride * 3 / 2));
- GETTIME(&mTimeStart, nullptr);
/* Compute time elapsed between end of previous decode()
* to start of current decode() */
- TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+ mTimeStart = systemTime();
+ timeDelay = mTimeStart - mTimeEnd;
status = ive_api_function(mCodecCtx, &s_video_encode_ip, &s_video_encode_op);
if (IV_SUCCESS != status) {
@@ -1844,11 +1842,11 @@
mBuffers[ps_encode_ip->s_inp_buf.apv_bufs[0]] = inputBuffer;
}
- GETTIME(&mTimeEnd, nullptr);
/* Compute time taken for decode() */
- TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+ mTimeEnd = systemTime();
+ nsecs_t timeTaken = mTimeEnd - mTimeStart;
- ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
+ ALOGV("timeTaken=%" PRId64 "d delay=%" PRId64 " numBytes=%6d", timeTaken, timeDelay,
ps_encode_op->s_out_buf.u4_bytes);
void *freed = ps_encode_op->s_inp_buf.apv_bufs[0];
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index 1fecd9e..293867d 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -18,6 +18,7 @@
#define ANDROID_C2_SOFT_AVC_ENC_H__
#include <map>
+#include <inttypes.h>
#include <utils/Vector.h>
@@ -115,14 +116,6 @@
/** Used to remove warnings about unused parameters */
#define UNUSED(x) ((void)(x))
-/** Get time */
-#define GETTIME(a, b) gettimeofday(a, b);
-
-/** Compute difference between start and end */
-#define TIME_DIFF(start, end, diff) \
- diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
- ((end).tv_usec - (start).tv_usec);
-
#define ive_aligned_malloc(alignment, size) memalign(alignment, size)
#define ive_aligned_free(buf) free(buf)
@@ -148,6 +141,7 @@
virtual ~C2SoftAvcEnc();
private:
+ // RBE What does OMX have to do with the c2 plugin?
// OMX input buffer's timestamp and flags
typedef struct {
int64_t mTimeUs;
@@ -158,8 +152,8 @@
int32_t mStride;
- struct timeval mTimeStart; // Time at the start of decode()
- struct timeval mTimeEnd; // Time at the end of decode()
+ nsecs_t mTimeStart = 0; // Time at the start of decode()
+ nsecs_t mTimeEnd = 0; // Time at the end of decode()
#ifdef FILE_DUMP_ENABLE
char mInFile[200];
@@ -259,14 +253,14 @@
#define OUTPUT_DUMP_EXT "h264"
#define GENERATE_FILE_NAMES() { \
- GETTIME(&mTimeStart, NULL); \
+ nsecs_t now = systemTime(); \
strcpy(mInFile, ""); \
- sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \
- mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ sprintf(mInFile, "%s_%" PRId64 "d.%s", \
+ INPUT_DUMP_PATH, now, \
INPUT_DUMP_EXT); \
strcpy(mOutFile, ""); \
- sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH,\
- mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ sprintf(mOutFile, "%s_%" PRId64 "d.%s", \
+ OUTPUT_DUMP_PATH, now, \
OUTPUT_DUMP_EXT); \
}
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 5295822..b30eed5 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -32,6 +32,13 @@
constexpr uint8_t kNeutralUVBitDepth8 = 128;
constexpr uint16_t kNeutralUVBitDepth10 = 512;
+bool isAtLeastT() {
+ char deviceCodeName[PROP_VALUE_MAX];
+ __system_property_get("ro.build.version.codename", deviceCodeName);
+ return android_get_device_api_level() >= __ANDROID_API_T__ ||
+ !strcmp(deviceCodeName, "Tiramisu");
+}
+
void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
size_t srcUStride, size_t srcVStride, size_t dstYStride,
@@ -767,9 +774,9 @@
// Save supported hal pixel formats for bit depth of 10, the first time this is called
if (!mBitDepth10HalPixelFormats.size()) {
std::vector<int> halPixelFormats;
- // TODO(b/178229371) Enable HAL_PIXEL_FORMAT_YCBCR_P010 once framework supports it
- // halPixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
-
+ if (isAtLeastT()) {
+ halPixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+ }
// since allowRGBA1010102 can chance in each call, but mBitDepth10HalPixelFormats
// is populated only once, allowRGBA1010102 is not considered at this stage.
halPixelFormats.push_back(HAL_PIXEL_FORMAT_RGBA_1010102);
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index d244f45..52ae3b8 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -27,7 +27,7 @@
#include <media/stagefright/foundation/Mutexed.h>
namespace android {
-
+bool isAtLeastT();
void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
size_t srcUStride, size_t srcVStride, size_t dstYStride,
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index e5fbe99..f5c8138 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -190,11 +190,18 @@
.withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
.build());
+ std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+ if (isAtLeastT()) {
+ pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+ }
// TODO: support more formats?
- addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
- .withConstValue(new C2StreamPixelFormatInfo::output(
- 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
- .build());
+ addParameter(
+ DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+ .withDefault(new C2StreamPixelFormatInfo::output(
+ 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+ .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+ .build());
}
static C2R SizeSetter(bool mayBlock,
@@ -335,8 +342,7 @@
std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
mIntf(intfImpl),
mCodecCtx(nullptr) {
- gettimeofday(&mTimeStart, nullptr);
- gettimeofday(&mTimeEnd, nullptr);
+ mTimeStart = mTimeEnd = systemTime();
}
C2SoftGav1Dec::~C2SoftGav1Dec() { onRelease(); }
@@ -403,6 +409,7 @@
bool C2SoftGav1Dec::initDecoder() {
mSignalledError = false;
mSignalledOutputEos = false;
+ mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
mCodecCtx.reset(new libgav1::Decoder());
if (mCodecCtx == nullptr) {
@@ -506,19 +513,17 @@
int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
if (inSize) {
uint8_t *bitstream = const_cast<uint8_t *>(rView.data() + inOffset);
- int32_t decodeTime = 0;
- int32_t delay = 0;
- GETTIME(&mTimeStart, nullptr);
- TIME_DIFF(mTimeEnd, mTimeStart, delay);
+ mTimeStart = systemTime();
+ nsecs_t delay = mTimeStart - mTimeEnd;
const Libgav1StatusCode status =
mCodecCtx->EnqueueFrame(bitstream, inSize, frameIndex,
/*buffer_private_data=*/nullptr);
- GETTIME(&mTimeEnd, nullptr);
- TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
- ALOGV("decodeTime=%4d delay=%4d\n", decodeTime, delay);
+ mTimeEnd = systemTime();
+ nsecs_t decodeTime = mTimeEnd - mTimeStart;
+ ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
if (status != kLibgav1StatusOk) {
ALOGE("av1 decoder failed to decode frame. status: %d.", status);
@@ -648,6 +653,24 @@
return false;
}
}
+
+ if (mHalPixelFormat != format) {
+ C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
+ if (err == C2_OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(pixelFormat));
+ } else {
+ ALOGE("Config update pixelFormat failed");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return UNKNOWN_ERROR;
+ }
+ mHalPixelFormat = format;
+ }
+
C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
// We always create a graphic block that is width aligned to 16 and height
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index 134fa0d..4b13fef 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -17,6 +17,8 @@
#ifndef ANDROID_C2_SOFT_GAV1_DEC_H_
#define ANDROID_C2_SOFT_GAV1_DEC_H_
+#include <inttypes.h>
+
#include <media/stagefright/foundation/ColorUtils.h>
#include <SimpleC2Component.h>
@@ -24,11 +26,6 @@
#include "libgav1/src/gav1/decoder.h"
#include "libgav1/src/gav1/decoder_settings.h"
-#define GETTIME(a, b) gettimeofday(a, b);
-#define TIME_DIFF(start, end, diff) \
- diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
- ((end).tv_usec - (start).tv_usec);
-
namespace android {
struct C2SoftGav1Dec : public SimpleC2Component {
@@ -54,6 +51,7 @@
std::shared_ptr<IntfImpl> mIntf;
std::unique_ptr<libgav1::Decoder> mCodecCtx;
+ uint32_t mHalPixelFormat;
uint32_t mWidth;
uint32_t mHeight;
bool mSignalledOutputEos;
@@ -80,8 +78,8 @@
}
} mBitstreamColorAspects;
- struct timeval mTimeStart; // Time at the start of decode()
- struct timeval mTimeEnd; // Time at the end of decode()
+ nsecs_t mTimeStart = 0; // Time at the start of decode()
+ nsecs_t mTimeEnd = 0; // Time at the end of decode()
bool initDecoder();
void getVuiParams(const libgav1::DecoderBuffer *buffer);
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 2a6adca..5a660c5 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -663,8 +663,7 @@
void C2SoftHevcDec::resetPlugin() {
mSignalledOutputEos = false;
- gettimeofday(&mTimeStart, nullptr);
- gettimeofday(&mTimeEnd, nullptr);
+ mTimeStart = mTimeEnd = systemTime();
}
status_t C2SoftHevcDec::deleteDecoder() {
@@ -858,14 +857,13 @@
/* Decode header and get dimensions */
setParams(mStride, IVD_DECODE_HEADER);
}
- WORD32 delay;
- GETTIME(&mTimeStart, nullptr);
- TIME_DIFF(mTimeEnd, mTimeStart, delay);
+
+ mTimeStart = systemTime();
+ nsecs_t delay = mTimeStart - mTimeEnd;
(void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
- WORD32 decodeTime;
- GETTIME(&mTimeEnd, nullptr);
- TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
- ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
+ mTimeEnd = systemTime();
+ nsecs_t decodeTime = mTimeEnd - mTimeStart;
+ ALOGV("decodeTime=%6" PRId64 " delay=%6" PRId64 " numBytes=%6d", decodeTime, delay,
ps_decode_op->u4_num_bytes_consumed);
if (IVD_MEM_ALLOC_FAILED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
ALOGE("allocation failure in decoder");
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
index b9296e9..6abf69e 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.h
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -20,6 +20,7 @@
#include <media/stagefright/foundation/ColorUtils.h>
#include <atomic>
+#include <inttypes.h>
#include <SimpleC2Component.h>
#include "ihevc_typedefs.h"
@@ -41,10 +42,6 @@
#define IVDEXT_CMD_CTL_SET_NUM_CORES \
(IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_SET_NUM_CORES
#define MIN(a, b) (((a) < (b)) ? (a) : (b))
-#define GETTIME(a, b) gettimeofday(a, b);
-#define TIME_DIFF(start, end, diff) \
- diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
- ((end).tv_usec - (start).tv_usec);
struct C2SoftHevcDec : public SimpleC2Component {
@@ -142,8 +139,8 @@
} mBitstreamColorAspects;
// profile
- struct timeval mTimeStart;
- struct timeval mTimeEnd;
+ nsecs_t mTimeStart = 0;
+ nsecs_t mTimeEnd = 0;
C2_DO_NOT_COPY(C2SoftHevcDec);
};
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index b7a5686..947e387 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -123,7 +123,7 @@
// matches size limits in codec library
addParameter(
DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
- .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+ .withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
.withFields({
C2F(mSize, width).inRange(2, 1920, 2),
C2F(mSize, height).inRange(2, 1088, 2),
@@ -133,7 +133,7 @@
addParameter(
DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
- .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+ .withDefault(new C2StreamFrameRateInfo::output(0u, 1.))
.withFields({C2F(mFrameRate, value).greaterThan(0.)})
.withSetter(
Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
@@ -591,8 +591,7 @@
CREATE_DUMP_FILE(mInFile);
CREATE_DUMP_FILE(mOutFile);
- gettimeofday(&mTimeStart, nullptr);
- gettimeofday(&mTimeEnd, nullptr);
+ mTimeStart = mTimeEnd = systemTime();
}
C2SoftHevcEnc::~C2SoftHevcEnc() {
@@ -1203,11 +1202,11 @@
}
}
- uint64_t timeDelay = 0;
- uint64_t timeTaken = 0;
+ nsecs_t timeDelay = 0;
+ nsecs_t timeTaken = 0;
memset(&s_encode_op, 0, sizeof(s_encode_op));
- GETTIME(&mTimeStart, nullptr);
- TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+ mTimeStart = systemTime();
+ timeDelay = mTimeStart - mTimeEnd;
if (inputBuffer) {
err = ihevce_encode(mCodecCtx, &s_encode_ip, &s_encode_op);
@@ -1222,12 +1221,12 @@
fillEmptyWork(work);
}
- GETTIME(&mTimeEnd, nullptr);
/* Compute time taken for decode() */
- TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+ mTimeEnd = systemTime();
+ timeTaken = mTimeEnd - mTimeStart;
- ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", (int)timeTaken,
- (int)timeDelay, s_encode_op.i4_bytes_generated);
+ ALOGV("timeTaken=%6" PRId64 " delay=%6" PRId64 " numBytes=%6d", timeTaken,
+ timeDelay, s_encode_op.i4_bytes_generated);
if (s_encode_op.i4_bytes_generated) {
finishWork(s_encode_op.u8_pts, work, pool, &s_encode_op);
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.h b/media/codec2/components/hevc/C2SoftHevcEnc.h
index 4217a8b..ce9cec8 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.h
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.h
@@ -19,6 +19,7 @@
#include <SimpleC2Component.h>
#include <algorithm>
+#include <inttypes.h>
#include <map>
#include <media/stagefright/foundation/ColorUtils.h>
#include <utils/Vector.h>
@@ -27,14 +28,6 @@
namespace android {
-/** Get time */
-#define GETTIME(a, b) gettimeofday(a, b)
-
-/** Compute difference between start and end */
-#define TIME_DIFF(start, end, diff) \
- diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
- ((end).tv_usec - (start).tv_usec);
-
#define CODEC_MAX_CORES 4
#define MAX_B_FRAMES 1
#define MAX_RC_LOOKAHEAD 1
@@ -102,8 +95,8 @@
#endif /* FILE_DUMP_ENABLE */
// profile
- struct timeval mTimeStart;
- struct timeval mTimeEnd;
+ nsecs_t mTimeStart = 0;
+ nsecs_t mTimeEnd = 0;
c2_status_t initEncParams();
c2_status_t initEncoder();
@@ -127,14 +120,12 @@
#define OUTPUT_DUMP_EXT "h265"
#define GENERATE_FILE_NAMES() \
{ \
- GETTIME(&mTimeStart, NULL); \
- strcpy(mInFile, ""); \
+ nsecs_t now = systemTime(); \
ALOGD("GENERATE_FILE_NAMES"); \
- sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, mTimeStart.tv_sec, \
- mTimeStart.tv_usec, INPUT_DUMP_EXT); \
- strcpy(mOutFile, ""); \
- sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH, \
- mTimeStart.tv_sec, mTimeStart.tv_usec, OUTPUT_DUMP_EXT); \
+ sprintf(mInFile, "%s_%" PRId64 ".%s", INPUT_DUMP_PATH, \
+ mTimeStart, INPUT_DUMP_EXT); \
+ sprintf(mutFile, "%s_%" PRId64 ".%s", OUTPUT_DUMP_PATH, \
+ mTimeStart, OUTPUT_DUMP_EXT); \
}
#define CREATE_DUMP_FILE(m_filename) \
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 5f9b30b..9a41910 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -731,8 +731,7 @@
void C2SoftMpeg2Dec::resetPlugin() {
mSignalledOutputEos = false;
- gettimeofday(&mTimeStart, nullptr);
- gettimeofday(&mTimeEnd, nullptr);
+ mTimeStart = mTimeEnd = systemTime();
}
status_t C2SoftMpeg2Dec::deleteDecoder() {
@@ -929,14 +928,12 @@
}
// If input dump is enabled, then write to file
DUMP_TO_FILE(mInFile, s_decode_ip.pv_stream_buffer, s_decode_ip.u4_num_Bytes);
- WORD32 delay;
- GETTIME(&mTimeStart, nullptr);
- TIME_DIFF(mTimeEnd, mTimeStart, delay);
+ nsecs_t delay = mTimeStart - mTimeEnd;
(void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
- WORD32 decodeTime;
- GETTIME(&mTimeEnd, nullptr);
- TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
- ALOGV("decodeTime=%6d delay=%6d numBytes=%6d ", decodeTime, delay,
+
+ mTimeEnd = systemTime();
+ nsecs_t decodeTime = mTimeEnd - mTimeStart;
+ ALOGV("decodeTime=%" PRId64 " delay=%" PRId64 " numBytes=%6d ", decodeTime, delay,
s_decode_op.u4_num_bytes_consumed);
if (IMPEG2D_UNSUPPORTED_DIMENSIONS == s_decode_op.u4_error_code) {
ALOGV("unsupported resolution : %dx%d", s_decode_op.u4_pic_wd, s_decode_op.u4_pic_ht);
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
index 8a29c14..3965bcc 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
@@ -18,6 +18,7 @@
#define ANDROID_C2_SOFT_MPEG2_DEC_H_
#include <atomic>
+#include <inttypes.h>
#include <SimpleC2Component.h>
#include <media/stagefright/foundation/ColorUtils.h>
@@ -42,19 +43,14 @@
#define IVDEXT_CMD_CTL_SET_NUM_CORES \
(IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_SET_NUM_CORES
#define MIN(a, b) (((a) < (b)) ? (a) : (b))
-#define GETTIME(a, b) gettimeofday(a, b);
-#define TIME_DIFF(start, end, diff) \
- diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
- ((end).tv_usec - (start).tv_usec);
#ifdef FILE_DUMP_ENABLE
#define INPUT_DUMP_PATH "/sdcard/clips/mpeg2d_input"
#define INPUT_DUMP_EXT "m2v"
#define GENERATE_FILE_NAMES() { \
- GETTIME(&mTimeStart, NULL); \
- strcpy(mInFile, ""); \
- sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \
- mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ nsecs_t now = systemTime(); \
+ sprintf(mInFile, "%s_%" PRId64 ".%s", \
+ INPUT_DUMP_PATH, now, \
INPUT_DUMP_EXT); \
}
#define CREATE_DUMP_FILE(m_filename) { \
@@ -183,8 +179,8 @@
} mBitstreamColorAspects;
// profile
- struct timeval mTimeStart;
- struct timeval mTimeEnd;
+ nsecs_t mTimeStart = 0;
+ nsecs_t mTimeEnd = 0;
#ifdef FILE_DUMP_ENABLE
char mInFile[200];
#endif /* FILE_DUMP_ENABLE */
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 5fc89be..e81f044 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -19,7 +19,6 @@
#include <log/log.h>
#include <algorithm>
-
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/MediaDefs.h>
@@ -218,11 +217,20 @@
.build());
// TODO: support more formats?
+ std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+#ifdef VP9
+ if (isAtLeastT()) {
+ pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+ }
+#endif
addParameter(
DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
- .withConstValue(new C2StreamPixelFormatInfo::output(
- 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .withDefault(new C2StreamPixelFormatInfo::output(
+ 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+ .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
.build());
+
}
static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
@@ -424,7 +432,7 @@
#else
mMode = MODE_VP8;
#endif
-
+ mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
mWidth = 320;
mHeight = 240;
mFrameParallelMode = false;
@@ -690,6 +698,24 @@
}
format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
}
+
+ if (mHalPixelFormat != format) {
+ C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
+ if (err == C2_OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(pixelFormat));
+ } else {
+ ALOGE("Config update pixelFormat failed");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return UNKNOWN_ERROR;
+ }
+ mHalPixelFormat = format;
+ }
+
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format, usage, &block);
if (err != C2_OK) {
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index 2065165..5564766 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -67,6 +67,7 @@
vpx_codec_ctx_t *mCodecCtx;
bool mFrameParallelMode; // Frame parallel is only supported by VP9 decoder.
+ uint32_t mHalPixelFormat;
uint32_t mWidth;
uint32_t mHeight;
bool mSignalledOutputEos;
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
index 3adc212..147a52e 100644
--- a/media/codec2/fuzzer/Android.bp
+++ b/media/codec2/fuzzer/Android.bp
@@ -38,6 +38,12 @@
"-Wall",
"-Werror",
],
+
+ fuzz_config: {
+ cc: [
+ "wonsik@google.com",
+ ],
+ },
}
cc_fuzz {
diff --git a/media/codec2/hidl/1.2/utils/Component.cpp b/media/codec2/hidl/1.2/utils/Component.cpp
index 8924e6d..7994d32 100644
--- a/media/codec2/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hidl/1.2/utils/Component.cpp
@@ -520,6 +520,37 @@
if (res != C2_OK) {
mInit = res;
}
+
+ struct ListenerDeathRecipient : public HwDeathRecipient {
+ ListenerDeathRecipient(const wp<Component>& comp)
+ : component{comp} {
+ }
+
+ virtual void serviceDied(
+ uint64_t /* cookie */,
+ const wp<::android::hidl::base::V1_0::IBase>& /* who */
+ ) override {
+ auto strongComponent = component.promote();
+ if (strongComponent) {
+ LOG(INFO) << "Client died ! release the component !!";
+ strongComponent->release();
+ } else {
+ LOG(ERROR) << "Client died ! no component to release !!";
+ }
+ }
+
+ wp<Component> component;
+ };
+
+ mDeathRecipient = new ListenerDeathRecipient(self);
+ Return<bool> transStatus = mListener->linkToDeath(
+ mDeathRecipient, 0);
+ if (!transStatus.isOk()) {
+ LOG(ERROR) << "Listener linkToDeath() transaction failed.";
+ }
+ if (!static_cast<bool>(transStatus)) {
+ LOG(DEBUG) << "Listener linkToDeath() call failed.";
+ }
}
Component::~Component() {
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
index 7937664..d0972ee 100644
--- a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
@@ -142,6 +142,10 @@
friend struct ComponentStore;
struct Listener;
+
+ using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
+ sp<HwDeathRecipient> mDeathRecipient;
+
};
} // namespace utils
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index a73b493..c36ae94 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -67,6 +67,7 @@
"libstagefright_codecbase",
"libstagefright_foundation",
"libstagefright_omx",
+ "libstagefright_surface_utils",
"libstagefright_xmlparser",
"libui",
"libutils",
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index def8a18..88fe1f3 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -212,9 +212,8 @@
(OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
&usage, sizeof(usage));
- mSource->configure(
- mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace));
- return OK;
+ return GetStatus(mSource->configure(
+ mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace)));
}
void disconnect() override {
@@ -872,6 +871,11 @@
}
config->mTunneled = true;
}
+
+ int32_t pushBlankBuffersOnStop = 0;
+ if (msg->findInt32(KEY_PUSH_BLANK_BUFFERS_ON_STOP, &pushBlankBuffersOnStop)) {
+ config->mPushBlankBuffersOnStop = pushBlankBuffersOnStop == 1;
+ }
}
}
setSurface(surface);
@@ -1527,6 +1531,9 @@
using namespace android::hardware::graphics::bufferqueue::V1_0::utils;
typedef android::hardware::media::omx::V1_0::Status OmxStatus;
android::sp<IOmx> omx = IOmx::getService();
+ if (omx == nullptr) {
+ return nullptr;
+ }
typedef android::hardware::graphics::bufferqueue::V1_0::
IGraphicBufferProducer HGraphicBufferProducer;
typedef android::hardware::media::omx::V1_0::
@@ -1804,9 +1811,16 @@
if (tryAndReportOnError(setRunning) != OK) {
return;
}
+
+ err2 = mChannel->requestInitialInputBuffers();
+
+ if (err2 != OK) {
+ ALOGE("Initial request for Input Buffers failed");
+ mCallback->onError(err2,ACTION_CODE_FATAL);
+ return;
+ }
mCallback->onStartCompleted();
- (void)mChannel->requestInitialInputBuffers();
}
void CCodec::initiateShutdown(bool keepComponentAllocated) {
@@ -1832,7 +1846,13 @@
}
state->set(STOPPING);
}
-
+ {
+ Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+ const std::unique_ptr<Config> &config = *configLocked;
+ if (config->mPushBlankBuffersOnStop) {
+ mChannel->pushBlankBufferToOutputSurface();
+ }
+ }
mChannel->reset();
(new AMessage(kWhatStop, this))->post();
}
@@ -1920,6 +1940,13 @@
config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
}
}
+ {
+ Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+ const std::unique_ptr<Config> &config = *configLocked;
+ if (config->mPushBlankBuffersOnStop) {
+ mChannel->pushBlankBufferToOutputSurface();
+ }
+ }
mChannel->reset();
// thiz holds strong ref to this while the thread is running.
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 99aa593..a086128 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -15,8 +15,11 @@
*/
//#define LOG_NDEBUG 0
+#include <utils/Errors.h>
#define LOG_TAG "CCodecBufferChannel"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
#include <utils/Log.h>
+#include <utils/Trace.h>
#include <algorithm>
#include <atomic>
@@ -44,9 +47,9 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/SkipCutBuffer.h>
+#include <media/stagefright/SurfaceUtils.h>
#include <media/MediaCodecBuffer.h>
#include <mediadrm/ICrypto.h>
#include <system/window.h>
@@ -327,6 +330,8 @@
}
c2_status_t err = C2_OK;
if (!items.empty()) {
+ ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+ "CCodecBufferChannel::queue(%s@ts=%lld)", mName, (long long)timeUs).c_str());
{
Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
PipelineWatcher::Clock::time_point now = PipelineWatcher::Clock::now();
@@ -924,6 +929,11 @@
hdr.validTypes |= HdrMetadata::CTA861_3;
hdr.cta8613 = cta861_meta;
}
+
+ // does not have valid info
+ if (!(hdr.validTypes & (HdrMetadata::SMPTE2086 | HdrMetadata::CTA861_3))) {
+ hdrStaticInfo.reset();
+ }
}
if (hdrDynamicInfo
&& hdrDynamicInfo->m.type_ == C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40) {
@@ -1892,7 +1902,7 @@
int32_t flags = 0;
if (worklet->output.flags & C2FrameData::FLAG_END_OF_STREAM) {
- flags |= MediaCodec::BUFFER_FLAG_EOS;
+ flags |= BUFFER_FLAG_END_OF_STREAM;
ALOGV("[%s] onWorkDone: output EOS", mName);
}
@@ -1909,6 +1919,8 @@
// When using input surface we need to restore the original input timestamp.
timestamp = work->input.ordinal.customOrdinal;
}
+ ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+ "CCodecBufferChannel::onWorkDone(%s@ts=%lld)", mName, timestamp.peekll()).c_str());
ALOGV("[%s] onWorkDone: input %lld, codec %lld => output %lld => %lld",
mName,
work->input.ordinal.customOrdinal.peekll(),
@@ -1930,7 +1942,7 @@
sp<MediaCodecBuffer> outBuffer;
if (output->buffers && output->buffers->registerCsd(initData, &index, &outBuffer) == OK) {
outBuffer->meta()->setInt64("timeUs", timestamp.peek());
- outBuffer->meta()->setInt32("flags", MediaCodec::BUFFER_FLAG_CODECCONFIG);
+ outBuffer->meta()->setInt32("flags", BUFFER_FLAG_CODEC_CONFIG);
ALOGV("[%s] onWorkDone: csd index = %zu [%p]", mName, index, outBuffer.get());
output.unlock();
@@ -1966,7 +1978,7 @@
switch (info->coreIndex().coreIndex()) {
case C2StreamPictureTypeMaskInfo::CORE_INDEX:
if (((C2StreamPictureTypeMaskInfo *)info.get())->value & C2Config::SYNC_FRAME) {
- flags |= MediaCodec::BUFFER_FLAG_SYNCFRAME;
+ flags |= BUFFER_FLAG_KEY_FRAME;
}
break;
default:
@@ -2170,4 +2182,13 @@
}
}
+status_t CCodecBufferChannel::pushBlankBufferToOutputSurface() {
+ Mutexed<OutputSurface>::Locked output(mOutputSurface);
+ sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(output->surface.get());
+ if (nativeWindow == nullptr) {
+ return INVALID_OPERATION;
+ }
+ return pushBlankBuffersToNativeWindow(nativeWindow.get());
+}
+
} // namespace android
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 26eef30..b3a5f4b 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -181,6 +181,11 @@
void setMetaMode(MetaMode mode);
+ /**
+ * Push a blank buffer to the configured native output surface.
+ */
+ status_t pushBlankBufferToOutputSurface();
+
private:
class QueueGuard;
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 20f2ecf..57c70c1 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -22,7 +22,6 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/MediaDefs.h>
-#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/SkipCutBuffer.h>
#include <mediadrm/ICrypto.h>
@@ -34,6 +33,8 @@
namespace {
+constexpr uint32_t PIXEL_FORMAT_UNKNOWN = 0;
+
sp<GraphicBlockBuffer> AllocateInputGraphicBuffer(
const std::shared_ptr<C2BlockPool> &pool,
const sp<AMessage> &format,
@@ -292,7 +293,7 @@
int32_t flags,
const sp<AMessage>& format,
const C2WorkOrdinalStruct& ordinal) {
- bool eos = flags & MediaCodec::BUFFER_FLAG_EOS;
+ bool eos = flags & BUFFER_FLAG_END_OF_STREAM;
if (!buffer && eos) {
// TRICKY: we may be violating ordering of the stash here. Because we
// don't expect any more emplace() calls after this, the ordering should
@@ -300,7 +301,7 @@
mReorderStash.emplace_back(
buffer, notify, timestamp, flags, format, ordinal);
} else {
- flags = flags & ~MediaCodec::BUFFER_FLAG_EOS;
+ flags = flags & ~BUFFER_FLAG_END_OF_STREAM;
auto it = mReorderStash.begin();
for (; it != mReorderStash.end(); ++it) {
if (less(ordinal, it->ordinal)) {
@@ -311,7 +312,7 @@
buffer, notify, timestamp, flags, format, ordinal);
if (eos) {
mReorderStash.back().flags =
- mReorderStash.back().flags | MediaCodec::BUFFER_FLAG_EOS;
+ mReorderStash.back().flags | BUFFER_FLAG_END_OF_STREAM;
}
}
while (!mReorderStash.empty() && mReorderStash.size() > mDepth) {
@@ -348,7 +349,7 @@
// Flushing mReorderStash because no other buffers should come after output
// EOS.
- if (entry.flags & MediaCodec::BUFFER_FLAG_EOS) {
+ if (entry.flags & BUFFER_FLAG_END_OF_STREAM) {
// Flush reorder stash
setReorderDepth(0);
}
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index dd37c4b..132902b 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -324,7 +324,8 @@
: mInputFormat(new AMessage),
mOutputFormat(new AMessage),
mUsingSurface(false),
- mTunneled(false) { }
+ mTunneled(false),
+ mPushBlankBuffersOnStop(false) { }
void CCodecConfig::initializeStandardParams() {
typedef Domain D;
@@ -963,8 +964,6 @@
.limitTo(D::ENCODER & D::VIDEO & D::READ));
/* still to do
- constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
-
not yet used by MediaCodec, but defined as MediaFormat
KEY_AUDIO_SESSION_ID // we use "audio-hw-sync"
KEY_OUTPUT_REORDER_DEPTH
@@ -1896,7 +1895,9 @@
names->clear();
// TODO: expand to standard params
for (const auto &[key, desc] : mVendorParams) {
- names->push_back(key);
+ if (desc->isVisible()) {
+ names->push_back(key);
+ }
}
return OK;
}
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index 88e6239..2e7b866 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -148,6 +148,8 @@
bool mTunneled;
sp<NativeHandle> mSidebandHandle;
+ bool mPushBlankBuffersOnStop;
+
CCodecConfig();
/// initializes the members required to manage the format: descriptors, reflector,
@@ -396,4 +398,3 @@
} // namespace android
#endif // C_CODEC_H_
-
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 2d3c70a..c2405e8 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -16,7 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "Codec2Buffer"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
#include <utils/Log.h>
+#include <utils/Trace.h>
#include <aidl/android/hardware/graphics/common/Cta861_3.h>
#include <aidl/android/hardware/graphics/common/Smpte2086.h>
@@ -229,6 +231,7 @@
mAllocatedDepth(0),
mBackBufferSize(0),
mMediaImage(new ABuffer(sizeof(MediaImage2))) {
+ ATRACE_CALL();
if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
mClientColorFormat = COLOR_FormatYUV420Flexible;
}
@@ -581,6 +584,7 @@
* Copy C2GraphicView to MediaImage2.
*/
status_t copyToMediaImage() {
+ ATRACE_CALL();
if (mInitCheck != OK) {
return mInitCheck;
}
@@ -619,7 +623,9 @@
const sp<AMessage> &format,
const std::shared_ptr<C2GraphicBlock> &block,
std::function<sp<ABuffer>(size_t)> alloc) {
+ ATRACE_BEGIN("GraphicBlockBuffer::Allocate block->map()");
C2GraphicView view(block->map().get());
+ ATRACE_END();
if (view.error() != C2_OK) {
ALOGD("C2GraphicBlock::map failed: %d", view.error());
return nullptr;
@@ -664,6 +670,7 @@
}
std::shared_ptr<C2Buffer> GraphicBlockBuffer::asC2Buffer() {
+ ATRACE_CALL();
uint32_t width = mView.width();
uint32_t height = mView.height();
if (!mWrapped) {
@@ -752,8 +759,10 @@
ALOGD("C2Buffer precond fail");
return nullptr;
}
+ ATRACE_BEGIN("ConstGraphicBlockBuffer::Allocate block->map()");
std::unique_ptr<const C2GraphicView> view(std::make_unique<const C2GraphicView>(
buffer->data().graphicBlocks()[0].map().get()));
+ ATRACE_END();
std::unique_ptr<const C2GraphicView> holder;
GraphicView2MediaImageConverter converter(*view, format, false /* copy */);
@@ -854,11 +863,13 @@
return false;
}
+ ATRACE_BEGIN("ConstGraphicBlockBuffer::canCopy block->map()");
GraphicView2MediaImageConverter converter(
buffer->data().graphicBlocks()[0].map().get(),
// FIXME: format() is not const, but we cannot change it, so do a const cast here
const_cast<ConstGraphicBlockBuffer *>(this)->format(),
true /* copy */);
+ ATRACE_END();
if (converter.initCheck() != OK) {
ALOGD("ConstGraphicBlockBuffer::canCopy: converter init failed: %d", converter.initCheck());
return false;
@@ -973,16 +984,47 @@
return sMapper;
}
-class NativeHandleDeleter {
+class Gralloc4Buffer {
public:
- explicit NativeHandleDeleter(native_handle_t *handle) : mHandle(handle) {}
- ~NativeHandleDeleter() {
- if (mHandle) {
- native_handle_delete(mHandle);
+ Gralloc4Buffer(const C2Handle *const handle) : mBuffer(nullptr) {
+ sp<IMapper4> mapper = GetMapper4();
+ if (!mapper) {
+ return;
+ }
+ // Unwrap raw buffer handle from the C2Handle
+ native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
+ if (!nh) {
+ return;
+ }
+ // Import the raw handle so IMapper can use the buffer. The imported
+ // handle must be freed when the client is done with the buffer.
+ mapper->importBuffer(
+ hardware::hidl_handle(nh),
+ [&](const Error4 &error, void *buffer) {
+ if (error == Error4::NONE) {
+ mBuffer = buffer;
+ }
+ });
+
+ // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
+ // does not clone the fds. Thus we need to delete the handle
+ // without closing it.
+ native_handle_delete(nh);
+ }
+
+ ~Gralloc4Buffer() {
+ sp<IMapper4> mapper = GetMapper4();
+ if (mapper && mBuffer) {
+ // Free the imported buffer handle. This does not release the
+ // underlying buffer itself.
+ mapper->freeBuffer(mBuffer);
}
}
+
+ void *get() const { return mBuffer; }
+ operator bool() const { return (mBuffer != nullptr); }
private:
- native_handle_t *mHandle;
+ void *mBuffer;
};
} // namspace
@@ -992,24 +1034,15 @@
std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo) {
c2_status_t err = C2_OK;
- native_handle_t *nativeHandle = UnwrapNativeCodec2GrallocHandle(handle);
- if (nativeHandle == nullptr) {
- // Nothing to do
- return err;
- }
- // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
- // does not clone the fds. Thus we need to delete the handle
- // without closing it when going out of scope.
- // NativeHandle cannot solve this problem, as it would close and
- // delete the handle, while we need delete only.
- NativeHandleDeleter nhd(nativeHandle);
sp<IMapper4> mapper = GetMapper4();
- if (!mapper) {
+ Gralloc4Buffer buffer(handle);
+ if (!mapper || !buffer) {
// Gralloc4 not supported; nothing to do
return err;
}
Error4 mapperErr = Error4::NONE;
if (staticInfo) {
+ ALOGV("Grabbing static HDR info from gralloc4 metadata");
staticInfo->reset(new C2StreamHdrStaticMetadataInfo::input(0u));
memset(&(*staticInfo)->mastering, 0, sizeof((*staticInfo)->mastering));
(*staticInfo)->maxCll = 0;
@@ -1038,7 +1071,7 @@
mapperErr = Error4::BAD_VALUE;
}
};
- Return<void> ret = mapper->get(nativeHandle, MetadataType_Smpte2086, cb);
+ Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2086, cb);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (mapperErr != Error4::NONE) {
@@ -1059,7 +1092,7 @@
mapperErr = Error4::BAD_VALUE;
}
};
- ret = mapper->get(nativeHandle, MetadataType_Cta861_3, cb);
+ ret = mapper->get(buffer.get(), MetadataType_Cta861_3, cb);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (mapperErr != Error4::NONE) {
@@ -1067,6 +1100,7 @@
}
}
if (dynamicInfo) {
+ ALOGV("Grabbing dynamic HDR info from gralloc4 metadata");
dynamicInfo->reset();
IMapper4::get_cb cb = [&mapperErr, dynamicInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
mapperErr = err;
@@ -1080,7 +1114,7 @@
vec.size(), 0u, C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
memcpy((*dynamicInfo)->m.data, vec.data(), vec.size());
};
- Return<void> ret = mapper->get(nativeHandle, MetadataType_Smpte2094_40, cb);
+ Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2094_40, cb);
if (!ret.isOk() || mapperErr != Error4::NONE) {
dynamicInfo->reset();
}
@@ -1094,21 +1128,14 @@
const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
const C2Handle *const handle) {
c2_status_t err = C2_OK;
- native_handle_t *nativeHandle = UnwrapNativeCodec2GrallocHandle(handle);
- if (nativeHandle == nullptr) {
- // Nothing to do
- return err;
- }
- // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
- // does not clone the fds. Thus we need to delete the handle
- // without closing it when going out of scope.
- NativeHandleDeleter nhd(nativeHandle);
sp<IMapper4> mapper = GetMapper4();
- if (!mapper) {
+ Gralloc4Buffer buffer(handle);
+ if (!mapper || !buffer) {
// Gralloc4 not supported; nothing to do
return err;
}
if (staticInfo && *staticInfo) {
+ ALOGV("Setting static HDR info as gralloc4 metadata");
std::optional<Smpte2086> smpte2086 = Smpte2086{
{staticInfo->mastering.red.x, staticInfo->mastering.red.y},
{staticInfo->mastering.green.x, staticInfo->mastering.green.y},
@@ -1118,8 +1145,17 @@
staticInfo->mastering.minLuminance,
};
hidl_vec<uint8_t> vec;
- if (gralloc4::encodeSmpte2086(smpte2086, &vec) == OK) {
- Return<Error4> ret = mapper->set(nativeHandle, MetadataType_Smpte2086, vec);
+ if (0.0 <= smpte2086->primaryRed.x && smpte2086->primaryRed.x <= 1.0
+ && 0.0 <= smpte2086->primaryRed.y && smpte2086->primaryRed.y <= 1.0
+ && 0.0 <= smpte2086->primaryGreen.x && smpte2086->primaryGreen.x <= 1.0
+ && 0.0 <= smpte2086->primaryGreen.y && smpte2086->primaryGreen.y <= 1.0
+ && 0.0 <= smpte2086->primaryBlue.x && smpte2086->primaryBlue.x <= 1.0
+ && 0.0 <= smpte2086->primaryBlue.y && smpte2086->primaryBlue.y <= 1.0
+ && 0.0 <= smpte2086->whitePoint.x && smpte2086->whitePoint.x <= 1.0
+ && 0.0 <= smpte2086->whitePoint.y && smpte2086->whitePoint.y <= 1.0
+ && 0.0 <= smpte2086->maxLuminance && 0.0 <= smpte2086->minLuminance
+ && gralloc4::encodeSmpte2086(smpte2086, &vec) == OK) {
+ Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Smpte2086, vec);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (ret != Error4::NONE) {
@@ -1130,8 +1166,9 @@
staticInfo->maxCll,
staticInfo->maxFall,
};
- if (gralloc4::encodeCta861_3(cta861_3, &vec) == OK) {
- Return<Error4> ret = mapper->set(nativeHandle, MetadataType_Cta861_3, vec);
+ if (0.0 <= cta861_3->maxContentLightLevel && 0.0 <= cta861_3->maxFrameAverageLightLevel
+ && gralloc4::encodeCta861_3(cta861_3, &vec) == OK) {
+ Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Cta861_3, vec);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (ret != Error4::NONE) {
@@ -1139,7 +1176,8 @@
}
}
}
- if (dynamicInfo && *dynamicInfo) {
+ if (dynamicInfo && *dynamicInfo && dynamicInfo->flexCount() > 0) {
+ ALOGV("Setting dynamic HDR info as gralloc4 metadata");
hidl_vec<uint8_t> vec;
vec.resize(dynamicInfo->flexCount());
memcpy(vec.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
@@ -1153,7 +1191,7 @@
break;
}
if (metadataType) {
- Return<Error4> ret = mapper->set(nativeHandle, *metadataType, vec);
+ Return<Error4> ret = mapper->set(buffer.get(), *metadataType, vec);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (ret != Error4::NONE) {
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 63bd64b..2b8a160 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -281,6 +281,11 @@
}
};
+ // The color format is ordered by preference. The intention here is to advertise:
+ // c2.android.* codecs: YUV420s, Surface, <the rest>
+ // all other codecs: Surface, YUV420s, <the rest>
+ // TODO: get this preference via Codec2 API
+
// vendor video codecs prefer opaque format
if (trait.name.find("android") == std::string::npos) {
addDefaultColorFormat(COLOR_FormatSurface);
@@ -290,9 +295,8 @@
addDefaultColorFormat(COLOR_FormatYUV420SemiPlanar);
addDefaultColorFormat(COLOR_FormatYUV420PackedPlanar);
addDefaultColorFormat(COLOR_FormatYUV420PackedSemiPlanar);
- // framework video encoders must support surface format, though it is unclear
- // that they will be able to map it if it is opaque
- if (encoder && trait.name.find("android") != std::string::npos) {
+ // Android video codecs prefer CPU-readable formats
+ if (trait.name.find("android") != std::string::npos) {
addDefaultColorFormat(COLOR_FormatSurface);
}
for (int32_t colorFormat : supportedColorFormats) {
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index bff9db5..7fc4c27 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -16,7 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "Codec2BufferUtils"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
#include <utils/Log.h>
+#include <utils/Trace.h>
#include <libyuv.h>
@@ -36,8 +38,8 @@
namespace {
/**
- * A flippable, optimizable memcpy. Constructs such as (from ? src : dst) do not work as the results are
- * always const.
+ * A flippable, optimizable memcpy. Constructs such as (from ? src : dst)
+ * do not work as the results are always const.
*/
template<bool ToA, size_t S>
struct MemCopier {
@@ -139,15 +141,18 @@
if (IsNV12(view)) {
if (IsNV12(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV12");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
return OK;
} else if (IsNV21(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV21");
if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
return OK;
}
} else if (IsI420(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->I420");
if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
@@ -155,15 +160,18 @@
}
} else if (IsNV21(view)) {
if (IsNV12(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV12");
if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
} else if (IsNV21(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV21");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
return OK;
} else if (IsI420(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->I420");
if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
@@ -171,22 +179,26 @@
}
} else if (IsI420(view)) {
if (IsNV12(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV12");
if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
} else if (IsNV21(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV21");
if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
return OK;
}
} else if (IsI420(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->I420");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
return OK;
}
}
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: generic");
return _ImageCopy<true>(view, img, imgBase);
}
@@ -210,15 +222,18 @@
int height = view.crop().height;
if (IsNV12(img)) {
if (IsNV12(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV12");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
return OK;
} else if (IsNV21(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV21");
if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
return OK;
}
} else if (IsI420(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->I420");
if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
@@ -226,15 +241,18 @@
}
} else if (IsNV21(img)) {
if (IsNV12(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV12");
if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
} else if (IsNV21(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV21");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
return OK;
} else if (IsI420(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->I420");
if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
@@ -242,22 +260,26 @@
}
} else if (IsI420(img)) {
if (IsNV12(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV12");
if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
} else if (IsNV21(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV21");
if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
return OK;
}
} else if (IsI420(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->I420");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
return OK;
}
}
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: generic");
return _ImageCopy<false>(view, img, imgBase);
}
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 4047173..1d8aea3 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -78,6 +78,7 @@
export_shared_lib_headers: [
"libbase",
+ "libdmabufheap",
"android.hardware.media.bufferpool@2.0",
],
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index d8d6f06..bc4053d 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -261,7 +261,7 @@
for (const ui::PlaneLayout &plane : planes) {
layout->rootPlanes++;
uint32_t lastOffsetInBits = 0;
- uint32_t rootIx = 0;
+ uint32_t rootIx = layout->numPlanes;
for (const PlaneLayoutComponent &component : plane.components) {
if (!gralloc4::isStandardPlaneLayoutComponentType(component.type)) {
@@ -309,7 +309,6 @@
layout->numPlanes++;
lastOffsetInBits = component.offsetInBits + component.sizeInBits;
- rootIx++;
}
}
return C2_OK;
@@ -699,17 +698,6 @@
C2PlanarLayout::PLANE_V, // rootIx
0, // offset
};
- // handle interleaved formats
- intptr_t uvOffset = addr[C2PlanarLayout::PLANE_V] - addr[C2PlanarLayout::PLANE_U];
- if (uvOffset > 0 && uvOffset < (intptr_t)ycbcrLayout.chroma_step) {
- layout->rootPlanes = 2;
- layout->planes[C2PlanarLayout::PLANE_V].rootIx = C2PlanarLayout::PLANE_U;
- layout->planes[C2PlanarLayout::PLANE_V].offset = uvOffset;
- } else if (uvOffset < 0 && uvOffset > -(intptr_t)ycbcrLayout.chroma_step) {
- layout->rootPlanes = 2;
- layout->planes[C2PlanarLayout::PLANE_U].rootIx = C2PlanarLayout::PLANE_V;
- layout->planes[C2PlanarLayout::PLANE_U].offset = -uvOffset;
- }
break;
}
@@ -830,17 +818,6 @@
C2PlanarLayout::PLANE_V, // rootIx
0, // offset
};
- // handle interleaved formats
- intptr_t uvOffset = addr[C2PlanarLayout::PLANE_V] - addr[C2PlanarLayout::PLANE_U];
- if (uvOffset > 0 && uvOffset < (intptr_t)ycbcrLayout.chroma_step) {
- layout->rootPlanes = 2;
- layout->planes[C2PlanarLayout::PLANE_V].rootIx = C2PlanarLayout::PLANE_U;
- layout->planes[C2PlanarLayout::PLANE_V].offset = uvOffset;
- } else if (uvOffset < 0 && uvOffset > -(intptr_t)ycbcrLayout.chroma_step) {
- layout->rootPlanes = 2;
- layout->planes[C2PlanarLayout::PLANE_U].rootIx = C2PlanarLayout::PLANE_V;
- layout->planes[C2PlanarLayout::PLANE_U].offset = -uvOffset;
- }
break;
}
@@ -886,6 +863,29 @@
}
mLocked = true;
+ // handle interleaved formats
+ if (layout->type == C2PlanarLayout::TYPE_YUV && layout->rootPlanes == 3) {
+ intptr_t uvOffset = addr[C2PlanarLayout::PLANE_V] - addr[C2PlanarLayout::PLANE_U];
+ intptr_t uvColInc = layout->planes[C2PlanarLayout::PLANE_U].colInc;
+ if (uvOffset > 0 && uvOffset < uvColInc) {
+ layout->rootPlanes = 2;
+ layout->planes[C2PlanarLayout::PLANE_V].rootIx = C2PlanarLayout::PLANE_U;
+ layout->planes[C2PlanarLayout::PLANE_V].offset = uvOffset;
+ } else if (uvOffset < 0 && uvOffset > -uvColInc) {
+ layout->rootPlanes = 2;
+ layout->planes[C2PlanarLayout::PLANE_U].rootIx = C2PlanarLayout::PLANE_V;
+ layout->planes[C2PlanarLayout::PLANE_U].offset = -uvOffset;
+ }
+ }
+
+ ALOGV("C2AllocationGralloc::map: layout: type=%d numPlanes=%d rootPlanes=%d",
+ layout->type, layout->numPlanes, layout->rootPlanes);
+ for (int i = 0; i < layout->numPlanes; ++i) {
+ const C2PlaneInfo &plane = layout->planes[i];
+ ALOGV("C2AllocationGralloc::map: plane[%d]: colInc=%d rowInc=%d rootIx=%u offset=%u",
+ i, plane.colInc, plane.rowInc, plane.rootIx, plane.offset);
+ }
+
return C2_OK;
}
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index 7b593ee..a6a733e 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -31,6 +31,7 @@
#include <C2HandleIonInternal.h>
#include <android-base/properties.h>
+#include <media/stagefright/foundation/Mutexed.h>
namespace android {
@@ -180,7 +181,7 @@
c2_status_t map(size_t offset, size_t size, C2MemoryUsage usage, C2Fence *fence, void **addr) {
(void)fence; // TODO: wait for fence
*addr = nullptr;
- if (!mMappings.empty()) {
+ if (!mMappings.lock()->empty()) {
ALOGV("multiple map");
// TODO: technically we should return DUPLICATE here, but our block views don't
// actually unmap, so we end up remapping an ion buffer multiple times.
@@ -207,47 +208,44 @@
c2_status_t err = mapInternal(mapSize, mapOffset, alignmentBytes, prot, flags, &(map.addr), addr);
if (map.addr) {
- std::lock_guard<std::mutex> guard(mMutexMappings);
- mMappings.push_back(map);
+ mMappings.lock()->push_back(map);
}
return err;
}
c2_status_t unmap(void *addr, size_t size, C2Fence *fence) {
- if (mMappings.empty()) {
+ Mutexed<std::list<Mapping>>::Locked mappings(mMappings);
+ if (mappings->empty()) {
ALOGD("tried to unmap unmapped buffer");
return C2_NOT_FOUND;
}
- { // Scope for the lock_guard of mMutexMappings.
- std::lock_guard<std::mutex> guard(mMutexMappings);
- for (auto it = mMappings.begin(); it != mMappings.end(); ++it) {
- if (addr != (uint8_t *)it->addr + it->alignmentBytes ||
- size + it->alignmentBytes != it->size) {
- continue;
- }
- int err = munmap(it->addr, it->size);
- if (err != 0) {
- ALOGD("munmap failed");
- return c2_map_errno<EINVAL>(errno);
- }
- if (fence) {
- *fence = C2Fence(); // not using fences
- }
- (void)mMappings.erase(it);
- ALOGV("successfully unmapped: addr=%p size=%zu fd=%d", addr, size,
- mHandle.bufferFd());
- return C2_OK;
+ for (auto it = mappings->begin(); it != mappings->end(); ++it) {
+ if (addr != (uint8_t *)it->addr + it->alignmentBytes ||
+ size + it->alignmentBytes != it->size) {
+ continue;
}
+ int err = munmap(it->addr, it->size);
+ if (err != 0) {
+ ALOGD("munmap failed");
+ return c2_map_errno<EINVAL>(errno);
+ }
+ if (fence) {
+ *fence = C2Fence(); // not using fences
+ }
+ (void)mappings->erase(it);
+ ALOGV("successfully unmapped: addr=%p size=%zu fd=%d", addr, size,
+ mHandle.bufferFd());
+ return C2_OK;
}
ALOGD("unmap failed to find specified map");
return C2_BAD_VALUE;
}
virtual ~Impl() {
- if (!mMappings.empty()) {
+ Mutexed<std::list<Mapping>>::Locked mappings(mMappings);
+ if (!mappings->empty()) {
ALOGD("Dangling mappings!");
- std::lock_guard<std::mutex> guard(mMutexMappings);
- for (const Mapping &map : mMappings) {
+ for (const Mapping &map : *mappings) {
(void)munmap(map.addr, map.size);
}
}
@@ -325,8 +323,7 @@
size_t alignmentBytes;
size_t size;
};
- std::list<Mapping> mMappings;
- std::mutex mMutexMappings;
+ Mutexed<std::list<Mapping>> mMappings;
};
class C2AllocationIon::ImplV2 : public C2AllocationIon::Impl {
diff --git a/media/codec2/vndk/C2DmaBufAllocator.cpp b/media/codec2/vndk/C2DmaBufAllocator.cpp
index 1aa3d69..c470171 100644
--- a/media/codec2/vndk/C2DmaBufAllocator.cpp
+++ b/media/codec2/vndk/C2DmaBufAllocator.cpp
@@ -31,6 +31,7 @@
#include <list>
#include <android-base/properties.h>
+#include <media/stagefright/foundation/Mutexed.h>
namespace android {
@@ -161,7 +162,7 @@
size_t alignmentBytes;
size_t size;
};
- std::list<Mapping> mMappings;
+ Mutexed<std::list<Mapping>> mMappings;
// TODO: we could make this encapsulate shared_ptr and copiable
C2_DO_NOT_COPY(C2DmaBufAllocation);
@@ -171,7 +172,7 @@
void** addr) {
(void)fence; // TODO: wait for fence
*addr = nullptr;
- if (!mMappings.empty()) {
+ if (!mMappings.lock()->empty()) {
ALOGV("multiple map");
// TODO: technically we should return DUPLICATE here, but our block views
// don't actually unmap, so we end up remapping the buffer multiple times.
@@ -199,17 +200,18 @@
c2_status_t err =
mapInternal(mapSize, mapOffset, alignmentBytes, prot, flags, &(map.addr), addr);
if (map.addr) {
- mMappings.push_back(map);
+ mMappings.lock()->push_back(map);
}
return err;
}
c2_status_t C2DmaBufAllocation::unmap(void* addr, size_t size, C2Fence* fence) {
- if (mMappings.empty()) {
+ Mutexed<std::list<Mapping>>::Locked mappings(mMappings);
+ if (mappings->empty()) {
ALOGD("tried to unmap unmapped buffer");
return C2_NOT_FOUND;
}
- for (auto it = mMappings.begin(); it != mMappings.end(); ++it) {
+ for (auto it = mappings->begin(); it != mappings->end(); ++it) {
if (addr != (uint8_t*)it->addr + it->alignmentBytes ||
size + it->alignmentBytes != it->size) {
continue;
@@ -222,7 +224,7 @@
if (fence) {
*fence = C2Fence(); // not using fences
}
- (void)mMappings.erase(it);
+ (void)mappings->erase(it);
ALOGV("successfully unmapped: %d", mHandle.bufferFd());
return C2_OK;
}
@@ -253,9 +255,10 @@
}
C2DmaBufAllocation::~C2DmaBufAllocation() {
- if (!mMappings.empty()) {
+ Mutexed<std::list<Mapping>>::Locked mappings(mMappings);
+ if (!mappings->empty()) {
ALOGD("Dangling mappings!");
- for (const Mapping& map : mMappings) {
+ for (const Mapping& map : *mappings) {
int err = munmap(map.addr, map.size);
if (err) ALOGD("munmap failed");
}
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index b2636e9..bec978a 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -101,6 +101,8 @@
uint32_t generationId,
uint64_t consumerUsage);
+ virtual void getConsumerUsage(uint64_t *consumerUsage);
+
private:
const std::shared_ptr<C2Allocator> mAllocator;
const local_id_t mLocalId;
@@ -138,7 +140,6 @@
uint32_t toGeneration, uint64_t toUsage, uint64_t toBqId,
android::sp<android::GraphicBuffer>& graphicBuffer, uint32_t oldGeneration,
std::shared_ptr<C2SurfaceSyncMemory> syncMem);
-
private:
friend struct _C2BlockFactory;
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 01995fd..63b0f39 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -571,19 +571,12 @@
public:
Impl(const std::shared_ptr<C2Allocator> &allocator)
: mInit(C2_OK), mProducerId(0), mGeneration(0),
- mDqFailure(0), mLastDqTs(0), mLastDqLogTs(0),
- mAllocator(allocator) {
+ mConsumerUsage(0), mDqFailure(0), mLastDqTs(0),
+ mLastDqLogTs(0), mAllocator(allocator) {
}
~Impl() {
- bool noInit = false;
for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
- if (!noInit && mProducer) {
- Return<HStatus> transResult =
- mProducer->detachBuffer(static_cast<int32_t>(i));
- noInit = !transResult.isOk() ||
- static_cast<HStatus>(transResult) == HStatus::NO_INIT;
- }
mBuffers[i].clear();
}
}
@@ -692,15 +685,6 @@
{
sp<GraphicBuffer> buffers[NUM_BUFFER_SLOTS];
std::scoped_lock<std::mutex> lock(mMutex);
- bool noInit = false;
- for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
- if (!noInit && mProducer) {
- Return<HStatus> transResult =
- mProducer->detachBuffer(static_cast<int32_t>(i));
- noInit = !transResult.isOk() ||
- static_cast<HStatus>(transResult) == HStatus::NO_INIT;
- }
- }
int32_t oldGeneration = mGeneration;
if (producer) {
mProducer = producer;
@@ -747,6 +731,11 @@
"bqId: %llu migrated buffers # %d",
generation, (unsigned long long)producerId, migrated);
}
+ mConsumerUsage = usage;
+ }
+
+ void getConsumerUsage(uint64_t *consumeUsage) {
+ *consumeUsage = mConsumerUsage;
}
private:
@@ -755,6 +744,7 @@
c2_status_t mInit;
uint64_t mProducerId;
uint32_t mGeneration;
+ uint64_t mConsumerUsage;
OnRenderCallback mRenderCallback;
size_t mDqFailure;
@@ -1086,3 +1076,10 @@
mImpl->setRenderCallback(renderCallback);
}
}
+
+void C2BufferQueueBlockPool::getConsumerUsage(uint64_t *consumeUsage) {
+ if (mImpl) {
+ mImpl->getConsumerUsage(consumeUsage);
+ }
+}
+
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
index e55bdc0..2115cc3 100644
--- a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -228,10 +228,10 @@
tv.tv_nsec = timeoutNs % 1000000000;
int ret = syscall(__NR_futex, &mCond, FUTEX_WAIT, waitId, &tv, NULL, 0);
- if (ret == 0 || ret == EAGAIN) {
+ if (ret == 0 || errno == EAGAIN) {
return C2_OK;
}
- if (ret == EINTR || ret == ETIMEDOUT) {
+ if (errno == EINTR || errno == ETIMEDOUT) {
return C2_TIMED_OUT;
}
return C2_BAD_VALUE;
diff --git a/media/codecs/m4v_h263/dec/src/vop.cpp b/media/codecs/m4v_h263/dec/src/vop.cpp
index 7b32498..abc0861 100644
--- a/media/codecs/m4v_h263/dec/src/vop.cpp
+++ b/media/codecs/m4v_h263/dec/src/vop.cpp
@@ -107,26 +107,57 @@
#ifndef PV_TOLERATE_VOL_ERRORS
if (layer) /* */
{
- /* support SSPL0-2 */
- if (tmpvar != 0x10 && tmpvar != 0x11 && tmpvar != 0x12 &&
- tmpvar != 0xA1 && tmpvar != 0xA2 && tmpvar != 0xA3/* Core SP@L1-L3 */)
- return PV_FAIL;
+ switch (tmpvar)
+ {
+ /* Simple Scalable Profile Levels */
+ case 0x10:
+ case 0x11:
+ case 0x12:
+ /* Core Scalable Profile Levels */
+ case 0xA1:
+ case 0xA2:
+ case 0xA3:
+ // Do Nothing, the cases listed above are supported values
+ break;
+ default:
+ // Unsupport profile level
+ return PV_FAIL;
+ }
}
else
{
- /* support SPL0-3 & SSPL0-2 */
- if (tmpvar != 0x01 && tmpvar != 0x02 && tmpvar != 0x03 && tmpvar != 0x08 &&
- /* While not technically supported, try to decode SPL4&SPL5 files as well. */
- /* We'll fail later if the size is too large. This is to allow playback of */
- /* some <=CIF files generated by other encoders. */
- tmpvar != 0x04 && tmpvar != 0x05 &&
- tmpvar != 0x10 && tmpvar != 0x11 && tmpvar != 0x12 &&
- tmpvar != 0x21 && tmpvar != 0x22 && /* Core Profile Levels */
- tmpvar != 0xA1 && tmpvar != 0xA2 && tmpvar != 0xA3 &&
- tmpvar != 0xF0 && tmpvar != 0xF1 && /* Advanced Simple Profile Levels*/
- tmpvar != 0xF2 && tmpvar != 0xF3 &&
- tmpvar != 0xF4 && tmpvar != 0xF5)
- return PV_FAIL;
+ switch (tmpvar)
+ {
+ /* Simple Profile Levels */
+ case 0x01:
+ case 0x02:
+ case 0x03:
+ case 0x04:
+ case 0x05:
+ case 0x06:
+ case 0x08:
+ case 0x10:
+ case 0x11:
+ case 0x12:
+ /* Core Profile Levels */
+ case 0x21:
+ case 0x22:
+ case 0xA1:
+ case 0xA2:
+ case 0xA3:
+ /* Advanced Simple Profile Levels*/
+ case 0xF0:
+ case 0xF1:
+ case 0xF2:
+ case 0xF3:
+ case 0xF4:
+ case 0xF5:
+ // Do Nothing, the cases listed above are supported values
+ break;
+ default:
+ // Unsupport profile level
+ return PV_FAIL;
+ }
}
#else
profile = tmpvar;
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index e0cc5bf..eccbf46 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1127,15 +1127,15 @@
void *data;
size_t size;
- if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0,
+ if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2,
&data, &size)
- && size >= 24) {
- const uint8_t *ptr = (const uint8_t *)data + (size - 24);
+ && size >= 5) {
+ const uint8_t *ptr = (const uint8_t *)data;
const uint8_t profile = ptr[2] >> 1;
- const uint8_t bl_compatibility_id = (ptr[4]) >> 4;
+ const uint8_t blCompatibilityId = (ptr[4]) >> 4;
bool create_two_tracks = false;
- if (bl_compatibility_id && bl_compatibility_id != 15) {
+ if (blCompatibilityId && blCompatibilityId != 15) {
create_two_tracks = true;
}
@@ -1168,11 +1168,11 @@
mLastTrack->next = track_b;
track_b->next = NULL;
- // we want to remove the csd-0 key from the metadata, but
+ // we want to remove the csd-2 key from the metadata, but
// don't have an AMediaFormat_* function to do so. Settle
- // for replacing this csd-0 with an empty csd-0.
+ // for replacing this csd-2 with an empty csd-2.
uint8_t emptybuffer[8] = {};
- AMediaFormat_setBuffer(track_b->meta, AMEDIAFORMAT_KEY_CSD_0,
+ AMediaFormat_setBuffer(track_b->meta, AMEDIAFORMAT_KEY_CSD_2,
emptybuffer, 0);
if (4 == profile || 7 == profile || 8 == profile ) {
@@ -1184,8 +1184,6 @@
} else if (10 == profile) {
AMediaFormat_setString(track_b->meta,
AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_VIDEO_AV1);
- AMediaFormat_setBuffer(track_b->meta, AMEDIAFORMAT_KEY_CSD_0,
- data, size - 24);
} // Should never get to else part
mLastTrack = track_b;
@@ -2618,22 +2616,8 @@
if (mLastTrack == NULL)
return ERROR_MALFORMED;
- void *data = nullptr;
- size_t size = 0;
- if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
- //if csd-0 is already present, then append dvcc
- auto csd0_dvcc = heapbuffer<uint8_t>(size + chunk_data_size);
-
- memcpy(csd0_dvcc.get(), data, size);
- memcpy(csd0_dvcc.get() + size, buffer.get(), chunk_data_size);
-
- AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0,
- csd0_dvcc.get(), size + chunk_data_size);
- } else {
- //if not set csd-0 directly
- AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0,
+ AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2,
buffer.get(), chunk_data_size);
- }
AMediaFormat_setString(mLastTrack->meta, AMEDIAFORMAT_KEY_MIME,
MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
@@ -4511,12 +4495,12 @@
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
void *data;
size_t size;
- if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)
- || size < 24) {
+ if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)
+ || size != 24) {
return NULL;
}
- const uint8_t *ptr = (const uint8_t *)data + (size - 24);
+ const uint8_t *ptr = (const uint8_t *)data;
// dv_major.dv_minor Should be 1.0 or 2.1
if ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1)) {
return NULL;
@@ -4596,7 +4580,7 @@
return ERROR_MALFORMED;
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
- if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
+ if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)) {
return ERROR_MALFORMED;
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)) {
@@ -5172,11 +5156,11 @@
ALOGV("%s DolbyVision stream detected", __FUNCTION__);
void *data;
size_t size;
- CHECK(AMediaFormat_getBuffer(format, AMEDIAFORMAT_KEY_CSD_0, &data, &size));
+ CHECK(AMediaFormat_getBuffer(format, AMEDIAFORMAT_KEY_CSD_2, &data, &size));
- const uint8_t *ptr = (const uint8_t *)data + (size - 24);
+ const uint8_t *ptr = (const uint8_t *)data;
- CHECK(size >= 24);
+ CHECK(size == 24);
// dv_major.dv_minor Should be 1.0 or 2.1
CHECK(!((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1)));
diff --git a/media/janitors/media_leads_OWNERS b/media/janitors/media_leads_OWNERS
new file mode 100644
index 0000000..b7dbdee
--- /dev/null
+++ b/media/janitors/media_leads_OWNERS
@@ -0,0 +1,9 @@
+# gerrit owner/approvers corresponding to the TLs within the media team
+# loosely (as of 2022/3) fgoldfain@ and direct reports
+arifdikici@google.com
+elaurent@google.com
+fgoldfain@google.com #{LAST_RESORT_SUGGESTION}
+lajos@google.com
+nchalko@google.com
+olly@google.com
+robertshih@google.com
diff --git a/media/libaaudio/TEST_MAPPING b/media/libaaudio/TEST_MAPPING
new file mode 100644
index 0000000..3de5a9f
--- /dev/null
+++ b/media/libaaudio/TEST_MAPPING
@@ -0,0 +1,12 @@
+{
+ "presubmit": [
+ {
+ "name": "CtsNativeMediaAAudioTestCases",
+ "options" : [
+ {
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+ }
+ ]
+ }
+ ]
+}
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index e2eec7a..2a12191 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -36,11 +36,11 @@
"libaudiomanager",
"libaudiopolicy",
"libaudioclient_aidl_conversion",
+ "libutils",
],
static_libs: [
"android.media.audio.common.types-V1-cpp",
"liblog",
- "libutils",
"libcutils",
"libaaudio",
"libjsoncpp",
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index efa9941..2ff9f5a 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -778,8 +778,16 @@
__INTRODUCED_IN(26);
/**
- * Request an audio device identified device using an ID.
- * On Android, for example, the ID could be obtained from the Java AudioManager.
+ * Request an audio device identified by an ID.
+ *
+ * The ID could be obtained from the Java AudioManager.
+ * AudioManager.getDevices() returns an array of {@link AudioDeviceInfo},
+ * which contains a getId() method. That ID can be passed to this function.
+ *
+ * It is possible that you may not get the device that you requested.
+ * So if it is important to you, you should call
+ * AAudioStream_getDeviceId() after the stream is opened to
+ * verify the actual ID.
*
* The default, if you do not call this function, is {@link #AAUDIO_UNSPECIFIED},
* in which case the primary device will be used.
diff --git a/media/libaaudio/scripts/measure_device_power.py b/media/libaaudio/scripts/measure_device_power.py
index 9603f88..1f90933 100755
--- a/media/libaaudio/scripts/measure_device_power.py
+++ b/media/libaaudio/scripts/measure_device_power.py
@@ -79,28 +79,42 @@
SORTED_ENERGY_LIST = sorted(ENERGY_DICTIONARY, key=ENERGY_DICTIONARY.get)
-# Sometimes "adb unroot" returns 1!
+# Sometimes adb returns 1 for no apparent reason.
# So try several times.
# @return 0 on success
-def adbUnroot():
+def adbTryMultiple(command):
returnCode = 1
count = 0
limit = 5
while count < limit and returnCode != 0:
- print(('Try to adb unroot {} of {}'.format(count, limit)))
+ print(('Try to adb {} {} of {}'.format(command, count, limit)))
subprocess.call(["adb", "wait-for-device"])
time.sleep(PRE_DELAY_SECONDS)
- returnCode = subprocess.call(["adb", "unroot"])
+ returnCode = subprocess.call(["adb", command])
print(('returnCode = {}'.format(returnCode)))
count += 1
return returnCode
+# Sometimes "adb root" returns 1!
+# So try several times.
+# @return 0 on success
+def adbRoot():
+ return adbTryMultiple("root");
+
+# Sometimes "adb unroot" returns 1!
+# So try several times.
+# @return 0 on success
+def adbUnroot():
+ return adbTryMultiple("unroot");
+
# @param commandString String containing shell command
# @return Both the stdout and stderr of the commands run
def runCommand(commandString):
print(commandString)
if commandString == "adb unroot":
result = adbUnroot()
+ elif commandString == "adb root":
+ result = adbRoot()
else:
commandArray = commandString.split(' ')
result = subprocess.run(commandArray, check=True, capture_output=True).stdout
@@ -111,6 +125,8 @@
def adbCommand(commandString):
if commandString == "unroot":
result = adbUnroot()
+ elif commandString == "root":
+ result = adbRoot()
else:
print(("adb " + commandString))
commandArray = ["adb"] + commandString.split(' ')
@@ -225,13 +241,13 @@
line = fp.readline()
while line:
command = line.strip()
- if command.endswith('\\'):
- command = command[:-1].strip() # remove \\:
- runCommand(command)
- elif command.startswith("#"):
+ if command.startswith("#"):
# ignore comment
print((command + "\n"))
- comment = command
+ comment = command[1:].strip() # remove leading '#'
+ elif command.endswith('\\'):
+ command = command[:-1].strip() # remove trailing '\'
+ runCommand(command)
elif command:
report = averageEnergyForCommand(command, DEFAULT_NUM_ITERATIONS)
finalReport += comment + ", " + command + ", " + formatEnergyData(report) + "\n"
diff --git a/media/libaaudio/scripts/synthmark_tests.txt b/media/libaaudio/scripts/synthmark_tests.txt
new file mode 100644
index 0000000..8b6d47e
--- /dev/null
+++ b/media/libaaudio/scripts/synthmark_tests.txt
@@ -0,0 +1,50 @@
+# Measure energy consumption with synthmark.
+
+# None
+adb shell synthmark -tj -n1 -N50 -B2 -z0
+adb shell synthmark -tj -n1 -N75 -B2 -z0
+adb shell synthmark -tj -n1 -N100 -B2 -z0
+
+# ADPF PID
+adb shell synthmark -tj -n1 -N50 -B2 -z1
+adb shell synthmark -tj -n1 -N75 -B2 -z1
+adb shell synthmark -tj -n1 -N100 -B2 -z1
+
+# ADPF <400 RR
+# adb root \
+# adb shell setprop vendor.powerhal.adpf.uclamp_min.high_limit 400 \
+# adb shell synthmark -tj -n1 -N50 -B2 -z1
+# adb shell synthmark -tj -n1 -N75 -B2 -z1
+# adb shell synthmark -tj -n1 -N100 -B2 -z1
+
+# ADPF <500 RR
+# adb root \
+# adb shell setprop vendor.powerhal.adpf.uclamp_min.high_limit 500 \
+# adb shell synthmark -tj -n1 -N50 -B2 -z1
+# adb shell synthmark -tj -n1 -N75 -B2 -z1
+# adb shell synthmark -tj -n1 -N100 -B2 -z1
+
+# ADPF <600 RR
+# adb root \
+# adb shell setprop vendor.powerhal.adpf.uclamp_min.high_limit 600 \
+# adb shell synthmark -tj -n1 -N50 -B2 -z1
+# adb shell synthmark -tj -n1 -N75 -B2 -z1
+# adb shell synthmark -tj -n1 -N100 -B2 -z1
+
+# uclamp
+# adb root \
+# adb shell synthmark -tj -n1 -N75 -B2 -u1
+
+# steady
+# adb shell synthmark -tj -n75 -B2 -u0
+
+# CPU affinity
+# adb shell synthmark -tj -n1 -N75 -B2 -u0 -c1
+# adb shell synthmark -tj -n1 -N75 -B2 -u0 -c4
+# adb shell synthmark -tj -n1 -N75 -B2 -u0 -c6
+
+# steady + affinity
+# adb shell synthmark -tj -n75 -B2 -u0 -c1
+# adb shell synthmark -tj -n75 -B2 -u0 -c4
+# adb shell synthmark -tj -n75 -B2 -u0 -c6
+
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index f50b53a..363d219 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -93,7 +93,6 @@
"-Wno-unused-parameter",
"-Wall",
"-Werror",
-
// By default, all symbols are hidden.
// "-fvisibility=hidden",
// AAUDIO_API is used to explicitly export a function or a variable as a visible symbol.
@@ -261,7 +260,7 @@
"binding/aidl/aaudio/IAAudioService.aidl",
],
imports: [
- "android.media.audio.common.types",
+ "android.media.audio.common.types-V1",
"audioclient-types-aidl",
"shared-file-region-aidl",
"framework-permission-aidl",
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.cpp b/media/libaaudio/src/client/AAudioFlowGraph.cpp
index d0c3238..2ed3e3c 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.cpp
+++ b/media/libaaudio/src/client/AAudioFlowGraph.cpp
@@ -35,7 +35,7 @@
#include <flowgraph/SourceI24.h>
#include <flowgraph/SourceI32.h>
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
aaudio_result_t AAudioFlowGraph::configure(audio_format_t sourceFormat,
int32_t sourceChannelCount,
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.h b/media/libaaudio/src/client/AAudioFlowGraph.h
index 00b6575..602c17f 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.h
+++ b/media/libaaudio/src/client/AAudioFlowGraph.h
@@ -72,17 +72,19 @@
void setRampLengthInFrames(int32_t numFrames);
private:
- std::unique_ptr<flowgraph::FlowGraphSourceBuffered> mSource;
- std::unique_ptr<flowgraph::MonoBlend> mMonoBlend;
- std::unique_ptr<flowgraph::ClipToRange> mClipper;
- std::unique_ptr<flowgraph::MonoToMultiConverter> mChannelConverter;
- std::unique_ptr<flowgraph::ManyToMultiConverter> mManyToMultiConverter;
- std::unique_ptr<flowgraph::MultiToManyConverter> mMultiToManyConverter;
- std::vector<std::unique_ptr<flowgraph::RampLinear>> mVolumeRamps;
- std::vector<float> mPanningVolumes;
- float mTargetVolume = 1.0f;
- android::audio_utils::Balance mBalance;
- std::unique_ptr<flowgraph::FlowGraphSink> mSink;
+ std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::FlowGraphSourceBuffered> mSource;
+ std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoBlend> mMonoBlend;
+ std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::ClipToRange> mClipper;
+ std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoToMultiConverter> mChannelConverter;
+ std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::ManyToMultiConverter>
+ mManyToMultiConverter;
+ std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MultiToManyConverter>
+ mMultiToManyConverter;
+ std::vector<std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::RampLinear>> mVolumeRamps;
+ std::vector<float> mPanningVolumes;
+ float mTargetVolume = 1.0f;
+ android::audio_utils::Balance mBalance;
+ std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::FlowGraphSink> mSink;
};
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 450d390..7c7a969 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -202,10 +202,18 @@
break;
case AAUDIO_STREAM_STATE_STARTED:
{
- // Sleep until the readCounter catches up and we only have
- // the getBufferSize() frames of data sitting in the buffer.
- int64_t nextReadPosition = mAudioEndpoint->getDataWriteCounter() - getBufferSize();
- wakeTime = mClockModel.convertPositionToTime(nextReadPosition);
+ // Calculate when there will be room available to write to the buffer.
+ // If the appBufferSize is smaller than the endpointBufferSize then
+ // we will have room to write data beyond the appBufferSize.
+ // That is a technique used to reduce glitches without adding latency.
+ const int32_t appBufferSize = getBufferSize();
+ // The endpoint buffer size is set to the maximum that can be written.
+ // If we use it then we must carve out some room to write data when we wake up.
+ const int32_t endBufferSize = mAudioEndpoint->getBufferSizeInFrames()
+ - getFramesPerBurst();
+ const int32_t bestBufferSize = std::min(appBufferSize, endBufferSize);
+ int64_t targetReadPosition = mAudioEndpoint->getDataWriteCounter() - bestBufferSize;
+ wakeTime = mClockModel.convertPositionToTime(targetReadPosition);
}
break;
default:
diff --git a/media/libaaudio/src/flowgraph/ChannelCountConverter.cpp b/media/libaaudio/src/flowgraph/ChannelCountConverter.cpp
index 351def2..dc80427 100644
--- a/media/libaaudio/src/flowgraph/ChannelCountConverter.cpp
+++ b/media/libaaudio/src/flowgraph/ChannelCountConverter.cpp
@@ -18,7 +18,7 @@
#include "FlowGraphNode.h"
#include "ChannelCountConverter.h"
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
ChannelCountConverter::ChannelCountConverter(
int32_t inputChannelCount,
diff --git a/media/libaaudio/src/flowgraph/ChannelCountConverter.h b/media/libaaudio/src/flowgraph/ChannelCountConverter.h
index e4b6f4e..858f4d4 100644
--- a/media/libaaudio/src/flowgraph/ChannelCountConverter.h
+++ b/media/libaaudio/src/flowgraph/ChannelCountConverter.h
@@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Change the number of number of channels without mixing.
@@ -47,6 +47,6 @@
FlowGraphPortFloatOutput output;
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_CHANNEL_COUNT_CONVERTER_H
diff --git a/media/libaaudio/src/flowgraph/ClipToRange.cpp b/media/libaaudio/src/flowgraph/ClipToRange.cpp
index d2f8a02..c6ad0b0 100644
--- a/media/libaaudio/src/flowgraph/ClipToRange.cpp
+++ b/media/libaaudio/src/flowgraph/ClipToRange.cpp
@@ -19,7 +19,7 @@
#include "FlowGraphNode.h"
#include "ClipToRange.h"
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
ClipToRange::ClipToRange(int32_t channelCount)
: FlowGraphFilter(channelCount) {
diff --git a/media/libaaudio/src/flowgraph/ClipToRange.h b/media/libaaudio/src/flowgraph/ClipToRange.h
index 22b7804..2fddeee 100644
--- a/media/libaaudio/src/flowgraph/ClipToRange.h
+++ b/media/libaaudio/src/flowgraph/ClipToRange.h
@@ -23,7 +23,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
// This is 3 dB, (10^(3/20)), to match the maximum headroom in AudioTrack for float data.
// It is designed to allow occasional transient peaks.
@@ -63,6 +63,6 @@
float mMaximum = kDefaultMaxHeadroom;
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_CLIP_TO_RANGE_H
diff --git a/media/libaaudio/src/flowgraph/FlowGraphNode.cpp b/media/libaaudio/src/flowgraph/FlowGraphNode.cpp
index 4c76e77..012abe7 100644
--- a/media/libaaudio/src/flowgraph/FlowGraphNode.cpp
+++ b/media/libaaudio/src/flowgraph/FlowGraphNode.cpp
@@ -19,7 +19,7 @@
#include <sys/types.h>
#include "FlowGraphNode.h"
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
/***************************************************************************/
int32_t FlowGraphNode::pullData(int32_t numFrames, int64_t callCount) {
@@ -68,7 +68,7 @@
: FlowGraphPort(parent, samplesPerFrame)
, mFramesPerBuffer(framesPerBuffer)
, mBuffer(nullptr) {
- size_t numFloats = framesPerBuffer * getSamplesPerFrame();
+ size_t numFloats = static_cast<size_t>(framesPerBuffer) * getSamplesPerFrame();
mBuffer = std::make_unique<float[]>(numFloats);
}
diff --git a/media/libaaudio/src/flowgraph/FlowGraphNode.h b/media/libaaudio/src/flowgraph/FlowGraphNode.h
index 69c83dd..2884c08 100644
--- a/media/libaaudio/src/flowgraph/FlowGraphNode.h
+++ b/media/libaaudio/src/flowgraph/FlowGraphNode.h
@@ -38,13 +38,26 @@
// TODO Review use of raw pointers for connect(). Maybe use smart pointers but need to avoid
// run-time deallocation in audio thread.
-// Set this to 1 if using it inside the Android framework.
-// This code is kept here so that it can be moved easily between Oboe and AAudio.
-#ifndef FLOWGRAPH_ANDROID_INTERNAL
-#define FLOWGRAPH_ANDROID_INTERNAL 0
-#endif
+// Set flags FLOWGRAPH_ANDROID_INTERNAL and FLOWGRAPH_OUTER_NAMESPACE based on whether compiler
+// flag __ANDROID_NDK__ is defined. __ANDROID_NDK__ should be defined in oboe and not aaudio.
-namespace flowgraph {
+#ifndef FLOWGRAPH_ANDROID_INTERNAL
+#ifdef __ANDROID_NDK__
+#define FLOWGRAPH_ANDROID_INTERNAL 0
+#else
+#define FLOWGRAPH_ANDROID_INTERNAL 1
+#endif // __ANDROID_NDK__
+#endif // FLOWGRAPH_ANDROID_INTERNAL
+
+#ifndef FLOWGRAPH_OUTER_NAMESPACE
+#ifdef __ANDROID_NDK__
+#define FLOWGRAPH_OUTER_NAMESPACE oboe
+#else
+#define FLOWGRAPH_OUTER_NAMESPACE aaudio
+#endif // __ANDROID_NDK__
+#endif // FLOWGRAPH_OUTER_NAMESPACE
+
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
// Default block size that can be overridden when the FlowGraphPortFloat is created.
// If it is too small then we will have too much overhead from switching between nodes.
@@ -432,6 +445,6 @@
FlowGraphPortFloatOutput output;
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif /* FLOWGRAPH_FLOW_GRAPH_NODE_H */
diff --git a/media/libaaudio/src/flowgraph/FlowgraphUtilities.h b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
index b750410..ce2bc82 100644
--- a/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
+++ b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
@@ -19,7 +19,7 @@
#include <unistd.h>
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
class FlowgraphUtilities {
public:
diff --git a/media/libaaudio/src/flowgraph/ManyToMultiConverter.cpp b/media/libaaudio/src/flowgraph/ManyToMultiConverter.cpp
index 879685e..4f973bc 100644
--- a/media/libaaudio/src/flowgraph/ManyToMultiConverter.cpp
+++ b/media/libaaudio/src/flowgraph/ManyToMultiConverter.cpp
@@ -18,7 +18,7 @@
#include "ManyToMultiConverter.h"
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
ManyToMultiConverter::ManyToMultiConverter(int32_t channelCount)
: inputs(channelCount)
diff --git a/media/libaaudio/src/flowgraph/ManyToMultiConverter.h b/media/libaaudio/src/flowgraph/ManyToMultiConverter.h
index c7460ff..50644cf 100644
--- a/media/libaaudio/src/flowgraph/ManyToMultiConverter.h
+++ b/media/libaaudio/src/flowgraph/ManyToMultiConverter.h
@@ -23,7 +23,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Combine multiple mono inputs into one interleaved multi-channel output.
@@ -48,6 +48,6 @@
private:
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MANY_TO_MULTI_CONVERTER_H
diff --git a/media/libaaudio/src/flowgraph/MonoBlend.cpp b/media/libaaudio/src/flowgraph/MonoBlend.cpp
index 62e2809..4fd75e1 100644
--- a/media/libaaudio/src/flowgraph/MonoBlend.cpp
+++ b/media/libaaudio/src/flowgraph/MonoBlend.cpp
@@ -18,7 +18,7 @@
#include "MonoBlend.h"
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
MonoBlend::MonoBlend(int32_t channelCount)
: FlowGraphFilter(channelCount)
@@ -43,4 +43,4 @@
}
return numFrames;
-}
\ No newline at end of file
+}
diff --git a/media/libaaudio/src/flowgraph/MonoBlend.h b/media/libaaudio/src/flowgraph/MonoBlend.h
index 7e3c35b..f8d44ff 100644
--- a/media/libaaudio/src/flowgraph/MonoBlend.h
+++ b/media/libaaudio/src/flowgraph/MonoBlend.h
@@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Combine data between multiple channels so each channel is an average
@@ -43,6 +43,6 @@
const float mInvChannelCount;
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MONO_BLEND
diff --git a/media/libaaudio/src/flowgraph/MonoToMultiConverter.cpp b/media/libaaudio/src/flowgraph/MonoToMultiConverter.cpp
index c8d60b9..33eed52 100644
--- a/media/libaaudio/src/flowgraph/MonoToMultiConverter.cpp
+++ b/media/libaaudio/src/flowgraph/MonoToMultiConverter.cpp
@@ -18,7 +18,7 @@
#include "FlowGraphNode.h"
#include "MonoToMultiConverter.h"
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
MonoToMultiConverter::MonoToMultiConverter(int32_t outputChannelCount)
: input(*this, 1)
diff --git a/media/libaaudio/src/flowgraph/MonoToMultiConverter.h b/media/libaaudio/src/flowgraph/MonoToMultiConverter.h
index 6e87ccb..762edb0 100644
--- a/media/libaaudio/src/flowgraph/MonoToMultiConverter.h
+++ b/media/libaaudio/src/flowgraph/MonoToMultiConverter.h
@@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Convert a monophonic stream to a multi-channel interleaved stream
@@ -44,6 +44,6 @@
FlowGraphPortFloatOutput output;
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MONO_TO_MULTI_CONVERTER_H
diff --git a/media/libaaudio/src/flowgraph/MultiToManyConverter.cpp b/media/libaaudio/src/flowgraph/MultiToManyConverter.cpp
index f074364..5cdf594 100644
--- a/media/libaaudio/src/flowgraph/MultiToManyConverter.cpp
+++ b/media/libaaudio/src/flowgraph/MultiToManyConverter.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright 2015 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,7 +18,7 @@
#include "FlowGraphNode.h"
#include "MultiToManyConverter.h"
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
MultiToManyConverter::MultiToManyConverter(int32_t channelCount)
: outputs(channelCount)
@@ -45,4 +45,3 @@
return numFrames;
}
-
diff --git a/media/libaaudio/src/flowgraph/MultiToManyConverter.h b/media/libaaudio/src/flowgraph/MultiToManyConverter.h
index de31475..dee40a2 100644
--- a/media/libaaudio/src/flowgraph/MultiToManyConverter.h
+++ b/media/libaaudio/src/flowgraph/MultiToManyConverter.h
@@ -1,5 +1,5 @@
/*
- * Copyright 2015 The Android Open Source Project
+ * Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Convert a multi-channel interleaved stream to multiple mono-channel
@@ -44,6 +44,6 @@
flowgraph::FlowGraphPortFloatInput input;
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
-#endif //FLOWGRAPH_MULTI_TO_MANY_CONVERTER_H
+#endif //FLOWGRAPH_MULTI_TO_MANY_CONVERTER_H
\ No newline at end of file
diff --git a/media/libaaudio/src/flowgraph/MultiToMonoConverter.cpp b/media/libaaudio/src/flowgraph/MultiToMonoConverter.cpp
index c745108..467f95e 100644
--- a/media/libaaudio/src/flowgraph/MultiToMonoConverter.cpp
+++ b/media/libaaudio/src/flowgraph/MultiToMonoConverter.cpp
@@ -18,7 +18,7 @@
#include "FlowGraphNode.h"
#include "MultiToMonoConverter.h"
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
MultiToMonoConverter::MultiToMonoConverter(int32_t inputChannelCount)
: input(*this, inputChannelCount)
diff --git a/media/libaaudio/src/flowgraph/MultiToMonoConverter.h b/media/libaaudio/src/flowgraph/MultiToMonoConverter.h
index 37c53bd..bf5b7b6 100644
--- a/media/libaaudio/src/flowgraph/MultiToMonoConverter.h
+++ b/media/libaaudio/src/flowgraph/MultiToMonoConverter.h
@@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Convert a multi-channel interleaved stream to a monophonic stream
@@ -44,6 +44,6 @@
FlowGraphPortFloatOutput output;
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MULTI_TO_MONO_CONVERTER_H
diff --git a/media/libaaudio/src/flowgraph/RampLinear.cpp b/media/libaaudio/src/flowgraph/RampLinear.cpp
index 905ae07..80ac72a 100644
--- a/media/libaaudio/src/flowgraph/RampLinear.cpp
+++ b/media/libaaudio/src/flowgraph/RampLinear.cpp
@@ -19,7 +19,7 @@
#include "FlowGraphNode.h"
#include "RampLinear.h"
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
RampLinear::RampLinear(int32_t channelCount)
: FlowGraphFilter(channelCount) {
diff --git a/media/libaaudio/src/flowgraph/RampLinear.h b/media/libaaudio/src/flowgraph/RampLinear.h
index f285704..3839d6e 100644
--- a/media/libaaudio/src/flowgraph/RampLinear.h
+++ b/media/libaaudio/src/flowgraph/RampLinear.h
@@ -23,7 +23,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* When the target is modified then the output will ramp smoothly
@@ -91,6 +91,6 @@
float mLevelTo = 0.0f;
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_RAMP_LINEAR_H
diff --git a/media/libaaudio/src/flowgraph/SampleRateConverter.cpp b/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
index 5c3ed1f..a15fcb8 100644
--- a/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
+++ b/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
@@ -16,10 +16,11 @@
#include "SampleRateConverter.h"
-using namespace flowgraph;
-using namespace resampler;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
-SampleRateConverter::SampleRateConverter(int32_t channelCount, MultiChannelResampler &resampler)
+SampleRateConverter::SampleRateConverter(int32_t channelCount,
+ MultiChannelResampler &resampler)
: FlowGraphFilter(channelCount)
, mResampler(resampler) {
setDataPulledAutomatically(false);
diff --git a/media/libaaudio/src/flowgraph/SampleRateConverter.h b/media/libaaudio/src/flowgraph/SampleRateConverter.h
index 57d76a4..f883e6c 100644
--- a/media/libaaudio/src/flowgraph/SampleRateConverter.h
+++ b/media/libaaudio/src/flowgraph/SampleRateConverter.h
@@ -14,8 +14,8 @@
* limitations under the License.
*/
-#ifndef OBOE_SAMPLE_RATE_CONVERTER_H
-#define OBOE_SAMPLE_RATE_CONVERTER_H
+#ifndef FLOWGRAPH_SAMPLE_RATE_CONVERTER_H
+#define FLOWGRAPH_SAMPLE_RATE_CONVERTER_H
#include <unistd.h>
#include <sys/types.h>
@@ -23,7 +23,7 @@
#include "FlowGraphNode.h"
#include "resampler/MultiChannelResampler.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
class SampleRateConverter : public FlowGraphFilter {
public:
@@ -58,6 +58,6 @@
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
-#endif //OBOE_SAMPLE_RATE_CONVERTER_H
+#endif //FLOWGRAPH_SAMPLE_RATE_CONVERTER_H
diff --git a/media/libaaudio/src/flowgraph/SinkFloat.cpp b/media/libaaudio/src/flowgraph/SinkFloat.cpp
index 0588848..940a66b 100644
--- a/media/libaaudio/src/flowgraph/SinkFloat.cpp
+++ b/media/libaaudio/src/flowgraph/SinkFloat.cpp
@@ -19,7 +19,7 @@
#include "FlowGraphNode.h"
#include "SinkFloat.h"
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SinkFloat::SinkFloat(int32_t channelCount)
: FlowGraphSink(channelCount) {
diff --git a/media/libaaudio/src/flowgraph/SinkFloat.h b/media/libaaudio/src/flowgraph/SinkFloat.h
index c812373..3be3f5d 100644
--- a/media/libaaudio/src/flowgraph/SinkFloat.h
+++ b/media/libaaudio/src/flowgraph/SinkFloat.h
@@ -23,7 +23,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* AudioSink that lets you read data as 32-bit floats.
@@ -40,6 +40,6 @@
}
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SINK_FLOAT_H
diff --git a/media/libaaudio/src/flowgraph/SinkI16.cpp b/media/libaaudio/src/flowgraph/SinkI16.cpp
index da7fd6b..690431c 100644
--- a/media/libaaudio/src/flowgraph/SinkI16.cpp
+++ b/media/libaaudio/src/flowgraph/SinkI16.cpp
@@ -23,7 +23,7 @@
#include <audio_utils/primitives.h>
#endif
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SinkI16::SinkI16(int32_t channelCount)
: FlowGraphSink(channelCount) {}
diff --git a/media/libaaudio/src/flowgraph/SinkI16.h b/media/libaaudio/src/flowgraph/SinkI16.h
index 1e1ce3a..bf124f5 100644
--- a/media/libaaudio/src/flowgraph/SinkI16.h
+++ b/media/libaaudio/src/flowgraph/SinkI16.h
@@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* AudioSink that lets you read data as 16-bit signed integers.
@@ -38,6 +38,6 @@
}
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SINK_I16_H
diff --git a/media/libaaudio/src/flowgraph/SinkI24.cpp b/media/libaaudio/src/flowgraph/SinkI24.cpp
index a9fb5d2..d4f68b6 100644
--- a/media/libaaudio/src/flowgraph/SinkI24.cpp
+++ b/media/libaaudio/src/flowgraph/SinkI24.cpp
@@ -25,7 +25,7 @@
#include <audio_utils/primitives.h>
#endif
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SinkI24::SinkI24(int32_t channelCount)
: FlowGraphSink(channelCount) {}
diff --git a/media/libaaudio/src/flowgraph/SinkI24.h b/media/libaaudio/src/flowgraph/SinkI24.h
index 44078a9..6b4135e 100644
--- a/media/libaaudio/src/flowgraph/SinkI24.h
+++ b/media/libaaudio/src/flowgraph/SinkI24.h
@@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* AudioSink that lets you read data as packed 24-bit signed integers.
@@ -39,6 +39,6 @@
}
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SINK_I24_H
diff --git a/media/libaaudio/src/flowgraph/SinkI32.cpp b/media/libaaudio/src/flowgraph/SinkI32.cpp
index 9fd4e96..b14b3d2 100644
--- a/media/libaaudio/src/flowgraph/SinkI32.cpp
+++ b/media/libaaudio/src/flowgraph/SinkI32.cpp
@@ -14,15 +14,15 @@
* limitations under the License.
*/
-#if FLOWGRAPH_ANDROID_INTERNAL
-#include <audio_utils/primitives.h>
-#endif
-
#include "FlowGraphNode.h"
#include "FlowgraphUtilities.h"
#include "SinkI32.h"
-using namespace flowgraph;
+#if FLOWGRAPH_ANDROID_INTERNAL
+#include <audio_utils/primitives.h>
+#endif
+
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SinkI32::SinkI32(int32_t channelCount)
: FlowGraphSink(channelCount) {}
diff --git a/media/libaaudio/src/flowgraph/SinkI32.h b/media/libaaudio/src/flowgraph/SinkI32.h
index 7456d5f..35507ea 100644
--- a/media/libaaudio/src/flowgraph/SinkI32.h
+++ b/media/libaaudio/src/flowgraph/SinkI32.h
@@ -21,7 +21,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
class SinkI32 : public FlowGraphSink {
public:
@@ -35,6 +35,6 @@
}
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SINK_I32_H
diff --git a/media/libaaudio/src/flowgraph/SourceFloat.cpp b/media/libaaudio/src/flowgraph/SourceFloat.cpp
index 1b3daf1..a0c8827 100644
--- a/media/libaaudio/src/flowgraph/SourceFloat.cpp
+++ b/media/libaaudio/src/flowgraph/SourceFloat.cpp
@@ -19,7 +19,7 @@
#include "FlowGraphNode.h"
#include "SourceFloat.h"
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SourceFloat::SourceFloat(int32_t channelCount)
: FlowGraphSourceBuffered(channelCount) {
diff --git a/media/libaaudio/src/flowgraph/SourceFloat.h b/media/libaaudio/src/flowgraph/SourceFloat.h
index 4719669..78053e5 100644
--- a/media/libaaudio/src/flowgraph/SourceFloat.h
+++ b/media/libaaudio/src/flowgraph/SourceFloat.h
@@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* AudioSource that reads a block of pre-defined float data.
@@ -39,6 +39,6 @@
}
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SOURCE_FLOAT_H
diff --git a/media/libaaudio/src/flowgraph/SourceI16.cpp b/media/libaaudio/src/flowgraph/SourceI16.cpp
index 8813023..16cd2b3 100644
--- a/media/libaaudio/src/flowgraph/SourceI16.cpp
+++ b/media/libaaudio/src/flowgraph/SourceI16.cpp
@@ -24,7 +24,7 @@
#include <audio_utils/primitives.h>
#endif
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SourceI16::SourceI16(int32_t channelCount)
: FlowGraphSourceBuffered(channelCount) {
diff --git a/media/libaaudio/src/flowgraph/SourceI16.h b/media/libaaudio/src/flowgraph/SourceI16.h
index fe440b2..923890c 100644
--- a/media/libaaudio/src/flowgraph/SourceI16.h
+++ b/media/libaaudio/src/flowgraph/SourceI16.h
@@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* AudioSource that reads a block of pre-defined 16-bit integer data.
*/
@@ -37,6 +37,6 @@
}
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SOURCE_I16_H
diff --git a/media/libaaudio/src/flowgraph/SourceI24.cpp b/media/libaaudio/src/flowgraph/SourceI24.cpp
index 1975878..d54b958 100644
--- a/media/libaaudio/src/flowgraph/SourceI24.cpp
+++ b/media/libaaudio/src/flowgraph/SourceI24.cpp
@@ -17,14 +17,14 @@
#include <algorithm>
#include <unistd.h>
+#include "FlowGraphNode.h"
+#include "SourceI24.h"
+
#if FLOWGRAPH_ANDROID_INTERNAL
#include <audio_utils/primitives.h>
#endif
-#include "FlowGraphNode.h"
-#include "SourceI24.h"
-
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
constexpr int kBytesPerI24Packed = 3;
diff --git a/media/libaaudio/src/flowgraph/SourceI24.h b/media/libaaudio/src/flowgraph/SourceI24.h
index 3779534..fb66d4a 100644
--- a/media/libaaudio/src/flowgraph/SourceI24.h
+++ b/media/libaaudio/src/flowgraph/SourceI24.h
@@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* AudioSource that reads a block of pre-defined 24-bit packed integer data.
@@ -38,6 +38,6 @@
}
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SOURCE_I24_H
diff --git a/media/libaaudio/src/flowgraph/SourceI32.cpp b/media/libaaudio/src/flowgraph/SourceI32.cpp
index 4b2e8c4..b1c8f75 100644
--- a/media/libaaudio/src/flowgraph/SourceI32.cpp
+++ b/media/libaaudio/src/flowgraph/SourceI32.cpp
@@ -17,14 +17,14 @@
#include <algorithm>
#include <unistd.h>
+#include "FlowGraphNode.h"
+#include "SourceI32.h"
+
#if FLOWGRAPH_ANDROID_INTERNAL
#include <audio_utils/primitives.h>
#endif
-#include "FlowGraphNode.h"
-#include "SourceI32.h"
-
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SourceI32::SourceI32(int32_t channelCount)
: FlowGraphSourceBuffered(channelCount) {
diff --git a/media/libaaudio/src/flowgraph/SourceI32.h b/media/libaaudio/src/flowgraph/SourceI32.h
index b4e0d7b..7109469 100644
--- a/media/libaaudio/src/flowgraph/SourceI32.h
+++ b/media/libaaudio/src/flowgraph/SourceI32.h
@@ -21,7 +21,7 @@
#include "FlowGraphNode.h"
-namespace flowgraph {
+namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
class SourceI32 : public FlowGraphSourceBuffered {
public:
@@ -37,6 +37,6 @@
static constexpr float kScale = 1.0 / (1UL << 31);
};
-} /* namespace flowgraph */
+} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SOURCE_I32_H
diff --git a/media/libaaudio/src/flowgraph/resampler/HyperbolicCosineWindow.h b/media/libaaudio/src/flowgraph/resampler/HyperbolicCosineWindow.h
index f6479ae..76ec0e7 100644
--- a/media/libaaudio/src/flowgraph/resampler/HyperbolicCosineWindow.h
+++ b/media/libaaudio/src/flowgraph/resampler/HyperbolicCosineWindow.h
@@ -19,7 +19,9 @@
#include <math.h>
-namespace resampler {
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/**
* Calculate a HyperbolicCosineWindow window centered at 0.
@@ -64,5 +66,6 @@
double mInverseCoshAlpha = 1.0;
};
-} // namespace resampler
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
#endif //RESAMPLER_HYPERBOLIC_COSINE_WINDOW_H
diff --git a/media/libaaudio/src/flowgraph/resampler/IntegerRatio.cpp b/media/libaaudio/src/flowgraph/resampler/IntegerRatio.cpp
index 4bd75b3..39e9b24 100644
--- a/media/libaaudio/src/flowgraph/resampler/IntegerRatio.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/IntegerRatio.cpp
@@ -16,7 +16,7 @@
#include "IntegerRatio.h"
-using namespace resampler;
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
// Enough primes to cover the common sample rates.
static const int kPrimes[] = {
diff --git a/media/libaaudio/src/flowgraph/resampler/IntegerRatio.h b/media/libaaudio/src/flowgraph/resampler/IntegerRatio.h
index 8c044d8..a6b524c 100644
--- a/media/libaaudio/src/flowgraph/resampler/IntegerRatio.h
+++ b/media/libaaudio/src/flowgraph/resampler/IntegerRatio.h
@@ -14,12 +14,14 @@
* limitations under the License.
*/
-#ifndef OBOE_INTEGER_RATIO_H
-#define OBOE_INTEGER_RATIO_H
+#ifndef RESAMPLER_INTEGER_RATIO_H
+#define RESAMPLER_INTEGER_RATIO_H
#include <sys/types.h>
-namespace resampler {
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/**
* Represent the ratio of two integers.
@@ -47,6 +49,6 @@
int32_t mDenominator;
};
-} // namespace resampler
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
-#endif //OBOE_INTEGER_RATIO_H
+#endif //RESAMPLER_INTEGER_RATIO_H
diff --git a/media/libaaudio/src/flowgraph/resampler/KaiserWindow.h b/media/libaaudio/src/flowgraph/resampler/KaiserWindow.h
index 73dbc41..f99f9b4 100644
--- a/media/libaaudio/src/flowgraph/resampler/KaiserWindow.h
+++ b/media/libaaudio/src/flowgraph/resampler/KaiserWindow.h
@@ -19,7 +19,9 @@
#include <math.h>
-namespace resampler {
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/**
* Calculate a Kaiser window centered at 0.
@@ -83,5 +85,6 @@
double mInverseBesselBeta = 1.0;
};
-} // namespace resampler
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
#endif //RESAMPLER_KAISER_WINDOW_H
diff --git a/media/libaaudio/src/flowgraph/resampler/LinearResampler.cpp b/media/libaaudio/src/flowgraph/resampler/LinearResampler.cpp
index a7748c1..cb4932a 100644
--- a/media/libaaudio/src/flowgraph/resampler/LinearResampler.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/LinearResampler.cpp
@@ -16,7 +16,7 @@
#include "LinearResampler.h"
-using namespace resampler;
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
LinearResampler::LinearResampler(const MultiChannelResampler::Builder &builder)
: MultiChannelResampler(builder) {
diff --git a/media/libaaudio/src/flowgraph/resampler/LinearResampler.h b/media/libaaudio/src/flowgraph/resampler/LinearResampler.h
index 6bde81d..5434379 100644
--- a/media/libaaudio/src/flowgraph/resampler/LinearResampler.h
+++ b/media/libaaudio/src/flowgraph/resampler/LinearResampler.h
@@ -14,15 +14,17 @@
* limitations under the License.
*/
-#ifndef OBOE_LINEAR_RESAMPLER_H
-#define OBOE_LINEAR_RESAMPLER_H
+#ifndef RESAMPLER_LINEAR_RESAMPLER_H
+#define RESAMPLER_LINEAR_RESAMPLER_H
#include <memory>
#include <sys/types.h>
#include <unistd.h>
-#include "MultiChannelResampler.h"
-namespace resampler {
+#include "MultiChannelResampler.h"
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/**
* Simple resampler that uses bi-linear interpolation.
@@ -40,5 +42,6 @@
std::unique_ptr<float[]> mCurrentFrame;
};
-} // namespace resampler
-#endif //OBOE_LINEAR_RESAMPLER_H
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_LINEAR_RESAMPLER_H
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
index d630520..7193ff3 100644
--- a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
@@ -25,11 +25,12 @@
#include "SincResampler.h"
#include "SincResamplerStereo.h"
-using namespace resampler;
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
MultiChannelResampler::MultiChannelResampler(const MultiChannelResampler::Builder &builder)
: mNumTaps(builder.getNumTaps())
- , mX(builder.getChannelCount() * builder.getNumTaps() * 2)
+ , mX(static_cast<size_t>(builder.getChannelCount())
+ * static_cast<size_t>(builder.getNumTaps()) * 2)
, mSingleFrame(builder.getChannelCount())
, mChannelCount(builder.getChannelCount())
{
@@ -110,7 +111,7 @@
if (--mCursor < 0) {
mCursor = getNumTaps() - 1;
}
- float *dest = &mX[mCursor * getChannelCount()];
+ float *dest = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
int offset = getNumTaps() * getChannelCount();
for (int channel = 0; channel < getChannelCount(); channel++) {
// Write twice so we avoid having to wrap when reading.
@@ -130,7 +131,7 @@
int32_t numRows,
double phaseIncrement,
float normalizedCutoff) {
- mCoefficients.resize(getNumTaps() * numRows);
+ mCoefficients.resize(static_cast<size_t>(getNumTaps()) * static_cast<size_t>(numRows));
int coefficientIndex = 0;
double phase = 0.0; // ranges from 0.0 to 1.0, fraction between samples
// Stretch the sinc function for low pass filtering.
@@ -150,7 +151,7 @@
#if MCR_USE_KAISER
float window = mKaiserWindow(tapPhase * numTapsHalfInverse);
#else
- float window = mCoshWindow(tapPhase * numTapsHalfInverse);
+ float window = mCoshWindow(static_cast<double>(tapPhase) * numTapsHalfInverse);
#endif
float coefficient = sinc(radians * cutoffScaler) * window;
mCoefficients.at(coefficientIndex++) = coefficient;
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
index da79cad..717f3fd 100644
--- a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
@@ -14,8 +14,8 @@
* limitations under the License.
*/
-#ifndef OBOE_MULTICHANNEL_RESAMPLER_H
-#define OBOE_MULTICHANNEL_RESAMPLER_H
+#ifndef RESAMPLER_MULTICHANNEL_RESAMPLER_H
+#define RESAMPLER_MULTICHANNEL_RESAMPLER_H
#include <memory>
#include <vector>
@@ -34,7 +34,9 @@
#include "HyperbolicCosineWindow.h"
#endif
-namespace resampler {
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
class MultiChannelResampler {
@@ -267,5 +269,6 @@
const int mChannelCount;
};
-} // namespace resampler
-#endif //OBOE_MULTICHANNEL_RESAMPLER_H
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_MULTICHANNEL_RESAMPLER_H
diff --git a/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.cpp b/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.cpp
index aa4ffd9..e47ee8e 100644
--- a/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.cpp
@@ -19,7 +19,7 @@
#include "IntegerRatio.h"
#include "PolyphaseResampler.h"
-using namespace resampler;
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
PolyphaseResampler::PolyphaseResampler(const MultiChannelResampler::Builder &builder)
: MultiChannelResampler(builder)
@@ -42,7 +42,7 @@
// Multiply input times windowed sinc function.
float *coefficients = &mCoefficients[mCoefficientCursor];
- float *xFrame = &mX[mCursor * getChannelCount()];
+ float *xFrame = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
for (int i = 0; i < mNumTaps; i++) {
float coefficient = *coefficients++;
for (int channel = 0; channel < getChannelCount(); channel++) {
diff --git a/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.h b/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.h
index 1aeb680..3642fce 100644
--- a/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.h
+++ b/media/libaaudio/src/flowgraph/resampler/PolyphaseResampler.h
@@ -14,16 +14,18 @@
* limitations under the License.
*/
-#ifndef OBOE_POLYPHASE_RESAMPLER_H
-#define OBOE_POLYPHASE_RESAMPLER_H
+#ifndef RESAMPLER_POLYPHASE_RESAMPLER_H
+#define RESAMPLER_POLYPHASE_RESAMPLER_H
#include <memory>
#include <vector>
#include <sys/types.h>
#include <unistd.h>
-#include "MultiChannelResampler.h"
-namespace resampler {
+#include "MultiChannelResampler.h"
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/**
* Resampler that is optimized for a reduced ratio of sample rates.
* All of the coefficients for each possible phase value are pre-calculated.
@@ -46,6 +48,6 @@
};
-} // namespace resampler
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
-#endif //OBOE_POLYPHASE_RESAMPLER_H
+#endif //RESAMPLER_POLYPHASE_RESAMPLER_H
diff --git a/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.cpp b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.cpp
index c0e29b7..fdaf13e 100644
--- a/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.cpp
@@ -17,7 +17,7 @@
#include <cassert>
#include "PolyphaseResamplerMono.h"
-using namespace resampler;
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
#define MONO 1
diff --git a/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.h b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.h
index 0a691a3..fe020b5 100644
--- a/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.h
+++ b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerMono.h
@@ -14,14 +14,16 @@
* limitations under the License.
*/
-#ifndef OBOE_POLYPHASE_RESAMPLER_MONO_H
-#define OBOE_POLYPHASE_RESAMPLER_MONO_H
+#ifndef RESAMPLER_POLYPHASE_RESAMPLER_MONO_H
+#define RESAMPLER_POLYPHASE_RESAMPLER_MONO_H
#include <sys/types.h>
#include <unistd.h>
-#include "PolyphaseResampler.h"
-namespace resampler {
+#include "PolyphaseResampler.h"
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
class PolyphaseResamplerMono : public PolyphaseResampler {
public:
@@ -34,6 +36,6 @@
void readFrame(float *frame) override;
};
-} // namespace resampler
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
-#endif //OBOE_POLYPHASE_RESAMPLER_MONO_H
+#endif //RESAMPLER_POLYPHASE_RESAMPLER_MONO_H
diff --git a/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.cpp b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.cpp
index e4bef74..b381851 100644
--- a/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.cpp
@@ -17,7 +17,7 @@
#include <cassert>
#include "PolyphaseResamplerStereo.h"
-using namespace resampler;
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
#define STEREO 2
diff --git a/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.h b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.h
index e608483..ee4caba 100644
--- a/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.h
+++ b/media/libaaudio/src/flowgraph/resampler/PolyphaseResamplerStereo.h
@@ -14,14 +14,16 @@
* limitations under the License.
*/
-#ifndef OBOE_POLYPHASE_RESAMPLER_STEREO_H
-#define OBOE_POLYPHASE_RESAMPLER_STEREO_H
+#ifndef RESAMPLER_POLYPHASE_RESAMPLER_STEREO_H
+#define RESAMPLER_POLYPHASE_RESAMPLER_STEREO_H
#include <sys/types.h>
#include <unistd.h>
-#include "PolyphaseResampler.h"
-namespace resampler {
+#include "PolyphaseResampler.h"
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
class PolyphaseResamplerStereo : public PolyphaseResampler {
public:
@@ -34,6 +36,6 @@
void readFrame(float *frame) override;
};
-} // namespace resampler
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
-#endif //OBOE_POLYPHASE_RESAMPLER_STEREO_H
+#endif //RESAMPLER_POLYPHASE_RESAMPLER_STEREO_H
diff --git a/media/libaaudio/src/flowgraph/resampler/README.md b/media/libaaudio/src/flowgraph/resampler/README.md
index 05d8a89..ea319c7 100644
--- a/media/libaaudio/src/flowgraph/resampler/README.md
+++ b/media/libaaudio/src/flowgraph/resampler/README.md
@@ -5,6 +5,17 @@
The converter is based on a sinc function that has been windowed by a hyperbolic cosine.
We found this had fewer artifacts than the more traditional Kaiser window.
+## Building the Resampler
+
+It is part of [Oboe](https://github.com/google/oboe) but has no dependencies on Oboe.
+So the contents of this folder can be used outside of Oboe.
+
+To build it for use outside of Oboe:
+
+1. Copy the "resampler" folder to a folder in your project that is in the include path.
+2. Add all of the \*.cpp files in the resampler folder to your project IDE or Makefile.
+3. In ResamplerDefinitions.h, define RESAMPLER_OUTER_NAMESPACE with your own project name. Alternatively, use -DRESAMPLER_OUTER_NAMESPACE=mynamespace when compiling to avoid modifying the resampler code.
+
## Creating a Resampler
Include the [main header](MultiChannelResampler.h) for the resampler.
@@ -88,4 +99,3 @@
When you are done, you should delete the Resampler to avoid a memory leak.
delete resampler;
-
diff --git a/media/libaaudio/src/flowgraph/resampler/ResamplerDefinitions.h b/media/libaaudio/src/flowgraph/resampler/ResamplerDefinitions.h
new file mode 100644
index 0000000..c6791ec
--- /dev/null
+++ b/media/libaaudio/src/flowgraph/resampler/ResamplerDefinitions.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Set flag RESAMPLER_OUTER_NAMESPACE based on whether compiler flag
+// __ANDROID_NDK__ is defined. __ANDROID_NDK__ should be defined in oboe
+// but not in android.
+
+#ifndef RESAMPLER_OUTER_NAMESPACE
+#ifdef __ANDROID_NDK__
+#define RESAMPLER_OUTER_NAMESPACE oboe
+#else
+#define RESAMPLER_OUTER_NAMESPACE aaudio
+#endif // __ANDROID_NDK__
+#endif // RESAMPLER_OUTER_NAMESPACE
diff --git a/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp b/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp
index 5e8a9e0..42d0ca2 100644
--- a/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/SincResampler.cpp
@@ -18,7 +18,7 @@
#include <math.h>
#include "SincResampler.h"
-using namespace resampler;
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
SincResampler::SincResampler(const MultiChannelResampler::Builder &builder)
: MultiChannelResampler(builder)
@@ -52,10 +52,12 @@
index2 -= mNumRows;
}
- float *coefficients1 = &mCoefficients[index1 * getNumTaps()];
- float *coefficients2 = &mCoefficients[index2 * getNumTaps()];
+ float *coefficients1 = &mCoefficients[static_cast<size_t>(index1)
+ * static_cast<size_t>(getNumTaps())];
+ float *coefficients2 = &mCoefficients[static_cast<size_t>(index2)
+ * static_cast<size_t>(getNumTaps())];
- float *xFrame = &mX[mCursor * getChannelCount()];
+ float *xFrame = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
for (int i = 0; i < mNumTaps; i++) {
float coefficient1 = *coefficients1++;
float coefficient2 = *coefficients2++;
diff --git a/media/libaaudio/src/flowgraph/resampler/SincResampler.h b/media/libaaudio/src/flowgraph/resampler/SincResampler.h
index b235188..05ff092 100644
--- a/media/libaaudio/src/flowgraph/resampler/SincResampler.h
+++ b/media/libaaudio/src/flowgraph/resampler/SincResampler.h
@@ -14,15 +14,17 @@
* limitations under the License.
*/
-#ifndef OBOE_SINC_RESAMPLER_H
-#define OBOE_SINC_RESAMPLER_H
+#ifndef RESAMPLER_SINC_RESAMPLER_H
+#define RESAMPLER_SINC_RESAMPLER_H
#include <memory>
#include <sys/types.h>
#include <unistd.h>
-#include "MultiChannelResampler.h"
-namespace resampler {
+#include "MultiChannelResampler.h"
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/**
* Resampler that can interpolate between coefficients.
@@ -43,5 +45,6 @@
double mPhaseScaler = 1.0;
};
-} // namespace resampler
-#endif //OBOE_SINC_RESAMPLER_H
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_SINC_RESAMPLER_H
diff --git a/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp b/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp
index ce00302..432137e 100644
--- a/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.cpp
@@ -19,7 +19,7 @@
#include "SincResamplerStereo.h"
-using namespace resampler;
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
#define STEREO 2
@@ -54,13 +54,15 @@
// Determine indices into coefficients table.
double tablePhase = getIntegerPhase() * mPhaseScaler;
int index1 = static_cast<int>(floor(tablePhase));
- float *coefficients1 = &mCoefficients[index1 * getNumTaps()];
+ float *coefficients1 = &mCoefficients[static_cast<size_t>(index1)
+ * static_cast<size_t>(getNumTaps())];
int index2 = (index1 + 1);
if (index2 >= mNumRows) { // no guard row needed because we wrap the indices
index2 = 0;
}
- float *coefficients2 = &mCoefficients[index2 * getNumTaps()];
- float *xFrame = &mX[mCursor * getChannelCount()];
+ float *coefficients2 = &mCoefficients[static_cast<size_t>(index2)
+ * static_cast<size_t>(getNumTaps())];
+ float *xFrame = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
for (int i = 0; i < mNumTaps; i++) {
float coefficient1 = *coefficients1++;
float coefficient2 = *coefficients2++;
diff --git a/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.h b/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.h
index 7d49ec7..d5576d1 100644
--- a/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.h
+++ b/media/libaaudio/src/flowgraph/resampler/SincResamplerStereo.h
@@ -14,14 +14,16 @@
* limitations under the License.
*/
-#ifndef OBOE_SINC_RESAMPLER_STEREO_H
-#define OBOE_SINC_RESAMPLER_STEREO_H
+#ifndef RESAMPLER_SINC_RESAMPLER_STEREO_H
+#define RESAMPLER_SINC_RESAMPLER_STEREO_H
#include <sys/types.h>
#include <unistd.h>
-#include "SincResampler.h"
-namespace resampler {
+#include "SincResampler.h"
+#include "ResamplerDefinitions.h"
+
+namespace RESAMPLER_OUTER_NAMESPACE::resampler {
class SincResamplerStereo : public SincResampler {
public:
@@ -35,5 +37,6 @@
};
-} // namespace resampler
-#endif //OBOE_SINC_RESAMPLER_STEREO_H
+} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
+
+#endif //RESAMPLER_SINC_RESAMPLER_STEREO_H
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index ed31ec9..1e39e0f 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -96,29 +96,8 @@
setFormat(AUDIO_FORMAT_PCM_FLOAT);
}
- // Maybe change device format to get a FAST path.
- // AudioRecord does not support FAST mode for FLOAT data.
- // TODO AudioRecord should allow FLOAT data paths for FAST tracks.
- // So IF the user asks for low latency FLOAT
- // AND the sampleRate is likely to be compatible with FAST
- // THEN request I16 and convert to FLOAT when passing to user.
- // Note that hard coding 48000 Hz is not ideal because the sampleRate
- // for a FAST path might not be 48000 Hz.
- // It normally is but there is a chance that it is not.
- // And there is no reliable way to know that in advance.
- // Luckily the consequences of a wrong guess are minor.
- // We just may not get a FAST track.
- // But we wouldn't have anyway without this hack.
- constexpr int32_t kMostLikelySampleRateForFast = 48000;
- if (getFormat() == AUDIO_FORMAT_PCM_FLOAT
- && perfMode == AAUDIO_PERFORMANCE_MODE_LOW_LATENCY
- && (audio_channel_count_from_in_mask(channelMask) <= 2) // FAST only for mono and stereo
- && (getSampleRate() == kMostLikelySampleRateForFast
- || getSampleRate() == AAUDIO_UNSPECIFIED)) {
- setDeviceFormat(AUDIO_FORMAT_PCM_16_BIT);
- } else {
- setDeviceFormat(getFormat());
- }
+
+ setDeviceFormat(getFormat());
// To avoid glitching, let AudioFlinger pick the optimal burst size.
uint32_t notificationFrames = 0;
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 4b42203..872faca 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -562,7 +562,9 @@
int32_t AAudioProperty_getMinimumSleepMicros() {
const int32_t minMicros = 1; // arbitrary
// Higher values can increase latency for moderate workloads.
- const int32_t defaultMicros = 1; // arbitrary
+ // Short values can cause the CPU to short cycle if there is a bug in
+ // calculating the wakeup times.
+ const int32_t defaultMicros = 100; // arbitrary
const int32_t maxMicros = 200; // arbitrary
int32_t prop = property_get_int32(AAUDIO_PROP_MINIMUM_SLEEP_USEC, defaultMicros);
if (prop < minMicros) {
diff --git a/media/libaaudio/tests/test_flowgraph.cpp b/media/libaaudio/tests/test_flowgraph.cpp
index 0792fc5..913feb0 100644
--- a/media/libaaudio/tests/test_flowgraph.cpp
+++ b/media/libaaudio/tests/test_flowgraph.cpp
@@ -33,26 +33,73 @@
#include "flowgraph/SourceI16.h"
#include "flowgraph/SourceI24.h"
-using namespace flowgraph;
+using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
constexpr int kBytesPerI24Packed = 3;
+// Simple test that tries to reproduce a Clang compiler bug.
+__attribute__((noinline))
+void local_convert_float_to_int16(const float *input,
+ int16_t *output,
+ int count) {
+ for (int i = 0; i < count; i++) {
+ int32_t n = (int32_t) (*input++ * 32768.0f);
+ *output++ = std::min(INT16_MAX, std::max(INT16_MIN, n)); // clip
+ }
+}
+
+TEST(test_flowgraph, local_convert_float_int16) {
+ static constexpr int kNumSamples = 8;
+ static constexpr std::array<float, kNumSamples> input = {
+ 1.0f, 0.5f, -0.25f, -1.0f,
+ 0.0f, 53.9f, -87.2f, -1.02f};
+ static constexpr std::array<int16_t, kNumSamples> expected = {
+ 32767, 16384, -8192, -32768,
+ 0, 32767, -32768, -32768};
+ std::array<int16_t, kNumSamples> output;
+
+ // Do it inline, which will probably work even with the buggy compiler.
+ // This validates the expected data.
+ const float *in = input.data();
+ int16_t *out = output.data();
+ output.fill(777);
+ for (int i = 0; i < kNumSamples; i++) {
+ int32_t n = (int32_t) (*in++ * 32768.0f);
+ *out++ = std::min(INT16_MAX, std::max(INT16_MIN, n)); // clip
+ }
+ for (int i = 0; i < kNumSamples; i++) {
+ EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
+ }
+
+ // Convert audio signal using the function.
+ output.fill(777);
+ local_convert_float_to_int16(input.data(), output.data(), kNumSamples);
+ for (int i = 0; i < kNumSamples; i++) {
+ EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
+ }
+}
+
TEST(test_flowgraph, module_sinki16) {
- static const float input[] = {1.0f, 0.5f, -0.25f, -1.0f, 0.0f, 53.9f, -87.2f};
- static const int16_t expected[] = {32767, 16384, -8192, -32768, 0, 32767, -32768};
- int16_t output[20];
+ static constexpr int kNumSamples = 8;
+ static constexpr std::array<float, kNumSamples> input = {
+ 1.0f, 0.5f, -0.25f, -1.0f,
+ 0.0f, 53.9f, -87.2f, -1.02f};
+ static constexpr std::array<int16_t, kNumSamples> expected = {
+ 32767, 16384, -8192, -32768,
+ 0, 32767, -32768, -32768};
+ std::array<int16_t, kNumSamples + 10> output; // larger than input
+
SourceFloat sourceFloat{1};
SinkI16 sinkI16{1};
- int numInputFrames = sizeof(input) / sizeof(input[0]);
- sourceFloat.setData(input, numInputFrames);
+ sourceFloat.setData(input.data(), kNumSamples);
sourceFloat.output.connect(&sinkI16.input);
- int numOutputFrames = sizeof(output) / sizeof(int16_t);
- int32_t numRead = sinkI16.read(output, numOutputFrames);
- ASSERT_EQ(numInputFrames, numRead);
+ output.fill(777);
+ int32_t numRead = sinkI16.read(output.data(), output.size());
+ ASSERT_EQ(kNumSamples, numRead);
for (int i = 0; i < numRead; i++) {
- EXPECT_EQ(expected[i], output[i]);
+ EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
}
}
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index e890e97..69a9c68 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -345,7 +345,7 @@
"aidl/android/media/TrackSecondaryOutputInfo.aidl",
],
imports: [
- "android.media.audio.common.types",
+ "android.media.audio.common.types-V1",
"framework-permission-aidl",
],
backend: {
@@ -389,7 +389,7 @@
"aidl/android/media/SpatializerHeadTrackingMode.aidl",
],
imports: [
- "android.media.audio.common.types",
+ "android.media.audio.common.types-V1",
"audioclient-types-aidl",
],
backend: {
@@ -432,7 +432,7 @@
"aidl/android/media/IAudioTrackCallback.aidl",
],
imports: [
- "android.media.audio.common.types",
+ "android.media.audio.common.types-V1",
"audioclient-types-aidl",
"av-types-aidl",
"effect-aidl",
@@ -469,7 +469,7 @@
"aidl/android/media/IAudioPolicyServiceClient.aidl",
],
imports: [
- "android.media.audio.common.types",
+ "android.media.audio.common.types-V1",
"audioclient-types-aidl",
"audiopolicy-types-aidl",
"capture_state_listener-aidl",
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index a7b10b2..9a7b9c1 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -959,7 +959,8 @@
audio_output_flags_t flags,
audio_port_handle_t* selectedDeviceId,
audio_port_handle_t* portId,
- std::vector<audio_io_handle_t>* secondaryOutputs) {
+ std::vector<audio_io_handle_t>* secondaryOutputs,
+ bool *isSpatialized) {
if (attr == nullptr) {
ALOGE("%s NULL audio attributes", __func__);
return BAD_VALUE;
@@ -1012,6 +1013,7 @@
*portId = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(responseAidl.portId));
*secondaryOutputs = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<audio_io_handle_t>>(
responseAidl.secondaryOutputs, aidl2legacy_int32_t_audio_io_handle_t));
+ *isSpatialized = responseAidl.isSpatialized;
return OK;
}
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
index d8c18c0..3751f80 100644
--- a/media/libaudioclient/TEST_MAPPING
+++ b/media/libaudioclient/TEST_MAPPING
@@ -1,7 +1,15 @@
{
"presubmit": [
{
- "name": "audio_aidl_conversion_tests"
+ "name": "audio_aidl_conversion_tests"
+ },
+ {
+ "name": "CtsNativeMediaAAudioTestCases",
+ "options" : [
+ {
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+ }
+ ]
}
]
}
diff --git a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
index 963877a..f1848b6 100644
--- a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
@@ -31,4 +31,6 @@
int portId;
/** Interpreted as audio_io_handle_t[]. */
int[] secondaryOutputs;
+ /** True if the track is connected to a spatializer mixer and actually spatialized */
+ boolean isSpatialized;
}
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 6afe023..10da028 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -227,4 +227,9 @@
int getAAudioHardwareBurstMinUsec();
void setDeviceConnectedState(in AudioPort devicePort, boolean connected);
+
+ // When adding a new method, please review and update
+ // IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
+ // AudioFlinger.cpp AudioFlinger::onTransactWrapper()
+ // AudioFlinger.cpp IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST
}
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index e2ef772..8ac89a8 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -390,4 +390,8 @@
* for the specified audio attributes.
*/
AudioProfile[] getDirectProfilesForAttributes(in AudioAttributesInternal attr);
+
+ // When adding a new method, please review and update
+ // AudioPolicyService.cpp AudioPolicyService::onTransact()
+ // AudioPolicyService.cpp IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
}
diff --git a/media/libaudioclient/aidl/android/media/IEffect.aidl b/media/libaudioclient/aidl/android/media/IEffect.aidl
index 813cd5c..6ec0405 100644
--- a/media/libaudioclient/aidl/android/media/IEffect.aidl
+++ b/media/libaudioclient/aidl/android/media/IEffect.aidl
@@ -62,4 +62,8 @@
* TODO(ytai): Explain how this should be used exactly.
*/
SharedFileRegion getCblk();
+
+ // When adding a new method, please review and update
+ // Effects.cpp AudioFlinger::EffectHandle::onTransact()
+ // Effects.cpp IEFFECT_BINDER_METHOD_MACRO_LIST
}
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index e89ce15..1cc22a0 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -286,7 +286,8 @@
audio_output_flags_t flags,
audio_port_handle_t *selectedDeviceId,
audio_port_handle_t *portId,
- std::vector<audio_io_handle_t> *secondaryOutputs);
+ std::vector<audio_io_handle_t> *secondaryOutputs,
+ bool *isSpatialized);
static status_t startOutput(audio_port_handle_t portId);
static status_t stopOutput(audio_port_handle_t portId);
static void releaseOutput(audio_port_handle_t portId);
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index e047378..3c3715d 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -482,9 +482,9 @@
* Legacy server should implement this interface in order to be wrapped.
*/
class Delegate : public IAudioFlinger {
- protected:
friend class AudioFlingerServerAdapter;
-
+ public:
+ // expose the TransactionCode enum for TimeCheck purposes.
enum class TransactionCode {
CREATE_TRACK = media::BnAudioFlingerService::TRANSACTION_createTrack,
CREATE_RECORD = media::BnAudioFlingerService::TRANSACTION_createRecord,
@@ -553,6 +553,7 @@
SET_DEVICE_CONNECTED_STATE = media::BnAudioFlingerService::TRANSACTION_setDeviceConnectedState,
};
+ protected:
/**
* And optional hook, called on every transaction, allowing additional operations to be
* performed before/after the unparceling ofthe data and dispatching to the respective
diff --git a/media/libaudiofoundation/TEST_MAPPING b/media/libaudiofoundation/TEST_MAPPING
index f6d249a..efe8437 100644
--- a/media/libaudiofoundation/TEST_MAPPING
+++ b/media/libaudiofoundation/TEST_MAPPING
@@ -1,7 +1,15 @@
{
"presubmit": [
{
- "name": "audiofoundation_parcelable_test"
+ "name": "audiofoundation_parcelable_test"
+ },
+ {
+ "name": "CtsNativeMediaAAudioTestCases",
+ "options" : [
+ {
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+ }
+ ]
}
]
}
diff --git a/media/libaudiohal/DevicesFactoryHalInterface.cpp b/media/libaudiohal/DevicesFactoryHalInterface.cpp
index 325a547..5ad26fc 100644
--- a/media/libaudiohal/DevicesFactoryHalInterface.cpp
+++ b/media/libaudiohal/DevicesFactoryHalInterface.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <string>
+
#include <media/audiohal/DevicesFactoryHalInterface.h>
#include <media/audiohal/FactoryHalHidl.h>
@@ -21,8 +23,10 @@
// static
sp<DevicesFactoryHalInterface> DevicesFactoryHalInterface::create() {
+ using namespace std::string_literals;
return createPreferredImpl<DevicesFactoryHalInterface>(
- "android.hardware.audio", "IDevicesFactory");
+ std::make_pair("android.hardware.audio"s, "IDevicesFactory"s),
+ std::make_pair("android.hardware.audio.effect"s, "IEffectsFactory"s));
}
} // namespace android
diff --git a/media/libaudiohal/EffectsFactoryHalInterface.cpp b/media/libaudiohal/EffectsFactoryHalInterface.cpp
index bc3b4c1..8a28f64 100644
--- a/media/libaudiohal/EffectsFactoryHalInterface.cpp
+++ b/media/libaudiohal/EffectsFactoryHalInterface.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <string>
+
#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <media/audiohal/FactoryHalHidl.h>
@@ -21,8 +23,10 @@
// static
sp<EffectsFactoryHalInterface> EffectsFactoryHalInterface::create() {
+ using namespace std::string_literals;
return createPreferredImpl<EffectsFactoryHalInterface>(
- "android.hardware.audio.effect", "IEffectsFactory");
+ std::make_pair("android.hardware.audio.effect"s, "IEffectsFactory"s),
+ std::make_pair("android.hardware.audio"s, "IDevicesFactory"s));
}
// static
diff --git a/media/libaudiohal/FactoryHalHidl.cpp b/media/libaudiohal/FactoryHalHidl.cpp
index 804edcc..590fec5 100644
--- a/media/libaudiohal/FactoryHalHidl.cpp
+++ b/media/libaudiohal/FactoryHalHidl.cpp
@@ -16,6 +16,10 @@
#define LOG_TAG "FactoryHalHidl"
+#include <algorithm>
+#include <array>
+#include <utility>
+
#include <media/audiohal/FactoryHalHidl.h>
#include <dlfcn.h>
@@ -28,15 +32,16 @@
namespace android::detail {
namespace {
-/** Supported HAL versions, in order of preference.
+/** Supported HAL versions, from most recent to least recent.
*/
-const char* sAudioHALVersions[] = {
- "7.1",
- "7.0",
- "6.0",
- "5.0",
- "4.0",
- nullptr
+#define CONC_VERSION(maj, min) #maj "." #min
+#define DECLARE_VERSION(maj, min) std::make_pair(std::make_pair(maj, min), CONC_VERSION(maj, min))
+static constexpr std::array<std::pair<std::pair<int, int>, const char*>, 5> sAudioHALVersions = {
+ DECLARE_VERSION(7, 1),
+ DECLARE_VERSION(7, 0),
+ DECLARE_VERSION(6, 0),
+ DECLARE_VERSION(5, 0),
+ DECLARE_VERSION(4, 0)
};
bool createHalService(const std::string& version, const std::string& interface,
@@ -94,11 +99,22 @@
} // namespace
-void* createPreferredImpl(const std::string& package, const std::string& interface) {
- for (auto version = detail::sAudioHALVersions; *version != nullptr; ++version) {
- void* rawInterface = nullptr;
- if (hasHalService(package, *version, interface)
- && createHalService(*version, interface, &rawInterface)) {
+void* createPreferredImpl(const InterfaceName& iface, const InterfaceName& siblingIface) {
+ auto findMostRecentVersion = [](const InterfaceName& iface) {
+ return std::find_if(detail::sAudioHALVersions.begin(), detail::sAudioHALVersions.end(),
+ [&](const auto& v) { return hasHalService(iface.first, v.second, iface.second); });
+ };
+ auto ifaceVersionIt = findMostRecentVersion(iface);
+ auto siblingVersionIt = findMostRecentVersion(siblingIface);
+ if (ifaceVersionIt != detail::sAudioHALVersions.end() &&
+ siblingVersionIt != detail::sAudioHALVersions.end() &&
+ // same major version
+ ifaceVersionIt->first.first == siblingVersionIt->first.first) {
+ std::string libraryVersion =
+ ifaceVersionIt->first >= siblingVersionIt->first ?
+ ifaceVersionIt->second : siblingVersionIt->second;
+ void* rawInterface;
+ if (createHalService(libraryVersion, iface.second, &rawInterface)) {
return rawInterface;
}
}
diff --git a/media/libaudiohal/TEST_MAPPING b/media/libaudiohal/TEST_MAPPING
new file mode 100644
index 0000000..3de5a9f
--- /dev/null
+++ b/media/libaudiohal/TEST_MAPPING
@@ -0,0 +1,12 @@
+{
+ "presubmit": [
+ {
+ "name": "CtsNativeMediaAAudioTestCases",
+ "options" : [
+ {
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+ }
+ ]
+ }
+ ]
+}
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index dd435fe..d30883a 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -10,6 +10,7 @@
filegroup {
name: "audio_core_hal_client_sources",
srcs: [
+ "CoreConversionHelperHidl.cpp",
"DeviceHalHidl.cpp",
"DevicesFactoryHalHidl.cpp",
"StreamHalHidl.cpp",
@@ -20,6 +21,7 @@
name: "audio_effect_hal_client_sources",
srcs: [
"EffectBufferHalHidl.cpp",
+ "EffectConversionHelperHidl.cpp",
"EffectHalHidl.cpp",
"EffectsFactoryHalHidl.cpp",
],
@@ -28,10 +30,6 @@
cc_defaults {
name: "libaudiohal_default",
- srcs: [
- "ConversionHelperHidl.cpp",
- ],
-
cflags: [
"-Wall",
"-Wextra",
@@ -76,6 +74,7 @@
srcs: [
":audio_core_hal_client_sources",
":audio_effect_hal_client_sources",
+ "EffectsFactoryHalHidlEntry.cpp",
],
shared_libs: [
"android.hardware.audio.common@4.0",
@@ -98,6 +97,7 @@
srcs: [
":audio_core_hal_client_sources",
":audio_effect_hal_client_sources",
+ "EffectsFactoryHalHidlEntry.cpp",
],
shared_libs: [
"android.hardware.audio.common@5.0",
@@ -120,6 +120,7 @@
srcs: [
":audio_core_hal_client_sources",
":audio_effect_hal_client_sources",
+ "EffectsFactoryHalHidlEntry.cpp",
],
shared_libs: [
"android.hardware.audio.common@6.0",
@@ -136,20 +137,41 @@
]
}
+cc_library_static {
+ name: "libaudiohal.effect@7.0",
+ defaults: ["libaudiohal_default"],
+ srcs: [
+ ":audio_effect_hal_client_sources",
+ ],
+ static_libs: [
+ "android.hardware.audio.common@7.0",
+ "android.hardware.audio.common@7.0-util",
+ "android.hardware.audio.effect@7.0",
+ "android.hardware.audio.effect@7.0-util",
+ ],
+ cflags: [
+ "-DMAJOR_VERSION=7",
+ "-DMINOR_VERSION=0",
+ "-include common/all-versions/VersionMacro.h",
+ ]
+}
+
cc_library_shared {
name: "libaudiohal@7.0",
defaults: ["libaudiohal_default"],
srcs: [
":audio_core_hal_client_sources",
- ":audio_effect_hal_client_sources",
+ "EffectsFactoryHalHidlEntry.cpp",
],
- shared_libs: [
+ static_libs: [
"android.hardware.audio.common@7.0",
+ "android.hardware.audio.common@7.0-enums",
"android.hardware.audio.common@7.0-util",
"android.hardware.audio.effect@7.0",
"android.hardware.audio.effect@7.0-util",
"android.hardware.audio@7.0",
"android.hardware.audio@7.0-util",
+ "libaudiohal.effect@7.0",
],
cflags: [
"-DMAJOR_VERSION=7",
@@ -163,13 +185,19 @@
defaults: ["libaudiohal_default"],
srcs: [
":audio_core_hal_client_sources",
+ "EffectsFactoryHalHidlEntry.cpp",
],
- shared_libs: [
+ static_libs: [
"android.hardware.audio.common@7.0",
+ "android.hardware.audio.common@7.0-util",
+ "android.hardware.audio.common@7.1-enums",
"android.hardware.audio.common@7.1-util",
+ "android.hardware.audio.effect@7.0",
+ "android.hardware.audio.effect@7.0-util",
"android.hardware.audio@7.0",
"android.hardware.audio@7.1",
"android.hardware.audio@7.1-util",
+ "libaudiohal.effect@7.0",
],
cflags: [
"-DMAJOR_VERSION=7",
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.h b/media/libaudiohal/impl/ConversionHelperHidl.h
index 9368551..6e2c831 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.h
+++ b/media/libaudiohal/impl/ConversionHelperHidl.h
@@ -17,33 +17,25 @@
#ifndef ANDROID_HARDWARE_CONVERSION_HELPER_HIDL_H
#define ANDROID_HARDWARE_CONVERSION_HELPER_HIDL_H
-#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/types.h)
+#include <functional>
+
#include <hidl/HidlSupport.h>
#include <system/audio.h>
-#include <utils/String8.h>
-#include <utils/String16.h>
-#include <utils/Vector.h>
-
-using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::ParameterValue;
-using CoreResult = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
-
-using ::android::hardware::Return;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
namespace android {
+template<typename HalResult>
class ConversionHelperHidl {
protected:
- static status_t keysFromHal(const String8& keys, hidl_vec<hidl_string> *hidlKeys);
- static status_t parametersFromHal(const String8& kvPairs, hidl_vec<ParameterValue> *hidlParams);
- static void parametersToHal(const hidl_vec<ParameterValue>& parameters, String8 *values);
- static void argsFromHal(const Vector<String16>& args, hidl_vec<hidl_string> *hidlArgs);
+ using HalResultConverter = std::function<status_t(const HalResult&)>;
+ const std::string mClassName;
- ConversionHelperHidl(const char* className);
+ ConversionHelperHidl(std::string_view className, HalResultConverter resultConv)
+ : mClassName(className), mResultConverter(resultConv) {}
template<typename R, typename T>
- status_t processReturn(const char* funcName, const Return<R>& ret, T *retval) {
+ status_t processReturn(const char* funcName,
+ const ::android::hardware::Return<R>& ret, T *retval) {
if (ret.isOk()) {
// This way it also works for enum class to unscoped enum conversion.
*retval = static_cast<T>(static_cast<R>(ret));
@@ -53,35 +45,40 @@
}
template<typename T>
- status_t processReturn(const char* funcName, const Return<T>& ret) {
+ status_t processReturn(const char* funcName, const ::android::hardware::Return<T>& ret) {
if (!ret.isOk()) {
emitError(funcName, ret.description().c_str());
}
return ret.isOk() ? OK : FAILED_TRANSACTION;
}
- status_t processReturn(const char* funcName, const Return<CoreResult>& ret) {
+ status_t processReturn(const char* funcName,
+ const ::android::hardware::Return<HalResult>& ret) {
if (!ret.isOk()) {
emitError(funcName, ret.description().c_str());
}
- return ret.isOk() ? analyzeResult(ret) : FAILED_TRANSACTION;
+ return ret.isOk() ? mResultConverter(ret) : FAILED_TRANSACTION;
}
template<typename T>
status_t processReturn(
- const char* funcName, const Return<T>& ret, CoreResult retval) {
+ const char* funcName, const ::android::hardware::Return<T>& ret, HalResult retval) {
if (!ret.isOk()) {
emitError(funcName, ret.description().c_str());
}
- return ret.isOk() ? analyzeResult(retval) : FAILED_TRANSACTION;
+ return ret.isOk() ? mResultConverter(retval) : FAILED_TRANSACTION;
+ }
+
+ const std::string& getClassName() const {
+ return mClassName;
}
private:
- const char* mClassName;
+ HalResultConverter mResultConverter;
- static status_t analyzeResult(const CoreResult& result);
-
- void emitError(const char* funcName, const char* description);
+ void emitError(const char* funcName, const char* description) {
+ ALOGE("%s %p %s: %s (from rpc)", mClassName.c_str(), this, funcName, description);
+ }
};
} // namespace android
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.cpp b/media/libaudiohal/impl/CoreConversionHelperHidl.cpp
similarity index 87%
rename from media/libaudiohal/impl/ConversionHelperHidl.cpp
rename to media/libaudiohal/impl/CoreConversionHelperHidl.cpp
index 1d34814..2ac8a42 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.cpp
+++ b/media/libaudiohal/impl/CoreConversionHelperHidl.cpp
@@ -21,7 +21,7 @@
#include <media/AudioParameter.h>
#include <utils/Log.h>
-#include "ConversionHelperHidl.h"
+#include "CoreConversionHelperHidl.h"
namespace android {
@@ -29,7 +29,8 @@
using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
// static
-status_t ConversionHelperHidl::keysFromHal(const String8& keys, hidl_vec<hidl_string> *hidlKeys) {
+status_t CoreConversionHelperHidl::keysFromHal(
+ const String8& keys, hidl_vec<hidl_string> *hidlKeys) {
AudioParameter halKeys(keys);
if (halKeys.size() == 0) return BAD_VALUE;
hidlKeys->resize(halKeys.size());
@@ -74,7 +75,7 @@
}
// static
-status_t ConversionHelperHidl::parametersFromHal(
+status_t CoreConversionHelperHidl::parametersFromHal(
const String8& kvPairs, hidl_vec<ParameterValue> *hidlParams) {
AudioParameter params(kvPairs);
if (params.size() == 0) return BAD_VALUE;
@@ -90,7 +91,7 @@
}
// static
-void ConversionHelperHidl::parametersToHal(
+void CoreConversionHelperHidl::parametersToHal(
const hidl_vec<ParameterValue>& parameters, String8 *values) {
AudioParameter params;
for (size_t i = 0; i < parameters.size(); ++i) {
@@ -99,12 +100,11 @@
values->setTo(params.toString());
}
-ConversionHelperHidl::ConversionHelperHidl(const char* className)
- : mClassName(className) {
-}
+CoreConversionHelperHidl::CoreConversionHelperHidl(std::string_view className)
+ : ConversionHelperHidl<CoreResult>(className, analyzeResult) {}
// static
-void ConversionHelperHidl::argsFromHal(
+void CoreConversionHelperHidl::argsFromHal(
const Vector<String16>& args, hidl_vec<hidl_string> *hidlArgs) {
hidlArgs->resize(args.size());
for (size_t i = 0; i < args.size(); ++i) {
@@ -113,19 +113,15 @@
}
// static
-status_t ConversionHelperHidl::analyzeResult(const Result& result) {
+status_t CoreConversionHelperHidl::analyzeResult(const CoreResult& result) {
switch (result) {
case Result::OK: return OK;
case Result::INVALID_ARGUMENTS: return BAD_VALUE;
case Result::INVALID_STATE: return NOT_ENOUGH_DATA;
case Result::NOT_INITIALIZED: return NO_INIT;
case Result::NOT_SUPPORTED: return INVALID_OPERATION;
- default: return NO_INIT;
}
-}
-
-void ConversionHelperHidl::emitError(const char* funcName, const char* description) {
- ALOGE("%s %p %s: %s (from rpc)", mClassName, this, funcName, description);
+ return NO_INIT;
}
} // namespace android
diff --git a/media/libaudiohal/impl/CoreConversionHelperHidl.h b/media/libaudiohal/impl/CoreConversionHelperHidl.h
new file mode 100644
index 0000000..a4d76f3
--- /dev/null
+++ b/media/libaudiohal/impl/CoreConversionHelperHidl.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_CORE_CONVERSION_HELPER_HIDL_H
+#define ANDROID_HARDWARE_CORE_CONVERSION_HELPER_HIDL_H
+
+#include "ConversionHelperHidl.h"
+
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/types.h)
+#include <utils/String8.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::ParameterValue;
+using CoreResult = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
+
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+
+namespace android {
+
+class CoreConversionHelperHidl : public ConversionHelperHidl<CoreResult> {
+ protected:
+ static status_t keysFromHal(const String8& keys, hidl_vec<hidl_string> *hidlKeys);
+ static status_t parametersFromHal(const String8& kvPairs, hidl_vec<ParameterValue> *hidlParams);
+ static void parametersToHal(const hidl_vec<ParameterValue>& parameters, String8 *values);
+ static void argsFromHal(const Vector<String16>& args, hidl_vec<hidl_string> *hidlArgs);
+
+ CoreConversionHelperHidl(std::string_view className);
+
+ private:
+ static status_t analyzeResult(const CoreResult& result);
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_CORE_CONVERSION_HELPER_HIDL_H
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index 16863e4..0cdf621 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -23,6 +23,7 @@
#include <cutils/properties.h>
#include <hwbinder/IPCThreadState.h>
#include <media/AudioContainers.h>
+#include <mediautils/TimeCheck.h>
#include <utils/Log.h>
#include PATH(android/hardware/audio/FILE_VERSION/IPrimaryDevice.h)
@@ -45,13 +46,16 @@
using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
+#define TIME_CHECK() auto timeCheck = \
+ mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
+
DeviceHalHidl::DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device)
- : ConversionHelperHidl("Device"), mDevice(device) {
+ : CoreConversionHelperHidl("DeviceHalHidl"), mDevice(device) {
}
DeviceHalHidl::DeviceHalHidl(
const sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice>& device)
- : ConversionHelperHidl("Device"),
+ : CoreConversionHelperHidl("DeviceHalHidl"),
#if MAJOR_VERSION <= 6 || (MAJOR_VERSION == 7 && MINOR_VERSION == 0)
mDevice(device),
#endif
@@ -84,22 +88,26 @@
}
status_t DeviceHalHidl::initCheck() {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
return processReturn("initCheck", mDevice->initCheck());
}
status_t DeviceHalHidl::setVoiceVolume(float volume) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
if (mPrimaryDevice == 0) return INVALID_OPERATION;
return processReturn("setVoiceVolume", mPrimaryDevice->setVoiceVolume(volume));
}
status_t DeviceHalHidl::setMasterVolume(float volume) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
return processReturn("setMasterVolume", mDevice->setMasterVolume(volume));
}
status_t DeviceHalHidl::getMasterVolume(float *volume) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
Result retval;
Return<void> ret = mDevice->getMasterVolume(
@@ -113,17 +121,20 @@
}
status_t DeviceHalHidl::setMode(audio_mode_t mode) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
if (mPrimaryDevice == 0) return INVALID_OPERATION;
return processReturn("setMode", mPrimaryDevice->setMode(AudioMode(mode)));
}
status_t DeviceHalHidl::setMicMute(bool state) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
return processReturn("setMicMute", mDevice->setMicMute(state));
}
status_t DeviceHalHidl::getMicMute(bool *state) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
Result retval;
Return<void> ret = mDevice->getMicMute(
@@ -137,11 +148,13 @@
}
status_t DeviceHalHidl::setMasterMute(bool state) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
return processReturn("setMasterMute", mDevice->setMasterMute(state));
}
status_t DeviceHalHidl::getMasterMute(bool *state) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
Result retval;
Return<void> ret = mDevice->getMasterMute(
@@ -155,6 +168,7 @@
}
status_t DeviceHalHidl::setParameters(const String8& kvPairs) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
hidl_vec<ParameterValue> hidlParams;
status_t status = parametersFromHal(kvPairs, &hidlParams);
@@ -165,6 +179,7 @@
}
status_t DeviceHalHidl::getParameters(const String8& keys, String8 *values) {
+ TIME_CHECK();
values->clear();
if (mDevice == 0) return NO_INIT;
hidl_vec<hidl_string> hidlKeys;
@@ -185,6 +200,7 @@
status_t DeviceHalHidl::getInputBufferSize(
const struct audio_config *config, size_t *size) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
AudioConfig hidlConfig;
HidlUtils::audioConfigFromHal(*config, true /*isInput*/, &hidlConfig);
@@ -207,6 +223,7 @@
struct audio_config *config,
const char *address,
sp<StreamOutHalInterface> *outStream) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
DeviceAddress hidlDevice;
if (status_t status = CoreUtils::deviceAddressFromHal(deviceType, address, &hidlDevice);
@@ -263,6 +280,7 @@
audio_devices_t outputDevice,
const char *outputDeviceAddress,
sp<StreamInHalInterface> *inStream) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
DeviceAddress hidlDevice;
if (status_t status = CoreUtils::deviceAddressFromHal(devices, address, &hidlDevice);
@@ -326,6 +344,7 @@
}
status_t DeviceHalHidl::supportsAudioPatches(bool *supportsPatches) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
return processReturn("supportsAudioPatches", mDevice->supportsAudioPatches(), supportsPatches);
}
@@ -336,6 +355,7 @@
unsigned int num_sinks,
const struct audio_port_config *sinks,
audio_patch_handle_t *patch) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
if (patch == nullptr) return BAD_VALUE;
@@ -381,6 +401,7 @@
}
status_t DeviceHalHidl::releaseAudioPatch(audio_patch_handle_t patch) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
return processReturn("releaseAudioPatch", mDevice->releaseAudioPatch(patch));
}
@@ -403,10 +424,12 @@
}
status_t DeviceHalHidl::getAudioPort(struct audio_port *port) {
+ TIME_CHECK();
return getAudioPortImpl(port);
}
status_t DeviceHalHidl::getAudioPort(struct audio_port_v7 *port) {
+ TIME_CHECK();
#if MAJOR_VERSION >= 7
return getAudioPortImpl(port);
#else
@@ -427,6 +450,7 @@
}
status_t DeviceHalHidl::setAudioPortConfig(const struct audio_port_config *config) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
AudioPortConfig hidlConfig;
HidlUtils::audioPortConfigFromHal(*config, &hidlConfig);
@@ -441,6 +465,7 @@
}
#elif MAJOR_VERSION >= 4
status_t DeviceHalHidl::getMicrophones(std::vector<media::MicrophoneInfo> *microphonesInfo) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
Result retval;
Return<void> ret = mDevice->getMicrophones(
@@ -461,6 +486,7 @@
#if MAJOR_VERSION >= 6
status_t DeviceHalHidl::addDeviceEffect(
audio_port_handle_t device, sp<EffectHalInterface> effect) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
return processReturn("addDeviceEffect", mDevice->addDeviceEffect(
static_cast<AudioPortHandle>(device), effect->effectId()));
@@ -475,6 +501,7 @@
#if MAJOR_VERSION >= 6
status_t DeviceHalHidl::removeDeviceEffect(
audio_port_handle_t device, sp<EffectHalInterface> effect) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
return processReturn("removeDeviceEffect", mDevice->removeDeviceEffect(
static_cast<AudioPortHandle>(device), effect->effectId()));
@@ -487,6 +514,7 @@
#endif
status_t DeviceHalHidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
if (supportsSetConnectedState7_1) {
@@ -513,6 +541,7 @@
}
error::Result<audio_hw_sync_t> DeviceHalHidl::getHwAvSync() {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
audio_hw_sync_t value;
Result result;
@@ -525,6 +554,7 @@
}
status_t DeviceHalHidl::dump(int fd, const Vector<String16>& args) {
+ TIME_CHECK();
if (mDevice == 0) return NO_INIT;
native_handle_t* hidlHandle = native_handle_create(1, 0);
hidlHandle->data[0] = fd;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 8a97a55..f6519b6 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -22,11 +22,11 @@
#include <media/audiohal/DeviceHalInterface.h>
#include <media/audiohal/EffectHalInterface.h>
-#include "ConversionHelperHidl.h"
+#include "CoreConversionHelperHidl.h"
namespace android {
-class DeviceHalHidl : public DeviceHalInterface, public ConversionHelperHidl
+class DeviceHalHidl : public DeviceHalInterface, public CoreConversionHelperHidl
{
public:
// Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
index 8f3c907..4069a6b 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
@@ -26,7 +26,6 @@
#include <media/audiohal/hidl/HalDeathHandler.h>
#include <utils/Log.h>
-#include "ConversionHelperHidl.h"
#include "DeviceHalHidl.h"
#include "DevicesFactoryHalHidl.h"
diff --git a/media/libaudiohal/impl/EffectBufferHalHidl.cpp b/media/libaudiohal/impl/EffectBufferHalHidl.cpp
index 65297af..9d5f72e 100644
--- a/media/libaudiohal/impl/EffectBufferHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectBufferHalHidl.cpp
@@ -23,7 +23,6 @@
#include <hidlmemory/mapping.h>
#include <utils/Log.h>
-#include "ConversionHelperHidl.h"
#include "EffectBufferHalHidl.h"
using ::android::hardware::Return;
diff --git a/media/libaudiohal/impl/EffectConversionHelperHidl.cpp b/media/libaudiohal/impl/EffectConversionHelperHidl.cpp
new file mode 100644
index 0000000..9e4f79c
--- /dev/null
+++ b/media/libaudiohal/impl/EffectConversionHelperHidl.cpp
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "HalHidl"
+#include <utils/Log.h>
+
+#include "EffectConversionHelperHidl.h"
+
+namespace android {
+
+EffectConversionHelperHidl::EffectConversionHelperHidl(std::string_view className)
+ : ConversionHelperHidl<EffectResult>(className, analyzeResult) {
+}
+
+// static
+status_t EffectConversionHelperHidl::analyzeResult(const EffectResult& result) {
+ switch (result) {
+ case EffectResult::OK: return OK;
+ case EffectResult::INVALID_ARGUMENTS: return BAD_VALUE;
+ case EffectResult::INVALID_STATE: return NOT_ENOUGH_DATA;
+ case EffectResult::NOT_INITIALIZED: return NO_INIT;
+ case EffectResult::NOT_SUPPORTED: return INVALID_OPERATION;
+ case EffectResult::RESULT_TOO_BIG: return NO_MEMORY;
+ }
+ return NO_INIT;
+}
+
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperHidl.h b/media/libaudiohal/impl/EffectConversionHelperHidl.h
new file mode 100644
index 0000000..4371d12
--- /dev/null
+++ b/media/libaudiohal/impl/EffectConversionHelperHidl.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_EFFECT_CONVERSION_HELPER_HIDL_H
+#define ANDROID_HARDWARE_EFFECT_CONVERSION_HELPER_HIDL_H
+
+#include "ConversionHelperHidl.h"
+
+#include PATH(android/hardware/audio/effect/FILE_VERSION/types.h)
+
+using EffectResult = ::android::hardware::audio::effect::CPP_VERSION::Result;
+
+namespace android {
+
+class EffectConversionHelperHidl : public ConversionHelperHidl<EffectResult> {
+ protected:
+ static status_t analyzeResult(const EffectResult& result);
+
+ EffectConversionHelperHidl(std::string_view className);
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_EFFECT_CONVERSION_HELPER_HIDL_H
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index 1bb1e5f..fdfe225 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -21,6 +21,7 @@
#include <cutils/native_handle.h>
#include <hwbinder/IPCThreadState.h>
#include <media/EffectsFactoryApi.h>
+#include <mediautils/TimeCheck.h>
#include <utils/Log.h>
#include <util/EffectUtils.h>
@@ -40,8 +41,12 @@
using namespace ::android::hardware::audio::common::CPP_VERSION;
using namespace ::android::hardware::audio::effect::CPP_VERSION;
+#define TIME_CHECK() auto timeCheck = \
+ mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
+
EffectHalHidl::EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId)
- : mEffect(effect), mEffectId(effectId), mBuffersChanged(true), mEfGroup(nullptr) {
+ : EffectConversionHelperHidl("EffectHalHidl"),
+ mEffect(effect), mEffectId(effectId), mBuffersChanged(true), mEfGroup(nullptr) {
effect_descriptor_t halDescriptor{};
if (EffectHalHidl::getDescriptor(&halDescriptor) == NO_ERROR) {
mIsInput = (halDescriptor.flags & EFFECT_FLAG_TYPE_PRE_PROC) == EFFECT_FLAG_TYPE_PRE_PROC;
@@ -59,20 +64,9 @@
}
}
-// static
-status_t EffectHalHidl::analyzeResult(const Result& result) {
- switch (result) {
- case Result::OK: return OK;
- case Result::INVALID_ARGUMENTS: return BAD_VALUE;
- case Result::INVALID_STATE: return NOT_ENOUGH_DATA;
- case Result::NOT_INITIALIZED: return NO_INIT;
- case Result::NOT_SUPPORTED: return INVALID_OPERATION;
- case Result::RESULT_TOO_BIG: return NO_MEMORY;
- default: return NO_INIT;
- }
-}
-
status_t EffectHalHidl::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
+ TIME_CHECK();
+
if (!mBuffersChanged) {
if (buffer.get() == nullptr || mInBuffer.get() == nullptr) {
mBuffersChanged = buffer.get() != mInBuffer.get();
@@ -85,6 +79,8 @@
}
status_t EffectHalHidl::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
+ TIME_CHECK();
+
if (!mBuffersChanged) {
if (buffer.get() == nullptr || mOutBuffer.get() == nullptr) {
mBuffersChanged = buffer.get() != mOutBuffer.get();
@@ -97,10 +93,14 @@
}
status_t EffectHalHidl::process() {
+ TIME_CHECK();
+
return processImpl(static_cast<uint32_t>(MessageQueueFlagBits::REQUEST_PROCESS));
}
status_t EffectHalHidl::processReverse() {
+ TIME_CHECK();
+
return processImpl(static_cast<uint32_t>(MessageQueueFlagBits::REQUEST_PROCESS_REVERSE));
}
@@ -183,6 +183,8 @@
status_t EffectHalHidl::command(uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
uint32_t *replySize, void *pReplyData) {
+ TIME_CHECK();
+
if (mEffect == 0) return NO_INIT;
// Special cases.
@@ -214,6 +216,8 @@
}
status_t EffectHalHidl::getDescriptor(effect_descriptor_t *pDescriptor) {
+ TIME_CHECK();
+
if (mEffect == 0) return NO_INIT;
Result retval = Result::NOT_INITIALIZED;
Return<void> ret = mEffect->getDescriptor(
@@ -227,12 +231,16 @@
}
status_t EffectHalHidl::close() {
+ TIME_CHECK();
+
if (mEffect == 0) return NO_INIT;
Return<Result> ret = mEffect->close();
return ret.isOk() ? analyzeResult(ret) : FAILED_TRANSACTION;
}
status_t EffectHalHidl::dump(int fd) {
+ TIME_CHECK();
+
if (mEffect == 0) return NO_INIT;
native_handle_t* hidlHandle = native_handle_create(1, 0);
hidlHandle->data[0] = fd;
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index 07745db..e139768 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -23,6 +23,8 @@
#include <fmq/MessageQueue.h>
#include <system/audio_effect.h>
+#include "EffectConversionHelperHidl.h"
+
using ::android::hardware::EventFlag;
using ::android::hardware::MessageQueue;
@@ -31,7 +33,7 @@
using namespace ::android::hardware::audio::effect::CPP_VERSION;
-class EffectHalHidl : public EffectHalInterface
+class EffectHalHidl : public EffectHalInterface, public EffectConversionHelperHidl
{
public:
// Set the input buffer.
@@ -77,8 +79,6 @@
EventFlag* mEfGroup;
bool mIsInput = false;
- static status_t analyzeResult(const Result& result);
-
// Can not be constructed directly by clients.
EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId);
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
index 90954b2..d7217fc 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
@@ -21,8 +21,9 @@
#include <UuidUtils.h>
#include <util/EffectUtils.h>
+#include <utils/Log.h>
-#include "ConversionHelperHidl.h"
+#include "EffectConversionHelperHidl.h"
#include "EffectBufferHalHidl.h"
#include "EffectHalHidl.h"
#include "EffectsFactoryHalHidl.h"
@@ -38,7 +39,7 @@
using namespace ::android::hardware::audio::effect::CPP_VERSION;
EffectsFactoryHalHidl::EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory)
- : ConversionHelperHidl("EffectsFactory") {
+ : EffectConversionHelperHidl("EffectsFactory") {
ALOG_ASSERT(effectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
mEffectsFactory = effectsFactory;
}
@@ -205,7 +206,10 @@
} // namespace effect
-extern "C" __attribute__((visibility("default"))) void* createIEffectsFactory() {
+// When a shared library is built from a static library, even explicit
+// exports from a static library are optimized out unless actually used by
+// the shared library. See EffectsFactoryHalHidlEntry.cpp.
+extern "C" void* createIEffectsFactoryImpl() {
auto service = hardware::audio::effect::CPP_VERSION::IEffectsFactory::getService();
return service ? new effect::EffectsFactoryHalHidl(service) : nullptr;
}
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.h b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
index 7491133..e1882e1 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
@@ -20,16 +20,15 @@
#include PATH(android/hardware/audio/effect/FILE_VERSION/IEffectsFactory.h)
#include <media/audiohal/EffectsFactoryHalInterface.h>
-#include "ConversionHelperHidl.h"
+#include "EffectConversionHelperHidl.h"
namespace android {
namespace effect {
using ::android::hardware::hidl_vec;
-using ::android::ConversionHelperHidl;
using namespace ::android::hardware::audio::effect::CPP_VERSION;
-class EffectsFactoryHalHidl : public EffectsFactoryHalInterface, public ConversionHelperHidl
+class EffectsFactoryHalHidl : public EffectsFactoryHalInterface, public EffectConversionHelperHidl
{
public:
EffectsFactoryHalHidl(sp<IEffectsFactory> effectsFactory);
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp
new file mode 100644
index 0000000..2c6f2c6
--- /dev/null
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidlEntry.cpp
@@ -0,0 +1,21 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+extern "C" void* createIEffectsFactoryImpl();
+
+extern "C" __attribute__((visibility("default"))) void* createIEffectsFactory() {
+ return createIEffectsFactoryImpl();
+}
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 8ba0f72..021ec51 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -22,6 +22,7 @@
#include <media/AudioParameter.h>
#include <mediautils/memory.h>
#include <mediautils/SchedulingPolicyService.h>
+#include <mediautils/TimeCheck.h>
#include <utils/Log.h>
#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamOutCallback.h)
@@ -45,8 +46,11 @@
using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
-StreamHalHidl::StreamHalHidl(IStream *stream)
- : ConversionHelperHidl("Stream"),
+#define TIME_CHECK() auto TimeCheck = \
+ mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
+
+StreamHalHidl::StreamHalHidl(std::string_view className, IStream *stream)
+ : CoreConversionHelperHidl(className),
mStream(stream),
mHalThreadPriority(HAL_THREAD_PRIORITY_DEFAULT),
mCachedBufferSize(0){
@@ -67,6 +71,7 @@
}
status_t StreamHalHidl::getBufferSize(size_t *size) {
+ TIME_CHECK();
if (!mStream) return NO_INIT;
status_t status = processReturn("getBufferSize", mStream->getBufferSize(), size);
if (status == OK) {
@@ -76,6 +81,7 @@
}
status_t StreamHalHidl::getAudioProperties(audio_config_base_t *configBase) {
+ TIME_CHECK();
*configBase = AUDIO_CONFIG_BASE_INITIALIZER;
if (!mStream) return NO_INIT;
#if MAJOR_VERSION <= 6
@@ -105,6 +111,7 @@
}
status_t StreamHalHidl::setParameters(const String8& kvPairs) {
+ TIME_CHECK();
if (!mStream) return NO_INIT;
hidl_vec<ParameterValue> hidlParams;
status_t status = parametersFromHal(kvPairs, &hidlParams);
@@ -114,6 +121,7 @@
}
status_t StreamHalHidl::getParameters(const String8& keys, String8 *values) {
+ TIME_CHECK();
values->clear();
if (!mStream) return NO_INIT;
hidl_vec<hidl_string> hidlKeys;
@@ -134,21 +142,25 @@
}
status_t StreamHalHidl::addEffect(sp<EffectHalInterface> effect) {
+ TIME_CHECK();
if (!mStream) return NO_INIT;
return processReturn("addEffect", mStream->addEffect(effect->effectId()));
}
status_t StreamHalHidl::removeEffect(sp<EffectHalInterface> effect) {
+ TIME_CHECK();
if (!mStream) return NO_INIT;
return processReturn("removeEffect", mStream->removeEffect(effect->effectId()));
}
status_t StreamHalHidl::standby() {
+ TIME_CHECK();
if (!mStream) return NO_INIT;
return processReturn("standby", mStream->standby());
}
status_t StreamHalHidl::dump(int fd, const Vector<String16>& args) {
+ TIME_CHECK();
if (!mStream) return NO_INIT;
native_handle_t* hidlHandle = native_handle_create(1, 0);
hidlHandle->data[0] = fd;
@@ -173,17 +185,20 @@
}
status_t StreamHalHidl::start() {
+ TIME_CHECK();
if (!mStream) return NO_INIT;
return processReturn("start", mStream->start());
}
status_t StreamHalHidl::stop() {
+ TIME_CHECK();
if (!mStream) return NO_INIT;
return processReturn("stop", mStream->stop());
}
status_t StreamHalHidl::createMmapBuffer(int32_t minSizeFrames,
struct audio_mmap_buffer_info *info) {
+ TIME_CHECK();
Result retval;
Return<void> ret = mStream->createMmapBuffer(
minSizeFrames,
@@ -216,6 +231,7 @@
}
status_t StreamHalHidl::getMmapPosition(struct audio_mmap_position *position) {
+ TIME_CHECK();
Result retval;
Return<void> ret = mStream->getMmapPosition(
[&](Result r, const MmapPosition& hidlPosition) {
@@ -244,7 +260,7 @@
status_t StreamHalHidl::getHalPid(pid_t *pid) {
using ::android::hidl::base::V1_0::DebugInfo;
using ::android::hidl::manager::V1_0::IServiceManager;
-
+ TIME_CHECK();
DebugInfo debugInfo;
auto ret = mStream->getDebugInfo([&] (const auto &info) {
debugInfo = info;
@@ -275,6 +291,7 @@
status_t StreamHalHidl::legacyCreateAudioPatch(const struct audio_port_config& port,
std::optional<audio_source_t> source,
audio_devices_t type) {
+ TIME_CHECK();
LOG_ALWAYS_FATAL_IF(port.type != AUDIO_PORT_TYPE_DEVICE, "port type must be device");
unique_malloced_ptr<char> address;
if (strcmp(port.ext.device.address, "") != 0) {
@@ -293,6 +310,7 @@
}
status_t StreamHalHidl::legacyReleaseAudioPatch() {
+ TIME_CHECK();
AudioParameter param;
param.addInt(String8(AudioParameter::keyRouting), 0);
return setParameters(param.toString());
@@ -352,7 +370,8 @@
StreamOutHalHidl::StreamOutHalHidl(
const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream)
- : StreamHalHidl(stream.get()), mStream(stream), mWriterClient(0), mEfGroup(nullptr) {
+ : StreamHalHidl("StreamOutHalHidl", stream.get())
+ , mStream(stream), mWriterClient(0), mEfGroup(nullptr) {
}
StreamOutHalHidl::~StreamOutHalHidl() {
@@ -376,11 +395,13 @@
}
status_t StreamOutHalHidl::getFrameSize(size_t *size) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn("getFrameSize", mStream->getFrameSize(), size);
}
status_t StreamOutHalHidl::getLatency(uint32_t *latency) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
if (mWriterClient == gettid() && mCommandMQ) {
return callWriterThread(
@@ -394,12 +415,14 @@
}
status_t StreamOutHalHidl::setVolume(float left, float right) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn("setVolume", mStream->setVolume(left, right));
}
#if MAJOR_VERSION == 2
status_t StreamOutHalHidl::selectPresentation(int presentationId, int programId) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
std::vector<ParameterValue> parameters;
String8 halParameters;
@@ -410,6 +433,7 @@
}
#elif MAJOR_VERSION >= 4
status_t StreamOutHalHidl::selectPresentation(int presentationId, int programId) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn("selectPresentation",
mStream->selectPresentation(presentationId, programId));
@@ -417,6 +441,7 @@
#endif
status_t StreamOutHalHidl::write(const void *buffer, size_t bytes, size_t *written) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
*written = 0;
@@ -562,6 +587,7 @@
}
status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
Result retval;
Return<void> ret = mStream->getRenderPosition(
@@ -575,6 +601,7 @@
}
status_t StreamOutHalHidl::getNextWriteTimestamp(int64_t *timestamp) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
Result retval;
Return<void> ret = mStream->getNextWriteTimestamp(
@@ -588,6 +615,7 @@
}
status_t StreamOutHalHidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
status_t status = processReturn(
"setCallback", mStream->setCallback(new StreamOutCallback(this)));
@@ -598,6 +626,7 @@
}
status_t StreamOutHalHidl::supportsPauseAndResume(bool *supportsPause, bool *supportsResume) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
Return<void> ret = mStream->supportsPauseAndResume(
[&](bool p, bool r) {
@@ -608,32 +637,38 @@
}
status_t StreamOutHalHidl::pause() {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn("pause", mStream->pause());
}
status_t StreamOutHalHidl::resume() {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn("pause", mStream->resume());
}
status_t StreamOutHalHidl::supportsDrain(bool *supportsDrain) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn("supportsDrain", mStream->supportsDrain(), supportsDrain);
}
status_t StreamOutHalHidl::drain(bool earlyNotify) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn(
"drain", mStream->drain(earlyNotify ? AudioDrain::EARLY_NOTIFY : AudioDrain::ALL));
}
status_t StreamOutHalHidl::flush() {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn("pause", mStream->flush());
}
status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
if (mWriterClient == gettid() && mCommandMQ) {
return callWriterThread(
@@ -667,6 +702,7 @@
#elif MAJOR_VERSION >= 4
status_t StreamOutHalHidl::updateSourceMetadata(
const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
+ TIME_CHECK();
#if MAJOR_VERSION == 4
::android::hardware::audio::CORE_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
#else
@@ -717,6 +753,7 @@
#else
status_t StreamOutHalHidl::getDualMonoMode(audio_dual_mono_mode_t* mode) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
Result retval;
Return<void> ret = mStream->getDualMonoMode(
@@ -730,12 +767,14 @@
}
status_t StreamOutHalHidl::setDualMonoMode(audio_dual_mono_mode_t mode) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn(
"setDualMonoMode", mStream->setDualMonoMode(static_cast<DualMonoMode>(mode)));
}
status_t StreamOutHalHidl::getAudioDescriptionMixLevel(float* leveldB) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
Result retval;
Return<void> ret = mStream->getAudioDescriptionMixLevel(
@@ -749,12 +788,14 @@
}
status_t StreamOutHalHidl::setAudioDescriptionMixLevel(float leveldB) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn(
"setAudioDescriptionMixLevel", mStream->setAudioDescriptionMixLevel(leveldB));
}
status_t StreamOutHalHidl::getPlaybackRateParameters(audio_playback_rate_t* playbackRate) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
Result retval;
Return<void> ret = mStream->getPlaybackRateParameters(
@@ -775,6 +816,7 @@
}
status_t StreamOutHalHidl::setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn(
"setPlaybackRateParameters", mStream->setPlaybackRateParameters(
@@ -809,6 +851,7 @@
status_t StreamOutHalHidl::setEventCallback(
const sp<StreamOutHalInterfaceEventCallback>& callback) {
+ TIME_CHECK();
if (mStream == nullptr) return NO_INIT;
mEventCallback = callback;
status_t status = processReturn(
@@ -823,12 +866,14 @@
using hardware::audio::V7_1::LatencyMode;
status_t StreamOutHalHidl::setLatencyMode(audio_latency_mode_t mode) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn(
"setLatencyMode", mStream->setLatencyMode(static_cast<LatencyMode>(mode)));
};
status_t StreamOutHalHidl::getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) {
+ TIME_CHECK();
if (!mStream) return NO_INIT;
Result retval;
Return<void> ret = mStream->getRecommendedLatencyModes(
@@ -869,6 +914,7 @@
status_t StreamOutHalHidl::setLatencyModeCallback(
const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) {
+ TIME_CHECK();
if (mStream == nullptr) return NO_INIT;
mLatencyModeCallback = callback;
@@ -940,7 +986,8 @@
StreamInHalHidl::StreamInHalHidl(
const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& stream)
- : StreamHalHidl(stream.get()), mStream(stream), mReaderClient(0), mEfGroup(nullptr) {
+ : StreamHalHidl("StreamInHalHidl", stream.get())
+ , mStream(stream), mReaderClient(0), mEfGroup(nullptr) {
}
StreamInHalHidl::~StreamInHalHidl() {
@@ -953,16 +1000,19 @@
}
status_t StreamInHalHidl::getFrameSize(size_t *size) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn("getFrameSize", mStream->getFrameSize(), size);
}
status_t StreamInHalHidl::setGain(float gain) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn("setGain", mStream->setGain(gain));
}
status_t StreamInHalHidl::read(void *buffer, size_t bytes, size_t *read) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
*read = 0;
@@ -1090,11 +1140,13 @@
}
status_t StreamInHalHidl::getInputFramesLost(uint32_t *framesLost) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
return processReturn("getInputFramesLost", mStream->getInputFramesLost(), framesLost);
}
status_t StreamInHalHidl::getCapturePosition(int64_t *frames, int64_t *time) {
+ TIME_CHECK();
if (mStream == 0) return NO_INIT;
if (mReaderClient == gettid() && mCommandMQ) {
ReadParameters params;
@@ -1134,6 +1186,7 @@
#elif MAJOR_VERSION >= 4
status_t StreamInHalHidl::getActiveMicrophones(
std::vector<media::MicrophoneInfo> *microphonesInfo) {
+ TIME_CHECK();
if (!mStream) return NO_INIT;
Result retval;
Return<void> ret = mStream->getActiveMicrophones(
@@ -1152,6 +1205,7 @@
status_t StreamInHalHidl::updateSinkMetadata(const
StreamInHalInterface::SinkMetadata& sinkMetadata) {
+ TIME_CHECK();
#if MAJOR_VERSION == 4
::android::hardware::audio::CORE_TYPES_CPP_VERSION::SinkMetadata hidlMetadata;
#else
@@ -1179,12 +1233,14 @@
}
#else
status_t StreamInHalHidl::setPreferredMicrophoneDirection(audio_microphone_direction_t direction) {
+ TIME_CHECK();
if (!mStream) return NO_INIT;
return processReturn("setPreferredMicrophoneDirection",
mStream->setMicrophoneDirection(static_cast<MicrophoneDirection>(direction)));
}
status_t StreamInHalHidl::setPreferredMicrophoneFieldDimension(float zoom) {
+ TIME_CHECK();
if (!mStream) return NO_INIT;
return processReturn("setPreferredMicrophoneFieldDimension",
mStream->setMicrophoneFieldDimension(zoom));
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 4e80e88..54fbefe 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -28,7 +28,7 @@
#include <media/audiohal/StreamHalInterface.h>
#include <mediautils/Synchronization.h>
-#include "ConversionHelperHidl.h"
+#include "CoreConversionHelperHidl.h"
#include "StreamPowerLog.h"
using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStream;
@@ -45,7 +45,7 @@
class DeviceHalHidl;
-class StreamHalHidl : public virtual StreamHalInterface, public ConversionHelperHidl
+class StreamHalHidl : public virtual StreamHalInterface, public CoreConversionHelperHidl
{
public:
// Return size of input/output buffer in bytes for this stream - eg. 4800.
@@ -97,7 +97,7 @@
protected:
// Subclasses can not be constructed directly by clients.
- explicit StreamHalHidl(IStream *stream);
+ StreamHalHidl(std::string_view className, IStream *stream);
~StreamHalHidl() override;
diff --git a/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h b/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h
index d353ed0..866dd3e 100644
--- a/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h
+++ b/media/libaudiohal/include/media/audiohal/FactoryHalHidl.h
@@ -18,21 +18,42 @@
#define ANDROID_HARDWARE_FACTORY_HAL_HIDL_H
#include <string>
+#include <utility>
#include <utils/StrongPointer.h>
namespace android {
+// The pair of the interface's package name and the interface name,
+// e.g. <"android.hardware.audio", "IDevicesFactory">.
+// Splitting is used for easier construction of versioned names (FQNs).
+using InterfaceName = std::pair<std::string, std::string>;
+
namespace detail {
-void* createPreferredImpl(const std::string& package, const std::string& interface);
+void* createPreferredImpl(const InterfaceName& iface, const InterfaceName& siblingIface);
} // namespace detail
-/** @Return the preferred available implementation or nullptr if none are available. */
+/**
+ * Create a client for the "preferred" (most recent) implementation of an interface.
+ * by loading the appropriate version of the shared library containing the implementation.
+ *
+ * In the audio HAL, there are two families of interfaces: core and effects. Both are
+ * packed into the same shared library for memory efficiency. Since the core and the effects
+ * interface can have different minor versions on the device, in order to avoid loading multiple
+ * shared libraries the loader function considers which interface among two has the most
+ * recent version. Thus, a pair of interface names must be passed in.
+ *
+ * @param iface the interface that needs to be created.
+ * @param siblingIface the interface which occupies the same shared library.
+ * @return the preferred available implementation or nullptr if none are available.
+ */
template <class Interface>
-static sp<Interface> createPreferredImpl(const std::string& package, const std::string& interface) {
- return sp<Interface>{static_cast<Interface*>(detail::createPreferredImpl(package, interface))};
+static sp<Interface> createPreferredImpl(
+ const InterfaceName& iface, const InterfaceName& siblingIface) {
+ return sp<Interface>{
+ static_cast<Interface*>(detail::createPreferredImpl(iface, siblingIface))};
}
} // namespace android
diff --git a/media/libaudioprocessing/TEST_MAPPING b/media/libaudioprocessing/TEST_MAPPING
new file mode 100644
index 0000000..3de5a9f
--- /dev/null
+++ b/media/libaudioprocessing/TEST_MAPPING
@@ -0,0 +1,12 @@
+{
+ "presubmit": [
+ {
+ "name": "CtsNativeMediaAAudioTestCases",
+ "options" : [
+ {
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+ }
+ ]
+ }
+ ]
+}
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index b9d795d..165a8ad 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -44,7 +44,10 @@
"-Wextra",
"-Wall",
],
- shared_libs: ["libutils", "liblog"],
+ shared_libs: [
+ "libutils",
+ "liblog",
+ ],
header_libs: [
"libmedia_helper_headers",
"libaudio_system_headers",
@@ -52,7 +55,7 @@
export_header_lib_headers: [
"libmedia_helper_headers",
],
- clang: true,
+
host_supported: true,
target: {
darwin: {
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 4247375..90472eb 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -140,6 +140,8 @@
#define AMEDIAMETRICS_PROP_INTERVALCOUNT "intervalCount" // int32
#define AMEDIAMETRICS_PROP_LATENCYMS "latencyMs" // double value
#define AMEDIAMETRICS_PROP_LOGSESSIONID "logSessionId" // hex string, "" none
+#define AMEDIAMETRICS_PROP_METHODCODE "methodCode" // int64_t an int indicating method
+#define AMEDIAMETRICS_PROP_METHODNAME "methodName" // string method name
#define AMEDIAMETRICS_PROP_NAME "name" // string value
#define AMEDIAMETRICS_PROP_ORIGINALFLAGS "originalFlags" // int32
#define AMEDIAMETRICS_PROP_OUTPUTDEVICES "outputDevices" // string value
@@ -224,6 +226,7 @@
#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOLUME "setVolume" // AudioTrack
#define AMEDIAMETRICS_PROP_EVENT_VALUE_START "start" // AudioTrack, AudioRecord
#define AMEDIAMETRICS_PROP_EVENT_VALUE_STOP "stop" // AudioTrack, AudioRecord
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT "timeout" // AudioFlinger, AudioPolicy
#define AMEDIAMETRICS_PROP_EVENT_VALUE_UNDERRUN "underrun" // from Thread
// Possible values for AMEDIAMETRICS_PROP_CALLERNAME
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index a23d1d9..266cb17 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -72,6 +72,7 @@
"media_plugin_headers",
"libmediautils_headers",
"libstagefright_rtsp_headers",
+ "libstagefright_webm_headers",
],
static_libs: [
@@ -79,9 +80,6 @@
"libstagefright_nuplayer",
"libstagefright_rtsp",
"libstagefright_timedtext",
- // this needs it, but it can get it transitively through libstagefright.
- // i'm going to leave it here.
- "libstagefright_webm",
"framework-permission-aidl-cpp",
],
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 55b1ed7..b3f7f25 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -216,7 +216,8 @@
sp<AMessage> format = new AMessage;
status_t err = convertMetaDataToMessage(trackMeta, &format);
if (err != OK) {
- format = NULL;
+ ALOGE("getImageInternal: convertMetaDataToMessage() failed, unable to extract image");
+ return NULL;
}
uint32_t bitDepth = 8;
@@ -400,7 +401,8 @@
sp<AMessage> format = new AMessage;
status_t err = convertMetaDataToMessage(trackMeta, &format);
if (err != OK) {
- format = NULL;
+ ALOGE("getFrameInternal: convertMetaDataToMessage() failed, unable to extract frame");
+ return NULL;
}
Vector<AString> matchingCodecs;
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index e47e7ff..10baec4 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -253,6 +253,40 @@
],
},
}
+
+cc_library_shared {
+ name: "libstagefright_surface_utils",
+
+ srcs: [
+ "SurfaceUtils.cpp",
+ ],
+
+ shared_libs: [
+ "libgui",
+ "liblog",
+ "libui",
+ "libutils",
+ ],
+
+ export_include_dirs: [
+ "include",
+ ],
+
+ cflags: [
+ "-Wno-multichar",
+ "-Werror",
+ "-Wall",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
+
cc_library {
name: "libstagefright",
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index f81a5eb..63d3180 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -36,6 +36,7 @@
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ALookup.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/ByteUtils.h>
#include <media/stagefright/foundation/ColorUtils.h>
@@ -44,6 +45,7 @@
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/Utils.h>
#include <media/mediarecorder.h>
@@ -372,9 +374,7 @@
uint8_t mProfileCompatible;
uint8_t mLevelIdc;
- uint8_t mDoviProfile;
- void *mDoviConfigData;
- size_t mDoviConfigDataSize;
+ int32_t mDoviProfile;
void *mCodecSpecificData;
size_t mCodecSpecificDataSize;
@@ -428,7 +428,7 @@
status_t parseHEVCCodecSpecificData(
const uint8_t *data, size_t size, HevcParameterSets ¶mSets);
- status_t makeDoviCodecSpecificData();
+ status_t getDolbyVisionProfile();
// Track authoring progress status
void trackProgressStatus(int64_t timeUs, status_t err = OK);
@@ -628,14 +628,14 @@
}
const char *MPEG4Writer::Track::getDoviFourCC() const {
- if (mDoviProfile == 5) {
+ if (mDoviProfile == DolbyVisionProfileDvheStn) {
return "dvh1";
- } else if (mDoviProfile == 8) {
+ } else if (mDoviProfile == DolbyVisionProfileDvheSt) {
return "hvc1";
- } else if (mDoviProfile == 9 || mDoviProfile == 32) {
+ } else if (mDoviProfile == DolbyVisionProfileDvavSe) {
return "avc1";
}
- return (const char*)NULL;
+ return nullptr;
}
// static
@@ -693,6 +693,11 @@
}
if (!strcmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+ // For MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+ // getFourCCForMime() requires profile information
+ // to decide the final FourCC codes.
+ // So we let the creation of the new track now and
+ // assign FourCC codes later using getDoviFourCC()
ALOGV("Add source mime '%s'", mime);
} else if (Track::getFourCCForMime(mime) == NULL) {
ALOGE("Unsupported mime '%s'", mime);
@@ -2173,8 +2178,7 @@
mMinCttsOffsetTimeUs(0),
mMinCttsOffsetTicks(0),
mMaxCttsOffsetTicks(0),
- mDoviConfigData(NULL),
- mDoviConfigDataSize(0),
+ mDoviProfile(0),
mCodecSpecificData(NULL),
mCodecSpecificDataSize(0),
mGotAllCodecSpecificData(false),
@@ -2636,7 +2640,7 @@
!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
mMeta->findData(kKeyHVCC, &type, &data, &size);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
- makeDoviCodecSpecificData();
+ getDolbyVisionProfile();
if (!mMeta->findData(kKeyAVCC, &type, &data, &size) &&
!mMeta->findData(kKeyHVCC, &type, &data, &size)) {
ALOGE("Failed: No HVCC/AVCC for Dolby Vision ..\n");
@@ -2683,10 +2687,6 @@
mCodecSpecificData = NULL;
}
- if (mDoviConfigData != NULL) {
- free(mDoviConfigData);
- mDoviConfigData = NULL;
- }
}
void MPEG4Writer::Track::initTrackingProgressStatus(MetaData *params) {
@@ -3365,34 +3365,37 @@
return OK;
}
-status_t MPEG4Writer::Track::makeDoviCodecSpecificData() {
+status_t MPEG4Writer::Track::getDolbyVisionProfile() {
uint32_t type;
const void *data = NULL;
size_t size = 0;
- if (mDoviConfigData != NULL) {
- ALOGE("Already have Dolby Vision codec specific data");
- return OK;
+ if (!mMeta->findData(kKeyDVCC, &type, &data, &size) &&
+ !mMeta->findData(kKeyDVVC, &type, &data, &size) &&
+ !mMeta->findData(kKeyDVWC, &type, &data, &size)) {
+ ALOGE("Failed getting Dovi config for Dolby Vision %d", (int)size);
+ return ERROR_MALFORMED;
}
+ static const ALookup<uint8_t, int32_t> dolbyVisionProfileMap = {
+ {1, DolbyVisionProfileDvavPen},
+ {3, DolbyVisionProfileDvheDen},
+ {4, DolbyVisionProfileDvheDtr},
+ {5, DolbyVisionProfileDvheStn},
+ {6, DolbyVisionProfileDvheDth},
+ {7, DolbyVisionProfileDvheDtb},
+ {8, DolbyVisionProfileDvheSt},
+ {9, DolbyVisionProfileDvavSe},
+ {10, DolbyVisionProfileDvav110}
+ };
- if (!mMeta->findData(kKeyDVCC, &type, &data, &size)
- && !mMeta->findData(kKeyDVVC, &type, &data, &size)
- && !mMeta->findData(kKeyDVWC, &type, &data, &size)) {
- ALOGE("Failed getting Dovi config for Dolby Vision %d", (int)size);
- return ERROR_MALFORMED;
+ // Dolby Vision profile information is extracted as per
+ // https://dolby.my.salesforce.com/sfc/p/#700000009YuG/a/4u000000l6FB/076wHYEmyEfz09m0V1bo85_25hlUJjaiWTbzorNmYY4
+ uint8_t dv_profile = ((((uint8_t *)data)[2] >> 1) & 0x7f);
+
+ if (!dolbyVisionProfileMap.map(dv_profile, &mDoviProfile)) {
+ ALOGE("Failed to get Dolby Profile from DV Config data");
+ return ERROR_MALFORMED;
}
-
- mDoviConfigData = malloc(size);
- if (mDoviConfigData == NULL) {
- ALOGE("Failed allocating Dolby Vision config data");
- return ERROR_MALFORMED;
- }
-
- mDoviConfigDataSize = size;
- memcpy(mDoviConfigData, data, size);
-
- mDoviProfile = (((char *)data)[2] >> 1) & 0x7f; //getting profile info
-
return OK;
}
@@ -3542,24 +3545,26 @@
buffer->range_length());
}
if (mIsDovi) {
- err = makeDoviCodecSpecificData();
-
- const void *data = NULL;
- size_t size = 0;
-
- uint32_t type = 0;
- if (mDoviProfile == 9){
- mMeta->findData(kKeyAVCC, &type, &data, &size);
- } else if (mDoviProfile < 9) {
- mMeta->findData(kKeyHVCC, &type, &data, &size);
- }
-
- if (data != NULL && copyCodecSpecificData((uint8_t *)data, size) == OK) {
- mGotAllCodecSpecificData = true;
+ err = getDolbyVisionProfile();
+ if(err == OK) {
+ const void *data = NULL;
+ size_t size = 0;
+ uint32_t type = 0;
+ if (mDoviProfile == DolbyVisionProfileDvavSe) {
+ mMeta->findData(kKeyAVCC, &type, &data, &size);
+ } else if (mDoviProfile < DolbyVisionProfileDvavSe) {
+ mMeta->findData(kKeyHVCC, &type, &data, &size);
+ } else {
+ ALOGW("DV Profiles > DolbyVisionProfileDvavSe are not supported");
+ err = ERROR_MALFORMED;
+ }
+ if (err == OK && data != NULL &&
+ copyCodecSpecificData((uint8_t *)data, size) == OK) {
+ mGotAllCodecSpecificData = true;
+ }
}
}
}
-
buffer->release();
buffer = NULL;
if (OK != err) {
@@ -4429,10 +4434,12 @@
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
writeHvccBox();
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime)) {
- if (mDoviProfile <= 8) {
+ if (mDoviProfile <= DolbyVisionProfileDvheSt) {
writeHvccBox();
- } else if (mDoviProfile == 9 || mDoviProfile == 32) {
+ } else if (mDoviProfile == DolbyVisionProfileDvavSe) {
writeAvccBox();
+ } else {
+ TRESPASS("Unsupported Dolby Vision profile");
}
writeDoviConfigBox();
}
@@ -4482,45 +4489,48 @@
size_t size;
bool found =
meta->findData(kKeyHdrStaticInfo, &type, reinterpret_cast<const void**>(&data), &size);
- if (found && size == 25) {
- uint16_t displayPrimariesRX = U16LE_AT(&data[1]);
- uint16_t displayPrimariesRY = U16LE_AT(&data[3]);
-
- uint16_t displayPrimariesGX = U16LE_AT(&data[5]);
- uint16_t displayPrimariesGY = U16LE_AT(&data[7]);
-
- uint16_t displayPrimariesBX = U16LE_AT(&data[9]);
- uint16_t displayPrimariesBY = U16LE_AT(&data[11]);
-
- uint16_t whitePointX = U16LE_AT(&data[13]);
- uint16_t whitePointY = U16LE_AT(&data[15]);
-
- uint16_t maxDisplayMasteringLuminance = U16LE_AT(&data[17]);
- uint16_t minDisplayMasteringLuminance = U16LE_AT(&data[19]);
-
- uint16_t maxContentLightLevel = U16LE_AT(&data[21]);
- uint16_t maxPicAverageLightLevel = U16LE_AT(&data[23]);
-
- mOwner->beginBox("mdcv");
- mOwner->writeInt16(displayPrimariesGX);
- mOwner->writeInt16(displayPrimariesGY);
- mOwner->writeInt16(displayPrimariesBX);
- mOwner->writeInt16(displayPrimariesBY);
- mOwner->writeInt16(displayPrimariesRX);
- mOwner->writeInt16(displayPrimariesRY);
- mOwner->writeInt16(whitePointX);
- mOwner->writeInt16(whitePointY);
- mOwner->writeInt32(maxDisplayMasteringLuminance * 10000);
- mOwner->writeInt32(minDisplayMasteringLuminance * 10000);
- mOwner->endBox(); // mdcv.
-
- mOwner->beginBox("clli");
- mOwner->writeInt16(maxContentLightLevel);
- mOwner->writeInt16(maxPicAverageLightLevel);
- mOwner->endBox(); // clli.
- } else {
- ALOGW("Ignoring HDR static info with unexpected size %d", (int)size);
+ if (!found) {
+ return; // Nothing to encode.
}
+ if (size != 25) {
+ ALOGW("Ignoring HDR static info with unexpected size %d", (int)size);
+ return;
+ }
+ uint16_t displayPrimariesRX = U16LE_AT(&data[1]);
+ uint16_t displayPrimariesRY = U16LE_AT(&data[3]);
+
+ uint16_t displayPrimariesGX = U16LE_AT(&data[5]);
+ uint16_t displayPrimariesGY = U16LE_AT(&data[7]);
+
+ uint16_t displayPrimariesBX = U16LE_AT(&data[9]);
+ uint16_t displayPrimariesBY = U16LE_AT(&data[11]);
+
+ uint16_t whitePointX = U16LE_AT(&data[13]);
+ uint16_t whitePointY = U16LE_AT(&data[15]);
+
+ uint16_t maxDisplayMasteringLuminance = U16LE_AT(&data[17]);
+ uint16_t minDisplayMasteringLuminance = U16LE_AT(&data[19]);
+
+ uint16_t maxContentLightLevel = U16LE_AT(&data[21]);
+ uint16_t maxPicAverageLightLevel = U16LE_AT(&data[23]);
+
+ mOwner->beginBox("mdcv");
+ mOwner->writeInt16(displayPrimariesGX);
+ mOwner->writeInt16(displayPrimariesGY);
+ mOwner->writeInt16(displayPrimariesBX);
+ mOwner->writeInt16(displayPrimariesBY);
+ mOwner->writeInt16(displayPrimariesRX);
+ mOwner->writeInt16(displayPrimariesRY);
+ mOwner->writeInt16(whitePointX);
+ mOwner->writeInt16(whitePointY);
+ mOwner->writeInt32(maxDisplayMasteringLuminance * 10000);
+ mOwner->writeInt32(minDisplayMasteringLuminance * 10000);
+ mOwner->endBox(); // mdcv.
+
+ mOwner->beginBox("clli");
+ mOwner->writeInt16(maxContentLightLevel);
+ mOwner->writeInt16(maxPicAverageLightLevel);
+ mOwner->endBox(); // clli.
}
void MPEG4Writer::Track::writeAudioFourCCBox() {
@@ -4991,21 +5001,29 @@
}
void MPEG4Writer::Track::writeDoviConfigBox() {
- CHECK(mDoviConfigData);
- CHECK_EQ(mDoviConfigDataSize, 24u);
+ CHECK_NE(mDoviProfile, 0u);
- uint8_t *ptr = (uint8_t *)mDoviConfigData;
- uint8_t profile = (ptr[2] >> 1) & 0x7f;
+ uint32_t type = 0;
+ const void *data = nullptr;
+ size_t size = 0;
+ // check to see which key has the configuration box.
+ if (mMeta->findData(kKeyDVCC, &type, &data, &size) ||
+ mMeta->findData(kKeyDVVC, &type, &data, &size) ||
+ mMeta->findData(kKeyDVWC, &type, &data, &size)) {
- if (profile > 10) {
- mOwner->beginBox("dvwC");
- } else if (profile > 7) {
- mOwner->beginBox("dvvC");
- } else {
- mOwner->beginBox("dvcC");
+ // if this box is present we write the box, or
+ // this mp4 will be interpreted as a backward
+ // compatible stream.
+ if (mDoviProfile > DolbyVisionProfileDvav110) {
+ mOwner->beginBox("dvwC");
+ } else if (mDoviProfile > DolbyVisionProfileDvheDtb) {
+ mOwner->beginBox("dvvC");
+ } else {
+ mOwner->beginBox("dvcC");
+ }
+ mOwner->write(data, size);
+ mOwner->endBox(); // dvwC/dvvC/dvcC
}
- mOwner->write(mDoviConfigData, mDoviConfigDataSize);
- mOwner->endBox(); // dvwC/dvvC/dvcC
}
void MPEG4Writer::Track::writeD263Box() {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 1ec09ea..5a27362 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -186,9 +186,12 @@
// XXX suppress until we get our representation right
static bool kEmitHistogram = false;
+static int64_t getId(IResourceManagerClient const * client) {
+ return (int64_t) client;
+}
static int64_t getId(const std::shared_ptr<IResourceManagerClient> &client) {
- return (int64_t) client.get();
+ return getId(client.get());
}
static bool isResourceError(status_t err) {
@@ -205,12 +208,20 @@
////////////////////////////////////////////////////////////////////////////////
struct ResourceManagerClient : public BnResourceManagerClient {
- explicit ResourceManagerClient(MediaCodec* codec) : mMediaCodec(codec) {}
+ explicit ResourceManagerClient(MediaCodec* codec, int32_t pid) :
+ mMediaCodec(codec), mPid(pid) {}
Status reclaimResource(bool* _aidl_return) override {
sp<MediaCodec> codec = mMediaCodec.promote();
if (codec == NULL) {
- // codec is already gone.
+ // Codec is already gone, so remove the resources as well
+ ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
+ std::shared_ptr<IResourceManagerService> service =
+ IResourceManagerService::fromBinder(binder);
+ if (service == nullptr) {
+ ALOGW("MediaCodec::ResourceManagerClient unable to find ResourceManagerService");
+ }
+ service->removeClient(mPid, getId(this));
*_aidl_return = true;
return Status::ok();
}
@@ -247,6 +258,7 @@
private:
wp<MediaCodec> mMediaCodec;
+ int32_t mPid;
DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
};
@@ -820,7 +832,7 @@
mGetCodecBase(getCodecBase),
mGetCodecInfo(getCodecInfo) {
mResourceManagerProxy = new ResourceManagerServiceProxy(pid, uid,
- ::ndk::SharedRefBase::make<ResourceManagerClient>(this));
+ ::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid));
if (!mGetCodecBase) {
mGetCodecBase = [](const AString &name, const char *owner) {
return GetCodecBase(name, owner);
@@ -1460,9 +1472,14 @@
if (mDomain == DOMAIN_VIDEO) {
// video codec needs dedicated looper
if (mCodecLooper == NULL) {
+ status_t err = OK;
mCodecLooper = new ALooper;
mCodecLooper->setName("CodecLooper");
- mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+ err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+ if (OK != err) {
+ ALOGE("Codec Looper failed to start");
+ return err;
+ }
}
mCodecLooper->registerHandler(mCodec);
@@ -2151,7 +2168,7 @@
bool reverse) {
AString mediaType;
if (!format->findString("mime", &mediaType)) {
- ALOGW("mapFormat: no mediaType information");
+ ALOGV("mapFormat: no mediaType information");
return;
}
ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
@@ -3048,8 +3065,9 @@
CHECK(msg->findInt32("err", &err));
CHECK(msg->findInt32("actionCode", &actionCode));
- ALOGE("Codec reported err %#x, actionCode %d, while in state %d/%s",
- err, actionCode, mState, stateString(mState).c_str());
+ ALOGE("Codec reported err %#x/%s, actionCode %d, while in state %d/%s",
+ err, StrMediaError(err).c_str(), actionCode,
+ mState, stateString(mState).c_str());
if (err == DEAD_OBJECT) {
mFlags |= kFlagSawMediaServerDie;
mFlags &= ~kFlagIsComponentAllocated;
@@ -3105,10 +3123,8 @@
case STOPPING:
{
if (mFlags & kFlagSawMediaServerDie) {
- bool postPendingReplies = true;
if (mState == RELEASING && !mReplyID) {
ALOGD("Releasing asynchronously, so nothing to reply here.");
- postPendingReplies = false;
}
// MediaServer died, there definitely won't
// be a shutdown complete notification after
@@ -3121,8 +3137,11 @@
if (mState == RELEASING) {
mComponentName.clear();
}
- if (postPendingReplies) {
+ if (mReplyID) {
postPendingRepliesAndDeferredMessages(origin + ":dead");
+ } else {
+ ALOGD("no pending replies: %s:dead following %s",
+ origin.c_str(), mLastReplyOrigin.c_str());
}
sendErrorResponse = false;
} else if (!mReplyID) {
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 1854588..4b6470a 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -243,6 +243,39 @@
}
}
+static const ALookup<uint8_t, int32_t>& getDolbyVisionProfileTable() {
+ static const ALookup<uint8_t, int32_t> profileTable = {
+ {1, DolbyVisionProfileDvavPen},
+ {3, DolbyVisionProfileDvheDen},
+ {4, DolbyVisionProfileDvheDtr},
+ {5, DolbyVisionProfileDvheStn},
+ {6, DolbyVisionProfileDvheDth},
+ {7, DolbyVisionProfileDvheDtb},
+ {8, DolbyVisionProfileDvheSt},
+ {9, DolbyVisionProfileDvavSe},
+ {10, DolbyVisionProfileDvav110},
+ };
+ return profileTable;
+}
+
+static const ALookup<uint8_t, int32_t>& getDolbyVisionLevelsTable() {
+ static const ALookup<uint8_t, int32_t> levelsTable = {
+ {0, DolbyVisionLevelUnknown},
+ {1, DolbyVisionLevelHd24},
+ {2, DolbyVisionLevelHd30},
+ {3, DolbyVisionLevelFhd24},
+ {4, DolbyVisionLevelFhd30},
+ {5, DolbyVisionLevelFhd60},
+ {6, DolbyVisionLevelUhd24},
+ {7, DolbyVisionLevelUhd30},
+ {8, DolbyVisionLevelUhd48},
+ {9, DolbyVisionLevelUhd60},
+ {10, DolbyVisionLevelUhd120},
+ {11, DolbyVisionLevel8k30},
+ {12, DolbyVisionLevel8k60},
+ };
+ return levelsTable;
+}
static void parseDolbyVisionProfileLevelFromDvcc(const uint8_t *ptr, size_t size, sp<AMessage> &format) {
// dv_major.dv_minor Should be 1.0 or 2.1
if (size != 24 || ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1))) {
@@ -262,33 +295,9 @@
// All Dolby Profiles will have profile and level info in MediaFormat
// Profile 8 and 9 will have bl_compatibility_id too.
- const static ALookup<uint8_t, int32_t> profiles{
- {1, DolbyVisionProfileDvavPen},
- {3, DolbyVisionProfileDvheDen},
- {4, DolbyVisionProfileDvheDtr},
- {5, DolbyVisionProfileDvheStn},
- {6, DolbyVisionProfileDvheDth},
- {7, DolbyVisionProfileDvheDtb},
- {8, DolbyVisionProfileDvheSt},
- {9, DolbyVisionProfileDvavSe},
- {10, DolbyVisionProfileDvav110},
- };
+ const ALookup<uint8_t, int32_t> &profiles = getDolbyVisionProfileTable();
+ const ALookup<uint8_t, int32_t> &levels = getDolbyVisionLevelsTable();
- const static ALookup<uint8_t, int32_t> levels{
- {0, DolbyVisionLevelUnknown},
- {1, DolbyVisionLevelHd24},
- {2, DolbyVisionLevelHd30},
- {3, DolbyVisionLevelFhd24},
- {4, DolbyVisionLevelFhd30},
- {5, DolbyVisionLevelFhd60},
- {6, DolbyVisionLevelUhd24},
- {7, DolbyVisionLevelUhd30},
- {8, DolbyVisionLevelUhd48},
- {9, DolbyVisionLevelUhd60},
- {10, DolbyVisionLevelUhd120},
- {11, DolbyVisionLevel8k30},
- {12, DolbyVisionLevel8k60},
- };
// set rpuAssoc
if (rpu_present_flag && el_present_flag && !bl_present_flag) {
format->setInt32("rpuAssoc", 1);
@@ -1516,30 +1525,18 @@
if (meta->findData(kKeyDVCC, &type, &data, &size)
|| meta->findData(kKeyDVVC, &type, &data, &size)
|| meta->findData(kKeyDVWC, &type, &data, &size)) {
- sp<ABuffer> buffer, csdOrg;
- if (msg->findBuffer("csd-0", &csdOrg)) {
- buffer = new (std::nothrow) ABuffer(size + csdOrg->size());
- if (buffer.get() == NULL || buffer->base() == NULL) {
- return NO_MEMORY;
- }
-
- memcpy(buffer->data(), csdOrg->data(), csdOrg->size());
- memcpy(buffer->data() + csdOrg->size(), data, size);
- } else {
- buffer = new (std::nothrow) ABuffer(size);
- if (buffer.get() == NULL || buffer->base() == NULL) {
- return NO_MEMORY;
- }
- memcpy(buffer->data(), data, size);
- }
-
- buffer->meta()->setInt32("csd", true);
- buffer->meta()->setInt64("timeUs", 0);
- msg->setBuffer("csd-0", buffer);
-
const uint8_t *ptr = (const uint8_t *)data;
ALOGV("DV: calling parseDolbyVisionProfileLevelFromDvcc with data size %zu", size);
parseDolbyVisionProfileLevelFromDvcc(ptr, size, msg);
+ sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
+ if (buffer.get() == nullptr || buffer->base() == nullptr) {
+ return NO_MEMORY;
+ }
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-2", buffer);
}
*format = msg;
@@ -2041,133 +2038,146 @@
mime == MEDIA_MIMETYPE_IMAGE_AVIF) {
meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
} else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION) {
- int32_t needCreateDoviCSD = 0;
- int32_t profile = 0;
- uint8_t bl_compatibility = 0;
- if (msg->findInt32("profile", &profile)) {
- if (profile == DolbyVisionProfileDvheSt) {
- profile = 8;
- bl_compatibility = 4;
- } else if (profile == DolbyVisionProfileDvavSe) {
- profile = 9;
- bl_compatibility = 2;
- }
- if (profile == 8 || profile == 9) {
- needCreateDoviCSD = 1;
- }
- } else {
- ALOGW("did not find dolby vision profile");
- }
- // No dovi csd data, need to create it
- if (needCreateDoviCSD) {
- uint8_t dvcc[24];
- int32_t level = 0;
- uint8_t level_val = 0;
+ int32_t profile = -1;
+ uint8_t blCompatibilityId = -1;
+ int32_t level = 0;
+ uint8_t profileVal = -1;
+ uint8_t profileVal1 = -1;
+ uint8_t profileVal2 = -1;
+ constexpr size_t dvccSize = 24;
- if (msg->findInt32("level", &level)) {
- const static ALookup<int32_t, uint8_t> levels {
- {DolbyVisionLevelUnknown, 0},
- {DolbyVisionLevelHd24, 1},
- {DolbyVisionLevelHd30, 2},
- {DolbyVisionLevelFhd24, 3},
- {DolbyVisionLevelFhd30, 4},
- {DolbyVisionLevelFhd60, 5},
- {DolbyVisionLevelUhd24, 6},
- {DolbyVisionLevelUhd30, 7},
- {DolbyVisionLevelUhd48, 8},
- {DolbyVisionLevelUhd60, 9},
- {DolbyVisionLevelUhd120, 10},
- {DolbyVisionLevel8k30, 11},
- {DolbyVisionLevel8k60, 12},
- };
- levels.map(level, &level_val);
- ALOGV("found dolby vision level: %d, value: %d", level, level_val);
+ const ALookup<uint8_t, int32_t> &profiles =
+ getDolbyVisionProfileTable();
+ const ALookup<uint8_t, int32_t> &levels =
+ getDolbyVisionLevelsTable();
+
+ if (!msg->findBuffer("csd-2", &csd2)) {
+ // MP4 extractors are expected to generate csd buffer
+ // some encoders might not be generating it, in which
+ // case we populate the track metadata dv (cc|vc|wc)
+ // from the 'profile' and 'level' info.
+ // This is done according to Dolby Vision ISOBMFF spec
+
+ if (!msg->findInt32("profile", &profile)) {
+ ALOGE("Dolby Vision profile not found");
+ return BAD_VALUE;
}
+ msg->findInt32("level", &level);
+
+ if (profile == DolbyVisionProfileDvheSt) {
+ if (!profiles.rlookup(DolbyVisionProfileDvheSt, &profileVal)) { // dvhe.08
+ ALOGE("Dolby Vision profile lookup error");
+ return BAD_VALUE;
+ }
+ blCompatibilityId = 4;
+ } else if (profile == DolbyVisionProfileDvavSe) {
+ if (!profiles.rlookup(DolbyVisionProfileDvavSe, &profileVal)) { // dvav.09
+ ALOGE("Dolby Vision profile lookup error");
+ return BAD_VALUE;
+ }
+ blCompatibilityId = 2;
+ } else {
+ ALOGE("Dolby Vision profile look up error");
+ return BAD_VALUE;
+ }
+
+ profile = (int32_t) profileVal;
+
+ uint8_t level_val = 0;
+ if (!levels.map(level, &level_val)) {
+ ALOGE("Dolby Vision level lookup error");
+ return BAD_VALUE;
+ }
+
+ std::vector<uint8_t> dvcc(dvccSize);
dvcc[0] = 1; // major version
dvcc[1] = 0; // minor version
- dvcc[2] = (uint8_t)((profile & 0x7f) << 1);// dolby vision profile
+ dvcc[2] = (uint8_t)((profile & 0x7f) << 1); // dolby vision profile
dvcc[2] = (uint8_t)((dvcc[2] | (uint8_t)((level_val >> 5) & 0x1)) & 0xff);
dvcc[3] = (uint8_t)((level_val & 0x1f) << 3); // dolby vision level
dvcc[3] = (uint8_t)(dvcc[3] | (1 << 2)); // rpu_present_flag
dvcc[3] = (uint8_t)(dvcc[3] | (1)); // bl_present_flag
- dvcc[4] = (uint8_t)(bl_compatibility << 4);// bl_compatibility id
+ dvcc[4] = (uint8_t)(blCompatibilityId << 4); // bl_compatibility id
- std::vector<uint8_t> dvcc_data(24);
- memcpy(dvcc_data.data(), dvcc, 24);
- if (profile > 10) {
- meta->setData(kKeyDVWC, kTypeDVWC, dvcc_data.data(), 24);
- } else if (profile > 7) {
- meta->setData(kKeyDVVC, kTypeDVVC, dvcc_data.data(), 24);
+ profiles.rlookup(DolbyVisionProfileDvav110, &profileVal);
+ profiles.rlookup(DolbyVisionProfileDvheDtb, &profileVal1);
+ if (profile > (int32_t) profileVal) {
+ meta->setData(kKeyDVWC, kTypeDVWC, dvcc.data(), dvccSize);
+ } else if (profile > (int32_t) profileVal1) {
+ meta->setData(kKeyDVVC, kTypeDVVC, dvcc.data(), dvccSize);
} else {
- meta->setData(kKeyDVCC, kTypeDVCC, dvcc_data.data(), 24);
+ meta->setData(kKeyDVCC, kTypeDVCC, dvcc.data(), dvccSize);
}
- } else if (csd0size >= 24) { // have dovi csd, just send it out...
- uint8_t *dvconfig = csd0->data() + (csd0size -24);
- profile = dvconfig[2] >> 1;
- if (profile > 10) {
- meta->setData(kKeyDVWC, kTypeDVWC, dvconfig, 24);
- } else if (profile > 7) {
- meta->setData(kKeyDVVC, kTypeDVVC, dvconfig, 24);
- } else {
- meta->setData(kKeyDVCC, kTypeDVCC, dvconfig, 24);
- }
+
} else {
- return BAD_VALUE;
+ // we have csd-2, just use that to populate dvcc
+ if (csd2->size() == dvccSize) {
+ uint8_t *dvcc = csd2->data();
+ profile = dvcc[2] >> 1;
+
+ profiles.rlookup(DolbyVisionProfileDvav110, &profileVal);
+ profiles.rlookup(DolbyVisionProfileDvheDtb, &profileVal1);
+ if (profile > (int32_t) profileVal) {
+ meta->setData(kKeyDVWC, kTypeDVWC, csd2->data(), csd2->size());
+ } else if (profile > (int32_t) profileVal1) {
+ meta->setData(kKeyDVVC, kTypeDVVC, csd2->data(), csd2->size());
+ } else {
+ meta->setData(kKeyDVCC, kTypeDVCC, csd2->data(), csd2->size());
+ }
+
+ } else {
+ ALOGE("Convert MessageToMetadata csd-2 is present but not valid");
+ return BAD_VALUE;
+ }
}
-
- // Send the avc/hevc/av1 csd data...
- if (csd0size >= 24) {
- sp<ABuffer> csd;
- if ( profile > 1 && profile < 9) {
- if (msg->findBuffer("csd-hevc", &csd)) {
- meta->setData(kKeyHVCC, kTypeHVCC, csd->data(), csd->size());
- } else if (csd0size > 24) {
- std::vector<uint8_t> hvcc(csd0size + 1024);
- size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
- meta->setData(kKeyHVCC, kTypeHVCC, hvcc.data(), outsize);
- }
- } else if (profile == 9) {
- sp<ABuffer> csd1;
- if (msg->findBuffer("csd-avc", &csd)) {
- meta->setData(kKeyAVCC, kTypeAVCC, csd->data(), csd->size());
- } else if (msg->findBuffer("csd-1", &csd1)) {
- std::vector<char> avcc(csd0size + csd1->size() + 1024);
- size_t outsize = reassembleAVCC(csd0, csd1, avcc.data());
- meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
- } else { // for dolby vision avc, csd0 also holds csd1
- size_t i = 0;
- int csd0realsize = 0;
- do {
- i = findNextNalStartCode(csd0->data() + i,
- csd0->size() - i) - csd0->data();
- if (i > 0) {
- csd0realsize = i;
- break;
- }
- i += 4;
- } while(i < csd0->size());
- // buffer0 -> csd0
- sp<ABuffer> buffer0 = new (std::nothrow) ABuffer(csd0realsize);
- if (buffer0.get() == NULL || buffer0->base() == NULL) {
- return NO_MEMORY;
+ profiles.rlookup(DolbyVisionProfileDvavPen, &profileVal);
+ profiles.rlookup(DolbyVisionProfileDvavSe, &profileVal1);
+ profiles.rlookup(DolbyVisionProfileDvav110, &profileVal2);
+ if ((profile > (int32_t) profileVal) && (profile < (int32_t) profileVal1)) {
+ std::vector<uint8_t> hvcc(csd0size + 1024);
+ size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
+ meta->setData(kKeyHVCC, kTypeHVCC, hvcc.data(), outsize);
+ } else if (profile == (int32_t) profileVal2) {
+ meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
+ } else {
+ sp<ABuffer> csd1;
+ if (msg->findBuffer("csd-1", &csd1)) {
+ std::vector<char> avcc(csd0size + csd1->size() + 1024);
+ size_t outsize = reassembleAVCC(csd0, csd1, avcc.data());
+ meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
+ }
+ else {
+ // for dolby vision avc, csd0 also holds csd1
+ size_t i = 0;
+ int csd0realsize = 0;
+ do {
+ i = findNextNalStartCode(csd0->data() + i,
+ csd0->size() - i) - csd0->data();
+ if (i > 0) {
+ csd0realsize = i;
+ break;
}
- memcpy(buffer0->data(), csd0->data(), csd0realsize);
- // buffer1 -> csd1
- sp<ABuffer> buffer1 = new (std::nothrow)
- ABuffer(csd0->size() - csd0realsize);
- if (buffer1.get() == NULL || buffer1->base() == NULL) {
- return NO_MEMORY;
- }
- memcpy(buffer1->data(), csd0->data()+csd0realsize,
- csd0->size() - csd0realsize);
-
- std::vector<char> avcc(csd0->size() + 1024);
- size_t outsize = reassembleAVCC(buffer0, buffer1, avcc.data());
- meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
+ i += 4;
+ } while(i < csd0->size());
+ // buffer0 -> csd0
+ sp<ABuffer> buffer0 = new (std::nothrow) ABuffer(csd0realsize);
+ if (buffer0.get() == NULL || buffer0->base() == NULL) {
+ return NO_MEMORY;
}
- } else if (profile == 10) {
- meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size() - 24);
+ memcpy(buffer0->data(), csd0->data(), csd0realsize);
+ // buffer1 -> csd1
+ sp<ABuffer> buffer1 = new (std::nothrow)
+ ABuffer(csd0->size() - csd0realsize);
+ if (buffer1.get() == NULL || buffer1->base() == NULL) {
+ return NO_MEMORY;
+ }
+ memcpy(buffer1->data(), csd0->data()+csd0realsize,
+ csd0->size() - csd0realsize);
+
+ std::vector<char> avcc(csd0->size() + 1024);
+ size_t outsize = reassembleAVCC(buffer0, buffer1, avcc.data());
+ meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
}
}
} else if (mime == MEDIA_MIMETYPE_VIDEO_VP9) {
@@ -2216,6 +2226,17 @@
meta->setData(kKeyStreamHeader, 'mdat', csd0->data(), csd0->size());
} else if (msg->findBuffer("d263", &csd0)) {
meta->setData(kKeyD263, kTypeD263, csd0->data(), csd0->size());
+ } else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION && msg->findBuffer("csd-2", &csd2)) {
+ meta->setData(kKeyDVCC, kTypeDVCC, csd2->data(), csd2->size());
+
+ // Remove CSD-2 from the data here to avoid duplicate data in meta
+ meta->remove(kKeyOpaqueCSD2);
+
+ if (msg->findBuffer("csd-avc", &csd0)) {
+ meta->setData(kKeyAVCC, kTypeAVCC, csd0->data(), csd0->size());
+ } else if (msg->findBuffer("csd-hevc", &csd0)) {
+ meta->setData(kKeyHVCC, kTypeHVCC, csd0->data(), csd0->size());
+ }
}
// XXX TODO add whatever other keys there are
diff --git a/media/libstagefright/VideoFrameScheduler.cpp b/media/libstagefright/VideoFrameScheduler.cpp
index 4e5b5e2..de8e425 100644
--- a/media/libstagefright/VideoFrameScheduler.cpp
+++ b/media/libstagefright/VideoFrameScheduler.cpp
@@ -22,8 +22,8 @@
#include <utils/String16.h>
#include <binder/IServiceManager.h>
-#include <gui/ISurfaceComposer.h>
-#include <ui/DisplayStatInfo.h>
+#include <android/gui/ISurfaceComposer.h>
+#include <android/gui/DisplayStatInfo.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AUtils.h>
@@ -39,21 +39,22 @@
mVsyncTime = 0;
mVsyncPeriod = 0;
+ // TODO(b/220021255): wrap this into SurfaceComposerClient
if (mComposer == NULL) {
- String16 name("SurfaceFlinger");
+ String16 name("SurfaceFlingerAIDL");
sp<IServiceManager> sm = defaultServiceManager();
- mComposer = interface_cast<ISurfaceComposer>(sm->checkService(name));
+ mComposer = interface_cast<gui::ISurfaceComposer>(sm->checkService(name));
}
if (mComposer != NULL) {
- DisplayStatInfo stats;
- status_t res = mComposer->getDisplayStats(NULL /* display */, &stats);
- if (res == OK) {
+ gui::DisplayStatInfo stats;
+ binder::Status status = mComposer->getDisplayStats(nullptr/* display */, &stats);
+ if (status.isOk()) {
ALOGV("vsync time:%lld period:%lld",
(long long)stats.vsyncTime, (long long)stats.vsyncPeriod);
mVsyncTime = stats.vsyncTime;
mVsyncPeriod = stats.vsyncPeriod;
} else {
- ALOGW("getDisplayStats returned %d", res);
+ ALOGW("getDisplayStats returned %d", status.transactionError());
}
} else {
ALOGW("could not get surface mComposer service");
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index 1b31392..ca17117 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -120,8 +120,6 @@
},
},
- clang: true,
-
sanitize: {
misc_undefined: [
"unsigned-integer-overflow",
@@ -172,7 +170,7 @@
shared_libs: [
"liblog",
- "libutils", // for sp<>
+ "libutils", // for sp<>
// actually invokes this, but called from folks who already load it
// "libmediandk",
],
@@ -200,8 +198,6 @@
"ColorUtils_fill.cpp",
],
- clang: true,
-
sanitize: {
misc_undefined: [
"unsigned-integer-overflow",
@@ -218,4 +214,3 @@
],
}
-
diff --git a/media/libstagefright/httplive/fuzzer/Android.bp b/media/libstagefright/httplive/fuzzer/Android.bp
index 14097b0..85fd8b7 100644
--- a/media/libstagefright/httplive/fuzzer/Android.bp
+++ b/media/libstagefright/httplive/fuzzer/Android.bp
@@ -40,7 +40,6 @@
"libdatasource",
"libmedia",
"libstagefright",
- "libutils",
],
header_libs: [
"libbase_headers",
@@ -53,6 +52,7 @@
"libstagefright_foundation",
"libhidlbase",
"libhidlmemory",
+ "libutils",
"android.hidl.allocator@1.0",
],
corpus: ["corpus/*"],
diff --git a/media/libstagefright/include/media/stagefright/MediaErrors.h b/media/libstagefright/include/media/stagefright/MediaErrors.h
index d1df2ca..b91c850 100644
--- a/media/libstagefright/include/media/stagefright/MediaErrors.h
+++ b/media/libstagefright/include/media/stagefright/MediaErrors.h
@@ -163,11 +163,28 @@
|| (ERROR_DRM_VENDOR_MIN <= err && err <= ERROR_DRM_VENDOR_MAX);
}
-static inline std::string StrCryptoError(status_t err) {
#define STATUS_CASE(STATUS) \
case STATUS: \
return #STATUS
+static inline std::string StrMediaError(status_t err) {
+ switch(err) {
+ STATUS_CASE(ERROR_ALREADY_CONNECTED);
+ STATUS_CASE(ERROR_NOT_CONNECTED);
+ STATUS_CASE(ERROR_UNKNOWN_HOST);
+ STATUS_CASE(ERROR_CANNOT_CONNECT);
+ STATUS_CASE(ERROR_IO);
+ STATUS_CASE(ERROR_CONNECTION_LOST);
+ STATUS_CASE(ERROR_MALFORMED);
+ STATUS_CASE(ERROR_OUT_OF_RANGE);
+ STATUS_CASE(ERROR_BUFFER_TOO_SMALL);
+ STATUS_CASE(ERROR_UNSUPPORTED);
+ STATUS_CASE(ERROR_END_OF_STREAM);
+ }
+ return statusToString(err);
+}
+
+static inline std::string StrCryptoError(status_t err) {
switch (err) {
STATUS_CASE(ERROR_DRM_UNKNOWN);
STATUS_CASE(ERROR_DRM_NO_LICENSE);
@@ -209,10 +226,10 @@
STATUS_CASE(ERROR_DRM_STORAGE_READ);
STATUS_CASE(ERROR_DRM_STORAGE_WRITE);
STATUS_CASE(ERROR_DRM_ZERO_SUBSAMPLES);
-#undef STATUS_CASE
}
return statusToString(err);
}
+#undef STATUS_CASE
} // namespace android
diff --git a/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h b/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h
index fcfcbec..a4f512a 100644
--- a/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h
+++ b/media/libstagefright/include/media/stagefright/VideoFrameScheduler.h
@@ -21,7 +21,9 @@
namespace android {
-class ISurfaceComposer;
+namespace gui {
+ class ISurfaceComposer;
+}
struct VideoFrameScheduler : public VideoFrameSchedulerBase {
VideoFrameScheduler();
@@ -32,7 +34,7 @@
private:
void updateVsync() override;
- sp<ISurfaceComposer> mComposer;
+ sp<gui::ISurfaceComposer> mComposer;
};
} // namespace android
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 9a7bad9..3c00a1c 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -87,7 +87,7 @@
enabled: true,
},
double_loadable: true,
- clang: true,
+
cflags: [
"-fvisibility=hidden",
"-Werror=format",
diff --git a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
index 6b1d2a1..1c8eef5 100644
--- a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
@@ -204,7 +204,7 @@
};
bool AMPEG4ElementaryAssembler::initCheck() {
- if(mSizeLength == 0 || mIndexLength == 0 || mIndexDeltaLength == 0) {
+ if(mIsGeneric && (mSizeLength == 0 || mIndexLength == 0 || mIndexDeltaLength == 0)) {
android_errorWriteLog(0x534e4554, "124777537");
return false;
}
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
index ac1e9b1..a8e64b6 100644
--- a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -393,3 +393,51 @@
std::this_thread::sleep_for(std::chrono::milliseconds(100));
looper->stop();
}
+
+TEST(MediaCodecTest, DeadWhileStoppingError) {
+ // Test scenario:
+ //
+ // 1) Client thread calls stop(); MediaCodec looper thread calls
+ // initiateShutdown(); shutdown is being handled at the component thread.
+ // 2) An error occurs while handling initiateShutdown().
+ // 3) MediaCodec looper thread handles the error.
+ // 4) Codec service dies after the error is handled
+ // 5) MediaCodec looper thread handles the death.
+
+ static const AString kCodecName{"test.codec"};
+ static const AString kCodecOwner{"nobody"};
+ static const AString kMediaType{"video/x-test"};
+
+ sp<MockCodec> mockCodec;
+ std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase =
+ [&mockCodec](const AString &, const char *) {
+ mockCodec = new MockCodec([](const std::shared_ptr<MockBufferChannel> &) {
+ // No mock setup, as we don't expect any buffer operations
+ // in this scenario.
+ });
+ ON_CALL(*mockCodec, initiateAllocateComponent(_))
+ .WillByDefault([mockCodec](const sp<AMessage> &) {
+ mockCodec->callback()->onComponentAllocated(kCodecName.c_str());
+ });
+ ON_CALL(*mockCodec, initiateShutdown(_))
+ .WillByDefault([mockCodec](bool) {
+ // 2)
+ mockCodec->callback()->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ // 4)
+ mockCodec->callback()->onError(DEAD_OBJECT, ACTION_CODE_FATAL);
+ // Codec service has died, no callback.
+ });
+ return mockCodec;
+ };
+
+ sp<ALooper> looper{new ALooper};
+ sp<MediaCodec> codec = SetupMediaCodec(
+ kCodecOwner, kCodecName, kMediaType, looper, getCodecBase);
+ ASSERT_NE(nullptr, codec) << "Codec must not be null";
+ ASSERT_NE(nullptr, mockCodec) << "MockCodec must not be null";
+
+ codec->stop();
+ // sleep here so that the looper thread can handle the error
+ std::this_thread::sleep_for(std::chrono::milliseconds(100));
+ looper->stop();
+}
diff --git a/media/libstagefright/timedtext/test/Android.bp b/media/libstagefright/timedtext/test/Android.bp
index 58c68ef..ae97c50 100644
--- a/media/libstagefright/timedtext/test/Android.bp
+++ b/media/libstagefright/timedtext/test/Android.bp
@@ -36,7 +36,6 @@
static_libs: [
"libstagefright_timedtext",
- "libstagefright_foundation",
],
header_libs: [
@@ -47,6 +46,7 @@
"liblog",
"libmedia",
"libbinder",
+ "libstagefright_foundation",
],
cflags: [
diff --git a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
index ee7af70..b97f347 100644
--- a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
+++ b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
@@ -174,10 +174,13 @@
params.sampleRate = 16000;
} else {
params.sampleRate = max(1, params.sampleRate);
+ params.channelCount = max(0, params.channelCount);
}
format->setInt32("channel-count", params.channelCount);
format->setInt32("sample-rate", params.sampleRate);
} else if (!strncmp(params.mime, "video/", 6)) {
+ params.width = max(1, params.width);
+ params.height = max(1, params.height);
format->setInt32("width", params.width);
format->setInt32("height", params.height);
}
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 055dd80..afc873c 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -41,8 +41,6 @@
"-Wall",
],
- clang: true,
-
sanitize: {
misc_undefined: [
"unsigned-integer-overflow",
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 537df76..edddaa4 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -33,7 +33,7 @@
shared_libs: [
"android.hardware.media.omx@1.0",
- "libandroidicu",
+ "libicu",
"libfmq",
"libbinder",
"libhidlbase",
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index 354971a..38e422d 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -15,6 +15,8 @@
*/
#include <inttypes.h>
+#include <mutex>
+#include <set>
//#define LOG_NDEBUG 0
#define LOG_TAG "NdkMediaCodec"
@@ -42,6 +44,7 @@
static media_status_t translate_error(status_t err) {
+
if (err == OK) {
return AMEDIA_OK;
} else if (err == -EAGAIN) {
@@ -51,7 +54,18 @@
} else if (err == DEAD_OBJECT) {
return AMEDIACODEC_ERROR_RECLAIMED;
}
- ALOGE("sf error code: %d", err);
+
+ {
+ // minimize log flooding. Some CTS behavior made this noisy and apps could do the same.
+ static std::set<status_t> untranslated;
+ static std::mutex mutex;
+ std::lock_guard lg(mutex);
+
+ if (untranslated.find(err) == untranslated.end()) {
+ ALOGE("untranslated sf error code: %d", err);
+ untranslated.insert(err);
+ }
+ }
return AMEDIA_ERROR_UNKNOWN;
}
@@ -263,8 +277,8 @@
break;
}
msg->findString("detail", &detail);
- ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
- err, actionCode, detail.c_str());
+ ALOGE("Codec reported error(0x%x/%s), actionCode(%d), detail(%s)",
+ err, StrMediaError(err).c_str(), actionCode, detail.c_str());
Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
if (mCodec->mAsyncCallback.onAsyncError != NULL) {
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index ba8f199..a38ef57 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -28,11 +28,16 @@
"AImageReaderUtils.cpp",
"BatteryNotifier.cpp",
"ISchedulingPolicyService.cpp",
+ "Library.cpp",
"LimitProcessMemory.cpp",
+ "MediaUtilsDelayed.cpp",
"MemoryLeakTrackUtil.cpp",
+ "MethodStatistics.cpp",
+ "Process.cpp",
"ProcessInfo.cpp",
"SchedulingPolicyService.cpp",
"ServiceUtilities.cpp",
+ "ThreadSnapshot.cpp",
"TimeCheck.cpp",
"TimerThread.cpp",
],
@@ -43,7 +48,7 @@
],
shared_libs: [
"libaudioclient_aidl_conversion",
- "libaudioutils", // for clock.h
+ "libaudioutils", // for clock.h, Statistics.h
"libbinder",
"libcutils",
"liblog",
@@ -75,6 +80,10 @@
"libpermission",
],
+ required: [
+ "libmediautils_delayed", // lazy loaded
+ ],
+
include_dirs: [
// For DEBUGGER_SIGNAL
"system/core/debuggerd/include",
@@ -84,6 +93,23 @@
}
cc_library {
+ name: "libmediautils_delayed", // match with MEDIAUTILS_DELAYED_LIBRARY_NAME
+ srcs: [
+ "MediaUtilsDelayedLibrary.cpp",
+ ],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+ shared_libs: [
+ "liblog",
+ "libutils",
+ "libutilscallstack",
+ ],
+}
+
+cc_library {
name: "libmediautils_vendor",
vendor_available: true, // required for platform/hardware/interfaces
srcs: [
diff --git a/media/utils/Library.cpp b/media/utils/Library.cpp
new file mode 100644
index 0000000..c1e22bf
--- /dev/null
+++ b/media/utils/Library.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Library"
+#include <utils/Log.h>
+#include <mediautils/Library.h>
+
+namespace {
+
+std::string dlerrorIfPresent() {
+ const char *dlerr = dlerror();
+ if (dlerr == nullptr) return "dlerror: none";
+ return std::string("dlerror: '").append(dlerr).append("'");
+}
+
+}
+namespace android::mediautils {
+
+std::shared_ptr<void> loadLibrary(const char *libraryName, int flags) {
+ std::shared_ptr<void> library{
+ dlopen(libraryName, flags),
+ [](void *lib) {
+ if (lib != nullptr) {
+ const int ret = dlclose(lib);
+ ALOGW_IF(ret !=0, "%s: dlclose(%p) == %d, %s",
+ __func__, lib, ret, dlerrorIfPresent().c_str());
+ }
+ }
+ };
+
+ if (!library) {
+ ALOGW("%s: cannot load libraryName %s, %s",
+ __func__, libraryName, dlerrorIfPresent().c_str());
+ return {};
+ }
+ return library;
+}
+
+std::shared_ptr<void> getUntypedObjectFromLibrary(
+ const char *objectName, const std::shared_ptr<void>& library) {
+ if (!library) {
+ ALOGW("%s: null library, cannot load objectName %s", __func__, objectName);
+ return {};
+ }
+ void *ptr = dlsym(library.get(), objectName);
+ if (ptr == nullptr) {
+ ALOGW("%s: cannot load objectName %s, %s",
+ __func__, objectName, dlerrorIfPresent().c_str());
+ return {};
+ }
+
+ // Note: we use the "aliasing" constructor of the std:shared_ptr.
+ //
+ // https://en.cppreference.com/w/cpp/memory/shared_ptr/shared_ptr
+ //
+ return { library, ptr }; // returns shared_ptr to ptr, but ref counted on library.
+}
+
+} // namespace android::mediautils
diff --git a/media/utils/MediaUtilsDelayed.cpp b/media/utils/MediaUtilsDelayed.cpp
new file mode 100644
index 0000000..c6c092d
--- /dev/null
+++ b/media/utils/MediaUtilsDelayed.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <mediautils/Library.h>
+#include <mediautils/MediaUtilsDelayed.h>
+#include "MediaUtilsDelayedLibrary.h"
+
+#define LOG_TAG "MediaUtilsDelayed"
+#include <utils/Log.h>
+#include <memory>
+
+namespace android::mediautils {
+
+namespace {
+// Specific implementation details for MediaUtils Delayed Library.
+
+// The following use static Meyer's singleton caches instead of letting
+// refcounted management as provided above. This is for speed.
+std::shared_ptr<void> getDelayedLibrary() {
+ static std::shared_ptr<void> library = loadLibrary(MEDIAUTILS_DELAYED_LIBRARY_NAME);
+ return library;
+}
+
+// Get the delayed dispatch table. This is refcounted and keeps the underlying library alive.
+std::shared_ptr<delayed_library::DelayedDispatchTable> getDelayedDispatchTable() {
+ static auto delayedDispatchTable =
+ getObjectFromLibrary<delayed_library::DelayedDispatchTable>(
+ MEDIAUTILS_DELAYED_DISPATCH_TABLE_SYMBOL_NAME, getDelayedLibrary());
+ return delayedDispatchTable;
+}
+
+} // namespace
+
+// Public implementations of methods here.
+
+std::string getCallStackStringForTid(pid_t tid) {
+ auto delayedDispatchTable = getDelayedDispatchTable();
+ if (!delayedDispatchTable) return {}; // on failure, return empty string
+ return delayedDispatchTable->getCallStackStringForTid(tid);
+}
+
+} // android::mediautils
diff --git a/media/utils/MediaUtilsDelayedLibrary.cpp b/media/utils/MediaUtilsDelayedLibrary.cpp
new file mode 100644
index 0000000..9054c1a
--- /dev/null
+++ b/media/utils/MediaUtilsDelayedLibrary.cpp
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MediaUtilsDelayedLibrary.h"
+#include <utils/CallStack.h>
+
+// Methods that are dynamically linked.
+namespace {
+
+std::string getCallStackStringForTid(pid_t tid) {
+ android::CallStack cs{};
+ cs.update(0 /* ignoreDepth */, tid);
+ return cs.toString().c_str();
+}
+
+} // namespace
+
+// leave global, this is picked up from dynamic linking
+android::mediautils::delayed_library::DelayedDispatchTable gDelayedDispatchTable {
+ getCallStackStringForTid,
+};
diff --git a/media/utils/MediaUtilsDelayedLibrary.h b/media/utils/MediaUtilsDelayedLibrary.h
new file mode 100644
index 0000000..3d72a3a
--- /dev/null
+++ b/media/utils/MediaUtilsDelayedLibrary.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+#include <unistd.h>
+
+// This should not be directly included by clients.
+// Use MediaUtilsDelayed.h instead.
+
+namespace android::mediautils::delayed_library {
+
+// Use a dispatch table to return methods from the delayed library
+struct DelayedDispatchTable {
+ std::string (*getCallStackStringForTid)(pid_t tid);
+};
+
+// Match with Android.bp and MediaUtilsDelayed.cpp.
+#define MEDIAUTILS_DELAYED_LIBRARY_NAME "libmediautils_delayed.so"
+
+// Match with MediaUtilsDelayed.cpp and MediaUtilsDelayedLibrary.cpp
+#define MEDIAUTILS_DELAYED_DISPATCH_TABLE_SYMBOL_NAME "gDelayedDispatchTable"
+
+} // namespace android::mediautils::delayed_library
diff --git a/media/utils/MethodStatistics.cpp b/media/utils/MethodStatistics.cpp
new file mode 100644
index 0000000..b179b20
--- /dev/null
+++ b/media/utils/MethodStatistics.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <mediautils/MethodStatistics.h>
+
+namespace android::mediautils {
+
+// Repository for MethodStatistics Objects
+
+std::shared_ptr<std::vector<std::string>>
+getStatisticsClassesForModule(std::string_view moduleName) {
+ static const std::map<std::string, std::shared_ptr<std::vector<std::string>>> m {
+ {
+ METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL,
+ std::shared_ptr<std::vector<std::string>>(
+ new std::vector<std::string>{
+ "DeviceHalHidl",
+ "EffectHalHidl",
+ "StreamInHalHidl",
+ "StreamOutHalHidl",
+ })
+ },
+ };
+ auto it = m.find({moduleName.begin(), moduleName.end()});
+ if (it == m.end()) return {};
+ return it->second;
+}
+
+static void addClassesToMap(const std::shared_ptr<std::vector<std::string>> &classNames,
+ std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>> &map) {
+ if (classNames) {
+ for (const auto& className : *classNames) {
+ map.emplace(className, std::make_shared<MethodStatistics<std::string>>());
+ }
+ }
+}
+
+// singleton statistics for DeviceHalHidl StreamOutHalHidl StreamInHalHidl
+std::shared_ptr<MethodStatistics<std::string>>
+getStatisticsForClass(std::string_view className) {
+ static const std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>> m =
+ // copy elided initialization of map m.
+ [](){
+ std::map<std::string, std::shared_ptr<MethodStatistics<std::string>>> m;
+ addClassesToMap(
+ getStatisticsClassesForModule(METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL),
+ m);
+ return m;
+ }();
+
+ auto it = m.find({className.begin(), className.end()});
+ if (it == m.end()) return {};
+ return it->second;
+}
+
+} // android::mediautils
diff --git a/media/utils/Process.cpp b/media/utils/Process.cpp
new file mode 100644
index 0000000..8fe8003
--- /dev/null
+++ b/media/utils/Process.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Process"
+#include <utils/Log.h>
+#include <mediautils/Process.h>
+
+#include <android-base/file.h>
+#include <android-base/strings.h>
+#include <cstdlib>
+
+namespace {
+
+void processLine(std::string_view s, std::map<std::string, double>& m) {
+ if (s.empty()) return;
+
+ const size_t colon_pos = s.find(':');
+ if (colon_pos == std::string_view::npos) return;
+
+ const size_t space_pos = s.find(' ');
+ if (space_pos == 0 || space_pos == std::string_view::npos || space_pos > colon_pos) return;
+ std::string key(s.data(), s.data() + space_pos);
+
+ const size_t value_pos = s.find_first_not_of(' ', colon_pos + 1);
+ if (value_pos == std::string_view::npos) return;
+
+ const double value = strtod(s.data() + value_pos, nullptr /* end */);
+ m[std::move(key)] = value;
+}
+
+} // namespace
+
+namespace android::mediautils {
+
+std::string getThreadSchedAsString(pid_t tid) {
+ const pid_t pid = getpid();
+ const std::string path = std::string("/proc/").append(std::to_string(pid))
+ .append("/task/").append(std::to_string(tid)).append("/sched");
+ std::string sched;
+ (void)android::base::ReadFileToString(path.c_str(), &sched);
+ return sched;
+}
+
+std::map<std::string, double> parseThreadSchedString(const std::string& schedString) {
+ std::map<std::string, double> m;
+ if (schedString.empty()) return m;
+ std::vector<std::string> stringlist = android::base::Split(schedString, "\n");
+
+ // OK we use values not strings... m["summary"] = stringlist[0];
+ for (size_t i = 2; i < stringlist.size(); ++i) {
+ processLine(stringlist[i], m);
+ }
+ return m;
+}
+
+} // namespace android::mediautils
diff --git a/media/utils/ThreadSnapshot.cpp b/media/utils/ThreadSnapshot.cpp
new file mode 100644
index 0000000..382738e
--- /dev/null
+++ b/media/utils/ThreadSnapshot.cpp
@@ -0,0 +1,145 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "ThreadSnapshot"
+#include <utils/Log.h>
+#include <utils/Timers.h>
+#include <mediautils/ThreadSnapshot.h>
+
+#include <mediautils/Process.h>
+
+namespace android::mediautils {
+
+pid_t ThreadSnapshot::getTid() const {
+ std::lock_guard lg(mLock);
+ return mState.mTid;
+}
+
+void ThreadSnapshot::setTid(pid_t tid) {
+ std::lock_guard lg(mLock);
+ if (mState.mTid == tid) return;
+ mState.reset(tid);
+}
+
+void ThreadSnapshot::reset() {
+ std::lock_guard lg(mLock);
+ mState.reset(mState.mTid);
+}
+
+void ThreadSnapshot::onBegin() {
+ std::string sched = getThreadSchedAsString(getTid()); // tid could race here,
+ // accept as benign.
+ std::lock_guard lg(mLock);
+ mState.onBegin(std::move(sched));
+}
+
+void ThreadSnapshot::onEnd() {
+ std::lock_guard lg(mLock);
+ mState.onEnd();
+}
+
+std::string ThreadSnapshot::toString() const {
+ // Make a local copy of the stats data under lock.
+ State state;
+ {
+ std::lock_guard lg(mLock);
+ state = mState;
+ }
+ return state.toString();
+}
+
+void ThreadSnapshot::State::reset(pid_t tid) {
+ mTid = tid;
+ mBeginTimeNs = -2;
+ mEndTimeNs = -1;
+ mCumulativeTimeNs = 0;
+ mBeginSched.clear();
+}
+
+void ThreadSnapshot::State::onBegin(std::string sched) {
+ if (mBeginTimeNs < mEndTimeNs) {
+ mBeginTimeNs = systemTime();
+ mBeginSched = std::move(sched);
+ }
+}
+
+void ThreadSnapshot::State::onEnd() {
+ if (mEndTimeNs < mBeginTimeNs) {
+ mEndTimeNs = systemTime();
+ mCumulativeTimeNs += mEndTimeNs - mBeginTimeNs;
+ }
+}
+
+std::string ThreadSnapshot::State::toString() const {
+ if (mBeginTimeNs < 0) return {}; // never begun.
+
+ // compute time intervals.
+ const int64_t nowNs = systemTime();
+ int64_t cumulativeTimeNs = mCumulativeTimeNs;
+ int64_t diffNs = mEndTimeNs - mBeginTimeNs; // if onEnd() isn't matched, diffNs < 0.
+ if (diffNs < 0) {
+ diffNs = nowNs - mBeginTimeNs;
+ cumulativeTimeNs += diffNs;
+ }
+ // normalization for rate variables
+ const double lastRunPerSec = 1e9 / diffNs;
+ const double totalPerSec = 1e9 / cumulativeTimeNs;
+
+ // HANDLE THE SCHEDULER STATISTICS HERE
+ // current and differential statistics for the scheduler.
+ std::string schedNow = getThreadSchedAsString(mTid);
+ const auto schedMapThen = parseThreadSchedString(mBeginSched);
+ const auto schedMapNow = parseThreadSchedString(schedNow);
+ static const char * schedDiffKeyList[] = {
+ "se.sum_exec_runtime",
+ "se.nr_migrations",
+ "se.statistics.wait_sum",
+ "se.statistics.wait_count",
+ "se.statistics.iowait_sum",
+ "se.statistics.iowait_count",
+ "se.statistics.nr_forced_migrations",
+ "nr_involuntary_switches",
+ };
+
+ // compute differential rate statistics.
+ std::string diffString;
+ for (const auto diffKey : schedDiffKeyList) {
+ if (auto itThen = schedMapThen.find(diffKey);
+ itThen != schedMapThen.end()) {
+
+ if (auto itNow = schedMapNow.find(diffKey);
+ itNow != schedMapNow.end()) {
+ auto diff = itNow->second - itThen->second;
+ diff *= lastRunPerSec;
+ auto total = itNow->second * totalPerSec;
+ diffString.append(diffKey).append(" last-run:")
+ .append(std::to_string(diff))
+ .append(" cumulative:")
+ .append(std::to_string(total))
+ .append("\n");
+ }
+ }
+ }
+
+ if (!diffString.empty()) {
+ schedNow.append("*** per second stats ***\n").append(diffString);
+ }
+
+ // Return snapshot string.
+ return schedNow;
+}
+
+} // android::mediautils
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 2b765cc..75a1b22 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -17,23 +17,79 @@
#define LOG_TAG "TimeCheck"
#include <optional>
-#include <sstream>
+#include <android-base/logging.h>
+#include <audio_utils/clock.h>
#include <mediautils/EventLog.h>
+#include <mediautils/MethodStatistics.h>
#include <mediautils/TimeCheck.h>
#include <utils/Log.h>
#include "debuggerd/handler.h"
-namespace android {
+namespace android::mediautils {
-namespace {
-
+/**
+ * Returns the std::string "HH:MM:SS.MSc" from a system_clock time_point.
+ */
std::string formatTime(std::chrono::system_clock::time_point t) {
- auto msSinceEpoch = std::chrono::round<std::chrono::milliseconds>(t.time_since_epoch());
- return (std::ostringstream() << msSinceEpoch.count()).str();
+ auto time_string = audio_utils_time_string_from_ns(
+ std::chrono::nanoseconds(t.time_since_epoch()).count());
+
+ // The time string is 19 characters (including null termination).
+ // Example: "03-27 16:47:06.187"
+ // MM DD HH MM SS MS
+ // We offset by 6 to get HH:MM:SS.MSc
+ //
+ return time_string.time + 6; // offset to remove month/day.
}
-} // namespace
+/**
+ * Finds the end of the common time prefix.
+ *
+ * This is as an option to remove the common time prefix to avoid
+ * unnecessary duplicated strings.
+ *
+ * \param time1 a time string
+ * \param time2 a time string
+ * \return the position where the common time prefix ends. For abbreviated
+ * printing of time2, offset the character pointer by this position.
+ */
+static size_t commonTimePrefixPosition(std::string_view time1, std::string_view time2) {
+ const size_t endPos = std::min(time1.size(), time2.size());
+ size_t i;
+
+ // Find location of the first mismatch between strings
+ for (i = 0; ; ++i) {
+ if (i == endPos) {
+ return i; // strings match completely to the length of one of the strings.
+ }
+ if (time1[i] != time2[i]) {
+ break;
+ }
+ if (time1[i] == '\0') {
+ return i; // "printed" strings match completely. No need to check further.
+ }
+ }
+
+ // Go backwards until we find a delimeter or space.
+ for (; i > 0
+ && isdigit(time1[i]) // still a number
+ && time1[i - 1] != ' '
+ ; --i) {
+ }
+ return i;
+}
+
+/**
+ * Returns the unique suffix of time2 that isn't present in time1.
+ *
+ * If time2 is identical to time1, then an empty string_view is returned.
+ * This method is used to elide the common prefix when printing times.
+ */
+std::string_view timeSuffix(std::string_view time1, std::string_view time2) {
+ const size_t pos = commonTimePrefixPosition(time1, time2);
+ return time2.substr(pos);
+}
// Audio HAL server pids vector used to generate audio HAL processes tombstone
// when audioserver watchdog triggers.
@@ -48,7 +104,7 @@
void TimeCheck::accessAudioHalPids(std::vector<pid_t>* pids, bool update) {
static constexpr int kNumAudioHalPidsVectors = 3;
static std::vector<pid_t> audioHalPids[kNumAudioHalPidsVectors];
- static std::atomic<int> curAudioHalPids = 0;
+ static std::atomic<unsigned> curAudioHalPids = 0;
if (update) {
audioHalPids[(curAudioHalPids++ + 1) % kNumAudioHalPidsVectors] = *pids;
@@ -70,27 +126,69 @@
}
/* static */
-TimerThread* TimeCheck::getTimeCheckThread() {
- static TimerThread* sTimeCheckThread = new TimerThread();
+TimerThread& TimeCheck::getTimeCheckThread() {
+ static TimerThread sTimeCheckThread{};
return sTimeCheckThread;
}
-TimeCheck::TimeCheck(const char* tag, uint32_t timeoutMs)
- : mTimerHandle(getTimeCheckThread()->scheduleTask(
- [tag, startTime = std::chrono::system_clock::now()] { crash(tag, startTime); },
- std::chrono::milliseconds(timeoutMs))) {}
-
-TimeCheck::~TimeCheck() {
- getTimeCheckThread()->cancelTask(mTimerHandle);
+/* static */
+std::string TimeCheck::toString() {
+ // note pending and retired are individually locked for maximum concurrency,
+ // snapshot is not instantaneous at a single time.
+ return getTimeCheckThread().toString();
}
-/* static */
-void TimeCheck::crash(const char* tag, std::chrono::system_clock::time_point startTime) {
- std::chrono::system_clock::time_point endTime = std::chrono::system_clock::now();
+TimeCheck::TimeCheck(std::string tag, OnTimerFunc&& onTimer, uint32_t timeoutMs,
+ bool crashOnTimeout)
+ : mTimeCheckHandler(new TimeCheckHandler{
+ std::move(tag), std::move(onTimer), crashOnTimeout,
+ std::chrono::system_clock::now(), gettid()})
+ , mTimerHandle(timeoutMs == 0
+ ? getTimeCheckThread().trackTask(mTimeCheckHandler->tag)
+ : getTimeCheckThread().scheduleTask(
+ mTimeCheckHandler->tag,
+ // Pass in all the arguments by value to this task for safety.
+ // The thread could call the callback before the constructor is finished.
+ // The destructor is not blocked on callback.
+ [ timeCheckHandler = mTimeCheckHandler ] {
+ timeCheckHandler->onTimeout();
+ },
+ std::chrono::milliseconds(timeoutMs))) {}
+
+TimeCheck::~TimeCheck() {
+ if (mTimeCheckHandler) {
+ mTimeCheckHandler->onCancel(mTimerHandle);
+ }
+}
+
+void TimeCheck::TimeCheckHandler::onCancel(TimerThread::Handle timerHandle) const
+{
+ if (TimeCheck::getTimeCheckThread().cancelTask(timerHandle) && onTimer) {
+ const std::chrono::system_clock::time_point endTime = std::chrono::system_clock::now();
+ onTimer(false /* timeout */,
+ std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+ endTime - startTime).count());
+ }
+}
+
+void TimeCheck::TimeCheckHandler::onTimeout() const
+{
+ const std::chrono::system_clock::time_point endTime = std::chrono::system_clock::now();
+ if (onTimer) {
+ onTimer(true /* timeout */,
+ std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+ endTime - startTime).count());
+ }
+
+ if (!crashOnTimeout) return;
+
+ // Generate the TimerThread summary string early before sending signals to the
+ // HAL processes which can affect thread behavior.
+ const std::string summary = getTimeCheckThread().toString(4 /* retiredCount */);
// Generate audio HAL processes tombstones and allow time to complete
// before forcing restart
- std::vector<pid_t> pids = getAudioHalPids();
+ std::vector<pid_t> pids = TimeCheck::getAudioHalPids();
if (pids.size() != 0) {
for (const auto& pid : pids) {
ALOGI("requesting tombstone for pid: %d", pid);
@@ -100,9 +198,45 @@
} else {
ALOGI("No HAL process pid available, skipping tombstones");
}
- LOG_EVENT_STRING(LOGTAG_AUDIO_BINDER_TIMEOUT, tag);
- LOG_ALWAYS_FATAL("TimeCheck timeout for %s (start=%s, end=%s)", tag,
- formatTime(startTime).c_str(), formatTime(endTime).c_str());
+
+ LOG_EVENT_STRING(LOGTAG_AUDIO_BINDER_TIMEOUT, tag.c_str());
+
+ // Create abort message string - caution: this can be very large.
+ const std::string abortMessage = std::string("TimeCheck timeout for ")
+ .append(tag)
+ .append(" scheduled ").append(formatTime(startTime))
+ .append(" on thread ").append(std::to_string(tid)).append("\n")
+ .append(summary);
+
+ // Note: LOG_ALWAYS_FATAL limits the size of the string - per log/log.h:
+ // Log message text may be truncated to less than an
+ // implementation-specific limit (1023 bytes).
+ //
+ // Here, we send the string through android-base/logging.h LOG()
+ // to avoid the size limitation. LOG(FATAL) does an abort whereas
+ // LOG(FATAL_WITHOUT_ABORT) does not abort.
+
+ LOG(FATAL) << abortMessage;
}
-}; // namespace android
+// Automatically create a TimeCheck class for a class and method.
+// This is used for Audio HIDL support.
+mediautils::TimeCheck makeTimeCheckStatsForClassMethod(
+ std::string_view className, std::string_view methodName) {
+ std::shared_ptr<MethodStatistics<std::string>> statistics =
+ mediautils::getStatisticsForClass(className);
+ if (!statistics) return {}; // empty TimeCheck.
+ return mediautils::TimeCheck(
+ std::string(className).append("::").append(methodName),
+ [ clazz = std::string(className), method = std::string(methodName),
+ stats = std::move(statistics) ]
+ (bool timeout, float elapsedMs) {
+ if (timeout) {
+ ; // ignored, there is no timeout value.
+ } else {
+ stats->event(method, elapsedMs);
+ }
+ }, 0 /* timeoutMs */);
+}
+
+} // namespace android::mediautils
diff --git a/media/utils/TimerThread-test.cpp b/media/utils/TimerThread-test.cpp
index ee8a811..93cd64c 100644
--- a/media/utils/TimerThread-test.cpp
+++ b/media/utils/TimerThread-test.cpp
@@ -20,54 +20,71 @@
#include <mediautils/TimerThread.h>
using namespace std::chrono_literals;
+using namespace android::mediautils;
-namespace android {
namespace {
constexpr auto kJitter = 10ms;
+// Each task written by *ToString() will start with a left brace.
+constexpr char REQUEST_START = '{';
+
+inline size_t countChars(std::string_view s, char c) {
+ return std::count(s.begin(), s.end(), c);
+}
+
TEST(TimerThread, Basic) {
std::atomic<bool> taskRan = false;
TimerThread thread;
- thread.scheduleTask([&taskRan] { taskRan = true; }, 100ms);
+ thread.scheduleTask("Basic", [&taskRan] { taskRan = true; }, 100ms);
std::this_thread::sleep_for(100ms - kJitter);
ASSERT_FALSE(taskRan);
std::this_thread::sleep_for(2 * kJitter);
ASSERT_TRUE(taskRan);
+ ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
}
TEST(TimerThread, Cancel) {
std::atomic<bool> taskRan = false;
TimerThread thread;
- TimerThread::Handle handle = thread.scheduleTask([&taskRan] { taskRan = true; }, 100ms);
+ TimerThread::Handle handle =
+ thread.scheduleTask("Cancel", [&taskRan] { taskRan = true; }, 100ms);
std::this_thread::sleep_for(100ms - kJitter);
ASSERT_FALSE(taskRan);
- thread.cancelTask(handle);
+ ASSERT_TRUE(thread.cancelTask(handle));
std::this_thread::sleep_for(2 * kJitter);
ASSERT_FALSE(taskRan);
+ ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
}
TEST(TimerThread, CancelAfterRun) {
std::atomic<bool> taskRan = false;
TimerThread thread;
- TimerThread::Handle handle = thread.scheduleTask([&taskRan] { taskRan = true; }, 100ms);
+ TimerThread::Handle handle =
+ thread.scheduleTask("CancelAfterRun", [&taskRan] { taskRan = true; }, 100ms);
std::this_thread::sleep_for(100ms + kJitter);
ASSERT_TRUE(taskRan);
- thread.cancelTask(handle);
+ ASSERT_FALSE(thread.cancelTask(handle));
+ ASSERT_EQ(1, countChars(thread.retiredToString(), REQUEST_START));
}
TEST(TimerThread, MultipleTasks) {
- std::array<std::atomic<bool>, 6> taskRan;
+ std::array<std::atomic<bool>, 6> taskRan{};
TimerThread thread;
auto startTime = std::chrono::steady_clock::now();
- thread.scheduleTask([&taskRan] { taskRan[0] = true; }, 300ms);
- thread.scheduleTask([&taskRan] { taskRan[1] = true; }, 100ms);
- thread.scheduleTask([&taskRan] { taskRan[2] = true; }, 200ms);
- thread.scheduleTask([&taskRan] { taskRan[3] = true; }, 400ms);
- auto handle4 = thread.scheduleTask([&taskRan] { taskRan[4] = true; }, 200ms);
- thread.scheduleTask([&taskRan] { taskRan[5] = true; }, 200ms);
+ thread.scheduleTask("0", [&taskRan] { taskRan[0] = true; }, 300ms);
+ thread.scheduleTask("1", [&taskRan] { taskRan[1] = true; }, 100ms);
+ thread.scheduleTask("2", [&taskRan] { taskRan[2] = true; }, 200ms);
+ thread.scheduleTask("3", [&taskRan] { taskRan[3] = true; }, 400ms);
+ auto handle4 = thread.scheduleTask("4", [&taskRan] { taskRan[4] = true; }, 200ms);
+ thread.scheduleTask("5", [&taskRan] { taskRan[5] = true; }, 200ms);
+
+ // 6 tasks pending
+ ASSERT_EQ(6, countChars(thread.pendingToString(), REQUEST_START));
+ // 0 tasks completed
+ ASSERT_EQ(0, countChars(thread.retiredToString(), REQUEST_START));
// Task 1 should trigger around 100ms.
std::this_thread::sleep_until(startTime + 100ms - kJitter);
@@ -123,6 +140,11 @@
ASSERT_FALSE(taskRan[4]);
ASSERT_TRUE(taskRan[5]);
+ // 1 task pending
+ ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+ // 4 tasks ran and 1 cancelled
+ ASSERT_EQ(4 + 1, countChars(thread.retiredToString(), REQUEST_START));
+
// Task 3 should trigger around 400ms.
std::this_thread::sleep_until(startTime + 400ms - kJitter);
ASSERT_TRUE(taskRan[0]);
@@ -132,6 +154,9 @@
ASSERT_FALSE(taskRan[4]);
ASSERT_TRUE(taskRan[5]);
+ // 4 tasks ran and 1 cancelled
+ ASSERT_EQ(4 + 1, countChars(thread.retiredToString(), REQUEST_START));
+
std::this_thread::sleep_until(startTime + 400ms + kJitter);
ASSERT_TRUE(taskRan[0]);
ASSERT_TRUE(taskRan[1]);
@@ -139,8 +164,62 @@
ASSERT_TRUE(taskRan[3]);
ASSERT_FALSE(taskRan[4]);
ASSERT_TRUE(taskRan[5]);
+
+ // 0 tasks pending
+ ASSERT_EQ(0, countChars(thread.pendingToString(), REQUEST_START));
+ // 5 tasks ran and 1 cancelled
+ ASSERT_EQ(5 + 1, countChars(thread.retiredToString(), REQUEST_START));
}
+TEST(TimerThread, TrackedTasks) {
+ TimerThread thread;
+
+ auto handle0 = thread.trackTask("0");
+ auto handle1 = thread.trackTask("1");
+ auto handle2 = thread.trackTask("2");
+
+ // 3 tasks pending
+ ASSERT_EQ(3, countChars(thread.pendingToString(), REQUEST_START));
+ // 0 tasks retired
+ ASSERT_EQ(0, countChars(thread.retiredToString(), REQUEST_START));
+
+ ASSERT_TRUE(thread.cancelTask(handle0));
+ ASSERT_TRUE(thread.cancelTask(handle1));
+
+ // 1 task pending
+ ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+ // 2 tasks retired
+ ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+
+ // handle1 is stale, cancel returns false.
+ ASSERT_FALSE(thread.cancelTask(handle1));
+
+ // 1 task pending
+ ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+ // 2 tasks retired
+ ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+
+ // Add another tracked task.
+ auto handle3 = thread.trackTask("3");
+
+ // 2 tasks pending
+ ASSERT_EQ(2, countChars(thread.pendingToString(), REQUEST_START));
+ // 2 tasks retired
+ ASSERT_EQ(2, countChars(thread.retiredToString(), REQUEST_START));
+
+ ASSERT_TRUE(thread.cancelTask(handle2));
+
+ // 1 tasks pending
+ ASSERT_EQ(1, countChars(thread.pendingToString(), REQUEST_START));
+ // 3 tasks retired
+ ASSERT_EQ(3, countChars(thread.retiredToString(), REQUEST_START));
+
+ ASSERT_TRUE(thread.cancelTask(handle3));
+
+ // 0 tasks pending
+ ASSERT_EQ(0, countChars(thread.pendingToString(), REQUEST_START));
+ // 4 tasks retired
+ ASSERT_EQ(4, countChars(thread.retiredToString(), REQUEST_START));
+}
} // namespace
-} // namespace android
diff --git a/media/utils/TimerThread.cpp b/media/utils/TimerThread.cpp
index 3c95798..6de6b13 100644
--- a/media/utils/TimerThread.cpp
+++ b/media/utils/TimerThread.cpp
@@ -17,18 +17,278 @@
#define LOG_TAG "TimerThread"
#include <optional>
+#include <sstream>
+#include <unistd.h>
+#include <vector>
+#include <mediautils/MediaUtilsDelayed.h>
#include <mediautils/TimerThread.h>
#include <utils/ThreadDefs.h>
-namespace android {
+namespace android::mediautils {
-TimerThread::TimerThread() : mThread([this] { threadFunc(); }) {
- pthread_setname_np(mThread.native_handle(), "TimeCheckThread");
- pthread_setschedprio(mThread.native_handle(), PRIORITY_URGENT_AUDIO);
+extern std::string formatTime(std::chrono::system_clock::time_point t);
+extern std::string_view timeSuffix(std::string_view time1, std::string_view time2);
+
+TimerThread::Handle TimerThread::scheduleTask(
+ std::string tag, std::function<void()>&& func, std::chrono::milliseconds timeout) {
+ const auto now = std::chrono::system_clock::now();
+ std::shared_ptr<const Request> request{
+ new Request{ now, now + timeout, gettid(), std::move(tag) }};
+ return mMonitorThread.add(std::move(request), std::move(func), timeout);
}
-TimerThread::~TimerThread() {
+TimerThread::Handle TimerThread::trackTask(std::string tag) {
+ const auto now = std::chrono::system_clock::now();
+ std::shared_ptr<const Request> request{
+ new Request{ now, now, gettid(), std::move(tag) }};
+ return mNoTimeoutMap.add(std::move(request));
+}
+
+bool TimerThread::cancelTask(Handle handle) {
+ std::shared_ptr<const Request> request = mNoTimeoutMap.isValidHandle(handle) ?
+ mNoTimeoutMap.remove(handle) : mMonitorThread.remove(handle);
+ if (!request) return false;
+ mRetiredQueue.add(std::move(request));
+ return true;
+}
+
+std::string TimerThread::toString(size_t retiredCount) const {
+ // Note: These request queues are snapshot very close together but
+ // not at "identical" times as we don't use a class-wide lock.
+
+ std::vector<std::shared_ptr<const Request>> timeoutRequests;
+ std::vector<std::shared_ptr<const Request>> retiredRequests;
+ mTimeoutQueue.copyRequests(timeoutRequests);
+ mRetiredQueue.copyRequests(retiredRequests, retiredCount);
+ std::vector<std::shared_ptr<const Request>> pendingRequests =
+ getPendingRequests();
+
+ struct Analysis analysis = analyzeTimeout(timeoutRequests, pendingRequests);
+ std::string analysisSummary;
+ if (!analysis.summary.empty()) {
+ analysisSummary = std::string("\nanalysis [ ").append(analysis.summary).append(" ]");
+ }
+ std::string timeoutStack;
+ if (analysis.timeoutTid != -1) {
+ timeoutStack = std::string("\ntimeout(")
+ .append(std::to_string(analysis.timeoutTid)).append(") callstack [\n")
+ .append(getCallStackStringForTid(analysis.timeoutTid)).append("]");
+ }
+ std::string blockedStack;
+ if (analysis.HALBlockedTid != -1) {
+ blockedStack = std::string("\nblocked(")
+ .append(std::to_string(analysis.HALBlockedTid)).append(") callstack [\n")
+ .append(getCallStackStringForTid(analysis.HALBlockedTid)).append("]");
+ }
+
+ return std::string("now ")
+ .append(formatTime(std::chrono::system_clock::now()))
+ .append(analysisSummary)
+ .append("\ntimeout [ ")
+ .append(requestsToString(timeoutRequests))
+ .append(" ]\npending [ ")
+ .append(requestsToString(pendingRequests))
+ .append(" ]\nretired [ ")
+ .append(requestsToString(retiredRequests))
+ .append(" ]")
+ .append(timeoutStack)
+ .append(blockedStack);
+}
+
+// A HAL method is where the substring "Hidl" is in the class name.
+// The tag should look like: ... Hidl ... :: ...
+// When the audio HAL is updated to AIDL perhaps we will use instead
+// a global directory of HAL classes.
+//
+// See MethodStatistics.cpp:
+// mediautils::getStatisticsClassesForModule(METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL)
+//
+/* static */
+bool TimerThread::isRequestFromHal(const std::shared_ptr<const Request>& request) {
+ const size_t hidlPos = request->tag.find("Hidl");
+ if (hidlPos == std::string::npos) return false;
+ // should be a separator afterwards Hidl which indicates the string was in the class.
+ const size_t separatorPos = request->tag.find("::", hidlPos);
+ return separatorPos != std::string::npos;
+}
+
+/* static */
+struct TimerThread::Analysis TimerThread::analyzeTimeout(
+ const std::vector<std::shared_ptr<const Request>>& timeoutRequests,
+ const std::vector<std::shared_ptr<const Request>>& pendingRequests) {
+
+ if (timeoutRequests.empty() || pendingRequests.empty()) return {}; // nothing to say.
+
+ // for now look at last timeout (in our case, the only timeout)
+ const std::shared_ptr<const Request> timeout = timeoutRequests.back();
+
+ // pending Requests that are problematic.
+ std::vector<std::shared_ptr<const Request>> pendingExact;
+ std::vector<std::shared_ptr<const Request>> pendingPossible;
+
+ // We look at pending requests that were scheduled no later than kDuration
+ // after the timeout request. This prevents false matches with calls
+ // that naturally block for a short period of time
+ // such as HAL write() and read().
+ //
+ auto constexpr kDuration = std::chrono::milliseconds(1000);
+ for (const auto& pending : pendingRequests) {
+ // If the pending tid is the same as timeout tid, problem identified.
+ if (pending->tid == timeout->tid) {
+ pendingExact.emplace_back(pending);
+ continue;
+ }
+
+ // if the pending tid is scheduled within time limit
+ if (pending->scheduled - timeout->scheduled < kDuration) {
+ pendingPossible.emplace_back(pending);
+ }
+ }
+
+ struct Analysis analysis{};
+
+ analysis.timeoutTid = timeout->tid;
+ std::string& summary = analysis.summary;
+ if (!pendingExact.empty()) {
+ const auto& request = pendingExact.front();
+ const bool hal = isRequestFromHal(request);
+
+ if (hal) {
+ summary = std::string("Blocked directly due to HAL call: ")
+ .append(request->toString());
+ }
+ }
+ if (summary.empty() && !pendingPossible.empty()) {
+ for (const auto& request : pendingPossible) {
+ const bool hal = isRequestFromHal(request);
+ if (hal) {
+ // The first blocked call is the most likely one.
+ // Recent calls might be temporarily blocked
+ // calls such as write() or read() depending on kDuration.
+ summary = std::string("Blocked possibly due to HAL call: ")
+ .append(request->toString());
+ analysis.HALBlockedTid = request->tid;
+ }
+ }
+ }
+ return analysis;
+}
+
+std::vector<std::shared_ptr<const TimerThread::Request>> TimerThread::getPendingRequests() const {
+ constexpr size_t kEstimatedPendingRequests = 8; // approx 128 byte alloc.
+ std::vector<std::shared_ptr<const Request>> pendingRequests;
+ pendingRequests.reserve(kEstimatedPendingRequests); // preallocate vector out of lock.
+
+ // following are internally locked calls, which add to our local pendingRequests.
+ mMonitorThread.copyRequests(pendingRequests);
+ mNoTimeoutMap.copyRequests(pendingRequests);
+
+ // Sort in order of scheduled time.
+ std::sort(pendingRequests.begin(), pendingRequests.end(),
+ [](const std::shared_ptr<const Request>& r1,
+ const std::shared_ptr<const Request>& r2) {
+ return r1->scheduled < r2->scheduled;
+ });
+ return pendingRequests;
+}
+
+std::string TimerThread::pendingToString() const {
+ return requestsToString(getPendingRequests());
+}
+
+std::string TimerThread::retiredToString(size_t n) const {
+ std::vector<std::shared_ptr<const Request>> retiredRequests;
+ mRetiredQueue.copyRequests(retiredRequests, n);
+
+ // Dump to string
+ return requestsToString(retiredRequests);
+}
+
+std::string TimerThread::timeoutToString(size_t n) const {
+ std::vector<std::shared_ptr<const Request>> timeoutRequests;
+ mTimeoutQueue.copyRequests(timeoutRequests, n);
+
+ // Dump to string
+ return requestsToString(timeoutRequests);
+}
+
+std::string TimerThread::Request::toString() const {
+ const auto scheduledString = formatTime(scheduled);
+ const auto deadlineString = formatTime(deadline);
+ return std::string(tag)
+ .append(" scheduled ").append(scheduledString)
+ .append(" deadline ").append(timeSuffix(scheduledString, deadlineString))
+ .append(" tid ").append(std::to_string(tid));
+}
+
+void TimerThread::RequestQueue::add(std::shared_ptr<const Request> request) {
+ std::lock_guard lg(mRQMutex);
+ mRequestQueue.emplace_back(std::chrono::system_clock::now(), std::move(request));
+ if (mRequestQueue.size() > mRequestQueueMax) {
+ mRequestQueue.pop_front();
+ }
+}
+
+void TimerThread::RequestQueue::copyRequests(
+ std::vector<std::shared_ptr<const Request>>& requests, size_t n) const {
+ std::lock_guard lg(mRQMutex);
+ const size_t size = mRequestQueue.size();
+ size_t i = n >= size ? 0 : size - n;
+ for (; i < size; ++i) {
+ const auto &[time, request] = mRequestQueue[i];
+ requests.emplace_back(request);
+ }
+}
+
+bool TimerThread::NoTimeoutMap::isValidHandle(Handle handle) const {
+ return handle > getIndexedHandle(mNoTimeoutRequests);
+}
+
+TimerThread::Handle TimerThread::NoTimeoutMap::add(std::shared_ptr<const Request> request) {
+ std::lock_guard lg(mNTMutex);
+ // A unique handle is obtained by mNoTimeoutRequests.fetch_add(1),
+ // This need not be under a lock, but we do so anyhow.
+ const Handle handle = getIndexedHandle(mNoTimeoutRequests++);
+ mMap[handle] = request;
+ return handle;
+}
+
+std::shared_ptr<const TimerThread::Request> TimerThread::NoTimeoutMap::remove(Handle handle) {
+ std::lock_guard lg(mNTMutex);
+ auto it = mMap.find(handle);
+ if (it == mMap.end()) return {};
+ auto request = it->second;
+ mMap.erase(it);
+ return request;
+}
+
+void TimerThread::NoTimeoutMap::copyRequests(
+ std::vector<std::shared_ptr<const Request>>& requests) const {
+ std::lock_guard lg(mNTMutex);
+ for (const auto &[handle, request] : mMap) {
+ requests.emplace_back(request);
+ }
+}
+
+TimerThread::Handle TimerThread::MonitorThread::getUniqueHandle_l(
+ std::chrono::milliseconds timeout) {
+ // To avoid key collisions, advance by 1 tick until the key is unique.
+ auto deadline = std::chrono::steady_clock::now() + timeout;
+ for (; mMonitorRequests.find(deadline) != mMonitorRequests.end();
+ deadline += std::chrono::steady_clock::duration(1))
+ ;
+ return deadline;
+}
+
+TimerThread::MonitorThread::MonitorThread(RequestQueue& timeoutQueue)
+ : mTimeoutQueue(timeoutQueue)
+ , mThread([this] { threadFunc(); }) {
+ pthread_setname_np(mThread.native_handle(), "TimerThread");
+ pthread_setschedprio(mThread.native_handle(), PRIORITY_URGENT_AUDIO);
+}
+
+TimerThread::MonitorThread::~MonitorThread() {
{
std::lock_guard _l(mMutex);
mShouldExit = true;
@@ -37,34 +297,26 @@
mThread.join();
}
-TimerThread::Handle TimerThread::scheduleTaskAtDeadline(std::function<void()>&& func,
- TimePoint deadline) {
- std::lock_guard _l(mMutex);
-
- // To avoid key collisions, advance by 1 tick until the key is unique.
- for (; mMonitorRequests.find(deadline) != mMonitorRequests.end();
- deadline += TimePoint::duration(1))
- ;
- mMonitorRequests.emplace(deadline, std::move(func));
- mCond.notify_all();
- return deadline;
-}
-
-void TimerThread::cancelTask(Handle handle) {
- std::lock_guard _l(mMutex);
- mMonitorRequests.erase(handle);
-}
-
-void TimerThread::threadFunc() {
+void TimerThread::MonitorThread::threadFunc() {
std::unique_lock _l(mMutex);
-
while (!mShouldExit) {
if (!mMonitorRequests.empty()) {
- TimePoint nextDeadline = mMonitorRequests.begin()->first;
+ Handle nextDeadline = mMonitorRequests.begin()->first;
if (nextDeadline < std::chrono::steady_clock::now()) {
- // Deadline expired.
- mMonitorRequests.begin()->second();
- mMonitorRequests.erase(mMonitorRequests.begin());
+ // Deadline has expired, handle the request.
+ {
+ auto node = mMonitorRequests.extract(mMonitorRequests.begin());
+ _l.unlock();
+ // We add Request to retired queue early so that it can be dumped out.
+ mTimeoutQueue.add(std::move(node.mapped().first));
+ node.mapped().second(); // Caution: we don't hold lock here - but do we care?
+ // this is the timeout case! We will crash soon,
+ // maybe before returning.
+ // anything left over is released here outside lock.
+ }
+ // reacquire the lock - if something was added, we loop immediately to check.
+ _l.lock();
+ continue;
}
mCond.wait_until(_l, nextDeadline);
} else {
@@ -73,4 +325,35 @@
}
}
-} // namespace android
+TimerThread::Handle TimerThread::MonitorThread::add(
+ std::shared_ptr<const Request> request, std::function<void()>&& func,
+ std::chrono::milliseconds timeout) {
+ std::lock_guard _l(mMutex);
+ const Handle handle = getUniqueHandle_l(timeout);
+ mMonitorRequests.emplace(handle, std::make_pair(std::move(request), std::move(func)));
+ mCond.notify_all();
+ return handle;
+}
+
+std::shared_ptr<const TimerThread::Request> TimerThread::MonitorThread::remove(Handle handle) {
+ std::unique_lock ul(mMutex);
+ const auto it = mMonitorRequests.find(handle);
+ if (it == mMonitorRequests.end()) {
+ return {};
+ }
+ std::shared_ptr<const TimerThread::Request> request = std::move(it->second.first);
+ std::function<void()> func = std::move(it->second.second);
+ mMonitorRequests.erase(it);
+ ul.unlock(); // manually release lock here so func is released outside of lock.
+ return request;
+}
+
+void TimerThread::MonitorThread::copyRequests(
+ std::vector<std::shared_ptr<const Request>>& requests) const {
+ std::lock_guard lg(mMutex);
+ for (const auto &[deadline, monitorpair] : mMonitorRequests) {
+ requests.emplace_back(monitorpair.first);
+ }
+}
+
+} // namespace android::mediautils
diff --git a/media/utils/fuzzers/TimeCheckFuzz.cpp b/media/utils/fuzzers/TimeCheckFuzz.cpp
index eeb6ba6..7966469 100644
--- a/media/utils/fuzzers/TimeCheckFuzz.cpp
+++ b/media/utils/fuzzers/TimeCheckFuzz.cpp
@@ -44,11 +44,11 @@
// 2. We also have setAudioHalPids, which is populated with the pids set
// above.
- android::TimeCheck::setAudioHalPids(pids);
+ android::mediautils::TimeCheck::setAudioHalPids(pids);
std::string name = data_provider.ConsumeRandomLengthString(kMaxStringLen);
// 3. The constructor, which is fuzzed here:
- android::TimeCheck timeCheck(name.c_str(), timeoutMs);
+ android::mediautils::TimeCheck timeCheck(name.c_str(), {} /* onTimer */, timeoutMs);
// We will leave some buffer to avoid sleeping too long
uint8_t sleep_amount_ms = data_provider.ConsumeIntegralInRange<uint8_t>(0, timeoutMs / 2);
diff --git a/media/utils/include/mediautils/Library.h b/media/utils/include/mediautils/Library.h
new file mode 100644
index 0000000..19cfc11
--- /dev/null
+++ b/media/utils/include/mediautils/Library.h
@@ -0,0 +1,106 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <dlfcn.h>
+#include <string>
+#include <unistd.h>
+
+namespace android::mediautils {
+
+/**
+ * Returns a shared pointer to the library instance.
+ *
+ * When the last reference to the library is removed, the library will be dlclose().
+ *
+ * Notes:
+ * 1) The Android bionic linker always uses RTLD_GLOBAL for executable linking
+ * which provides the symbols for other subsequent libraries.
+ *
+ * 2) RTLD_GLOBAL like RTLD_NODELETE disables unloading of the library
+ * when the reference count drops to zero.
+ *
+ * 3) RTLD_LOCAL is the default in the absence of RTLD_GLOBAL.
+ * RTLD_LOCAL may be ignored in some situations, for example:
+ * https://stackoverflow.com/questions/56808889/static-objects-destructed-before-dlclose
+ *
+ * 4) We default to use RTLD_LAZY to delay symbol relocations until needed.
+ * This flag may be ignored by Android. RTLD_LAZY may allow
+ * unresolved symbols if not accessed, or symbols added later with another library
+ * loaded with RTLD_GLOBAL. See RTLD_NOW for comparison.
+ *
+ * 5) Avoid both staticly loading and dynamically loading the same library.
+ * This is known to cause double free issues as library symbols may map to
+ * the same location. RTLD_DEEPBIND does not appear supported as of T.
+ * https://stackoverflow.com/questions/34073051/when-we-are-supposed-to-use-rtld-deepbind
+ * https://stackoverflow.com/questions/31209693/static-library-linked-two-times
+ *
+ * Details on Android linker and debugging here:
+ * See: adb shell setprop debug.ld.all dlerror,dlopen,dlsym
+ * See: https://android.googlesource.com/platform/bionic/+/master/android-changes-for-ndk-developers.md
+ *
+ * Some other relevant info:
+ * See: Soong double_loadable:true go/double_loadable
+ * See: https://en.wikipedia.org/wiki/One_Definition_Rule#Summary
+ *
+ * TODO(b/228093151): Consider moving to platform/system.
+ *
+ * \param libraryName
+ * \param flags one of the dlopen RTLD_* flags. https://linux.die.net/man/3/dlopen
+ * \return shared_ptr to the library. This will be nullptr if it isn't found.
+ */
+std::shared_ptr<void> loadLibrary(const char *libraryName, int flags = RTLD_LAZY);
+
+/**
+ * Returns a shared pointer to an object in the library
+ *
+ * The object will be a global variable or method in the library.
+ * The object reference counting is aliased to the library shared ptr.
+ *
+ * Note: If any internals of the shared library are exposed, for example by
+ * a method returning a pointer to library globals,
+ * or returning an object whose class definition is from the library,
+ * then the shared_ptr must be kept alive while such references to
+ * library internals exist to prevent library unloading.
+ *
+ * See usage of RTLD_NODELETE as a flag to prevent unloading.
+ *
+ * \param objectName of the library object.
+ * \param library a shared pointer to the library returned by loadLibrary().
+ * \return shared_ptr to the object, but whose refcount is
+ * aliased to the library shared ptr.
+ */
+std::shared_ptr<void> getUntypedObjectFromLibrary(
+ const char *objectName, const std::shared_ptr<void>& library);
+
+/**
+ * Returns a shared pointer to an object in the library
+ *
+ * This is the template typed version of getUntypedObjectFromLibrary().
+ *
+ * \param objectName of the library object.
+ * \param library a shared pointer to the library
+ * \return shared_ptr to the object, but whose refcount is
+ * aliased to the library shared ptr.
+ */
+template <typename T>
+std::shared_ptr<T> getObjectFromLibrary(
+ const char *objectName, const std::shared_ptr<void>& library) {
+ return std::static_pointer_cast<T>(getUntypedObjectFromLibrary(objectName, library));
+}
+
+} // android::mediautils
diff --git a/media/utils/include/mediautils/MediaUtilsDelayed.h b/media/utils/include/mediautils/MediaUtilsDelayed.h
new file mode 100644
index 0000000..1583e60
--- /dev/null
+++ b/media/utils/include/mediautils/MediaUtilsDelayed.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+#include <unistd.h>
+
+namespace android::mediautils {
+
+// These methods use lazy library loading.
+
+/**
+ * Returns a string callstack from the thread id tid.
+ */
+std::string getCallStackStringForTid(pid_t tid);
+
+} // android::mediautils
diff --git a/media/utils/include/mediautils/MethodStatistics.h b/media/utils/include/mediautils/MethodStatistics.h
new file mode 100644
index 0000000..700fbaa
--- /dev/null
+++ b/media/utils/include/mediautils/MethodStatistics.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+#include <mutex>
+#include <string>
+#include <vector>
+
+#include <android-base/thread_annotations.h>
+#include <audio_utils/Statistics.h>
+
+namespace android::mediautils {
+
+/**
+ * MethodStatistics is used to associate Binder codes
+ * with a method name and execution time statistics.
+ *
+ * This is used to track binder transaction times for
+ * AudioFlinger and AudioPolicy services.
+ *
+ * Here, Code is the enumeration type for the method
+ * lookup.
+ */
+template <typename Code>
+class MethodStatistics {
+public:
+ using FloatType = float;
+ using StatsType = audio_utils::Statistics<FloatType>;
+
+ /**
+ * Method statistics.
+ *
+ * Initialized with the Binder transaction list for tracking AudioFlinger
+ * and AudioPolicyManager execution statistics.
+ */
+ explicit MethodStatistics(
+ const std::initializer_list<std::pair<const Code, std::string>>& methodMap = {})
+ : mMethodMap{methodMap} {}
+
+ /**
+ * Adds a method event, typically execution time in ms.
+ */
+ void event(Code code, FloatType executeMs) {
+ std::lock_guard lg(mLock);
+ mStatisticsMap[code].add(executeMs);
+ }
+
+ /**
+ * Returns the name for the method code.
+ */
+ std::string getMethodForCode(Code code) const {
+ auto it = mMethodMap.find(code);
+ return it == mMethodMap.end() ? std::to_string((int)code) : it->second;
+ }
+
+ /**
+ * Returns the number of times the method was invoked by event().
+ */
+ size_t getMethodCount(Code code) const {
+ std::lock_guard lg(mLock);
+ auto it = mStatisticsMap.find(code);
+ return it == mStatisticsMap.end() ? 0 : it->second.getN();
+ }
+
+ /**
+ * Returns the statistics object for the method.
+ */
+ StatsType getStatistics(Code code) const {
+ std::lock_guard lg(mLock);
+ auto it = mStatisticsMap.find(code);
+ return it == mStatisticsMap.end() ? StatsType{} : it->second;
+ }
+
+ /**
+ * Dumps the current method statistics.
+ */
+ std::string dump() const {
+ std::stringstream ss;
+ std::lock_guard lg(mLock);
+ if constexpr (std::is_same_v<Code, std::string>) {
+ for (const auto &[code, stats] : mStatisticsMap) {
+ ss << code <<
+ " n=" << stats.getN() << " " << stats.toString() << "\n";
+ }
+ } else /* constexpr */ {
+ for (const auto &[code, stats] : mStatisticsMap) {
+ ss << int(code) << " " << getMethodForCode(code) <<
+ " n=" << stats.getN() << " " << stats.toString() << "\n";
+ }
+ }
+ return ss.str();
+ }
+
+private:
+ const std::map<Code, std::string> mMethodMap;
+ mutable std::mutex mLock;
+ std::map<Code, StatsType> mStatisticsMap GUARDED_BY(mLock);
+};
+
+// Managed Statistics support.
+// Supported Modules
+#define METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL "AudioHidl"
+
+// Returns a vector of class names for the module, or a nullptr if module not found.
+std::shared_ptr<std::vector<std::string>>
+getStatisticsClassesForModule(std::string_view moduleName);
+
+// Returns a statistics object for that class, or a nullptr if class not found.
+std::shared_ptr<MethodStatistics<std::string>>
+getStatisticsForClass(std::string_view className);
+
+// Only if used, requires IBinder.h to be included at the location of invocation.
+#define METHOD_STATISTICS_BINDER_CODE_NAMES(CODE_TYPE) \
+ {(CODE_TYPE)IBinder::PING_TRANSACTION , "ping"}, \
+ {(CODE_TYPE)IBinder::DUMP_TRANSACTION , "dump"}, \
+ {(CODE_TYPE)IBinder::SHELL_COMMAND_TRANSACTION , "shellCommand"}, \
+ {(CODE_TYPE)IBinder::INTERFACE_TRANSACTION , "getInterfaceDescriptor"}, \
+ {(CODE_TYPE)IBinder::SYSPROPS_TRANSACTION , "SYSPROPS_TRANSACTION"}, \
+ {(CODE_TYPE)IBinder::EXTENSION_TRANSACTION , "EXTENSION_TRANSACTION"}, \
+ {(CODE_TYPE)IBinder::DEBUG_PID_TRANSACTION , "DEBUG_PID_TRANSACTION"}, \
+
+} // android::mediautils
diff --git a/media/utils/include/mediautils/Process.h b/media/utils/include/mediautils/Process.h
new file mode 100644
index 0000000..d249c3a
--- /dev/null
+++ b/media/utils/include/mediautils/Process.h
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+#include <string>
+#include <unistd.h>
+
+/*
+ * This header contains utilities to read the linux system /proc/pid files
+ *
+ * The format of this is not guaranteed to be stable, so use for diagnostic purposes only.
+ *
+ * The linux "proc" directory documentation:
+ * https://kernel.org/doc/Documentation/filesystems/proc.txt
+ * https://www.kernel.org/doc/html/latest/filesystems/proc.html?highlight=proc%20pid#chapter-3-per-process-parameters
+ */
+
+namespace android::mediautils {
+
+/**
+ * Return the thread schedule information for tid.
+ *
+ * String will be empty if the process does not have permission to
+ * access the /proc/pid tables, or if not on a Linux device.
+ *
+ * Linux scheduler documentation:
+ * https://www.kernel.org/doc/html/latest/scheduler/index.html
+ * https://man7.org/linux/man-pages/man7/sched.7.html
+ *
+ * Sample as follows:
+
+AudioOut_8D (10800, #threads: 36)
+-------------------------------------------------------------------
+se.exec_start : 8132077.598026
+se.vruntime : 798689.872087
+se.sum_exec_runtime : 136466.957838
+se.nr_migrations : 132487
+se.statistics.sum_sleep_runtime : 5629794.565945
+se.statistics.wait_start : 0.000000
+se.statistics.sleep_start : 8195727.586392
+se.statistics.block_start : 0.000000
+se.statistics.sleep_max : 1995665.869808
+se.statistics.block_max : 0.591675
+se.statistics.exec_max : 2.477580
+se.statistics.slice_max : 0.000000
+se.statistics.wait_max : 8.608642
+se.statistics.wait_sum : 4683.266835
+se.statistics.wait_count : 300964
+se.statistics.iowait_sum : 0.000000
+se.statistics.iowait_count : 0
+se.statistics.nr_migrations_cold : 0
+se.statistics.nr_failed_migrations_affine : 297
+se.statistics.nr_failed_migrations_running : 1412
+se.statistics.nr_failed_migrations_hot : 96
+se.statistics.nr_forced_migrations : 26
+se.statistics.nr_wakeups : 281263
+se.statistics.nr_wakeups_sync : 84
+se.statistics.nr_wakeups_migrate : 132322
+se.statistics.nr_wakeups_local : 2165
+se.statistics.nr_wakeups_remote : 279098
+se.statistics.nr_wakeups_affine : 0
+se.statistics.nr_wakeups_affine_attempts : 0
+se.statistics.nr_wakeups_passive : 0
+se.statistics.nr_wakeups_idle : 0
+avg_atom : 0.453434
+avg_per_cpu : 1.030040
+nr_switches : 300963
+nr_voluntary_switches : 281252
+nr_involuntary_switches : 19711
+se.load.weight : 73477120
+se.avg.load_sum : 58
+se.avg.runnable_sum : 27648
+se.avg.util_sum : 21504
+se.avg.load_avg : 48
+se.avg.runnable_avg : 0
+se.avg.util_avg : 0
+se.avg.last_update_time : 8132075824128
+se.avg.util_est.ewma : 8
+se.avg.util_est.enqueued : 1
+uclamp.min : 0
+uclamp.max : 1024
+effective uclamp.min : 0
+effective uclamp.max : 1024
+policy : 0
+prio : 101
+clock-delta : 163
+*/
+std::string getThreadSchedAsString(pid_t tid);
+
+/**
+ * Returns map for the raw thread schedule string.
+ */
+std::map<std::string, double> parseThreadSchedString(const std::string& schedString);
+
+/**
+ * Returns map for /proc/pid/task/tid/sched
+ */
+inline std::map<std::string, double> getThreadSchedAsMap(pid_t tid) {
+ return parseThreadSchedString(getThreadSchedAsString(tid));
+}
+
+// TODO: Extend to other /proc/pid file information.
+//
+// See "ps" command get_ps().
+// https://cs.android.com/android/platform/superproject/+/master:external/toybox/toys/posix/ps.c;l=707
+
+} // android::mediautils
diff --git a/media/utils/include/mediautils/ThreadSnapshot.h b/media/utils/include/mediautils/ThreadSnapshot.h
new file mode 100644
index 0000000..c470822
--- /dev/null
+++ b/media/utils/include/mediautils/ThreadSnapshot.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <mutex>
+#include <string>
+
+#include <android-base/thread_annotations.h>
+
+namespace android::mediautils {
+
+/**
+ * Collect Thread performance statistics.
+ *
+ * An onBegin() and onEnd() signal a continuous "run".
+ * Statistics are returned by toString().
+ */
+class ThreadSnapshot {
+public:
+ explicit ThreadSnapshot(pid_t tid = -1) { mState.reset(tid); };
+
+ // Returns current tid
+ pid_t getTid() const;
+
+ // Sets the tid
+ void setTid(pid_t tid);
+
+ // Reset statistics, keep same tid.
+ void reset();
+
+ // Signal a timing run is beginning
+ void onBegin();
+
+ // Signal a timing run is ending
+ void onEnd();
+
+ // Return the thread snapshot statistics in a string
+ std::string toString() const;
+
+private:
+ mutable std::mutex mLock;
+
+ // State represents our statistics at a given point in time.
+ // It is not thread-safe, so any locking must occur at the caller.
+ struct State {
+ pid_t mTid;
+ int64_t mBeginTimeNs; // when last run began
+ int64_t mEndTimeNs; // when last run ends (if less than begin time, not started)
+ int64_t mCumulativeTimeNs;
+
+ // Sched is the scheduler statistics obtained as a string.
+ // This is parsed only when toString() is called.
+ std::string mBeginSched;
+
+ // Clears existing state.
+ void reset(pid_t tid);
+
+ // onBegin() takes a std::string sched should can be captured outside
+ // of locking.
+ void onBegin(std::string sched);
+ void onEnd();
+ std::string toString() const;
+ };
+
+ // Our current state. We only keep the current running state.
+ State mState GUARDED_BY(mLock);
+};
+
+} // android::mediautils
diff --git a/media/utils/include/mediautils/TimeCheck.h b/media/utils/include/mediautils/TimeCheck.h
index 0d6e80d..ef03aef 100644
--- a/media/utils/include/mediautils/TimeCheck.h
+++ b/media/utils/include/mediautils/TimeCheck.h
@@ -20,27 +20,88 @@
#include <mediautils/TimerThread.h>
-namespace android {
+namespace android::mediautils {
// A class monitoring execution time for a code block (scoped variable) and causing an assert
// if it exceeds a certain time
class TimeCheck {
public:
+ using OnTimerFunc = std::function<void(bool /* timeout */, float /* elapsedMs */ )>;
+
// The default timeout is chosen to be less than system server watchdog timeout
static constexpr uint32_t kDefaultTimeOutMs = 5000;
- TimeCheck(const char* tag, uint32_t timeoutMs = kDefaultTimeOutMs);
+ /**
+ * TimeCheck is a RAII object which will notify a callback
+ * on timer expiration or when the object is deallocated.
+ *
+ * TimeCheck is used as a watchdog and aborts by default on timer expiration.
+ * When it aborts, it will also send a debugger signal to pids passed in through
+ * setAudioHalPids().
+ *
+ * If the callback function returns for timeout it will not be called again for
+ * the deallocation.
+ *
+ * \param tag string associated with the TimeCheck object.
+ * \param onTimer callback function with 2 parameters
+ * bool timeout (which is true when the TimeCheck object
+ * times out, false when the TimeCheck object is
+ * destroyed or leaves scope before the timer expires.)
+ * float elapsedMs (the elapsed time to this event).
+ * The callback when timeout is true will be called on a different thread.
+ * This will cancel the callback on the destructor but is not guaranteed
+ * to block for callback completion if it is already in progress
+ * (for maximum concurrency and reduced deadlock potential), so use proper
+ * lifetime analysis (e.g. shared or weak pointers).
+ * \param timeoutMs timeout in milliseconds.
+ * A zero timeout means no timeout is set -
+ * the callback is called only when
+ * the TimeCheck object is destroyed or leaves scope.
+ * \param crashOnTimeout true if the object issues an abort on timeout.
+ */
+ explicit TimeCheck(std::string tag, OnTimerFunc&& onTimer = {},
+ uint32_t timeoutMs = kDefaultTimeOutMs, bool crashOnTimeout = true);
+
+ TimeCheck() = default;
+ // Remove copy constructors as there should only be one call to the destructor.
+ // Move is kept implicitly disabled, but would be logically consistent if enabled.
+ TimeCheck(const TimeCheck& other) = delete;
+ TimeCheck& operator=(const TimeCheck&) = delete;
+
~TimeCheck();
+ static std::string toString();
static void setAudioHalPids(const std::vector<pid_t>& pids);
static std::vector<pid_t> getAudioHalPids();
private:
- static TimerThread* getTimeCheckThread();
- static void accessAudioHalPids(std::vector<pid_t>* pids, bool update);
- static void crash(const char* tag, std::chrono::system_clock::time_point startTime);
+ // Helper class for handling events.
+ // The usage here is const safe.
+ class TimeCheckHandler {
+ public:
+ const std::string tag;
+ const OnTimerFunc onTimer;
+ const bool crashOnTimeout;
+ const std::chrono::system_clock::time_point startTime;
+ const pid_t tid;
- const TimerThread::Handle mTimerHandle;
+ void onCancel(TimerThread::Handle handle) const;
+ void onTimeout() const;
+ };
+
+ static TimerThread& getTimeCheckThread();
+ static void accessAudioHalPids(std::vector<pid_t>* pids, bool update);
+
+ // mTimeCheckHandler is immutable, prefer to be first initialized, last destroyed.
+ // Technically speaking, we do not need a shared_ptr here because TimerThread::cancelTask()
+ // is mutually exclusive of the callback, but the price paid for lifetime safety is minimal.
+ const std::shared_ptr<const TimeCheckHandler> mTimeCheckHandler;
+ const TimerThread::Handle mTimerHandle = TimerThread::INVALID_HANDLE;
};
-}; // namespace android
+// Returns a TimeCheck object that sends info to MethodStatistics
+// obtained from getStatisticsForClass(className).
+TimeCheck makeTimeCheckStatsForClassMethod(
+ std::string_view className, std::string_view methodName);
+
+} // namespace android::mediautils
diff --git a/media/utils/include/mediautils/TimerThread.h b/media/utils/include/mediautils/TimerThread.h
index cf457b8..ffee602 100644
--- a/media/utils/include/mediautils/TimerThread.h
+++ b/media/utils/include/mediautils/TimerThread.h
@@ -16,53 +16,255 @@
#pragma once
+#include <atomic>
#include <condition_variable>
+#include <deque>
#include <functional>
#include <map>
#include <mutex>
+#include <string>
#include <thread>
#include <android-base/thread_annotations.h>
-namespace android {
+namespace android::mediautils {
/**
* A thread for deferred execution of tasks, with cancellation.
*/
class TimerThread {
public:
+ // A Handle is a time_point that serves as a unique key. It is ordered.
using Handle = std::chrono::steady_clock::time_point;
- TimerThread();
- ~TimerThread();
+ static inline constexpr Handle INVALID_HANDLE =
+ std::chrono::steady_clock::time_point::min();
/**
- * Schedule a task to be executed in the future (`timeout` duration from now).
- * Returns a handle that can be used for cancellation.
+ * Schedules a task to be executed in the future (`timeout` duration from now).
+ *
+ * \param tag string associated with the task. This need not be unique,
+ * as the Handle returned is used for cancelling.
+ * \param func callback function that is invoked at the timeout.
+ * \param timeout timeout duration which is converted to milliseconds with at
+ * least 45 integer bits.
+ * A timeout of 0 (or negative) means the timer never expires
+ * so func() is never called. These tasks are stored internally
+ * and reported in the toString() until manually cancelled.
+ * \returns a handle that can be used for cancellation.
*/
- template <typename R, typename P>
- Handle scheduleTask(std::function<void()>&& func, std::chrono::duration<R, P> timeout) {
- auto deadline = std::chrono::steady_clock::now() + std::chrono::milliseconds(timeout);
- return scheduleTaskAtDeadline(std::move(func), deadline);
+ Handle scheduleTask(
+ std::string tag, std::function<void()>&& func, std::chrono::milliseconds timeout);
+
+ /**
+ * Tracks a task that shows up on toString() until cancelled.
+ *
+ * \param tag string associated with the task.
+ * \returns a handle that can be used for cancellation.
+ */
+ Handle trackTask(std::string tag);
+
+ /**
+ * Cancels a task previously scheduled with scheduleTask()
+ * or trackTask().
+ *
+ * \returns true if cancelled. If the task has already executed
+ * or if the handle doesn't exist, this is a no-op
+ * and returns false.
+ */
+ bool cancelTask(Handle handle);
+
+ std::string toString(size_t retiredCount = SIZE_MAX) const;
+
+ /**
+ * Returns a string representation of the TimerThread queue.
+ *
+ * The queue is dumped in order of scheduling (not deadline).
+ */
+ std::string pendingToString() const;
+
+ /**
+ * Returns a string representation of the last retired tasks.
+ *
+ * These tasks from trackTask() or scheduleTask() are
+ * cancelled.
+ *
+ * These are ordered when the task was retired.
+ *
+ * \param n is maximum number of tasks to dump.
+ */
+ std::string retiredToString(size_t n = SIZE_MAX) const;
+
+
+ /**
+ * Returns a string representation of the last timeout tasks.
+ *
+ * These tasks from scheduleTask() which have timed-out.
+ *
+ * These are ordered when the task had timed-out.
+ *
+ * \param n is maximum number of tasks to dump.
+ */
+ std::string timeoutToString(size_t n = SIZE_MAX) const;
+
+ /**
+ * Dumps a container with SmartPointer<Request> to a string.
+ *
+ * "{ Request1 } { Request2} ...{ RequestN }"
+ */
+ template <typename T>
+ static std::string requestsToString(const T& containerRequests) {
+ std::string s;
+ // append seems to be faster than stringstream.
+ // https://stackoverflow.com/questions/18892281/most-optimized-way-of-concatenation-in-strings
+ for (const auto& request : containerRequests) {
+ s.append("{ ").append(request->toString()).append(" } ");
+ }
+ // If not empty, there's an extra space at the end, so we trim it off.
+ if (!s.empty()) s.pop_back();
+ return s;
}
- /**
- * Cancel a task, previously scheduled with scheduleTask().
- * If the task has already executed, this is a no-op.
- */
- void cancelTask(Handle handle);
-
private:
- using TimePoint = std::chrono::steady_clock::time_point;
+ // To minimize movement of data, we pass around shared_ptrs to Requests.
+ // These are allocated and deallocated outside of the lock.
+ struct Request {
+ const std::chrono::system_clock::time_point scheduled;
+ const std::chrono::system_clock::time_point deadline; // deadline := scheduled + timeout
+ // if deadline == scheduled, no
+ // timeout, task not executed.
+ const pid_t tid;
+ const std::string tag;
- std::condition_variable mCond;
- std::mutex mMutex;
- std::thread mThread;
- std::map<TimePoint, std::function<void()>> mMonitorRequests GUARDED_BY(mMutex);
- bool mShouldExit GUARDED_BY(mMutex) = false;
+ std::string toString() const;
+ };
- void threadFunc();
- Handle scheduleTaskAtDeadline(std::function<void()>&& func, TimePoint deadline);
+ // Deque of requests, in order of add().
+ // This class is thread-safe.
+ class RequestQueue {
+ public:
+ explicit RequestQueue(size_t maxSize)
+ : mRequestQueueMax(maxSize) {}
+
+ void add(std::shared_ptr<const Request>);
+
+ // return up to the last "n" requests retired.
+ void copyRequests(std::vector<std::shared_ptr<const Request>>& requests,
+ size_t n = SIZE_MAX) const;
+
+ private:
+ const size_t mRequestQueueMax;
+ mutable std::mutex mRQMutex;
+ std::deque<std::pair<std::chrono::system_clock::time_point,
+ std::shared_ptr<const Request>>>
+ mRequestQueue GUARDED_BY(mRQMutex);
+ };
+
+ // A storage map of tasks without timeouts. There is no std::function<void()>
+ // required, it just tracks the tasks with the tag, scheduled time and the tid.
+ // These tasks show up on a pendingToString() until manually cancelled.
+ class NoTimeoutMap {
+ // This a counter of the requests that have no timeout (timeout == 0).
+ std::atomic<size_t> mNoTimeoutRequests{};
+
+ mutable std::mutex mNTMutex;
+ std::map<Handle, std::shared_ptr<const Request>> mMap GUARDED_BY(mNTMutex);
+
+ public:
+ bool isValidHandle(Handle handle) const; // lock free
+ Handle add(std::shared_ptr<const Request> request);
+ std::shared_ptr<const Request> remove(Handle handle);
+ void copyRequests(std::vector<std::shared_ptr<const Request>>& requests) const;
+ };
+
+ // Monitor thread.
+ // This thread manages shared pointers to Requests and a function to
+ // call on timeout.
+ // This class is thread-safe.
+ class MonitorThread {
+ mutable std::mutex mMutex;
+ mutable std::condition_variable mCond;
+
+ // Ordered map of requests based on time of deadline.
+ //
+ std::map<Handle, std::pair<std::shared_ptr<const Request>, std::function<void()>>>
+ mMonitorRequests GUARDED_BY(mMutex);
+
+ RequestQueue& mTimeoutQueue; // locked internally, added to when request times out.
+
+ // Worker thread variables
+ bool mShouldExit GUARDED_BY(mMutex) = false;
+
+ // To avoid race with initialization,
+ // mThread should be initialized last as the thread is launched immediately.
+ std::thread mThread;
+
+ void threadFunc();
+ Handle getUniqueHandle_l(std::chrono::milliseconds timeout) REQUIRES(mMutex);
+
+ public:
+ MonitorThread(RequestQueue &timeoutQueue);
+ ~MonitorThread();
+
+ Handle add(std::shared_ptr<const Request> request, std::function<void()>&& func,
+ std::chrono::milliseconds timeout);
+ std::shared_ptr<const Request> remove(Handle handle);
+ void copyRequests(std::vector<std::shared_ptr<const Request>>& requests) const;
+ };
+
+ // Analysis contains info deduced by analysisTimeout().
+ //
+ // Summary is the result string from checking timeoutRequests to see if
+ // any might be caused by blocked calls in pendingRequests.
+ //
+ // Summary string is empty if there is no automatic actionable info.
+ //
+ // timeoutTid is the tid selected from timeoutRequests (if any).
+ //
+ // HALBlockedTid is the tid that is blocked from pendingRequests believed
+ // to cause the timeout.
+ // HALBlockedTid may be INVALID_PID if no suspected tid is found,
+ // and if HALBlockedTid is valid, it will not be the same as timeoutTid.
+ //
+ static constexpr pid_t INVALID_PID = -1;
+ struct Analysis {
+ std::string summary;
+ pid_t timeoutTid = INVALID_PID;
+ pid_t HALBlockedTid = INVALID_PID;
+ };
+
+ // A HAL method is where the substring "Hidl" is in the class name.
+ // The tag should look like: ... Hidl ... :: ...
+ static bool isRequestFromHal(const std::shared_ptr<const Request>& request);
+
+ // Returns analysis from the requests.
+ static Analysis analyzeTimeout(
+ const std::vector<std::shared_ptr<const Request>>& timeoutRequests,
+ const std::vector<std::shared_ptr<const Request>>& pendingRequests);
+
+ std::vector<std::shared_ptr<const Request>> getPendingRequests() const;
+
+ // A no-timeout request is represented by a handles at the end of steady_clock time,
+ // counting down by the number of no timeout requests previously requested.
+ // We manage them on the NoTimeoutMap, but conceptually they could be scheduled
+ // on the MonitorThread because those time handles won't expire in
+ // the lifetime of the device.
+ static inline Handle getIndexedHandle(size_t index) {
+ return std::chrono::time_point<std::chrono::steady_clock>::max() -
+ std::chrono::time_point<std::chrono::steady_clock>::duration(index);
+ }
+
+ static constexpr size_t kRetiredQueueMax = 16;
+ RequestQueue mRetiredQueue{kRetiredQueueMax}; // locked internally
+
+ static constexpr size_t kTimeoutQueueMax = 16;
+ RequestQueue mTimeoutQueue{kTimeoutQueueMax}; // locked internally
+
+ NoTimeoutMap mNoTimeoutMap; // locked internally
+
+ MonitorThread mMonitorThread{mTimeoutQueue}; // This should be initialized last because
+ // the thread is launched immediately.
+ // Locked internally.
};
-} // namespace android
+} // namespace android::mediautils
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 6593d56..a6f408d 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -7,6 +7,82 @@
default_applicable_licenses: ["frameworks_av_license"],
}
+cc_test_library {
+ name: "libsharedtest",
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+
+ sanitize:{
+ address: true,
+ cfi: true,
+ integer_overflow: true,
+ memtag_heap: true,
+ },
+
+ shared_libs: [
+ "liblog",
+ ],
+
+ srcs: [
+ "sharedtest.cpp",
+ ]
+}
+
+cc_test {
+ name: "library_tests",
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+
+ sanitize:{
+ address: true,
+ cfi: true,
+ integer_overflow: true,
+ memtag_heap: true,
+ },
+
+ shared_libs: [
+ "libbase",
+ "liblog",
+ "libmediautils",
+ "libutils",
+ ],
+
+ data_libs: [
+ "libsharedtest",
+ ],
+
+ srcs: [
+ "library_tests.cpp",
+ ],
+}
+
+cc_test {
+ name: "media_process_tests",
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+
+ shared_libs: [
+ "liblog",
+ "libmediautils",
+ "libutils",
+ ],
+
+ srcs: [
+ "media_process_tests.cpp",
+ ],
+}
+
cc_test {
name: "media_synchronization_tests",
@@ -26,3 +102,71 @@
"media_synchronization_tests.cpp",
],
}
+
+cc_test {
+ name: "media_threadsnapshot_tests",
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+
+ shared_libs: [
+ "liblog",
+ "libmediautils",
+ "libutils",
+ ],
+
+ srcs: [
+ "media_threadsnapshot_tests.cpp",
+ ],
+}
+
+cc_test {
+ name: "methodstatistics_tests",
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+
+ shared_libs: [
+ "libaudioutils",
+ "liblog",
+ "libmediautils",
+ "libutils",
+ ],
+
+ srcs: [
+ "methodstatistics_tests.cpp",
+ ],
+}
+
+cc_test {
+ name: "timecheck_tests",
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+
+ sanitize:{
+ address: true,
+ cfi: true,
+ integer_overflow: true,
+ memtag_heap: true,
+ },
+
+ shared_libs: [
+ "liblog",
+ "libmediautils",
+ "libutils",
+ ],
+
+ srcs: [
+ "timecheck_tests.cpp",
+ ],
+}
diff --git a/media/utils/tests/library_tests.cpp b/media/utils/tests/library_tests.cpp
new file mode 100644
index 0000000..c5c500c
--- /dev/null
+++ b/media/utils/tests/library_tests.cpp
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "library_tests"
+#include <utils/Log.h>
+
+#include <mediautils/Library.h>
+
+#include <android-base/file.h>
+#include <gtest/gtest.h>
+
+using namespace android::mediautils;
+
+namespace {
+
+static int32_t here = 0; // accessed on same thread.
+
+TEST(library_tests, basic) {
+ std::string path = android::base::GetExecutableDirectory() + "/libsharedtest.so";
+ // The flags to loadLibrary should not include RTLD_GLOBAL or RTLD_NODELETE
+ // which prevent unloading.
+ std::shared_ptr<void> library = loadLibrary(path.c_str(), RTLD_LAZY);
+ ASSERT_TRUE(library);
+ ASSERT_EQ(1, library.use_count());
+
+ std::shared_ptr<int32_t*> ptr = getObjectFromLibrary<int32_t*>("gPtr", library);
+ ASSERT_TRUE(ptr);
+ ASSERT_EQ(2, library.use_count());
+
+ ASSERT_EQ(nullptr, *ptr); // original contents are nullptr.
+
+ // There is a static object destructor in libsharedtest.so that will set the
+ // contents of the integer pointer (if non-null) to 1 when called.
+ // This is used to detect that the library is unloaded.
+ *ptr = &here;
+
+ ptr.reset(); // Note: this shared pointer uses library's refcount.
+ ASSERT_EQ(1, library.use_count()); // Verify library's refcount goes down by 1.
+ ASSERT_EQ(0, here); // the shared library's object destructor hasn't been called.
+
+ // use weak_ptr to investigate whether the library is gone.
+ std::weak_ptr<void> wlibrary = library;
+ ASSERT_EQ(1, wlibrary.use_count());
+ library.reset();
+
+ // we should have released the last reference.
+ ASSERT_EQ(0, wlibrary.use_count());
+
+ // The library should unload and the global object destroyed.
+ // Note on Android, specifying RTLD_GLOBAL or RTLD_NODELETE in the flags
+ // will prevent unloading libraries.
+ ASSERT_EQ(1, here);
+}
+
+TEST(library_tests, sad_library) {
+ std::string path = android::base::GetExecutableDirectory()
+ + "/something_random_library_that_doesn't_exit.so";
+
+ std::shared_ptr<void> library = loadLibrary(path.c_str(), RTLD_LAZY);
+ // We shouldn't crash on an invalid library path, just return an empty shared pointer.
+ // Check the logcat for any error details.
+ ASSERT_FALSE(library);
+}
+
+} // namespace
diff --git a/media/utils/tests/media_process_tests.cpp b/media/utils/tests/media_process_tests.cpp
new file mode 100644
index 0000000..2ae3f70
--- /dev/null
+++ b/media/utils/tests/media_process_tests.cpp
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <mediautils/Process.h>
+
+#define LOG_TAG "media_process_tests"
+
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android;
+using namespace android::mediautils;
+
+TEST(media_process_tests, basic) {
+ const std::string schedString = getThreadSchedAsString(gettid());
+
+ (void)schedString;
+ // We don't test schedString, only that we haven't crashed.
+ // ASSERT_FALSE(schedString.empty());
+
+ // schedString is not normative. So we conjure up our own string
+ const std::string fakeString = "\
+AudioOut_8D (10800, #threads: 36)\n\
+-------------------------------------------------------------------\n\
+se.exec_start : 8132077.598026\n\
+se.vruntime : 798689.872087\n\
+se.sum_exec_runtime : 136466.957838\n\
+se.nr_migrations : 132487\n\
+se.statistics.sum_sleep_runtime : 5629794.565945\n\
+se.statistics.wait_start : 0.000000\n\
+se.statistics.sleep_start : 8195727.586392\n\
+se.statistics.block_start : 0.000000\n\
+se.statistics.sleep_max : 1995665.869808\n\
+se.statistics.block_max : 0.591675\n\
+se.statistics.exec_max : 2.477580\n\
+se.statistics.slice_max : 0.000000\n\
+se.statistics.wait_max : 8.608642\n\
+se.statistics.wait_sum : 4683.266835\n\
+se.statistics.wait_count : 300964\n\
+se.statistics.iowait_sum : 0.000000\n\
+se.statistics.iowait_count : 0\n\
+se.statistics.nr_migrations_cold : 0\n\
+se.statistics.nr_failed_migrations_affine : 297\n\
+se.statistics.nr_failed_migrations_running : 1412\n\
+se.statistics.nr_failed_migrations_hot : 96\n\
+se.statistics.nr_forced_migrations : 26\n\
+se.statistics.nr_wakeups : 281263\n\
+se.statistics.nr_wakeups_sync : 84\n\
+se.statistics.nr_wakeups_migrate : 132322\n\
+se.statistics.nr_wakeups_local : 2165\n\
+se.statistics.nr_wakeups_remote : 279098\n\
+se.statistics.nr_wakeups_affine : 0\n\
+se.statistics.nr_wakeups_affine_attempts : 0\n\
+se.statistics.nr_wakeups_passive : 0\n\
+se.statistics.nr_wakeups_idle : 0\n\
+avg_atom : 0.453434\n\
+avg_per_cpu : 1.030040\n\
+nr_switches : 300963\n\
+nr_voluntary_switches : 281252\n\
+nr_involuntary_switches : 19711\n\
+se.load.weight : 73477120\n\
+se.avg.load_sum : 58\n\
+se.avg.runnable_sum : 27648\n\
+se.avg.util_sum : 21504\n\
+se.avg.load_avg : 48\n\
+se.avg.runnable_avg : 0\n\
+se.avg.util_avg : 0\n\
+se.avg.last_update_time : 8132075824128\n\
+se.avg.util_est.ewma : 8\n\
+se.avg.util_est.enqueued : 1\n\
+uclamp.min : 0\n\
+uclamp.max : 1024\n\
+effective uclamp.min : 0\n\
+effective uclamp.max : 1024\n\
+policy : 0\n\
+prio : 101\n\
+clock-delta : 163";
+
+ std::map<std::string, double> m = parseThreadSchedString(fakeString);
+
+ auto it = m.find("clock-delta");
+ ASSERT_NE(it, m.end());
+ ASSERT_EQ(it->second, 163);
+
+ it = m.find("se.avg.load_avg");
+ ASSERT_NE(it, m.end());
+ ASSERT_EQ(it->second, 48);
+}
diff --git a/media/utils/tests/media_threadsnapshot_tests.cpp b/media/utils/tests/media_threadsnapshot_tests.cpp
new file mode 100644
index 0000000..c7a45e2
--- /dev/null
+++ b/media/utils/tests/media_threadsnapshot_tests.cpp
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <mediautils/ThreadSnapshot.h>
+
+#define LOG_TAG "media_threadsnapshot_tests"
+
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+#include <chrono>
+#include <thread>
+
+using namespace android;
+using namespace android::mediautils;
+
+TEST(media_threadsnapshot_tests, basic) {
+ using namespace std::chrono_literals;
+
+ ThreadSnapshot threadSnapshot(gettid());
+
+ threadSnapshot.onBegin();
+
+ std::string snapshot1 = threadSnapshot.toString();
+
+ std::this_thread::sleep_for(100ms);
+
+ threadSnapshot.onEnd();
+
+ std::string snapshot2 = threadSnapshot.toString();
+
+ // Either we can't get a snapshot, or they must be different when taken when thread is running.
+ if (snapshot1.empty()) {
+ ASSERT_TRUE(snapshot2.empty());
+ } else {
+ ASSERT_FALSE(snapshot2.empty());
+ ASSERT_NE(snapshot1, snapshot2);
+ }
+}
diff --git a/media/utils/tests/methodstatistics_tests.cpp b/media/utils/tests/methodstatistics_tests.cpp
new file mode 100644
index 0000000..85c4ad5
--- /dev/null
+++ b/media/utils/tests/methodstatistics_tests.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "methodstatistics_tests"
+
+#include <mediautils/MethodStatistics.h>
+
+#include <atomic>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android::mediautils;
+using CodeType = size_t;
+
+constexpr CodeType HELLO_CODE = 10;
+constexpr const char * HELLO_NAME = "hello";
+constexpr float HELLO_EVENTS[] = { 1.f, 3.f }; // needs lossless average
+
+constexpr CodeType WORLD_CODE = 21;
+constexpr const char * WORLD_NAME = "world";
+
+constexpr CodeType UNKNOWN_CODE = 12345;
+
+TEST(methodstatistics_tests, method_names) {
+ const MethodStatistics<CodeType> methodStatistics{
+ {HELLO_CODE, HELLO_NAME},
+ {WORLD_CODE, WORLD_NAME},
+ };
+
+ ASSERT_EQ(std::string(HELLO_NAME), methodStatistics.getMethodForCode(HELLO_CODE));
+ ASSERT_EQ(std::string(WORLD_NAME), methodStatistics.getMethodForCode(WORLD_CODE));
+ // an unknown code returns itself as a number.
+ ASSERT_EQ(std::to_string(UNKNOWN_CODE), methodStatistics.getMethodForCode(UNKNOWN_CODE));
+}
+
+TEST(methodstatistics_tests, events) {
+ MethodStatistics<CodeType> methodStatistics{
+ {HELLO_CODE, HELLO_NAME},
+ {WORLD_CODE, WORLD_NAME},
+ };
+
+ size_t n = 0;
+ float sum = 0.f;
+ for (const auto event : HELLO_EVENTS) {
+ methodStatistics.event(HELLO_CODE, event);
+ sum += event;
+ ++n;
+ }
+
+ const auto helloStats = methodStatistics.getStatistics(HELLO_CODE);
+ ASSERT_EQ((signed)n, helloStats.getN());
+ ASSERT_EQ(sum / n, helloStats.getMean());
+ ASSERT_EQ(n, methodStatistics.getMethodCount(HELLO_CODE));
+
+ const auto unsetStats = methodStatistics.getStatistics(UNKNOWN_CODE);
+ ASSERT_EQ(0, unsetStats.getN());
+ ASSERT_EQ(0.f, unsetStats.getMean());
+ ASSERT_EQ(0U, methodStatistics.getMethodCount(UNKNOWN_CODE));
+}
diff --git a/media/utils/tests/sharedtest.cpp b/media/utils/tests/sharedtest.cpp
new file mode 100644
index 0000000..3888874
--- /dev/null
+++ b/media/utils/tests/sharedtest.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#define LOG_TAG "sharedtest"
+#include <utils/Log.h>
+
+// Test library which is dynamicly loaded by library_tests.
+
+// Static variable construction.
+// Calls A constructor on library load, A destructor on library unload.
+
+int32_t *gPtr = nullptr; // this pointer is filled with the location to set memory
+ // when ~A() is called.
+ // we cannot use anything internal to this file as the
+ // data segment may no longer exist after unloading the library.
+struct A {
+ A() {
+ ALOGD("%s: gPtr:%p", __func__, gPtr);
+ }
+
+ ~A() {
+ ALOGD("%s: gPtr:%p", __func__, gPtr);
+ if (gPtr != nullptr) {
+ *gPtr = 1;
+ }
+ }
+} gA;
+
+// __attribute__((constructor)) methods occur before any static variable construction.
+// Libraries that use __attribute__((constructor)) should not rely on global constructors
+// before method call because they will not be initialized before use.
+// See heapprofd_client_api.
+// NOTE: is this right? Shouldn't it occur after construction?
+ __attribute__((constructor))
+void onConstruction() {
+ ALOGD("%s: in progress", __func__); // for logcat analysis
+}
+
+// __attribute__((destructor)) methods occur before any static variable destruction.
+ __attribute__((destructor))
+void onDestruction() {
+ ALOGD("%s: in progress", __func__); // for logcat analysis
+}
diff --git a/media/utils/tests/timecheck_tests.cpp b/media/utils/tests/timecheck_tests.cpp
new file mode 100644
index 0000000..6ebf44d
--- /dev/null
+++ b/media/utils/tests/timecheck_tests.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "timecheck_tests"
+
+#include <mediautils/TimeCheck.h>
+
+#include <atomic>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android::mediautils;
+using namespace std::chrono_literals;
+
+namespace {
+
+TEST(timecheck_tests, success) {
+ bool timeoutRegistered = false;
+ float elapsedMsRegistered = 0.f;
+ bool event = false;
+
+ {
+ TimeCheck timeCheck("success",
+ [&event, &timeoutRegistered, &elapsedMsRegistered]
+ (bool timeout, float elapsedMs) {
+ timeoutRegistered = timeout;
+ elapsedMsRegistered = elapsedMs;
+ event = true;
+ }, 1000 /* msec */, false /* crash */);
+ }
+ ASSERT_TRUE(event);
+ ASSERT_FALSE(timeoutRegistered);
+ ASSERT_GT(elapsedMsRegistered, 0.f);
+}
+
+TEST(timecheck_tests, timeout) {
+ bool timeoutRegistered = false;
+ float elapsedMsRegistered = 0.f;
+ std::atomic_bool event = false; // seq-cst implies acquire-release
+
+ {
+ TimeCheck timeCheck("timeout",
+ [&event, &timeoutRegistered, &elapsedMsRegistered]
+ (bool timeout, float elapsedMs) {
+ timeoutRegistered = timeout;
+ elapsedMsRegistered = elapsedMs;
+ event = true; // store-release, must be last.
+ }, 1 /* msec */, false /* crash */);
+ std::this_thread::sleep_for(100ms);
+ }
+ ASSERT_TRUE(event); // load-acquire, must be first.
+ ASSERT_TRUE(timeoutRegistered); // only called once on failure, not on dealloc.
+ ASSERT_GT(elapsedMsRegistered, 0.f);
+}
+
+// Note: We do not test TimeCheck crash because TimeCheck is multithreaded and the
+// EXPECT_EXIT() signal catching is imperfect due to the gtest fork.
+
+} // namespace
\ No newline at end of file
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index eb3c164..894b31c 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -79,6 +79,7 @@
#include <media/nbaio/PipeReader.h>
#include <mediautils/BatteryNotifier.h>
#include <mediautils/MemoryLeakTrackUtil.h>
+#include <mediautils/MethodStatistics.h>
#include <mediautils/ServiceUtilities.h>
#include <mediautils/TimeCheck.h>
#include <private/android_filesystem_config.h>
@@ -158,6 +159,92 @@
return sExternalVibratorService;
}
+// Creates association between Binder code to name for IAudioFlinger.
+#define IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST \
+BINDER_METHOD_ENTRY(createTrack) \
+BINDER_METHOD_ENTRY(createRecord) \
+BINDER_METHOD_ENTRY(sampleRate) \
+BINDER_METHOD_ENTRY(format) \
+BINDER_METHOD_ENTRY(frameCount) \
+BINDER_METHOD_ENTRY(latency) \
+BINDER_METHOD_ENTRY(setMasterVolume) \
+BINDER_METHOD_ENTRY(setMasterMute) \
+BINDER_METHOD_ENTRY(masterVolume) \
+BINDER_METHOD_ENTRY(masterMute) \
+BINDER_METHOD_ENTRY(setStreamVolume) \
+BINDER_METHOD_ENTRY(setStreamMute) \
+BINDER_METHOD_ENTRY(streamVolume) \
+BINDER_METHOD_ENTRY(streamMute) \
+BINDER_METHOD_ENTRY(setMode) \
+BINDER_METHOD_ENTRY(setMicMute) \
+BINDER_METHOD_ENTRY(getMicMute) \
+BINDER_METHOD_ENTRY(setRecordSilenced) \
+BINDER_METHOD_ENTRY(setParameters) \
+BINDER_METHOD_ENTRY(getParameters) \
+BINDER_METHOD_ENTRY(registerClient) \
+BINDER_METHOD_ENTRY(getInputBufferSize) \
+BINDER_METHOD_ENTRY(openOutput) \
+BINDER_METHOD_ENTRY(openDuplicateOutput) \
+BINDER_METHOD_ENTRY(closeOutput) \
+BINDER_METHOD_ENTRY(suspendOutput) \
+BINDER_METHOD_ENTRY(restoreOutput) \
+BINDER_METHOD_ENTRY(openInput) \
+BINDER_METHOD_ENTRY(closeInput) \
+BINDER_METHOD_ENTRY(invalidateStream) \
+BINDER_METHOD_ENTRY(setVoiceVolume) \
+BINDER_METHOD_ENTRY(getRenderPosition) \
+BINDER_METHOD_ENTRY(getInputFramesLost) \
+BINDER_METHOD_ENTRY(newAudioUniqueId) \
+BINDER_METHOD_ENTRY(acquireAudioSessionId) \
+BINDER_METHOD_ENTRY(releaseAudioSessionId) \
+BINDER_METHOD_ENTRY(queryNumberEffects) \
+BINDER_METHOD_ENTRY(queryEffect) \
+BINDER_METHOD_ENTRY(getEffectDescriptor) \
+BINDER_METHOD_ENTRY(createEffect) \
+BINDER_METHOD_ENTRY(moveEffects) \
+BINDER_METHOD_ENTRY(loadHwModule) \
+BINDER_METHOD_ENTRY(getPrimaryOutputSamplingRate) \
+BINDER_METHOD_ENTRY(getPrimaryOutputFrameCount) \
+BINDER_METHOD_ENTRY(setLowRamDevice) \
+BINDER_METHOD_ENTRY(getAudioPort) \
+BINDER_METHOD_ENTRY(createAudioPatch) \
+BINDER_METHOD_ENTRY(releaseAudioPatch) \
+BINDER_METHOD_ENTRY(listAudioPatches) \
+BINDER_METHOD_ENTRY(setAudioPortConfig) \
+BINDER_METHOD_ENTRY(getAudioHwSyncForSession) \
+BINDER_METHOD_ENTRY(systemReady) \
+BINDER_METHOD_ENTRY(audioPolicyReady) \
+BINDER_METHOD_ENTRY(frameCountHAL) \
+BINDER_METHOD_ENTRY(getMicrophones) \
+BINDER_METHOD_ENTRY(setMasterBalance) \
+BINDER_METHOD_ENTRY(getMasterBalance) \
+BINDER_METHOD_ENTRY(setEffectSuspended) \
+BINDER_METHOD_ENTRY(setAudioHalPids) \
+BINDER_METHOD_ENTRY(setVibratorInfos) \
+BINDER_METHOD_ENTRY(updateSecondaryOutputs) \
+BINDER_METHOD_ENTRY(getMmapPolicyInfos) \
+BINDER_METHOD_ENTRY(getAAudioMixerBurstCount) \
+BINDER_METHOD_ENTRY(getAAudioHardwareBurstMinUsec) \
+BINDER_METHOD_ENTRY(setDeviceConnectedState) \
+
+// singleton for Binder Method Statistics for IAudioFlinger
+static auto& getIAudioFlingerStatistics() {
+ using Code = android::AudioFlingerServerAdapter::Delegate::TransactionCode;
+
+#pragma push_macro("BINDER_METHOD_ENTRY")
+#undef BINDER_METHOD_ENTRY
+#define BINDER_METHOD_ENTRY(ENTRY) \
+ {(Code)media::BnAudioFlingerService::TRANSACTION_##ENTRY, #ENTRY},
+
+ static mediautils::MethodStatistics<Code> methodStatistics{
+ IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST
+ METHOD_STATISTICS_BINDER_CODE_NAMES(Code)
+ };
+#pragma pop_macro("BINDER_METHOD_ENTRY")
+
+ return methodStatistics;
+}
+
class DevicesFactoryHalCallbackImpl : public DevicesFactoryHalCallback {
public:
void onNewDevicesAvailable() override {
@@ -276,7 +363,7 @@
mMediaLogNotifier->run("MediaLogNotifier");
std::vector<pid_t> halPids;
mDevicesFactoryHal->getHalPids(&halPids);
- TimeCheck::setAudioHalPids(halPids);
+ mediautils::TimeCheck::setAudioHalPids(halPids);
// Notify that we have started (also called when audioserver service restarts)
mediametrics::LogItem(mMetricsId)
@@ -316,7 +403,7 @@
}
status_t AudioFlinger::setAudioHalPids(const std::vector<pid_t>& pids) {
- TimeCheck::setAudioHalPids(pids);
+ mediautils::TimeCheck::setAudioHalPids(pids);
return NO_ERROR;
}
@@ -498,14 +585,14 @@
fullConfig.channel_mask = config->channel_mask;
fullConfig.format = config->format;
std::vector<audio_io_handle_t> secondaryOutputs;
-
+ bool isSpatialized;
ret = AudioSystem::getOutputForAttr(&localAttr, &io,
actualSessionId,
&streamType, client.attributionSource,
&fullConfig,
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
AUDIO_OUTPUT_FLAG_DIRECT),
- deviceId, &portId, &secondaryOutputs);
+ deviceId, &portId, &secondaryOutputs, &isSpatialized);
ALOGW_IF(!secondaryOutputs.empty(),
"%s does not support secondary outputs, ignoring them", __func__);
} else {
@@ -828,6 +915,36 @@
std::string s = GetUnreachableMemoryString(true /* contents */, 100 /* limit */);
write(fd, s.c_str(), s.size());
}
+ {
+ std::string timeCheckStats = getIAudioFlingerStatistics().dump();
+ dprintf(fd, "\nIAudioFlinger binder call profile:\n");
+ write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+
+ extern mediautils::MethodStatistics<int>& getIEffectStatistics();
+ timeCheckStats = getIEffectStatistics().dump();
+ dprintf(fd, "\nIEffect binder call profile:\n");
+ write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+
+ // Automatically fetch HIDL statistics.
+ std::shared_ptr<std::vector<std::string>> hidlClassNames =
+ mediautils::getStatisticsClassesForModule(
+ METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL);
+ if (hidlClassNames) {
+ for (const auto& className : *hidlClassNames) {
+ auto stats = mediautils::getStatisticsForClass(className);
+ if (stats) {
+ timeCheckStats = stats->dump();
+ dprintf(fd, "\n%s binder call profile:\n", className.c_str());
+ write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+ }
+ }
+ }
+
+ timeCheckStats = mediautils::TimeCheck::toString();
+ dprintf(fd, "\nTimeCheck:\n");
+ write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+ dprintf(fd, "\n");
+ }
}
return NO_ERROR;
}
@@ -917,6 +1034,7 @@
audio_stream_type_t streamType;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
std::vector<audio_io_handle_t> secondaryOutputs;
+ bool isSpatialized = false;;
// TODO b/182392553: refactor or make clearer
pid_t clientPid =
@@ -960,7 +1078,8 @@
output.selectedDeviceId = input.selectedDeviceId;
lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
adjAttributionSource, &input.config, input.flags,
- &output.selectedDeviceId, &portId, &secondaryOutputs);
+ &output.selectedDeviceId, &portId, &secondaryOutputs,
+ &isSpatialized);
if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
@@ -1026,7 +1145,7 @@
input.notificationsPerBuffer, input.speed,
input.sharedBuffer, sessionId, &output.flags,
callingPid, adjAttributionSource, input.clientInfo.clientTid,
- &lStatus, portId, input.audioTrackCallback);
+ &lStatus, portId, input.audioTrackCallback, isSpatialized);
LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
// we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
@@ -4417,9 +4536,20 @@
break;
}
- std::string tag("IAudioFlinger command " +
- std::to_string(static_cast<std::underlying_type_t<TransactionCode>>(code)));
- TimeCheck check(tag.c_str());
+ const std::string methodName = getIAudioFlingerStatistics().getMethodForCode(code);
+ mediautils::TimeCheck check(
+ std::string("IAudioFlinger::").append(methodName),
+ [code, methodName](bool timeout, float elapsedMs) { // don't move methodName.
+ if (timeout) {
+ mediametrics::LogItem(mMetricsId)
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT)
+ .set(AMEDIAMETRICS_PROP_METHODCODE, int64_t(code))
+ .set(AMEDIAMETRICS_PROP_METHODNAME, methodName.c_str())
+ .record();
+ } else {
+ getIAudioFlingerStatistics().event(code, elapsedMs);
+ }
+ });
// Make sure we connect to Audio Policy Service before calling into AudioFlinger:
// - AudioFlinger can call into Audio Policy Service with its global mutex held
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 59f22eb..8e4383c 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -76,6 +76,7 @@
#include <media/VolumeShaper.h>
#include <mediautils/ServiceUtilities.h>
#include <mediautils/Synchronization.h>
+#include <mediautils/ThreadSnapshot.h>
#include <audio_utils/clock.h>
#include <audio_utils/FdToString.h>
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index b748f9d..e6d7cf7 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -39,7 +39,9 @@
#include <media/ShmemCompat.h>
#include <media/audiohal/EffectHalInterface.h>
#include <media/audiohal/EffectsFactoryHalInterface.h>
+#include <mediautils/MethodStatistics.h>
#include <mediautils/ServiceUtilities.h>
+#include <mediautils/TimeCheck.h>
#include "AudioFlinger.h"
@@ -1751,6 +1753,47 @@
disconnect(false);
}
+// Creates an association between Binder code to name for IEffect.
+#define IEFFECT_BINDER_METHOD_MACRO_LIST \
+BINDER_METHOD_ENTRY(enable) \
+BINDER_METHOD_ENTRY(disable) \
+BINDER_METHOD_ENTRY(command) \
+BINDER_METHOD_ENTRY(disconnect) \
+BINDER_METHOD_ENTRY(getCblk) \
+
+// singleton for Binder Method Statistics for IEffect
+mediautils::MethodStatistics<int>& getIEffectStatistics() {
+ using Code = int;
+
+#pragma push_macro("BINDER_METHOD_ENTRY")
+#undef BINDER_METHOD_ENTRY
+#define BINDER_METHOD_ENTRY(ENTRY) \
+ {(Code)media::BnEffect::TRANSACTION_##ENTRY, #ENTRY},
+
+ static mediautils::MethodStatistics<Code> methodStatistics{
+ IEFFECT_BINDER_METHOD_MACRO_LIST
+ METHOD_STATISTICS_BINDER_CODE_NAMES(Code)
+ };
+#pragma pop_macro("BINDER_METHOD_ENTRY")
+
+ return methodStatistics;
+}
+
+status_t AudioFlinger::EffectHandle::onTransact(
+ uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) {
+ const std::string methodName = getIEffectStatistics().getMethodForCode(code);
+ mediautils::TimeCheck check(
+ std::string("IEffect::").append(methodName),
+ [code](bool timeout, float elapsedMs) {
+ if (timeout) {
+ ; // we don't timeout right now on the effect interface.
+ } else {
+ getIEffectStatistics().event(code, elapsedMs);
+ }
+ }, 0 /* timeoutMs */);
+ return BnEffect::onTransact(code, data, reply, flags);
+}
+
status_t AudioFlinger::EffectHandle::initCheck()
{
return mClient == 0 || mCblkMemory != 0 ? OK : NO_MEMORY;
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index e2bea67..42614cc 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -356,6 +356,8 @@
const sp<media::IEffectClient>& effectClient,
int32_t priority, bool notifyFramesProcessed);
virtual ~EffectHandle();
+ status_t onTransact(
+ uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
virtual status_t initCheck();
// IEffect
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index aecd4d3..6a138bb 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -82,7 +82,8 @@
/** default behaviour is to start when there are as many frames
* ready as possible (aka. Buffer is full). */
size_t frameCountToBeReady = SIZE_MAX,
- float speed = 1.0f);
+ float speed = 1.0f,
+ bool isSpatialized = false);
virtual ~Track();
virtual status_t initCheck() const;
@@ -201,6 +202,7 @@
audio_output_flags_t getOutputFlags() const { return mFlags; }
float getSpeed() const { return mSpeed; }
+ bool isSpatialized() const override { return mIsSpatialized; }
protected:
// for numerous
@@ -351,6 +353,7 @@
audio_output_flags_t mFlags;
TeePatches mTeePatches;
const float mSpeed;
+ const bool mIsSpatialized;
}; // end of Track
diff --git a/services/audioflinger/TEST_MAPPING b/services/audioflinger/TEST_MAPPING
new file mode 100644
index 0000000..3de5a9f
--- /dev/null
+++ b/services/audioflinger/TEST_MAPPING
@@ -0,0 +1,12 @@
+{
+ "presubmit": [
+ {
+ "name": "CtsNativeMediaAAudioTestCases",
+ "options" : [
+ {
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+ }
+ ]
+ }
+ ]
+}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index ae5772d..9344e20 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -65,6 +65,7 @@
#include <media/nbaio/PipeReader.h>
#include <media/nbaio/SourceAudioBufferProvider.h>
#include <mediautils/BatteryNotifier.h>
+#include <mediautils/Process.h>
#include <audiomanager/AudioManager.h>
#include <powermanager/PowerManager.h>
@@ -923,6 +924,20 @@
dprintf(fd, " Local log:\n");
mLocalLog.dump(fd, " " /* prefix */, 40 /* lines */);
+
+ // --all does the statistics
+ bool dumpAll = false;
+ for (const auto &arg : args) {
+ if (arg == String16("--all")) {
+ dumpAll = true;
+ }
+ }
+ if (dumpAll || type() == SPATIALIZER) {
+ const std::string sched = mThreadSnapshot.toString();
+ if (!sched.empty()) {
+ (void)write(fd, sched.c_str(), sched.size());
+ }
+ }
}
void AudioFlinger::ThreadBase::dumpBase_l(int fd, const Vector<String16>& args __unused)
@@ -2098,6 +2113,7 @@
}
}
run(mThreadName, ANDROID_PRIORITY_URGENT_AUDIO);
+ mThreadSnapshot.setTid(getTid());
}
// ThreadBase virtuals
@@ -2228,7 +2244,8 @@
pid_t tid,
status_t *status,
audio_port_handle_t portId,
- const sp<media::IAudioTrackCallback>& callback)
+ const sp<media::IAudioTrackCallback>& callback,
+ bool isSpatialized)
{
size_t frameCount = *pFrameCount;
size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2520,7 +2537,8 @@
channelMask, frameCount,
nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
sessionId, creatorPid, attributionSource, trackFlags,
- TrackBase::TYPE_DEFAULT, portId, SIZE_MAX /*frameCountToBeReady*/, speed);
+ TrackBase::TYPE_DEFAULT, portId, SIZE_MAX /*frameCountToBeReady*/,
+ speed, isSpatialized);
lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
if (lStatus != NO_ERROR) {
@@ -3337,6 +3355,7 @@
mInWrite = false;
if (mStandby) {
mThreadMetrics.logBeginInterval();
+ mThreadSnapshot.onBegin();
mStandby = false;
}
return bytesWritten;
@@ -3822,6 +3841,7 @@
if (!mStandby) {
LOG_AUDIO_STATE();
mThreadMetrics.logEndInterval();
+ mThreadSnapshot.onEnd();
mStandby = true;
}
sendStatistics(false /* force */);
@@ -3882,14 +3902,14 @@
&& effectChain->containsHapticGeneratingEffect_l()) {
activeHapticSessionId = track->sessionId();
isHapticSessionSpatialized =
- mType == SPATIALIZER && track->canBeSpatialized();
+ mType == SPATIALIZER && track->isSpatialized();
break;
}
if (activeHapticSessionId == AUDIO_SESSION_NONE
&& track->getHapticPlaybackEnabled()) {
activeHapticSessionId = track->sessionId();
isHapticSessionSpatialized =
- mType == SPATIALIZER && track->canBeSpatialized();
+ mType == SPATIALIZER && track->isSpatialized();
}
}
}
@@ -5569,7 +5589,7 @@
AudioMixer::TRACK,
AudioMixer::CHANNEL_MASK, (void *)(uintptr_t)track->channelMask());
- if (mType == SPATIALIZER && !track->canBeSpatialized()) {
+ if (mType == SPATIALIZER && !track->isSpatialized()) {
mAudioMixer->setParameter(
trackId,
AudioMixer::TRACK,
@@ -5619,7 +5639,7 @@
if (mMixerBufferEnabled
&& (track->mainBuffer() == mSinkBuffer
|| track->mainBuffer() == mMixerBuffer)) {
- if (mType == SPATIALIZER && !track->canBeSpatialized()) {
+ if (mType == SPATIALIZER && !track->isSpatialized()) {
mAudioMixer->setParameter(
trackId,
AudioMixer::TRACK,
@@ -5957,6 +5977,7 @@
mOutput->standby();
if (!mStandby) {
mThreadMetrics.logEndInterval();
+ mThreadSnapshot.onEnd();
mStandby = true;
}
mBytesWritten = 0;
@@ -6478,6 +6499,7 @@
mOutput->standby();
if (!mStandby) {
mThreadMetrics.logEndInterval();
+ mThreadSnapshot.onEnd();
mStandby = true;
}
mBytesWritten = 0;
@@ -7064,6 +7086,7 @@
}
if (mStandby) {
mThreadMetrics.logBeginInterval();
+ mThreadSnapshot.onBegin();
mStandby = false;
}
return (ssize_t)mSinkBufferSize;
@@ -7589,6 +7612,7 @@
doBroadcast = true;
if (mStandby) {
mThreadMetrics.logBeginInterval();
+ mThreadSnapshot.onBegin();
mStandby = false;
}
activeTrack->mState = TrackBase::ACTIVE;
@@ -8070,6 +8094,7 @@
if (!mStandby) {
inputStandBy();
mThreadMetrics.logEndInterval();
+ mThreadSnapshot.onEnd();
mStandby = true;
}
}
@@ -9455,6 +9480,7 @@
}
if (mStandby) {
mThreadMetrics.logBeginInterval();
+ mThreadSnapshot.onBegin();
mStandby = false;
}
return NO_ERROR;
@@ -9494,6 +9520,7 @@
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT);
audio_port_handle_t deviceId = mDeviceId;
std::vector<audio_io_handle_t> secondaryOutputs;
+ bool isSpatialized;
ret = AudioSystem::getOutputForAttr(&mAttr, &io,
mSessionId,
&stream,
@@ -9502,7 +9529,8 @@
flags,
&deviceId,
&portId,
- &secondaryOutputs);
+ &secondaryOutputs,
+ &isSpatialized);
ALOGD_IF(!secondaryOutputs.empty(),
"MmapThread::start does not support secondary outputs, ignoring them");
} else {
@@ -9649,6 +9677,7 @@
mHalStream->standby();
if (!mStandby) {
mThreadMetrics.logEndInterval();
+ mThreadSnapshot.onEnd();
mStandby = true;
}
releaseWakeLock();
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 982893d..b2962ed8 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -483,7 +483,7 @@
if (track->isFastTrack()) {
result |= FAST_SESSION; // caution, only represents first track.
}
- if (track->canBeSpatialized()) {
+ if (track->isSpatialized()) {
result |= SPATIALIZED_SESSION; // caution, only first track.
}
break;
@@ -687,6 +687,9 @@
int64_t mLastIoBeginNs = -1;
int64_t mLastIoEndNs = -1;
+ // ThreadSnapshot is thread-safe (internally locked)
+ mediautils::ThreadSnapshot mThreadSnapshot;
+
// This should be read under ThreadBase lock (if not on the threadLoop thread).
audio_utils::Statistics<double> mIoJitterMs{0.995 /* alpha */};
audio_utils::Statistics<double> mProcessTimeMs{0.995 /* alpha */};
@@ -960,7 +963,8 @@
pid_t tid,
status_t *status /*non-NULL*/,
audio_port_handle_t portId,
- const sp<media::IAudioTrackCallback>& callback);
+ const sp<media::IAudioTrackCallback>& callback,
+ bool isSpatialized);
AudioStreamOut* getOutput() const;
AudioStreamOut* clearOutput();
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 98a1bd9..2677ab3 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -107,9 +107,7 @@
audio_attributes_t attributes() const { return mAttr; }
- bool canBeSpatialized() const { return mIsOut && (mAttr.flags
- & (AUDIO_FLAG_CONTENT_SPATIALIZED | AUDIO_FLAG_NEVER_SPATIALIZE)) == 0
- && mChannelCount > 2; }
+ virtual bool isSpatialized() const { return false; }
#ifdef TEE_SINK
void dumpTee(int fd, const std::string &reason) const {
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 279ff3d..6135020 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -633,7 +633,8 @@
track_type type,
audio_port_handle_t portId,
size_t frameCountToBeReady,
- float speed)
+ float speed,
+ bool isSpatialized)
: TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
// TODO: Using unsecurePointer() has some associated security pitfalls
// (see declaration for details).
@@ -667,7 +668,8 @@
mResumeToStopping(false),
mFlushHwPending(false),
mFlags(flags),
- mSpeed(speed)
+ mSpeed(speed),
+ mIsSpatialized(isSpatialized)
{
// client == 0 implies sharedBuffer == 0
ALOG_ASSERT(!(client == 0 && sharedBuffer != 0));
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 09f947c..20d0523 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -143,7 +143,8 @@
audio_port_handle_t *selectedDeviceId,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
- output_type_t *outputType) = 0;
+ output_type_t *outputType,
+ bool *isSpatialized) = 0;
// indicates to the audio policy manager that the output starts being used by corresponding
// stream.
virtual status_t startOutput(audio_port_handle_t portId) = 0;
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index 9b4cc8a..f130f7c 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -11,6 +11,14 @@
"include-filter": "com.google.android.gts.audio.AudioHostTest#testTwoChannelCapturing"
}
]
+ },
+ {
+ "name": "CtsNativeMediaAAudioTestCases",
+ "options" : [
+ {
+ "include-filter": "android.nativemedia.aaudio.AAudioTests#AAudioBasic.*"
+ }
+ ]
}
]
}
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index f57ac64..64c7923 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -438,6 +438,8 @@
uint32_t getRecommendedMuteDurationMs() const override;
+ void setTracksInvalidatedStatusByStrategy(product_strategy_t strategy);
+
const sp<IOProfile> mProfile; // I/O profile this output derives from
audio_io_handle_t mIoHandle; // output handle
uint32_t mLatency; //
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 6f95012..0431619 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -157,6 +157,14 @@
}
uint32_t getActivityCount() const { return mActivityCount; }
+ bool isInvalid() const {
+ return mIsInvalid;
+ }
+
+ void setIsInvalid() {
+ mIsInvalid = true;
+ }
+
private:
const audio_stream_type_t mStream;
const product_strategy_t mStrategy;
@@ -169,6 +177,7 @@
* involved in a duplication.
*/
uint32_t mActivityCount = 0;
+ bool mIsInvalid = false;
};
class RecordClientDescriptor: public ClientDescriptor
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 5c342a1..009fa82 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -726,6 +726,14 @@
return mProfile->recommendedMuteDurationMs;
}
+void SwAudioOutputDescriptor::setTracksInvalidatedStatusByStrategy(product_strategy_t strategy) {
+ for (const auto &client : getClientIterable()) {
+ if (strategy == client->strategy()) {
+ client->setIsInvalid();
+ }
+ }
+}
+
// HwAudioOutputDescriptor implementation
HwAudioOutputDescriptor::HwAudioOutputDescriptor(const sp<SourceClientDescriptor>& source,
AudioPolicyClientInterface *clientInterface)
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index d1655ef..713b0ac 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -125,7 +125,7 @@
void SourceClientCollection::dump(String8 *dst) const
{
- dst->append("\n Audio sources (%zu):\n", size());
+ dst->appendFormat("\n Audio sources (%zu):\n", size());
for (size_t i = 0; i < size(); i++) {
const std::string prefix = base::StringPrintf(" %zu. ", i + 1);
dst->appendFormat("%s", prefix.c_str());
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 7c6907d..48f7410 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -258,13 +258,14 @@
if (!portId) portId = &localPortId;
*portId = AUDIO_PORT_HANDLE_NONE;
AudioPolicyInterface::output_type_t outputType;
+ bool isSpatialized;
// TODO b/182392769: use attribution source util
AttributionSourceState attributionSource;
attributionSource.uid = 0;
attributionSource.token = sp<BBinder>::make();
if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, attributionSource,
- &config, &flags, selectedDeviceId, portId, {}, &outputType) != OK) {
+ &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized) != OK) {
return false;
}
if (*output == AUDIO_IO_HANDLE_NONE || *portId == AUDIO_PORT_HANDLE_NONE) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index ff4705c..49a0dde 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -287,9 +287,12 @@
sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
// close unused outputs after device disconnection or direct outputs that have
// been opened by checkOutputsForDevice() to query dynamic parameters
+ // "outputs" vector never contains duplicated outputs
if ((state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE)
|| (((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) &&
- (desc->mDirectOpenCount == 0))) {
+ (desc->mDirectOpenCount == 0))
+ || (((desc->mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0) &&
+ !isOutputOnlyAvailableRouteToSomeDevice(desc))) {
clearAudioSourcesForOutput(output);
closeOutput(output);
}
@@ -935,6 +938,32 @@
ALOGV("setSystemProperty() property %s, value %s", property, value);
}
+// Find an MSD output profile compatible with the parameters passed.
+// When "directOnly" is set, restrict search to profiles for direct outputs.
+sp<IOProfile> AudioPolicyManager::getMsdProfileForOutput(
+ const DeviceVector& devices,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_output_flags_t flags,
+ bool directOnly)
+{
+ flags = getRelevantFlags(flags, directOnly);
+
+ sp<HwModule> msdModule = mHwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_MSD);
+ if (msdModule != nullptr) {
+ // for the msd module check if there are patches to the output devices
+ if (msdHasPatchesToAllDevices(devices.toTypeAddrVector())) {
+ HwModuleCollection modules;
+ modules.add(msdModule);
+ return searchCompatibleProfileHwModules(
+ modules, getMsdAudioOutDevices(), samplingRate, format, channelMask,
+ flags, directOnly);
+ }
+ }
+ return nullptr;
+}
+
// Find an output profile compatible with the parameters passed. When "directOnly" is set, restrict
// search to profiles for direct outputs.
sp<IOProfile> AudioPolicyManager::getProfileForOutput(
@@ -945,45 +974,65 @@
audio_output_flags_t flags,
bool directOnly)
{
+ flags = getRelevantFlags(flags, directOnly);
+
+ return searchCompatibleProfileHwModules(
+ mHwModules, devices, samplingRate, format, channelMask, flags, directOnly);
+}
+
+audio_output_flags_t AudioPolicyManager::getRelevantFlags (
+ audio_output_flags_t flags, bool directOnly) {
if (directOnly) {
- // only retain flags that will drive the direct output profile selection
- // if explicitly requested
- static const uint32_t kRelevantFlags =
+ // only retain flags that will drive the direct output profile selection
+ // if explicitly requested
+ static const uint32_t kRelevantFlags =
(AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD |
- AUDIO_OUTPUT_FLAG_VOIP_RX | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ);
- flags =
- (audio_output_flags_t)((flags & kRelevantFlags) | AUDIO_OUTPUT_FLAG_DIRECT);
+ AUDIO_OUTPUT_FLAG_VOIP_RX | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ);
+ flags = (audio_output_flags_t)((flags & kRelevantFlags) | AUDIO_OUTPUT_FLAG_DIRECT);
}
+ return flags;
+}
+sp<IOProfile> AudioPolicyManager::searchCompatibleProfileHwModules (
+ const HwModuleCollection& hwModules,
+ const DeviceVector& devices,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_output_flags_t flags,
+ bool directOnly) {
sp<IOProfile> profile;
-
- for (const auto& hwModule : mHwModules) {
+ for (const auto& hwModule : hwModules) {
for (const auto& curProfile : hwModule->getOutputProfiles()) {
- if (!curProfile->isCompatibleProfile(devices,
- samplingRate, NULL /*updatedSamplingRate*/,
- format, NULL /*updatedFormat*/,
- channelMask, NULL /*updatedChannelMask*/,
- flags)) {
+ if (!curProfile->isCompatibleProfile(devices,
+ samplingRate, NULL /*updatedSamplingRate*/,
+ format, NULL /*updatedFormat*/,
+ channelMask, NULL /*updatedChannelMask*/,
+ flags)) {
+ continue;
+ }
+ // reject profiles not corresponding to a device currently available
+ if (!mAvailableOutputDevices.containsAtLeastOne(curProfile->getSupportedDevices())) {
+ continue;
+ }
+ // reject profiles if connected device does not support codec
+ if (!curProfile->devicesSupportEncodedFormats(devices.types())) {
+ continue;
+ }
+ if (!directOnly) {
+ return curProfile;
+ }
+
+ // when searching for direct outputs, if several profiles are compatible, give priority
+ // to one with offload capability
+ if (profile != 0 &&
+ ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0)) {
continue;
- }
- // reject profiles not corresponding to a device currently available
- if (!mAvailableOutputDevices.containsAtLeastOne(curProfile->getSupportedDevices())) {
- continue;
- }
- // reject profiles if connected device does not support codec
- if (!curProfile->devicesSupportEncodedFormats(devices.types())) {
- continue;
- }
- if (!directOnly) return curProfile;
- // when searching for direct outputs, if several profiles are compatible, give priority
- // to one with offload capability
- if (profile != 0 && ((curProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0)) {
- continue;
- }
- profile = curProfile;
- if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
- break;
- }
+ }
+ profile = curProfile;
+ if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
+ break;
+ }
}
}
return profile;
@@ -1076,7 +1125,8 @@
audio_port_handle_t *selectedDeviceId,
bool *isRequestedDeviceForExclusiveUse,
std::vector<sp<AudioPolicyMix>> *secondaryMixes,
- output_type_t *outputType)
+ output_type_t *outputType,
+ bool *isSpatialized)
{
DeviceVector outputDevices;
const audio_port_handle_t requestedPortId = *selectedDeviceId;
@@ -1085,6 +1135,8 @@
mAvailableOutputDevices.getDeviceFromId(requestedPortId);
*outputType = API_OUTPUT_INVALID;
+ *isSpatialized = false;
+
status_t status = getAudioAttributes(resultAttr, attr, *stream);
if (status != NO_ERROR) {
return status;
@@ -1184,7 +1236,7 @@
*output = AUDIO_IO_HANDLE_NONE;
if (!msdDevices.isEmpty()) {
- *output = getOutputForDevices(msdDevices, session, resultAttr, config, flags);
+ *output = getOutputForDevices(msdDevices, session, resultAttr, config, flags, isSpatialized);
if (*output != AUDIO_IO_HANDLE_NONE && setMsdOutputPatches(&outputDevices) == NO_ERROR) {
ALOGV("%s() Using MSD devices %s instead of devices %s",
__func__, msdDevices.toString().c_str(), outputDevices.toString().c_str());
@@ -1194,7 +1246,7 @@
}
if (*output == AUDIO_IO_HANDLE_NONE) {
*output = getOutputForDevices(outputDevices, session, resultAttr, config,
- flags, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
+ flags, isSpatialized, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
}
if (*output == AUDIO_IO_HANDLE_NONE) {
return INVALID_OPERATION;
@@ -1229,7 +1281,8 @@
audio_port_handle_t *selectedDeviceId,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
- output_type_t *outputType)
+ output_type_t *outputType,
+ bool *isSpatialized)
{
// The supplied portId must be AUDIO_PORT_HANDLE_NONE
if (*portId != AUDIO_PORT_HANDLE_NONE) {
@@ -1251,7 +1304,7 @@
status_t status = getOutputForAttrInt(&resultAttr, output, session, attr, stream, uid,
config, flags, selectedDeviceId, &isRequestedDeviceForExclusiveUse,
- secondaryOutputs != nullptr ? &secondaryMixes : nullptr, outputType);
+ secondaryOutputs != nullptr ? &secondaryMixes : nullptr, outputType, isSpatialized);
if (status != NO_ERROR) {
return status;
}
@@ -1394,6 +1447,7 @@
const audio_attributes_t *attr,
const audio_config_t *config,
audio_output_flags_t *flags,
+ bool *isSpatialized,
bool forceMutingHaptic)
{
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
@@ -1440,9 +1494,11 @@
*flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_ULTRASOUND);
}
+ *isSpatialized = false;
if (mSpatializerOutput != nullptr
&& canBeSpatializedInt(attr, config,
devices.toTypeAddrVector(), false /* allowCurrentOutputReconfig */)) {
+ *isSpatialized = true;
return mSpatializerOutput->mIoHandle;
}
@@ -3146,9 +3202,10 @@
}
}
}
- return mEffects.registerEffect(desc, io, session, id,
- (strategy == streamToStrategy(AUDIO_STREAM_MUSIC) ||
- strategy == PRODUCT_STRATEGY_NONE));
+ bool isMusicEffect = (session != AUDIO_SESSION_OUTPUT_STAGE)
+ && ((strategy == streamToStrategy(AUDIO_STREAM_MUSIC)
+ || strategy == PRODUCT_STRATEGY_NONE));
+ return mEffects.registerEffect(desc, io, session, id, isMusicEffect);
}
status_t AudioPolicyManager::unregisterEffect(int id)
@@ -3820,7 +3877,22 @@
__FUNCTION__, profile != 0 ? "" : "NOT ",
(profile != 0 ? profile->getTagName().c_str() : "null"),
config.sample_rate, config.format, config.channel_mask, output_flags);
- return (profile != 0);
+
+ // also try the MSD module if compatible profile not found
+ if (profile == nullptr) {
+ profile = getMsdProfileForOutput(outputDevices,
+ config.sample_rate,
+ config.format,
+ config.channel_mask,
+ output_flags,
+ true /* directOnly */);
+ ALOGV("%s() MSD profile %sfound with name: %s, "
+ "sample rate: %u, format: 0x%x, channel_mask: 0x%x, output flags: 0x%x",
+ __FUNCTION__, profile != 0 ? "" : "NOT ",
+ (profile != 0 ? profile->getTagName().c_str() : "null"),
+ config.sample_rate, config.format, config.channel_mask, output_flags);
+ }
+ return (profile != nullptr);
}
bool AudioPolicyManager::isOffloadPossible(const audio_offload_info_t &offloadInfo,
@@ -3901,8 +3973,16 @@
}
flags = (audio_output_flags_t)((flags & relevantFlags) | AUDIO_OUTPUT_FLAG_DIRECT);
- DeviceVector outputDevices = mEngine->getOutputDevicesForAttributes(*attr);
+ DeviceVector engineOutputDevices = mEngine->getOutputDevicesForAttributes(*attr);
for (const auto& hwModule : mHwModules) {
+ DeviceVector outputDevices = engineOutputDevices;
+ // the MSD module checks for different conditions and output devices
+ if (strcmp(hwModule->getName(), AUDIO_HARDWARE_MODULE_ID_MSD) == 0) {
+ if (!msdHasPatchesToAllDevices(engineOutputDevices.toTypeAddrVector())) {
+ continue;
+ }
+ outputDevices = getMsdAudioOutDevices();
+ }
for (const auto& curProfile : hwModule->getOutputProfiles()) {
if (!curProfile->isCompatibleProfile(outputDevices,
config->sample_rate, nullptr /*updatedSamplingRate*/,
@@ -3929,11 +4009,10 @@
~AUDIO_DIRECT_OFFLOAD_SUPPORTED) |
AUDIO_DIRECT_OFFLOAD_GAPLESS_SUPPORTED);
} else {
- directMode = (audio_direct_mode_t)(directMode |AUDIO_DIRECT_OFFLOAD_SUPPORTED);
+ directMode = (audio_direct_mode_t)(directMode | AUDIO_DIRECT_OFFLOAD_SUPPORTED);
}
} else {
- directMode = (audio_direct_mode_t) (directMode |
- AUDIO_DIRECT_BITSTREAM_SUPPORTED);
+ directMode = (audio_direct_mode_t) (directMode | AUDIO_DIRECT_BITSTREAM_SUPPORTED);
}
}
}
@@ -4394,10 +4473,11 @@
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
bool isRequestedDeviceForExclusiveUse = false;
output_type_t outputType;
+ bool isSpatialized;
getOutputForAttrInt(&resultAttr, &output, AUDIO_SESSION_NONE, &attributes,
&stream, sourceDesc->uid(), &config, &flags,
&selectedDeviceId, &isRequestedDeviceForExclusiveUse,
- nullptr, &outputType);
+ nullptr, &outputType, &isSpatialized);
if (output == AUDIO_IO_HANDLE_NONE) {
ALOGV("%s no output for device %s",
__FUNCTION__, sinkDevice->toString().c_str());
@@ -5284,6 +5364,29 @@
}
}
+
+bool AudioPolicyManager::isOutputOnlyAvailableRouteToSomeDevice(
+ const sp<SwAudioOutputDescriptor>& outputDesc) {
+ if (outputDesc->isDuplicated()) {
+ return false;
+ }
+ DeviceVector devices = outputDesc->supportedDevices();
+ for (size_t i = 0; i < mOutputs.size(); i++) {
+ sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
+ if (desc == outputDesc || desc->isDuplicated()) {
+ continue;
+ }
+ DeviceVector sharedDevices = desc->filterSupportedDevices(devices);
+ if (!sharedDevices.isEmpty()
+ && (desc->devicesSupportEncodedFormats(sharedDevices.types())
+ == outputDesc->devicesSupportEncodedFormats(sharedDevices.types()))) {
+ return false;
+ }
+ }
+ return true;
+}
+
+
status_t AudioPolicyManager::getSpatializerOutput(const audio_config_base_t *mixerConfig,
const audio_attributes_t *attr,
audio_io_handle_t *output) {
@@ -5299,80 +5402,67 @@
}
if (!canBeSpatializedInt(
attr, configPtr, devicesTypeAddress)) {
- ALOGW("%s provided attributes or mixer config cannot be spatialized", __func__);
+ ALOGV("%s provided attributes or mixer config cannot be spatialized", __func__);
return BAD_VALUE;
}
sp<IOProfile> profile =
getSpatializerOutputProfile(configPtr, devicesTypeAddress);
if (profile == nullptr) {
- ALOGW("%s no suitable output profile for provided attributes or mixer config", __func__);
+ ALOGV("%s no suitable output profile for provided attributes or mixer config", __func__);
return BAD_VALUE;
}
- if (mSpatializerOutput != nullptr && mSpatializerOutput->mProfile == profile
- && configPtr != nullptr
- && configPtr->channel_mask == mSpatializerOutput->mMixerChannelMask) {
- *output = mSpatializerOutput->mIoHandle;
- ALOGV("%s returns current spatializer output %d", __func__, *output);
- return NO_ERROR;
- }
- mSpatializerOutput.clear();
+ std::vector<sp<SwAudioOutputDescriptor>> spatializerOutputs;
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
- if (!desc->isDuplicated() && desc->mProfile == profile) {
- ALOGV("%s found output %d for spatializer profile", __func__, desc->mIoHandle);
- mSpatializerOutput = desc;
- break;
+ if (!desc->isDuplicated()
+ && (desc->mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0) {
+ spatializerOutputs.push_back(desc);
+ ALOGV("%s adding opened spatializer Output %d", __func__, desc->mIoHandle);
}
}
- if (mSpatializerOutput == nullptr) {
- ALOGW("%s no opened spatializer output for profile %s",
- __func__, profile->getName().c_str());
- return BAD_VALUE;
+ mSpatializerOutput.clear();
+ bool outputsChanged = false;
+ for (const auto& desc : spatializerOutputs) {
+ if (desc->mProfile == profile
+ && (configPtr == nullptr
+ || configPtr->channel_mask == desc->mMixerChannelMask)) {
+ mSpatializerOutput = desc;
+ ALOGV("%s reusing current spatializer output %d", __func__, desc->mIoHandle);
+ } else {
+ ALOGV("%s closing spatializerOutput output %d to match channel mask %#x"
+ " and devices %s", __func__, desc->mIoHandle,
+ configPtr != nullptr ? configPtr->channel_mask : 0,
+ devices.toString().c_str());
+ closeOutput(desc->mIoHandle);
+ outputsChanged = true;
+ }
}
- if (configPtr != nullptr
- && configPtr->channel_mask != mSpatializerOutput->mMixerChannelMask) {
- audio_config_base_t savedMixerConfig = {
- .sample_rate = mSpatializerOutput->getSamplingRate(),
- .format = mSpatializerOutput->getFormat(),
- .channel_mask = mSpatializerOutput->mMixerChannelMask,
- };
- DeviceVector savedDevices = mSpatializerOutput->devices();
-
- ALOGV("%s reopening spatializer output to match channel mask %#x (current mask %#x)",
- __func__, configPtr->channel_mask, mSpatializerOutput->mMixerChannelMask);
-
- closeOutput(mSpatializerOutput->mIoHandle);
- //from now on mSpatializerOutput is null
-
+ if (mSpatializerOutput == nullptr) {
sp<SwAudioOutputDescriptor> desc =
openOutputWithProfileAndDevice(profile, devices, mixerConfig);
- if (desc == nullptr) {
- // re open the spatializer output with previous channel mask
- desc = openOutputWithProfileAndDevice(profile, savedDevices, &savedMixerConfig);
- if (desc == nullptr) {
- ALOGE("%s failed to restore mSpatializerOutput with previous config", __func__);
- } else {
- mSpatializerOutput = desc;
- }
- mPreviousOutputs = mOutputs;
- mpClientInterface->onAudioPortListUpdate();
- *output = AUDIO_IO_HANDLE_NONE;
- ALOGW("%s could not open spatializer output with requested config", __func__);
- return BAD_VALUE;
+ if (desc != nullptr) {
+ mSpatializerOutput = desc;
+ outputsChanged = true;
}
- mSpatializerOutput = desc;
- mPreviousOutputs = mOutputs;
- mpClientInterface->onAudioPortListUpdate();
}
checkVirtualizerClientRoutes();
+ if (outputsChanged) {
+ mPreviousOutputs = mOutputs;
+ mpClientInterface->onAudioPortListUpdate();
+ }
+
+ if (mSpatializerOutput == nullptr) {
+ ALOGV("%s could not open spatializer output with requested config", __func__);
+ return BAD_VALUE;
+ }
*output = mSpatializerOutput->mIoHandle;
- ALOGV("%s returns new spatializer output %d", __func__, *output);
- return NO_ERROR;
+ ALOGV("%s returning new spatializer output %d", __func__, *output);
+ return OK;
}
status_t AudioPolicyManager::releaseSpatializerOutput(audio_io_handle_t output) {
@@ -5383,9 +5473,12 @@
return BAD_VALUE;
}
- mSpatializerOutput.clear();
-
- checkVirtualizerClientRoutes();
+ if (!isOutputOnlyAvailableRouteToSomeDevice(mSpatializerOutput)) {
+ ALOGV("%s closing spatializer output %d", __func__, mSpatializerOutput->mIoHandle);
+ closeOutput(mSpatializerOutput->mIoHandle);
+ //from now on mSpatializerOutput is null
+ checkVirtualizerClientRoutes();
+ }
return NO_ERROR;
}
@@ -5462,6 +5555,7 @@
return status;
}
+ mEngine->updateDeviceSelectionCache();
mCommunnicationStrategy = mEngine->getProductStrategyForAttributes(
mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL));
@@ -5661,6 +5755,21 @@
inputDesc->close();
}
}
+
+ // Check if spatializer outputs can be closed until used.
+ // mOutputs vector never contains duplicated outputs at this point.
+ std::vector<audio_io_handle_t> outputsClosed;
+ for (size_t i = 0; i < mOutputs.size(); i++) {
+ sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
+ if ((desc->mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0
+ && !isOutputOnlyAvailableRouteToSomeDevice(desc)) {
+ outputsClosed.push_back(desc->mIoHandle);
+ desc->close();
+ }
+ }
+ for (auto output : outputsClosed) {
+ removeOutput(output);
+ }
}
void AudioPolicyManager::addOutput(audio_io_handle_t output,
@@ -6274,6 +6383,12 @@
for (auto stream : mEngine->getStreamTypesForProductStrategy(psId)) {
mpClientInterface->invalidateStream(stream);
}
+ for (audio_io_handle_t srcOut : srcOutputs) {
+ sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
+ if (desc == nullptr) continue;
+
+ desc->setTracksInvalidatedStatusByStrategy(psId);
+ }
}
}
}
@@ -6349,6 +6464,18 @@
return false;
}
+bool AudioPolicyManager::isHearingAidUsedForComm() const {
+ DeviceVector devices = mEngine->getOutputDevicesForStream(AUDIO_STREAM_VOICE_CALL,
+ true /*fromCache*/);
+ for (const auto &device : devices) {
+ if (device->type() == AUDIO_DEVICE_OUT_HEARING_AID) {
+ return true;
+ }
+ }
+ return false;
+}
+
+
void AudioPolicyManager::checkA2dpSuspend()
{
audio_io_handle_t a2dpOutput = mOutputs.getA2dpOutput();
@@ -7146,13 +7273,15 @@
bool isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (btScoVolSrc == volumeSource);
bool isScoRequested = isScoRequestedForComm();
+ bool isHAUsed = isHearingAidUsedForComm();
+
// do not change in call volume if bluetooth is connected and vice versa
// if sco and call follow same curves, bypass forceUseForComm
if ((callVolSrc != btScoVolSrc) &&
((isVoiceVolSrc && isScoRequested) ||
- (isBtScoVolSrc && !isScoRequested))) {
+ (isBtScoVolSrc && !(isScoRequested || isHAUsed)))) {
ALOGV("%s cannot set volume group %d volume when is%srequested for comm", __func__,
- volumeSource, isScoRequested ? " " : "n ot ");
+ volumeSource, isScoRequested ? " " : " not ");
// Do not return an error here as AudioService will always set both voice call
// and bluetooth SCO volumes due to stream aliasing.
return NO_ERROR;
@@ -7599,7 +7728,10 @@
routedDevices.add(device);
}
for (const auto& client : activeClients) {
- // TODO: b/175343099 only travel the valid client
+ if (client->isInvalid()) {
+ // No need to take care about invalidated clients.
+ continue;
+ }
sp<DeviceDescriptor> preferredDevice =
mAvailableOutputDevices.getDeviceFromId(client->preferredDeviceId());
if (mEngine->getOutputDevicesForAttributes(
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 6f8b897..0d9b5bf 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -122,7 +122,8 @@
audio_port_handle_t *selectedDeviceId,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
- output_type_t *outputType) override;
+ output_type_t *outputType,
+ bool *isSpatialized) override;
virtual status_t startOutput(audio_port_handle_t portId);
virtual status_t stopOutput(audio_port_handle_t portId);
virtual bool releaseOutput(audio_port_handle_t portId);
@@ -768,6 +769,15 @@
audio_channel_mask_t channelMask,
audio_output_flags_t flags,
bool directOnly);
+ /**
+ * Same as getProfileForOutput, but it looks for an MSD profile
+ */
+ sp<IOProfile> getMsdProfileForOutput(const DeviceVector &devices,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_output_flags_t flags,
+ bool directOnly);
audio_io_handle_t selectOutputForMusicEffects();
@@ -1032,7 +1042,8 @@
audio_port_handle_t *selectedDeviceId,
bool *isRequestedDeviceForExclusiveUse,
std::vector<sp<AudioPolicyMix>> *secondaryMixes,
- output_type_t *outputType);
+ output_type_t *outputType,
+ bool *isSpatialized);
// internal method to return the output handle for the given device and format
audio_io_handle_t getOutputForDevices(
const DeviceVector &devices,
@@ -1040,6 +1051,7 @@
const audio_attributes_t *attr,
const audio_config_t *config,
audio_output_flags_t *flags,
+ bool *isSpatialized,
bool forceMutingHaptic = false);
// Internal method checking if a direct output can be opened matching the requested
@@ -1085,6 +1097,16 @@
void checkVirtualizerClientRoutes();
/**
+ * @brief Returns true if at least one device can only be reached via the output passed
+ * as argument. Always returns false for duplicated outputs.
+ * This can be used to decide if an output can be closed without forbidding
+ * playback to any given device.
+ * @param outputDesc the output to consider
+ * @return true if at least one device can only be reached via the output.
+ */
+ bool isOutputOnlyAvailableRouteToSomeDevice(const sp<SwAudioOutputDescriptor>& outputDesc);
+
+ /**
* @brief getInputForDevice selects an input handle for a given input device and
* requester context
* @param device to be used by requester, selected by policy mix rules or engine
@@ -1177,6 +1199,8 @@
bool isScoRequestedForComm() const;
+ bool isHearingAidUsedForComm() const;
+
bool areAllActiveTracksRerouted(const sp<SwAudioOutputDescriptor>& output);
/**
@@ -1201,6 +1225,21 @@
// without duplicating them if already present
void addPortProfilesToVector(sp<IOProfile> outputProfile,
AudioProfileVector& audioProfilesVector);
+
+ // Searches for a compatible profile with the sample rate, audio format and channel mask
+ // in the list of passed HwModule(s).
+ // returns a compatible profile if found, nullptr otherwise
+ sp<IOProfile> searchCompatibleProfileHwModules (
+ const HwModuleCollection& hwModules,
+ const DeviceVector& devices,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_output_flags_t flags,
+ bool directOnly);
+
+ // Filters only the relevant flags for getProfileForOutput
+ audio_output_flags_t getRelevantFlags (audio_output_flags_t flags, bool directOnly);
};
};
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index ae4d174..df49bba 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -90,13 +90,13 @@
if (attr.usage == AUDIO_USAGE_CALL_ASSISTANT
&& ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)) {
if (!callAudioInterceptionAllowed(attributionSource)) {
- ALOGE(("permission denied: modify audio routing not allowed "
- "for attributionSource %s"), attributionSource.toString().c_str());
+ ALOGE("%s: call audio interception not allowed for attribution source: %s",
+ __func__, attributionSource.toString().c_str());
return PERMISSION_DENIED;
}
} else if (!modifyAudioRoutingAllowed(attributionSource)) {
- ALOGE(("permission denied: modify audio routing not allowed "
- "for attributionSource %s"), attributionSource.toString().c_str());
+ ALOGE("%s: modify audio routing not allowed for attribution source: %s",
+ __func__, attributionSource.toString().c_str());
return PERMISSION_DENIED;
}
} else {
@@ -383,13 +383,15 @@
AutoCallerClear acc;
AudioPolicyInterface::output_type_t outputType;
+ bool isSpatialized = false;
status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
&stream,
adjAttributionSource,
&config,
&flags, &selectedDeviceId, &portId,
&secondaryOutputs,
- &outputType);
+ &outputType,
+ &isSpatialized);
// FIXME: Introduce a way to check for the the telephony device before opening the output
if (result == NO_ERROR) {
@@ -426,7 +428,7 @@
if (result == NO_ERROR) {
sp<AudioPlaybackClient> client =
new AudioPlaybackClient(attr, output, adjAttributionSource, session,
- portId, selectedDeviceId, stream);
+ portId, selectedDeviceId, stream, isSpatialized);
mAudioPlaybackClients.add(portId, client);
_aidl_return->output = VALUE_OR_RETURN_BINDER_STATUS(
@@ -440,6 +442,7 @@
_aidl_return->secondaryOutputs = VALUE_OR_RETURN_BINDER_STATUS(
convertContainer<std::vector<int32_t>>(secondaryOutputs,
legacy2aidl_audio_io_handle_t_int32_t));
+ _aidl_return->isSpatialized = isSpatialized;
}
return binderStatusFromStatusT(result);
}
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 3ee2aa3..18339b0 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -39,6 +39,7 @@
#include <media/AidlConversion.h>
#include <media/AudioEffect.h>
#include <media/AudioParameter.h>
+#include <mediautils/MethodStatistics.h>
#include <mediautils/ServiceUtilities.h>
#include <mediautils/TimeCheck.h>
#include <sensorprivacy/SensorPrivacyManager.h>
@@ -60,6 +61,120 @@
static const String16 sManageAudioPolicyPermission("android.permission.MANAGE_AUDIO_POLICY");
+// Creates an association between Binder code to name for IAudioPolicyService.
+#define IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST \
+BINDER_METHOD_ENTRY(onNewAudioModulesAvailable) \
+BINDER_METHOD_ENTRY(setDeviceConnectionState) \
+BINDER_METHOD_ENTRY(getDeviceConnectionState) \
+BINDER_METHOD_ENTRY(handleDeviceConfigChange) \
+BINDER_METHOD_ENTRY(setPhoneState) \
+BINDER_METHOD_ENTRY(setForceUse) \
+BINDER_METHOD_ENTRY(getForceUse) \
+BINDER_METHOD_ENTRY(getOutput) \
+BINDER_METHOD_ENTRY(getOutputForAttr) \
+BINDER_METHOD_ENTRY(startOutput) \
+BINDER_METHOD_ENTRY(stopOutput) \
+BINDER_METHOD_ENTRY(releaseOutput) \
+BINDER_METHOD_ENTRY(getInputForAttr) \
+BINDER_METHOD_ENTRY(startInput) \
+BINDER_METHOD_ENTRY(stopInput) \
+BINDER_METHOD_ENTRY(releaseInput) \
+BINDER_METHOD_ENTRY(initStreamVolume) \
+BINDER_METHOD_ENTRY(setStreamVolumeIndex) \
+BINDER_METHOD_ENTRY(getStreamVolumeIndex) \
+BINDER_METHOD_ENTRY(setVolumeIndexForAttributes) \
+BINDER_METHOD_ENTRY(getVolumeIndexForAttributes) \
+BINDER_METHOD_ENTRY(getMaxVolumeIndexForAttributes) \
+BINDER_METHOD_ENTRY(getMinVolumeIndexForAttributes) \
+BINDER_METHOD_ENTRY(getStrategyForStream) \
+BINDER_METHOD_ENTRY(getDevicesForAttributes) \
+BINDER_METHOD_ENTRY(getOutputForEffect) \
+BINDER_METHOD_ENTRY(registerEffect) \
+BINDER_METHOD_ENTRY(unregisterEffect) \
+BINDER_METHOD_ENTRY(setEffectEnabled) \
+BINDER_METHOD_ENTRY(moveEffectsToIo) \
+BINDER_METHOD_ENTRY(isStreamActive) \
+BINDER_METHOD_ENTRY(isStreamActiveRemotely) \
+BINDER_METHOD_ENTRY(isSourceActive) \
+BINDER_METHOD_ENTRY(queryDefaultPreProcessing) \
+BINDER_METHOD_ENTRY(addSourceDefaultEffect) \
+BINDER_METHOD_ENTRY(addStreamDefaultEffect) \
+BINDER_METHOD_ENTRY(removeSourceDefaultEffect) \
+BINDER_METHOD_ENTRY(removeStreamDefaultEffect) \
+BINDER_METHOD_ENTRY(setSupportedSystemUsages) \
+BINDER_METHOD_ENTRY(setAllowedCapturePolicy) \
+BINDER_METHOD_ENTRY(getOffloadSupport) \
+BINDER_METHOD_ENTRY(isDirectOutputSupported) \
+BINDER_METHOD_ENTRY(listAudioPorts) \
+BINDER_METHOD_ENTRY(getAudioPort) \
+BINDER_METHOD_ENTRY(createAudioPatch) \
+BINDER_METHOD_ENTRY(releaseAudioPatch) \
+BINDER_METHOD_ENTRY(listAudioPatches) \
+BINDER_METHOD_ENTRY(setAudioPortConfig) \
+BINDER_METHOD_ENTRY(registerClient) \
+BINDER_METHOD_ENTRY(setAudioPortCallbacksEnabled) \
+BINDER_METHOD_ENTRY(setAudioVolumeGroupCallbacksEnabled) \
+BINDER_METHOD_ENTRY(acquireSoundTriggerSession) \
+BINDER_METHOD_ENTRY(releaseSoundTriggerSession) \
+BINDER_METHOD_ENTRY(getPhoneState) \
+BINDER_METHOD_ENTRY(registerPolicyMixes) \
+BINDER_METHOD_ENTRY(setUidDeviceAffinities) \
+BINDER_METHOD_ENTRY(removeUidDeviceAffinities) \
+BINDER_METHOD_ENTRY(setUserIdDeviceAffinities) \
+BINDER_METHOD_ENTRY(removeUserIdDeviceAffinities) \
+BINDER_METHOD_ENTRY(startAudioSource) \
+BINDER_METHOD_ENTRY(stopAudioSource) \
+BINDER_METHOD_ENTRY(setMasterMono) \
+BINDER_METHOD_ENTRY(getMasterMono) \
+BINDER_METHOD_ENTRY(getStreamVolumeDB) \
+BINDER_METHOD_ENTRY(getSurroundFormats) \
+BINDER_METHOD_ENTRY(getReportedSurroundFormats) \
+BINDER_METHOD_ENTRY(getHwOffloadFormatsSupportedForBluetoothMedia) \
+BINDER_METHOD_ENTRY(setSurroundFormatEnabled) \
+BINDER_METHOD_ENTRY(setAssistantServicesUids) \
+BINDER_METHOD_ENTRY(setActiveAssistantServicesUids) \
+BINDER_METHOD_ENTRY(setA11yServicesUids) \
+BINDER_METHOD_ENTRY(setCurrentImeUid) \
+BINDER_METHOD_ENTRY(isHapticPlaybackSupported) \
+BINDER_METHOD_ENTRY(isUltrasoundSupported) \
+BINDER_METHOD_ENTRY(listAudioProductStrategies) \
+BINDER_METHOD_ENTRY(getProductStrategyFromAudioAttributes) \
+BINDER_METHOD_ENTRY(listAudioVolumeGroups) \
+BINDER_METHOD_ENTRY(getVolumeGroupFromAudioAttributes) \
+BINDER_METHOD_ENTRY(setRttEnabled) \
+BINDER_METHOD_ENTRY(isCallScreenModeSupported) \
+BINDER_METHOD_ENTRY(setDevicesRoleForStrategy) \
+BINDER_METHOD_ENTRY(removeDevicesRoleForStrategy) \
+BINDER_METHOD_ENTRY(getDevicesForRoleAndStrategy) \
+BINDER_METHOD_ENTRY(setDevicesRoleForCapturePreset) \
+BINDER_METHOD_ENTRY(addDevicesRoleForCapturePreset) \
+BINDER_METHOD_ENTRY(removeDevicesRoleForCapturePreset) \
+BINDER_METHOD_ENTRY(clearDevicesRoleForCapturePreset) \
+BINDER_METHOD_ENTRY(getDevicesForRoleAndCapturePreset) \
+BINDER_METHOD_ENTRY(registerSoundTriggerCaptureStateListener) \
+BINDER_METHOD_ENTRY(getSpatializer) \
+BINDER_METHOD_ENTRY(canBeSpatialized) \
+BINDER_METHOD_ENTRY(getDirectPlaybackSupport) \
+BINDER_METHOD_ENTRY(getDirectProfilesForAttributes) \
+
+// singleton for Binder Method Statistics for IAudioPolicyService
+static auto& getIAudioPolicyServiceStatistics() {
+ using Code = int;
+
+#pragma push_macro("BINDER_METHOD_ENTRY")
+#undef BINDER_METHOD_ENTRY
+#define BINDER_METHOD_ENTRY(ENTRY) \
+ {(Code)media::BnAudioPolicyService::TRANSACTION_##ENTRY, #ENTRY},
+
+ static mediautils::MethodStatistics<Code> methodStatistics{
+ IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
+ METHOD_STATISTICS_BINDER_CODE_NAMES(Code)
+ };
+#pragma pop_macro("BINDER_METHOD_ENTRY")
+
+ return methodStatistics;
+}
+
// ----------------------------------------------------------------------------
static AudioPolicyInterface* createAudioPolicyManager(AudioPolicyClientInterface *clientInterface)
@@ -114,6 +229,13 @@
void AudioPolicyService::onFirstRef()
{
+ // Log an AudioPolicy "constructor" mediametrics event on first ref.
+ // This records the time it takes to load the audio modules and devices.
+ mediametrics::Defer defer([beginNs = systemTime()] {
+ mediametrics::LogItem(AMEDIAMETRICS_KEY_AUDIO_POLICY)
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR)
+ .set(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, (int64_t)(systemTime() - beginNs))
+ .record(); });
{
Mutex::Autolock _l(mLock);
@@ -383,6 +505,8 @@
{
Mutex::Autolock _l(mLock);
+ ALOGI("%s mSpatializer %p level %d", __func__, mSpatializer.get(), (int)mSpatializer->getLevel());
+
if (mSpatializer != nullptr) {
// Note: mSpatializer != nullptr => mAudioPolicyManager != nullptr
if (mSpatializer->getLevel() != media::SpatializationLevel::NONE) {
@@ -422,11 +546,13 @@
}
}
-size_t AudioPolicyService::countActiveClientsOnOutput_l(audio_io_handle_t output) REQUIRES(mLock) {
+size_t AudioPolicyService::countActiveClientsOnOutput_l(
+ audio_io_handle_t output, bool spatializedOnly) {
size_t count = 0;
for (size_t i = 0; i < mAudioPlaybackClients.size(); i++) {
auto client = mAudioPlaybackClients.valueAt(i);
- if (client->io == output && client->active) {
+ if (client->io == output && client->active
+ && (!spatializedOnly || client->isSpatialized)) {
count++;
}
}
@@ -442,14 +568,21 @@
void AudioPolicyService::doOnUpdateActiveSpatializerTracks()
{
- Mutex::Autolock _l(mLock);
- if (mSpatializer == nullptr) {
- return;
+ sp<Spatializer> spatializer;
+ size_t activeClients;
+ {
+ Mutex::Autolock _l(mLock);
+ if (mSpatializer == nullptr) {
+ return;
+ }
+ spatializer = mSpatializer;
+ activeClients = countActiveClientsOnOutput_l(mSpatializer->getOutput());
}
- mSpatializer->updateActiveTracks(countActiveClientsOnOutput_l(mSpatializer->getOutput()));
+ if (spatializer != nullptr) {
+ spatializer->updateActiveTracks(activeClients);
+ }
}
-
status_t AudioPolicyService::clientCreateAudioPatch(const struct audio_patch *patch,
audio_patch_handle_t *handle,
int delayMs)
@@ -1061,6 +1194,12 @@
mPackageManager.dump(fd);
dumpReleaseLock(mLock, locked);
+
+ {
+ std::string timeCheckStats = getIAudioPolicyServiceStatistics().dump();
+ dprintf(fd, "\nIAudioPolicyService binder call profile\n");
+ write(fd, timeCheckStats.c_str(), timeCheckStats.size());
+ }
}
return NO_ERROR;
}
@@ -1166,8 +1305,20 @@
break;
}
- std::string tag("IAudioPolicyService command " + std::to_string(code));
- TimeCheck check(tag.c_str());
+ const std::string methodName = getIAudioPolicyServiceStatistics().getMethodForCode(code);
+ mediautils::TimeCheck check(
+ std::string("IAudioPolicyService::").append(methodName),
+ [code, methodName](bool timeout, float elapsedMs) { // don't move methodName.
+ if (timeout) {
+ mediametrics::LogItem(AMEDIAMETRICS_KEY_AUDIO_POLICY)
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT)
+ .set(AMEDIAMETRICS_PROP_METHODCODE, int64_t(code))
+ .set(AMEDIAMETRICS_PROP_METHODNAME, methodName.c_str())
+ .record();
+ } else {
+ getIAudioPolicyServiceStatistics().event(code, elapsedMs);
+ }
+ });
switch (code) {
case SHELL_COMMAND_TRANSACTION: {
@@ -1489,6 +1640,9 @@
}
}
+void AudioPolicyService::UidPolicy::onUidProcAdjChanged(uid_t uid __unused) {
+}
+
void AudioPolicyService::UidPolicy::updateOverrideUid(uid_t uid, bool active, bool insert) {
updateUid(&mOverrideUids, uid, active, ActivityManager::PROCESS_STATE_UNKNOWN, insert);
}
@@ -1784,12 +1938,16 @@
while (!exitPending())
{
sp<AudioPolicyService> svc;
+ int numTimesBecameEmpty = 0;
while (!mAudioCommands.isEmpty() && !exitPending()) {
nsecs_t curTime = systemTime();
// commands are sorted by increasing time stamp: execute them from index 0 and up
if (mAudioCommands[0]->mTime <= curTime) {
sp<AudioCommand> command = mAudioCommands[0];
mAudioCommands.removeAt(0);
+ if (mAudioCommands.isEmpty()) {
+ ++numTimesBecameEmpty;
+ }
mLastCommand = command;
switch (command->mCommand) {
@@ -2026,8 +2184,9 @@
}
}
- // release delayed commands wake lock if the queue is empty
- if (mAudioCommands.isEmpty()) {
+ // release delayed commands wake lock as many times as we made the queue is
+ // empty during popping.
+ while (numTimesBecameEmpty--) {
release_wake_lock(mName.string());
}
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 43b579f..d863ff1 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -450,7 +450,8 @@
void onUidGone(uid_t uid, bool disabled) override;
void onUidIdle(uid_t uid, bool disabled) override;
void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
- int32_t capability);
+ int32_t capability) override;
+ void onUidProcAdjChanged(uid_t uid) override;
void addOverrideUid(uid_t uid, bool active) { updateOverrideUid(uid, active, true); }
void removeOverrideUid(uid_t uid) { updateOverrideUid(uid, false, false); }
@@ -970,12 +971,14 @@
AudioPlaybackClient(const audio_attributes_t attributes,
const audio_io_handle_t io, AttributionSourceState attributionSource,
const audio_session_t session, audio_port_handle_t portId,
- audio_port_handle_t deviceId, audio_stream_type_t stream) :
+ audio_port_handle_t deviceId, audio_stream_type_t stream,
+ bool isSpatialized) :
AudioClient(attributes, io, attributionSource, session, portId,
- deviceId), stream(stream) {}
+ deviceId), stream(stream), isSpatialized(isSpatialized) {}
~AudioPlaybackClient() override = default;
const audio_stream_type_t stream;
+ const bool isSpatialized;
};
void getPlaybackClientAndEffects(audio_port_handle_t portId,
@@ -1005,7 +1008,16 @@
void loadAudioPolicyManager();
void unloadAudioPolicyManager();
- size_t countActiveClientsOnOutput_l(audio_io_handle_t output) REQUIRES(mLock);
+ /**
+ * Returns the number of active audio tracks on the specified output mixer.
+ * The query can be specified to only include spatialized audio tracks or consider
+ * all tracks.
+ * @param output the I/O handle of the output mixer to consider
+ * @param spatializedOnly true if only spatialized tracks should be considered
+ * @return the number of active tracks.
+ */
+ size_t countActiveClientsOnOutput_l(
+ audio_io_handle_t output, bool spatializedOnly = true) REQUIRES(mLock);
mutable Mutex mLock; // prevents concurrent access to AudioPolicy manager functions changing
// device connection state or routing
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index d9e89aa..389233e 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -298,9 +298,9 @@
callback = mSpatializerCallback;
if (levelChanged && mEngine != nullptr) {
- setEffectParameter_l(SPATIALIZER_PARAM_LEVEL, std::vector<SpatializationLevel>{level});
+ checkEngineState_l();
}
- checkHeadSensor_l();
+ checkSensorsState_l();
}
if (levelChanged) {
@@ -373,10 +373,8 @@
break;
}
- if (mPoseController != nullptr) {
- mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
- checkHeadSensor_l();
- }
+ checkPoseController_l();
+ checkSensorsState_l();
return Status::ok();
}
@@ -449,9 +447,8 @@
}
std::lock_guard lock(mLock);
mHeadSensor = sensorHandle;
- if (mPoseController != nullptr) {
- checkHeadSensor_l();
- }
+ checkPoseController_l();
+ checkSensorsState_l();
return Status::ok();
}
@@ -462,9 +459,7 @@
}
std::lock_guard lock(mLock);
mScreenSensor = sensorHandle;
- if (mPoseController != nullptr) {
- mPoseController->setScreenSensor(mScreenSensor);
- }
+ checkSensorsState_l();
return Status::ok();
}
@@ -572,9 +567,6 @@
sp<media::ISpatializerHeadTrackingCallback> callback;
{
std::lock_guard lock(mLock);
- if (mActualHeadTrackingMode == SpatializerHeadTrackingMode::DISABLED) {
- return;
- }
callback = mHeadTrackingCallback;
if (mEngine != nullptr) {
setEffectParameter_l(SPATIALIZER_PARAM_HEAD_TO_STAGE, headToStage);
@@ -618,6 +610,10 @@
}
}
mActualHeadTrackingMode = spatializerMode;
+ if (mEngine != nullptr) {
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+ }
callback = mHeadTrackingCallback;
}
if (callback != nullptr) {
@@ -626,7 +622,6 @@
}
status_t Spatializer::attachOutput(audio_io_handle_t output, size_t numActiveTracks) {
- std::shared_ptr<SpatializerPoseController> poseController;
bool outputChanged = false;
sp<media::INativeSpatializerCallback> callback;
@@ -652,33 +647,17 @@
return status;
}
- setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
- std::vector<SpatializationLevel>{mLevel});
- setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
- std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
-
- mEngine->setEnabled(true);
outputChanged = mOutput != output;
mOutput = output;
+ mNumActiveTracks = numActiveTracks;
+ checkEngineState_l();
if (mSupportsHeadTracking) {
- mPoseController = std::make_shared<SpatializerPoseController>(
- static_cast<SpatializerPoseController::Listener*>(this), 10ms, 50ms);
- LOG_ALWAYS_FATAL_IF(mPoseController == nullptr,
- "%s could not allocate pose controller", __func__);
-
- mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
- mNumActiveTracks = numActiveTracks;
- checkHeadSensor_l();
- mPoseController->setScreenSensor(mScreenSensor);
- mPoseController->setDisplayOrientation(mDisplayOrientation);
- poseController = mPoseController;
+ checkPoseController_l();
+ checkSensorsState_l();
}
callback = mSpatializerCallback;
}
- if (poseController != nullptr) {
- poseController->waitUntilCalculated();
- }
if (outputChanged && callback != nullptr) {
callback->onOutputChanged(output);
@@ -714,22 +693,62 @@
void Spatializer::updateActiveTracks(size_t numActiveTracks) {
std::lock_guard lock(mLock);
- mNumActiveTracks = numActiveTracks;
- checkHeadSensor_l();
+ if (mNumActiveTracks != numActiveTracks) {
+ mNumActiveTracks = numActiveTracks;
+ checkEngineState_l();
+ checkSensorsState_l();
+ }
}
-void Spatializer::checkHeadSensor_l() {
+void Spatializer::checkSensorsState_l() {
if (mSupportsHeadTracking && mPoseController != nullptr) {
- if(mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
+ if (mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
&& mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
&& mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
mPoseController->setHeadSensor(mHeadSensor);
+ mPoseController->setScreenSensor(mScreenSensor);
} else {
mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+ mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
}
}
}
+void Spatializer::checkEngineState_l() {
+ if (mEngine != nullptr) {
+ if (mLevel != SpatializationLevel::NONE && mNumActiveTracks > 0) {
+ mEngine->setEnabled(true);
+ setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
+ std::vector<SpatializationLevel>{mLevel});
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
+ } else {
+ setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
+ std::vector<SpatializationLevel>{SpatializationLevel::NONE});
+ mEngine->setEnabled(false);
+ }
+ }
+}
+
+void Spatializer::checkPoseController_l() {
+ bool isControllerNeeded = mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
+ && mHeadSensor != SpatializerPoseController::INVALID_SENSOR;
+
+ if (isControllerNeeded && mPoseController == nullptr) {
+ mPoseController = std::make_shared<SpatializerPoseController>(
+ static_cast<SpatializerPoseController::Listener*>(this),
+ 10ms, std::nullopt);
+ LOG_ALWAYS_FATAL_IF(mPoseController == nullptr,
+ "%s could not allocate pose controller", __func__);
+ mPoseController->setDisplayOrientation(mDisplayOrientation);
+ } else if (!isControllerNeeded && mPoseController != nullptr) {
+ mPoseController.reset();
+ }
+ if (mPoseController != nullptr) {
+ mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
+ }
+}
+
void Spatializer::calculateHeadPose() {
ALOGV("%s", __func__);
std::lock_guard lock(mLock);
@@ -746,11 +765,11 @@
switch (event) {
case AudioEffect::EVENT_FRAMES_PROCESSED: {
int frames = info == nullptr ? 0 : *(int*)info;
- ALOGD("%s frames processed %d for me %p", __func__, frames, me);
+ ALOGV("%s frames processed %d for me %p", __func__, frames, me);
me->postFramesProcessedMsg(frames);
} break;
default:
- ALOGD("%s event %d", __func__, event);
+ ALOGV("%s event %d", __func__, event);
break;
}
}
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 4ce99d8..a36ba61 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -276,7 +276,24 @@
void postFramesProcessedMsg(int frames);
- void checkHeadSensor_l() REQUIRES(mLock);
+ /**
+ * Checks if head and screen sensors must be actively monitored based on
+ * spatializer state and playback activity and configures the pose controller
+ * accordingly.
+ */
+ void checkSensorsState_l() REQUIRES(mLock);
+
+ /**
+ * Checks if the head pose controller should be created or destroyed according
+ * to desired head tracking mode.
+ */
+ void checkPoseController_l() REQUIRES(mLock);
+
+ /**
+ * Checks if the spatializer effect should be enabled based on
+ * playback activity and requested level.
+ */
+ void checkEngineState_l() REQUIRES(mLock);
/** Effect engine descriptor */
const effect_descriptor_t mEngineDescriptor;
diff --git a/services/audiopolicy/service/SpatializerPoseController.cpp b/services/audiopolicy/service/SpatializerPoseController.cpp
index 58a57ac..0a9f4d9 100644
--- a/services/audiopolicy/service/SpatializerPoseController.cpp
+++ b/services/audiopolicy/service/SpatializerPoseController.cpp
@@ -46,9 +46,8 @@
// high will result in high prediction errors whenever the head accelerates (changes velocity).
constexpr auto kPredictionDuration = 50ms;
-// After losing this many consecutive samples from either sensor, we would treat the measurement as
-// stale;
-constexpr auto kMaxLostSamples = 4;
+// After not getting a pose sample for this long, we would treat the measurement as stale.
+constexpr auto kFreshnessTimeout = 50ms;
// Auto-recenter kicks in after the head has been still for this long.
constexpr auto kAutoRecenterWindowDuration = 6s;
@@ -79,14 +78,14 @@
} // namespace
SpatializerPoseController::SpatializerPoseController(Listener* listener,
- std::chrono::microseconds sensorPeriod,
- std::chrono::microseconds maxUpdatePeriod)
+ std::chrono::microseconds sensorPeriod,
+ std::optional<std::chrono::microseconds> maxUpdatePeriod)
: mListener(listener),
mSensorPeriod(sensorPeriod),
mProcessor(createHeadTrackingProcessor(HeadTrackingProcessor::Options{
.maxTranslationalVelocity = kMaxTranslationalVelocity / kTicksPerSecond,
.maxRotationalVelocity = kMaxRotationalVelocity / kTicksPerSecond,
- .freshnessTimeout = Ticks(sensorPeriod * kMaxLostSamples).count(),
+ .freshnessTimeout = Ticks(kFreshnessTimeout).count(),
.predictionDuration = Ticks(kPredictionDuration).count(),
.autoRecenterWindowDuration = Ticks(kAutoRecenterWindowDuration).count(),
.autoRecenterTranslationalThreshold = kAutoRecenterTranslationThreshold,
@@ -102,8 +101,12 @@
std::optional<HeadTrackingMode> modeIfChanged;
{
std::unique_lock lock(mMutex);
- mCondVar.wait_for(lock, maxUpdatePeriod,
- [this] { return mShouldExit || mShouldCalculate; });
+ if (maxUpdatePeriod.has_value()) {
+ mCondVar.wait_for(lock, maxUpdatePeriod.value(),
+ [this] { return mShouldExit || mShouldCalculate; });
+ } else {
+ mCondVar.wait(lock, [this] { return mShouldExit || mShouldCalculate; });
+ }
if (mShouldExit) {
ALOGV("Exiting thread");
return;
diff --git a/services/audiopolicy/service/SpatializerPoseController.h b/services/audiopolicy/service/SpatializerPoseController.h
index 2b5c189..2c6d79a 100644
--- a/services/audiopolicy/service/SpatializerPoseController.h
+++ b/services/audiopolicy/service/SpatializerPoseController.h
@@ -60,10 +60,10 @@
* Ctor.
* sensorPeriod determines how often to receive updates from the sensors (input rate).
* maxUpdatePeriod determines how often to produce an output when calculateAsync() isn't
- * invoked.
+ * invoked; passing nullopt means an output is never produced.
*/
SpatializerPoseController(Listener* listener, std::chrono::microseconds sensorPeriod,
- std::chrono::microseconds maxUpdatePeriod);
+ std::optional<std::chrono::microseconds> maxUpdatePeriod);
/** Dtor. */
~SpatializerPoseController();
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 551f5e9..5429176 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -218,13 +218,14 @@
if (!portId) portId = &localPortId;
*portId = AUDIO_PORT_HANDLE_NONE;
AudioPolicyInterface::output_type_t outputType;
+ bool isSpatialized;
// TODO b/182392769: use attribution source util
AttributionSourceState attributionSource = AttributionSourceState();
attributionSource.uid = 0;
attributionSource.token = sp<BBinder>::make();
ASSERT_EQ(OK, mManager->getOutputForAttr(
&attr, output, AUDIO_SESSION_NONE, &stream, attributionSource, &config, &flags,
- selectedDeviceId, portId, {}, &outputType));
+ selectedDeviceId, portId, {}, &outputType, &isSpatialized));
ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
ASSERT_NE(AUDIO_IO_HANDLE_NONE, *output);
}
@@ -699,6 +700,158 @@
ASSERT_EQ(countDirectProfilesPrimary, getDirectProfilesForAttributes(attr).size());
}
+TEST_P(AudioPolicyManagerTestMsd, IsDirectPlaybackSupportedWithMsd) {
+ const audio_attributes_t attr = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+ audio_config_base_t directConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+ directConfig.format = AUDIO_FORMAT_DTS;
+ directConfig.sample_rate = 48000;
+ directConfig.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
+
+ audio_config_base_t nonDirectConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+ nonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ nonDirectConfig.sample_rate = 48000;
+ nonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+ audio_config_base_t nonExistentConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+ nonExistentConfig.format = AUDIO_FORMAT_E_AC3;
+ nonExistentConfig.sample_rate = 48000;
+ nonExistentConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+ audio_config_base_t msdDirectConfig1 = AUDIO_CONFIG_BASE_INITIALIZER;
+ msdDirectConfig1.format = AUDIO_FORMAT_AC3;
+ msdDirectConfig1.sample_rate = 48000;
+ msdDirectConfig1.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
+
+ audio_config_base_t msdDirectConfig2 = AUDIO_CONFIG_BASE_INITIALIZER;
+ msdDirectConfig2.format = AUDIO_FORMAT_IEC60958;
+ msdDirectConfig2.sample_rate = 48000;
+ msdDirectConfig2.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+ audio_config_base_t msdNonDirectConfig = AUDIO_CONFIG_BASE_INITIALIZER;
+ msdNonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ msdNonDirectConfig.sample_rate = 96000;
+ msdNonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+ ASSERT_TRUE(mManager->isDirectOutputSupported(directConfig, attr));
+ ASSERT_FALSE(mManager->isDirectOutputSupported(nonDirectConfig, attr));
+ ASSERT_FALSE(mManager->isDirectOutputSupported(nonExistentConfig, attr));
+ // before setting MSD patches the direct MSD configs return false
+ ASSERT_FALSE(mManager->isDirectOutputSupported(msdDirectConfig1, attr));
+ ASSERT_FALSE(mManager->isDirectOutputSupported(msdDirectConfig2, attr));
+ ASSERT_FALSE(mManager->isDirectOutputSupported(msdNonDirectConfig, attr));
+
+ DeviceVector outputDevices = mManager->getAvailableOutputDevices();
+ // Remove MSD output device to avoid patching to itself
+ outputDevices.remove(mMsdOutputDevice);
+ mManager->setMsdOutputPatches(&outputDevices);
+
+ ASSERT_TRUE(mManager->isDirectOutputSupported(directConfig, attr));
+ ASSERT_FALSE(mManager->isDirectOutputSupported(nonDirectConfig, attr));
+ ASSERT_FALSE(mManager->isDirectOutputSupported(nonExistentConfig, attr));
+ // after setting MSD patches the direct MSD configs return true
+ ASSERT_TRUE(mManager->isDirectOutputSupported(msdDirectConfig1, attr));
+ ASSERT_TRUE(mManager->isDirectOutputSupported(msdDirectConfig2, attr));
+ ASSERT_FALSE(mManager->isDirectOutputSupported(msdNonDirectConfig, attr));
+
+ mManager->releaseMsdOutputPatches(outputDevices);
+
+ ASSERT_TRUE(mManager->isDirectOutputSupported(directConfig, attr));
+ ASSERT_FALSE(mManager->isDirectOutputSupported(nonDirectConfig, attr));
+ ASSERT_FALSE(mManager->isDirectOutputSupported(nonExistentConfig, attr));
+ // AFTER releasing MSD patches the direct MSD configs return false
+ ASSERT_FALSE(mManager->isDirectOutputSupported(msdDirectConfig1, attr));
+ ASSERT_FALSE(mManager->isDirectOutputSupported(msdDirectConfig2, attr));
+ ASSERT_FALSE(mManager->isDirectOutputSupported(msdNonDirectConfig, attr));
+}
+
+TEST_P(AudioPolicyManagerTestMsd, GetDirectPlaybackSupportWithMsd) {
+ const audio_attributes_t attr = {
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+
+ audio_config_t directConfig = AUDIO_CONFIG_INITIALIZER;
+ directConfig.format = AUDIO_FORMAT_DTS;
+ directConfig.sample_rate = 48000;
+ directConfig.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
+
+ audio_config_t nonDirectConfig = AUDIO_CONFIG_INITIALIZER;
+ nonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ nonDirectConfig.sample_rate = 48000;
+ nonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+ audio_config_t nonExistentConfig = AUDIO_CONFIG_INITIALIZER;
+ nonExistentConfig.format = AUDIO_FORMAT_E_AC3;
+ nonExistentConfig.sample_rate = 48000;
+ nonExistentConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+ audio_config_t msdDirectConfig1 = AUDIO_CONFIG_INITIALIZER;
+ msdDirectConfig1.format = AUDIO_FORMAT_AC3;
+ msdDirectConfig1.sample_rate = 48000;
+ msdDirectConfig1.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
+
+ audio_config_t msdDirectConfig2 = AUDIO_CONFIG_INITIALIZER;
+ msdDirectConfig2.format = AUDIO_FORMAT_IEC60958;
+ msdDirectConfig2.sample_rate = 48000;
+ msdDirectConfig2.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+ audio_config_t msdNonDirectConfig = AUDIO_CONFIG_INITIALIZER;
+ msdNonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ msdNonDirectConfig.sample_rate = 96000;
+ msdNonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+ ASSERT_EQ(AUDIO_DIRECT_BITSTREAM_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &directConfig));
+ ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &nonDirectConfig));
+ ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &nonExistentConfig));
+ // before setting MSD patches the direct MSD configs return AUDIO_DIRECT_NOT_SUPPORTED
+ ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig1));
+ ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig2));
+ ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &msdNonDirectConfig));
+
+ DeviceVector outputDevices = mManager->getAvailableOutputDevices();
+ // Remove MSD output device to avoid patching to itself
+ outputDevices.remove(mMsdOutputDevice);
+ mManager->setMsdOutputPatches(&outputDevices);
+
+ ASSERT_EQ(AUDIO_DIRECT_BITSTREAM_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &directConfig));
+ ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &nonDirectConfig));
+ ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &nonExistentConfig));
+ // after setting MSD patches the direct MSD configs return values according to their flags
+ ASSERT_EQ(AUDIO_DIRECT_OFFLOAD_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig1));
+ ASSERT_EQ(AUDIO_DIRECT_BITSTREAM_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig2));
+ ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &msdNonDirectConfig));
+
+ mManager->releaseMsdOutputPatches(outputDevices);
+
+ ASSERT_EQ(AUDIO_DIRECT_BITSTREAM_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &directConfig));
+ ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &nonDirectConfig));
+ ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &nonExistentConfig));
+ // after releasing MSD patches the direct MSD configs return AUDIO_DIRECT_NOT_SUPPORTED
+ ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig1));
+ ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &msdDirectConfig2));
+ ASSERT_EQ(AUDIO_DIRECT_NOT_SUPPORTED,
+ mManager->getDirectPlaybackSupport(&attr, &msdNonDirectConfig));
+}
+
class AudioPolicyManagerTestWithConfigurationFile : public AudioPolicyManagerTest {
protected:
void SetUpManagerConfig() override;
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 69300be..1e2dccb 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -81,7 +81,6 @@
"device3/Camera3OutputUtils.cpp",
"device3/Camera3DeviceInjectionMethods.cpp",
"device3/UHRCropAndMeteringRegionMapper.cpp",
- "device3/PreviewFrameScheduler.cpp",
"device3/hidl/HidlCamera3Device.cpp",
"device3/hidl/HidlCamera3OfflineSession.cpp",
"device3/hidl/HidlCamera3OutputUtils.cpp",
@@ -112,7 +111,6 @@
],
shared_libs: [
- "libandroid",
"libbase",
"libdl",
"libexif",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index c576162..a965080 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -3670,7 +3670,8 @@
status_t res = mAm.linkToDeath(this);
mAm.registerUidObserver(this, ActivityManager::UID_OBSERVER_GONE
| ActivityManager::UID_OBSERVER_IDLE
- | ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE,
+ | ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE
+ | ActivityManager::UID_OBSERVER_PROC_OOM_ADJ,
ActivityManager::PROCESS_STATE_UNKNOWN,
String16("cameraserver"));
if (res == OK) {
@@ -3719,9 +3720,9 @@
bool procStateChange = false;
{
Mutex::Autolock _l(mUidLock);
- if ((mMonitoredUids.find(uid) != mMonitoredUids.end()) &&
- (mMonitoredUids[uid].first != procState)) {
- mMonitoredUids[uid].first = procState;
+ if (mMonitoredUids.find(uid) != mMonitoredUids.end() &&
+ mMonitoredUids[uid].procState != procState) {
+ mMonitoredUids[uid].procState = procState;
procStateChange = true;
}
}
@@ -3734,15 +3735,33 @@
}
}
+void CameraService::UidPolicy::onUidProcAdjChanged(uid_t uid) {
+ bool procAdjChange = false;
+ {
+ Mutex::Autolock _l(mUidLock);
+ if (mMonitoredUids.find(uid) != mMonitoredUids.end()) {
+ procAdjChange = true;
+ }
+ }
+
+ if (procAdjChange) {
+ sp<CameraService> service = mService.promote();
+ if (service != nullptr) {
+ service->notifyMonitoredUids();
+ }
+ }
+}
+
void CameraService::UidPolicy::registerMonitorUid(uid_t uid) {
Mutex::Autolock _l(mUidLock);
auto it = mMonitoredUids.find(uid);
if (it != mMonitoredUids.end()) {
- it->second.second++;
+ it->second.refCount++;
} else {
- mMonitoredUids.emplace(
- std::pair<uid_t, std::pair<int32_t, size_t>> (uid,
- std::pair<int32_t, size_t> (ActivityManager::PROCESS_STATE_NONEXISTENT, 1)));
+ MonitoredUid monitoredUid;
+ monitoredUid.procState = ActivityManager::PROCESS_STATE_NONEXISTENT;
+ monitoredUid.refCount = 1;
+ mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid));
}
}
@@ -3750,8 +3769,8 @@
Mutex::Autolock _l(mUidLock);
auto it = mMonitoredUids.find(uid);
if (it != mMonitoredUids.end()) {
- it->second.second--;
- if (it->second.second == 0) {
+ it->second.refCount--;
+ if (it->second.refCount == 0) {
mMonitoredUids.erase(it);
}
} else {
@@ -3829,7 +3848,7 @@
int32_t CameraService::UidPolicy::getProcStateLocked(uid_t uid) {
int32_t procState = ActivityManager::PROCESS_STATE_UNKNOWN;
if (mMonitoredUids.find(uid) != mMonitoredUids.end()) {
- procState = mMonitoredUids[uid].first;
+ procState = mMonitoredUids[uid].procState;
}
return procState;
}
@@ -4431,8 +4450,9 @@
void CameraService::dumpOpenSessionClientLogs(int fd,
const Vector<String16>& args, const String8& cameraId) {
auto clientDescriptor = mActiveClientManager.get(cameraId);
- dprintf(fd, " Device %s is open. Client instance dump:\n",
- cameraId.string());
+ dprintf(fd, " %s : Device %s is open. Client instance dump:\n",
+ getFormattedCurrentTime().string(),
+ cameraId.string());
dprintf(fd, " Client priority score: %d state: %d\n",
clientDescriptor->getPriority().getScore(),
clientDescriptor->getPriority().getState());
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 6346f50..89a537d 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -698,11 +698,13 @@
bool isUidActive(uid_t uid, String16 callingPackage);
int32_t getProcState(uid_t uid);
- void onUidGone(uid_t uid, bool disabled);
- void onUidActive(uid_t uid);
- void onUidIdle(uid_t uid, bool disabled);
+ // IUidObserver
+ void onUidGone(uid_t uid, bool disabled) override;
+ void onUidActive(uid_t uid) override;
+ void onUidIdle(uid_t uid, bool disabled) override;
void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
- int32_t capability);
+ int32_t capability) override;
+ void onUidProcAdjChanged(uid_t uid) override;
void addOverrideUid(uid_t uid, String16 callingPackage, bool active);
void removeOverrideUid(uid_t uid, String16 callingPackage);
@@ -717,13 +719,18 @@
int32_t getProcStateLocked(uid_t uid);
void updateOverrideUid(uid_t uid, String16 callingPackage, bool active, bool insert);
+ struct MonitoredUid {
+ int32_t procState;
+ size_t refCount;
+ };
+
Mutex mUidLock;
bool mRegistered;
ActivityManager mAm;
wp<CameraService> mService;
std::unordered_set<uid_t> mActiveUids;
- // Monitored uid map to cached procState and refCount pair
- std::unordered_map<uid_t, std::pair<int32_t, size_t>> mMonitoredUids;
+ // Monitored uid map
+ std::unordered_map<uid_t, MonitoredUid> mMonitoredUids;
std::unordered_map<uid_t, bool> mOverrideUids;
}; // class UidPolicy
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index bcba80e..5db3fa6 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -518,6 +518,10 @@
metadataRequestList.push_back(physicalSettingsList);
surfaceMapList.push_back(surfaceMap);
+
+ if (!request.mUserTag.empty()) {
+ mUserTag = request.mUserTag;
+ }
}
mRequestIdCounter++;
@@ -717,8 +721,10 @@
}
*status = false;
+ camera3::metadataGetter getMetadata = [this](const String8 &id, bool /*overrideForPerfClass*/) {
+ return mDevice->infoPhysical(id);};
ret = mProviderManager->isSessionConfigurationSupported(mCameraIdStr.string(),
- sessionConfiguration, mOverrideForPerfClass, status);
+ sessionConfiguration, mOverrideForPerfClass, getMetadata, status);
switch (ret) {
case OK:
// Expected, do nothing.
@@ -864,7 +870,7 @@
bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
bool isMultiResolution = outputConfiguration.isMultiResolution();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
- int streamUseCase = outputConfiguration.getStreamUseCase();
+ int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
@@ -1260,7 +1266,7 @@
}
const std::vector<int32_t> &sensorPixelModesUsed =
outputConfiguration.getSensorPixelModesUsed();
- int streamUseCase = outputConfiguration.getStreamUseCase();
+ int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
int mirrorMode = outputConfiguration.getMirrorMode();
@@ -1629,7 +1635,7 @@
const std::vector<int32_t> &sensorPixelModesUsed =
outputConfiguration.getSensorPixelModesUsed();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
- int streamUseCase= outputConfiguration.getStreamUseCase();
+ int64_t streamUseCase= outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
for (auto& bufferProducer : bufferProducers) {
@@ -1964,7 +1970,8 @@
if (remoteCb != 0) {
remoteCb->onDeviceIdle();
}
- Camera2ClientBase::notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
+ Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
+ streamStats, mUserTag);
}
void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 9d1deb1..3af0b80 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -349,6 +349,9 @@
// Override the camera characteristics for performance class primary cameras.
bool mOverrideForPerfClass;
+
+ // The string representation of object passed into CaptureRequest.setTag.
+ std::string mUserTag;
};
}; // namespace android
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 600bd28..d32b71c 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -117,6 +117,41 @@
// Composite streams should behave accordingly.
void enableErrorState();
+ // Utility class to lock and unlock a GraphicBuffer
+ class GraphicBufferLocker {
+ public:
+ GraphicBufferLocker(sp<GraphicBuffer> buffer) : _buffer(buffer) {}
+
+ status_t lockAsync(void** dstBuffer, int fenceFd) {
+ if (_buffer == nullptr) return BAD_VALUE;
+
+ status_t res = OK;
+ if (!_locked) {
+ status_t res = _buffer->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN,
+ dstBuffer, fenceFd);
+ if (res == OK) {
+ _locked = true;
+ }
+ }
+ return res;
+ }
+
+ ~GraphicBufferLocker() {
+ if (_locked && _buffer != nullptr) {
+ auto res = _buffer->unlock();
+ if (res != OK) {
+ ALOGE("%s: Error trying to unlock buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ }
+ }
+ }
+
+ private:
+ sp<GraphicBuffer> _buffer;
+ bool _locked = false;
+ };
+
+
wp<CameraDeviceBase> mDevice;
wp<camera3::StatusTracker> mStatusTracker;
wp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index a66a592..aa057c7 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -297,7 +297,8 @@
}
sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
- res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, fenceFd);
+ GraphicBufferLocker gbLocker(gb);
+ res = gbLocker.lockAsync(&dstBuffer, fenceFd);
if (res != OK) {
ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
strerror(-res), res);
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index a73ffb9..5da77d6 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -441,6 +441,10 @@
newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
+ int32_t left, top, right, bottom;
+ if (newFormat->findRect("crop", &left, &top, &right, &bottom)) {
+ newFormat->setRect("crop", 0, 0, mOutputWidth - 1, mOutputHeight - 1);
+ }
}
}
newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
@@ -1130,7 +1134,8 @@
// Copy the content of the file to memory.
sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
void* dstBuffer;
- auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
+ GraphicBufferLocker gbLocker(gb);
+ auto res = gbLocker.lockAsync(&dstBuffer, inputFrame.fenceFd);
if (res != OK) {
ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
strerror(-res), res);
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 6ed3c02..0ac047a 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -332,9 +332,10 @@
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyIdle(
+void Camera2ClientBase<TClientBase>::notifyIdleWithUserTag(
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
- const std::vector<hardware::CameraStreamStats>& streamStats) {
+ const std::vector<hardware::CameraStreamStats>& streamStats,
+ const std::string& userTag) {
if (mDeviceActive) {
status_t res = TClientBase::finishCameraStreamingOps();
if (res != OK) {
@@ -342,7 +343,7 @@
TClientBase::mCameraIdStr.string(), res);
}
CameraServiceProxyWrapper::logIdle(TClientBase::mCameraIdStr,
- requestCount, resultErrorCount, deviceError, streamStats);
+ requestCount, resultErrorCount, deviceError, userTag, streamStats);
}
mDeviceActive = false;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 6b90f5e..9cba2f1 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -75,9 +75,9 @@
const CaptureResultExtras& resultExtras);
// Returns errors on app ops permission failures
virtual status_t notifyActive(float maxPreviewFps);
- virtual void notifyIdle(int64_t requestCount, int64_t resultErrorCount,
- bool deviceError,
- const std::vector<hardware::CameraStreamStats>& streamStats);
+ virtual void notifyIdle(int64_t /*requestCount*/, int64_t /*resultErrorCount*/,
+ bool /*deviceError*/,
+ const std::vector<hardware::CameraStreamStats>&) {}
virtual void notifyShutter(const CaptureResultExtras& resultExtras,
nsecs_t timestamp);
virtual void notifyAutoFocus(uint8_t newState, int triggerId);
@@ -88,6 +88,11 @@
virtual void notifyRequestQueueEmpty();
virtual void notifyRepeatingRequestError(long lastFrameNumber);
+ void notifyIdleWithUserTag(int64_t requestCount, int64_t resultErrorCount,
+ bool deviceError,
+ const std::vector<hardware::CameraStreamStats>& streamStats,
+ const std::string& userTag);
+
int getCameraId() const;
const sp<CameraDeviceBase>&
getCameraDevice();
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 5883988..05edd6a 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -184,7 +184,7 @@
bool isShared = false, bool isMultiResolution = false,
uint64_t consumerUsage = 0,
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
@@ -205,7 +205,7 @@
bool isShared = false, bool isMultiResolution = false,
uint64_t consumerUsage = 0,
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index c337eda..d545484 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -40,7 +40,6 @@
#include <android-base/logging.h>
#include <cutils/properties.h>
#include <hwbinder/IPCThreadState.h>
-#include <utils/SessionConfigurationUtils.h>
#include <utils/Trace.h>
#include "api2/HeicCompositeStream.h"
@@ -338,14 +337,15 @@
status_t CameraProviderManager::isSessionConfigurationSupported(const std::string& id,
const SessionConfiguration &configuration, bool overrideForPerfClass,
- bool *status /*out*/) const {
+ metadataGetter getMetadata, bool *status /*out*/) const {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
auto deviceInfo = findDeviceInfoLocked(id);
if (deviceInfo == nullptr) {
return NAME_NOT_FOUND;
}
- return deviceInfo->isSessionConfigurationSupported(configuration, overrideForPerfClass, status);
+ return deviceInfo->isSessionConfigurationSupported(configuration,
+ overrideForPerfClass, getMetadata, status);
}
status_t CameraProviderManager::getCameraIdIPCTransport(const std::string &id,
@@ -642,7 +642,7 @@
removeRef(DeviceMode::CAMERA, id);
ALOGE("%s: Transaction error opening a session for camera device %s: %s",
__FUNCTION__, id.c_str(), ret.getMessage());
- return DEAD_OBJECT;
+ return AidlProviderInfo::mapToStatusT(ret);
}
return OK;
}
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 3d108bd..d934ae8 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -33,6 +33,7 @@
#include <utils/Errors.h>
#include <android/hardware/ICameraService.h>
#include <utils/IPCTransport.h>
+#include <utils/SessionConfigurationUtils.h>
#include <aidl/android/hardware/camera/provider/ICameraProvider.h>
#include <android/hardware/camera/common/1.0/types.h>
#include <android/hardware/camera/provider/2.5/ICameraProvider.h>
@@ -278,7 +279,7 @@
*/
status_t isSessionConfigurationSupported(const std::string& id,
const SessionConfiguration &configuration,
- bool overrideForPerfClass,
+ bool overrideForPerfClass, camera3::metadataGetter getMetadata,
bool *status /*out*/) const;
/**
@@ -587,6 +588,7 @@
virtual status_t isSessionConfigurationSupported(
const SessionConfiguration &/*configuration*/,
bool /*overrideForPerfClass*/,
+ camera3::metadataGetter /*getMetadata*/,
bool * /*status*/) {
return INVALID_OPERATION;
}
@@ -639,6 +641,7 @@
CameraMetadata *characteristics) const override;
virtual status_t isSessionConfigurationSupported(
const SessionConfiguration &configuration, bool /*overrideForPerfClass*/,
+ camera3::metadataGetter /*getMetadata*/,
bool *status /*out*/) = 0;
virtual status_t filterSmallJpegSizes() override;
virtual void notifyDeviceStateChange(
@@ -658,6 +661,8 @@
// A copy of mCameraCharacteristics without performance class
// override
std::unique_ptr<CameraMetadata> mCameraCharNoPCOverride;
+ // Only contains characteristics for hidden physical cameras,
+ // not for public physical cameras.
std::unordered_map<std::string, CameraMetadata> mPhysicalCameraCharacteristics;
void queryPhysicalCameraIds();
SystemCameraKind getSystemCameraKind();
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index b2a7fee..f58ed00 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2021 The Android Open Source Project
+ * Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -45,10 +45,32 @@
using ICameraProvider = aidl::android::hardware::camera::provider::ICameraProvider;
using StatusListener = CameraProviderManager::StatusListener;
+static status_t mapExceptionCodeToStatusT(binder_exception_t binderException) {
+ switch (binderException) {
+ case EX_NONE:
+ return OK;
+ case EX_ILLEGAL_ARGUMENT:
+ case EX_NULL_POINTER:
+ case EX_BAD_PARCELABLE:
+ case EX_ILLEGAL_STATE:
+ return BAD_VALUE;
+ case EX_UNSUPPORTED_OPERATION:
+ return INVALID_OPERATION;
+ case EX_TRANSACTION_FAILED:
+ return DEAD_OBJECT;
+ default:
+ return UNKNOWN_ERROR;
+ }
+}
+
status_t AidlProviderInfo::mapToStatusT(const ndk::ScopedAStatus& s) {
using Status = aidl::android::hardware::camera::common::Status;
+ auto exceptionCode = s.getExceptionCode();
+ if (exceptionCode != EX_SERVICE_SPECIFIC) {
+ return mapExceptionCodeToStatusT(exceptionCode);
+ }
Status st = static_cast<Status>(s.getServiceSpecificError());
- switch(st) {
+ switch (st) {
case Status::OK:
return OK;
case Status::ILLEGAL_ARGUMENT:
@@ -671,15 +693,11 @@
}
status_t AidlProviderInfo::AidlDeviceInfo3::isSessionConfigurationSupported(
- const SessionConfiguration &configuration, bool overrideForPerfClass, bool *status) {
+ const SessionConfiguration &configuration, bool overrideForPerfClass,
+ camera3::metadataGetter getMetadata, bool *status) {
camera::device::StreamConfiguration streamConfiguration;
bool earlyExit = false;
- camera3::metadataGetter getMetadata = [this](const String8 &id, bool /*overrideForPerfClass*/) {
- CameraMetadata physicalChars;
- getPhysicalCameraCharacteristics(id.c_str(), &physicalChars);
- return physicalChars;
- };
auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
String8(mId.c_str()), mCameraCharacteristics, getMetadata, mPhysicalIds,
streamConfiguration, overrideForPerfClass, &earlyExit);
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
index aa71e85..97a8fed 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
@@ -129,7 +129,7 @@
virtual status_t isSessionConfigurationSupported(
const SessionConfiguration &/*configuration*/,
- bool overrideForPerfClass,
+ bool overrideForPerfClass, camera3::metadataGetter /*getMetadata*/,
bool *status/*status*/);
std::shared_ptr<aidl::android::hardware::camera::device::ICameraDevice>
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 3c5ea75..9cbfbcf 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -878,15 +878,11 @@
}
status_t HidlProviderInfo::HidlDeviceInfo3::isSessionConfigurationSupported(
- const SessionConfiguration &configuration, bool overrideForPerfClass, bool *status) {
+ const SessionConfiguration &configuration, bool overrideForPerfClass,
+ metadataGetter getMetadata, bool *status) {
hardware::camera::device::V3_8::StreamConfiguration streamConfiguration;
bool earlyExit = false;
- camera3::metadataGetter getMetadata = [this](const String8 &id, bool /*overrideForPerfClass*/) {
- CameraMetadata physicalChars;
- getPhysicalCameraCharacteristics(id.c_str(), &physicalChars);
- return physicalChars;
- };
auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
String8(mId.c_str()), mCameraCharacteristics, getMetadata, mPhysicalIds,
streamConfiguration, overrideForPerfClass, &earlyExit);
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
index 4181fea..e0f1646 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
@@ -105,7 +105,7 @@
virtual status_t isSessionConfigurationSupported(
const SessionConfiguration &/*configuration*/,
- bool overrideForPerfClass,
+ bool overrideForPerfClass, camera3::metadataGetter getMetadata,
bool *status/*status*/);
sp<hardware::camera::device::V3_2::ICameraDevice> startDeviceInterface();
};
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 997be51..04e65d4 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -978,7 +978,7 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
- uint64_t consumerUsage, int64_t dynamicRangeProfile, int streamUseCase,
+ uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
int timestampBase, int mirrorMode) {
ATRACE_CALL();
@@ -1013,8 +1013,8 @@
android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
- uint64_t consumerUsage, int64_t dynamicRangeProfile, int streamUseCase, int timestampBase,
- int mirrorMode) {
+ uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
+ int timestampBase, int mirrorMode) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
@@ -1022,7 +1022,8 @@
Mutex::Autolock l(mLock);
ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
" consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
- " dynamicRangeProfile %" PRIx64 ", streamUseCase %d, timestampBase %d, mirrorMode %d",
+ " dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
+ " mirrorMode %d",
mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
consumerUsage, isShared, physicalCameraId.string(), isMultiResolution,
dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode);
@@ -1841,7 +1842,7 @@
streamIds.push_back(stream->getId());
Camera3Stream* camera3Stream = Camera3Stream::cast(stream->asHalStream());
int64_t usage = 0LL;
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
if (camera3Stream != nullptr) {
usage = camera3Stream->getUsage();
streamUseCase = camera3Stream->getStreamUseCase();
@@ -2648,7 +2649,7 @@
status_t Camera3Device::registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
- bool hasAppCallback, nsecs_t maxExpectedDuration,
+ bool hasAppCallback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
const std::set<std::set<String8>>& physicalCameraIds,
bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
const std::set<std::string>& cameraIdsWithZoom,
@@ -2658,8 +2659,9 @@
ssize_t res;
res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
- hasAppCallback, maxExpectedDuration, physicalCameraIds, isStillCapture, isZslCapture,
- rotateAndCropAuto, cameraIdsWithZoom, requestTimeNs, outputSurfaces));
+ hasAppCallback, minExpectedDuration, maxExpectedDuration, physicalCameraIds,
+ isStillCapture, isZslCapture, rotateAndCropAuto, cameraIdsWithZoom, requestTimeNs,
+ outputSurfaces));
if (res < 0) return res;
if (mInFlightMap.size() == 1) {
@@ -2855,6 +2857,7 @@
mInterface(interface),
mListener(nullptr),
mId(getId(parent)),
+ mRequestClearing(false),
mFirstRepeating(false),
mReconfigured(false),
mDoPause(false),
@@ -3088,6 +3091,7 @@
*lastFrameNumber = mRepeatingLastFrameNumber;
}
mRepeatingLastFrameNumber = hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES;
+ mRequestClearing = true;
mRequestSignal.signal();
return OK;
}
@@ -3215,13 +3219,16 @@
return true;
}
-nsecs_t Camera3Device::RequestThread::calculateMaxExpectedDuration(const camera_metadata_t *request) {
- nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
+std::pair<nsecs_t, nsecs_t> Camera3Device::RequestThread::calculateExpectedDurationRange(
+ const camera_metadata_t *request) {
+ std::pair<nsecs_t, nsecs_t> expectedRange(
+ InFlightRequest::kDefaultMinExpectedDuration,
+ InFlightRequest::kDefaultMaxExpectedDuration);
camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
find_camera_metadata_ro_entry(request,
ANDROID_CONTROL_AE_MODE,
&e);
- if (e.count == 0) return maxExpectedDuration;
+ if (e.count == 0) return expectedRange;
switch (e.data.u8[0]) {
case ANDROID_CONTROL_AE_MODE_OFF:
@@ -3229,13 +3236,15 @@
ANDROID_SENSOR_EXPOSURE_TIME,
&e);
if (e.count > 0) {
- maxExpectedDuration = e.data.i64[0];
+ expectedRange.first = e.data.i64[0];
+ expectedRange.second = expectedRange.first;
}
find_camera_metadata_ro_entry(request,
ANDROID_SENSOR_FRAME_DURATION,
&e);
if (e.count > 0) {
- maxExpectedDuration = std::max(e.data.i64[0], maxExpectedDuration);
+ expectedRange.first = std::max(e.data.i64[0], expectedRange.first);
+ expectedRange.second = expectedRange.first;
}
break;
default:
@@ -3243,12 +3252,13 @@
ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
&e);
if (e.count > 1) {
- maxExpectedDuration = 1e9 / e.data.u8[0];
+ expectedRange.first = 1e9 / e.data.i32[1];
+ expectedRange.second = 1e9 / e.data.i32[0];
}
break;
}
- return maxExpectedDuration;
+ return expectedRange;
}
bool Camera3Device::RequestThread::skipHFRTargetFPSUpdate(int32_t tag,
@@ -3863,11 +3873,13 @@
isZslCapture = true;
}
}
+ auto expectedDurationRange = calculateExpectedDurationRange(settings);
res = parent->registerInFlight(halRequest->frame_number,
totalNumBuffers, captureRequest->mResultExtras,
/*hasInput*/halRequest->input_buffer != NULL,
hasCallback,
- calculateMaxExpectedDuration(settings),
+ /*min*/expectedDurationRange.first,
+ /*max*/expectedDurationRange.second,
requestedPhysicalCameras, isStillCapture, isZslCapture,
captureRequest->mRotateAndCropAuto, mPrevCameraIdsWithZoom,
(mUseHalBufManager) ? uniqueSurfaceIdMap :
@@ -4208,7 +4220,9 @@
break;
}
- res = mRequestSignal.waitRelative(mRequestLock, kRequestTimeout);
+ if (!mRequestClearing) {
+ res = mRequestSignal.waitRelative(mRequestLock, kRequestTimeout);
+ }
if ((mRequestQueue.empty() && mRepeatingRequests.empty()) ||
exitPending()) {
@@ -4230,6 +4244,7 @@
if (parent != nullptr) {
parent->mRequestBufferSM.onRequestThreadPaused();
}
+ mRequestClearing = false;
}
// Stop waiting for now and let thread management happen
return NULL;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index e926b5b..749b342 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -141,7 +141,7 @@
uint64_t consumerUsage = 0,
int64_t dynamicRangeProfile =
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
@@ -156,7 +156,7 @@
uint64_t consumerUsage = 0,
int64_t dynamicRangeProfile =
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
@@ -302,7 +302,6 @@
static const nsecs_t kMinWarnInflightDuration = 5000000000; // 5 s
static const size_t kInFlightWarnLimit = 30;
static const size_t kInFlightWarnLimitHighSpeed = 256; // batch size 32 * pipe depth 8
- static const nsecs_t kDefaultExpectedDuration = 100000000; // 100 ms
static const nsecs_t kMinInflightDuration = 5000000000; // 5 s
static const nsecs_t kBaseGetBufferWait = 3000000000; // 3 sec.
// SCHED_FIFO priority for request submission thread in HFR mode
@@ -956,8 +955,9 @@
// send request in mNextRequests to HAL in a batch. Return true = sucssess
bool sendRequestsBatch();
- // Calculate the expected maximum duration for a request
- nsecs_t calculateMaxExpectedDuration(const camera_metadata_t *request);
+ // Calculate the expected (minimum, maximum) duration range for a request
+ std::pair<nsecs_t, nsecs_t> calculateExpectedDurationRange(
+ const camera_metadata_t *request);
// Check and update latest session parameters based on the current request settings.
bool updateSessionParameters(const CameraMetadata& settings);
@@ -982,6 +982,8 @@
Mutex mRequestLock;
Condition mRequestSignal;
+ bool mRequestClearing;
+
Condition mRequestSubmittedSignal;
RequestList mRequestQueue;
RequestList mRepeatingRequests;
@@ -1072,7 +1074,7 @@
status_t registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
- bool callback, nsecs_t maxExpectedDuration,
+ bool callback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
const std::set<std::set<String8>>& physicalCameraIds,
bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
@@ -1323,6 +1325,9 @@
// performance class.
bool mOverrideForPerfClass;
+ // The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
+ nsecs_t mMinExpectedDuration = 0;
+
// Injection camera related methods.
class Camera3DeviceInjectionMethods : public virtual RefBase {
public:
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
index 48e44dc..8cecabd 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.h
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -100,6 +100,7 @@
virtual status_t setBatchSize(size_t batchSize) override;
+ virtual void onMinDurationChanged(nsecs_t /*duration*/) {}
protected:
/**
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 2497c22..b5d0746 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -34,7 +34,7 @@
android_dataspace dataSpace, camera_stream_rotation_t rotation,
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
- int setId, bool isMultiResolution, int64_t dynamicRangeProfile, int streamUseCase,
+ int setId, bool isMultiResolution, int64_t dynamicRangeProfile, int64_t streamUseCase,
bool deviceTimeBaseIsRealtime, int timestampBase) :
Camera3Stream(id, type,
width, height, maxSize, format, dataSpace, rotation,
@@ -91,7 +91,7 @@
}
lines.appendFormat(" Dynamic Range Profile: 0x%" PRIx64,
camera_stream::dynamic_range_profile);
- lines.appendFormat(" Stream use case: %d\n", camera_stream::use_case);
+ lines.appendFormat(" Stream use case: %" PRId64 "\n", camera_stream::use_case);
lines.appendFormat(" Frames produced: %d, last timestamp: %" PRId64 " ns\n",
mFrameCount, mLastTimestamp);
lines.appendFormat(" Total buffers: %zu, currently dequeued: %zu\n",
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index e757ec6..f389d53 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -39,7 +39,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index 0f7d145..a799719 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -246,6 +246,9 @@
// For client methods such as disconnect/dump
std::mutex mInterfaceLock;
+ // The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
+ nsecs_t mMinExpectedDuration = 0;
+
// SetErrorInterface
void setErrorState(const char *fmt, ...) override;
void setErrorStateLocked(const char *fmt, ...) override;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 8ae16e5..37f9227 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -47,7 +47,7 @@
nsecs_t timestampOffset, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
int mirrorMode) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
@@ -80,7 +80,7 @@
nsecs_t timestampOffset, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
int mirrorMode) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
@@ -118,7 +118,7 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
int mirrorMode) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
@@ -163,7 +163,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
uint64_t consumerUsage, nsecs_t timestampOffset,
int setId, bool isMultiResolution,
- int64_t dynamicRangeProfile, int streamUseCase,
+ int64_t dynamicRangeProfile, int64_t streamUseCase,
bool deviceTimeBaseIsRealtime, int timestampBase,
int mirrorMode) :
Camera3IOStreamBase(id, type, width, height,
@@ -376,32 +376,24 @@
dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
}
- nsecs_t t = mPreviewFrameScheduler != nullptr ? readoutTimestamp : timestamp;
- t -= mTimestampOffset;
- if (mPreviewFrameScheduler != nullptr) {
- res = mPreviewFrameScheduler->queuePreviewBuffer(t, transform,
- anwBuffer, anwReleaseFence);
- if (res != OK) {
- ALOGE("%s: Stream %d: Error queuing buffer to preview buffer scheduler: %s (%d)",
- __FUNCTION__, mId, strerror(-res), res);
- return res;
- }
- } else {
- setTransform(transform, true/*mayChangeMirror*/);
- res = native_window_set_buffers_timestamp(mConsumer.get(), t);
- if (res != OK) {
- ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
- __FUNCTION__, mId, strerror(-res), res);
- return res;
- }
+ nsecs_t captureTime = (mSyncToDisplay ? readoutTimestamp : timestamp) - mTimestampOffset;
+ nsecs_t presentTime = mSyncToDisplay ?
+ syncTimestampToDisplayLocked(captureTime) : captureTime;
- queueHDRMetadata(anwBuffer->handle, currentConsumer, dynamic_range_profile);
+ setTransform(transform, true/*mayChangeMirror*/);
+ res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
+ __FUNCTION__, mId, strerror(-res), res);
+ return res;
+ }
- res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
- if (shouldLogError(res, state)) {
- ALOGE("%s: Stream %d: Error queueing buffer to native window:"
- " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
- }
+ queueHDRMetadata(anwBuffer->handle, currentConsumer, dynamic_range_profile);
+
+ res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
+ if (shouldLogError(res, state)) {
+ ALOGE("%s: Stream %d: Error queueing buffer to native window:"
+ " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
}
}
mLock.lock();
@@ -476,7 +468,7 @@
return res;
}
- if ((res = configureConsumerQueueLocked(true /*allowPreviewScheduler*/)) != OK) {
+ if ((res = configureConsumerQueueLocked(true /*allowDisplaySync*/)) != OK) {
return res;
}
@@ -500,7 +492,7 @@
return OK;
}
-status_t Camera3OutputStream::configureConsumerQueueLocked(bool allowPreviewScheduler) {
+status_t Camera3OutputStream::configureConsumerQueueLocked(bool allowDisplaySync) {
status_t res;
mTraceFirstBuffer = true;
@@ -590,16 +582,17 @@
int timestampBase = getTimestampBase();
bool isDefaultTimeBase = (timestampBase ==
OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
- if (allowPreviewScheduler) {
+ if (allowDisplaySync) {
// We cannot distinguish between a SurfaceView and an ImageReader of
- // preview buffer format. The PreviewFrameScheduler needs to handle both.
+ // preview buffer format. Frames are synchronized to display in both
+ // cases.
bool forceChoreographer = (timestampBase ==
OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED);
bool defaultToChoreographer = (isDefaultTimeBase && isConsumedByHWComposer() &&
!property_get_bool("camera.disable_preview_scheduler", false));
if (forceChoreographer || defaultToChoreographer) {
- mPreviewFrameScheduler = std::make_unique<PreviewFrameScheduler>(*this, mConsumer);
- mTotalBufferCount += PreviewFrameScheduler::kQueueDepthWatermark;
+ mSyncToDisplay = true;
+ mTotalBufferCount += kDisplaySyncExtraBuffer;
}
}
@@ -1244,6 +1237,11 @@
return OK;
}
+void Camera3OutputStream::onMinDurationChanged(nsecs_t duration) {
+ Mutex::Autolock l(mLock);
+ mMinExpectedDuration = duration;
+}
+
void Camera3OutputStream::returnPrefetchedBuffersLocked() {
std::vector<Surface::BatchBuffer> batchedBuffers;
@@ -1261,9 +1259,61 @@
}
}
-bool Camera3OutputStream::shouldLogError(status_t res) {
- Mutex::Autolock l(mLock);
- return shouldLogError(res, mState);
+nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t) {
+ ParcelableVsyncEventData parcelableVsyncEventData;
+ auto res = mDisplayEventReceiver.getLatestVsyncEventData(&parcelableVsyncEventData);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error getting latest vsync event data: %s (%d)",
+ __FUNCTION__, mId, strerror(-res), res);
+ mLastCaptureTime = t;
+ mLastPresentTime = t;
+ return t;
+ }
+
+ const VsyncEventData& vsyncEventData = parcelableVsyncEventData.vsync;
+ nsecs_t currentTime = systemTime();
+
+ // Reset capture to present time offset if more than 1 second
+ // between frames.
+ if (t - mLastCaptureTime > kSpacingResetIntervalNs) {
+ for (size_t i = 0; i < VsyncEventData::kFrameTimelinesLength; i++) {
+ if (vsyncEventData.frameTimelines[i].deadlineTimestamp >= currentTime) {
+ mCaptureToPresentOffset =
+ vsyncEventData.frameTimelines[i].expectedPresentationTime - t;
+ break;
+ }
+ }
+ }
+
+ nsecs_t idealPresentT = t + mCaptureToPresentOffset;
+ nsecs_t expectedPresentT = mLastPresentTime;
+ nsecs_t minDiff = INT64_MAX;
+ // Derive minimum intervals between presentation times based on minimal
+ // expected duration.
+ size_t minVsyncs = (mMinExpectedDuration + vsyncEventData.frameInterval - 1) /
+ vsyncEventData.frameInterval - 1;
+ nsecs_t minInterval = minVsyncs * vsyncEventData.frameInterval + kTimelineThresholdNs;
+ // Find best timestamp in the vsync timeline:
+ // - closest to the ideal present time,
+ // - deadline timestamp is greater than the current time, and
+ // - the candidate present time is at least minInterval in the future
+ // compared to last present time.
+ for (const auto& vsyncTime : vsyncEventData.frameTimelines) {
+ if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
+ vsyncTime.deadlineTimestamp >= currentTime &&
+ vsyncTime.expectedPresentationTime > mLastPresentTime + minInterval) {
+ expectedPresentT = vsyncTime.expectedPresentationTime;
+ minDiff = std::abs(vsyncTime.expectedPresentationTime - idealPresentT);
+ }
+ }
+ mLastCaptureTime = t;
+ mLastPresentTime = expectedPresentT;
+
+ // Move the expected presentation time back by 1/3 of frame interval to
+ // mitigate the time drift. Due to time drift, if we directly use the
+ // expected presentation time, often times 2 expected presentation time
+ // falls into the same VSYNC interval.
+ return expectedPresentT - vsyncEventData.frameInterval/3;
}
}; // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index e777e85..6ea7ef7 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -21,13 +21,13 @@
#include <utils/RefBase.h>
#include <gui/IProducerListener.h>
#include <gui/Surface.h>
+#include <gui/DisplayEventReceiver.h>
#include "utils/LatencyHistogram.h"
#include "Camera3Stream.h"
#include "Camera3IOStreamBase.h"
#include "Camera3OutputStreamInterface.h"
#include "Camera3BufferManager.h"
-#include "PreviewFrameScheduler.h"
namespace android {
@@ -91,7 +91,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
@@ -108,7 +108,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
@@ -124,7 +124,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
@@ -240,12 +240,16 @@
virtual status_t setBatchSize(size_t batchSize = 1) override;
/**
+ * Notify the stream on change of min frame durations.
+ */
+ virtual void onMinDurationChanged(nsecs_t duration) override;
+
+ /**
* Apply ZSL related consumer usage quirk.
*/
static void applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/);
void setImageDumpMask(int mask) { mImageDumpMask = mask; }
- bool shouldLogError(status_t res);
protected:
Camera3OutputStream(int id, camera_stream_type_t type,
@@ -256,7 +260,7 @@
uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
@@ -278,7 +282,7 @@
status_t getEndpointUsageForSurface(uint64_t *usage,
const sp<Surface>& surface) const;
- status_t configureConsumerQueueLocked(bool allowPreviewScheduler);
+ status_t configureConsumerQueueLocked(bool allowDisplaySync);
// Consumer as the output of camera HAL
sp<Surface> mConsumer;
@@ -392,13 +396,24 @@
void returnPrefetchedBuffersLocked();
+ // Synchronize camera timestamp to display, and the return value
+ // can be used as presentation timestamp
+ nsecs_t syncTimestampToDisplayLocked(nsecs_t t);
+
static const int32_t kDequeueLatencyBinSize = 5; // in ms
CameraLatencyHistogram mDequeueBufferLatency;
int mImageDumpMask = 0;
- // The preview stream scheduler for re-timing frames
- std::unique_ptr<PreviewFrameScheduler> mPreviewFrameScheduler;
+ nsecs_t mMinExpectedDuration = 0;
+ bool mSyncToDisplay = false;
+ DisplayEventReceiver mDisplayEventReceiver;
+ nsecs_t mLastCaptureTime = 0;
+ nsecs_t mLastPresentTime = 0;
+ nsecs_t mCaptureToPresentOffset = 0;
+ static constexpr size_t kDisplaySyncExtraBuffer = 2;
+ static constexpr nsecs_t kSpacingResetIntervalNs = 1000000000LL; // 1 second
+ static constexpr nsecs_t kTimelineThresholdNs = 1000000LL; // 1 millisecond
}; // class Camera3OutputStream
} // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index e44e795..a6d4b96 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -108,6 +108,14 @@
* instead.
*/
virtual status_t setBatchSize(size_t batchSize = 1) = 0;
+
+ /**
+ * Notify the output stream that the minimum frame duration has changed.
+ *
+ * The minimum frame duration is calculated based on the upper bound of
+ * AE_TARGET_FPS_RANGE in the capture request.
+ */
+ virtual void onMinDurationChanged(nsecs_t duration) = 0;
};
// Helper class to organize a synchronized mapping of stream IDs to stream instances
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index ab25322..ed66df0 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -853,6 +853,13 @@
r.shutterTimestamp = msg.timestamp;
r.shutterReadoutTimestamp = msg.readout_timestamp;
+ if (r.minExpectedDuration != states.minFrameDuration) {
+ for (size_t i = 0; i < states.outputStreams.size(); i++) {
+ auto outputStream = states.outputStreams[i];
+ outputStream->onMinDurationChanged(r.minExpectedDuration);
+ }
+ states.minFrameDuration = r.minExpectedDuration;
+ }
if (r.hasCallback) {
ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
states.cameraId.string(), __FUNCTION__,
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 4d1eb75..dd01408 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -107,6 +107,7 @@
InflightRequestUpdateInterface& inflightIntf;
BufferRecordsInterface& bufferRecordsIntf;
bool legacyClient;
+ nsecs_t& minFrameDuration;
};
void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result);
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 198e32f..d24b527 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -34,7 +34,7 @@
nsecs_t timestampOffset, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool useHalBufManager, int64_t dynamicProfile,
- int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
int mirrorMode) :
Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
@@ -251,7 +251,7 @@
return res;
}
- res = configureConsumerQueueLocked(false/*allowPreviewScheduler*/);
+ res = configureConsumerQueueLocked(false/*allowDisplaySync*/);
if (res != OK) {
ALOGE("Failed to configureConsumerQueueLocked: %s(%d)", strerror(-res), res);
return res;
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 9be0c86..8f7f00b 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -42,7 +42,7 @@
int setId = CAMERA3_STREAM_SET_ID_INVALID,
bool useHalBufManager = false,
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 396b316..7ad6649 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -55,7 +55,7 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
camera_stream(),
mId(id),
mSetId(setId),
@@ -179,7 +179,7 @@
return camera_stream::max_buffers;
}
-int Camera3Stream::getStreamUseCase() const {
+int64_t Camera3Stream::getStreamUseCase() const {
return camera_stream::use_case;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index d1545cc..d429e6c 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -178,7 +178,7 @@
android_dataspace getOriginalDataSpace() const;
int getMaxHalBuffers() const;
const String8& physicalCameraId() const;
- int getStreamUseCase() const;
+ int64_t getStreamUseCase() const;
int getTimestampBase() const;
bool isDeviceTimeBaseRealtime() const;
@@ -509,7 +509,7 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase);
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase);
wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 77c6483..5c333a4 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -66,7 +66,7 @@
std::unordered_set<int32_t> sensor_pixel_modes_used;
int64_t dynamic_range_profile;
- int use_case;
+ int64_t use_case;
} camera_stream_t;
typedef struct camera_stream_buffer {
@@ -111,7 +111,7 @@
bool supportsOffline = false;
std::unordered_set<int32_t> sensorPixelModesUsed;
int64_t dynamicRangeProfile;
- int streamUseCase;
+ int64_t streamUseCase;
int timestampBase;
int mirrorMode;
OutputStreamInfo() :
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index 0c97f3e..493a9e2 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -143,6 +143,11 @@
// is not for constrained high speed recording, this flag will also be true.
bool hasCallback;
+ // Minimum expected frame duration for this request
+ // For manual captures, equal to the max of requested exposure time and frame duration
+ // For auto-exposure modes, equal to 1/(higher end of target FPS range)
+ nsecs_t minExpectedDuration;
+
// Maximum expected frame duration for this request.
// For manual captures, equal to the max of requested exposure time and frame duration
// For auto-exposure modes, equal to 1/(lower end of target FPS range)
@@ -187,8 +192,8 @@
// Current output transformation
int32_t transform;
- // TODO: dedupe
- static const nsecs_t kDefaultExpectedDuration = 100000000; // 100 ms
+ static const nsecs_t kDefaultMinExpectedDuration = 33333333; // 33 ms
+ static const nsecs_t kDefaultMaxExpectedDuration = 100000000; // 100 ms
// Default constructor needed by KeyedVector
InFlightRequest() :
@@ -199,7 +204,8 @@
numBuffersLeft(0),
hasInputBuffer(false),
hasCallback(true),
- maxExpectedDuration(kDefaultExpectedDuration),
+ minExpectedDuration(kDefaultMinExpectedDuration),
+ maxExpectedDuration(kDefaultMaxExpectedDuration),
skipResultMetadata(false),
errorBufStrategy(ERROR_BUF_CACHE),
stillCapture(false),
@@ -210,7 +216,7 @@
}
InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
- bool hasAppCallback, nsecs_t maxDuration,
+ bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration,
const std::set<std::set<String8>>& physicalCameraIdSet, bool isStillCapture,
bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& idsWithZoom,
nsecs_t requestNs, const SurfaceMap& outSurfaces = SurfaceMap{}) :
@@ -222,6 +228,7 @@
resultExtras(extras),
hasInputBuffer(hasInput),
hasCallback(hasAppCallback),
+ minExpectedDuration(minDuration),
maxExpectedDuration(maxDuration),
skipResultMetadata(false),
errorBufStrategy(ERROR_BUF_CACHE),
diff --git a/services/camera/libcameraservice/device3/PreviewFrameScheduler.cpp b/services/camera/libcameraservice/device3/PreviewFrameScheduler.cpp
deleted file mode 100644
index 80f27ed..0000000
--- a/services/camera/libcameraservice/device3/PreviewFrameScheduler.cpp
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "Camera3-PreviewFrameScheduler"
-#define ATRACE_TAG ATRACE_TAG_CAMERA
-//#define LOG_NDEBUG 0
-
-#include <utils/Log.h>
-#include <utils/Trace.h>
-
-#include <android/looper.h>
-#include "PreviewFrameScheduler.h"
-#include "Camera3OutputStream.h"
-
-namespace android {
-
-namespace camera3 {
-
-/**
- * Internal Choreographer thread implementation for polling and handling callbacks
- */
-
-// Callback function for Choreographer
-static void frameCallback(const AChoreographerFrameCallbackData* callbackData, void* data) {
- PreviewFrameScheduler* parent = static_cast<PreviewFrameScheduler*>(data);
- if (parent == nullptr) {
- ALOGE("%s: Invalid data for Choreographer callback!", __FUNCTION__);
- return;
- }
-
- size_t length = AChoreographerFrameCallbackData_getFrameTimelinesLength(callbackData);
- std::vector<nsecs_t> timeline(length);
- for (size_t i = 0; i < length; i++) {
- nsecs_t timestamp = AChoreographerFrameCallbackData_getFrameTimelineExpectedPresentationTimeNanos(
- callbackData, i);
- timeline[i] = timestamp;
- }
-
- parent->onNewPresentationTime(timeline);
-
- AChoreographer_postVsyncCallback(AChoreographer_getInstance(), frameCallback, data);
-}
-
-struct ChoreographerThread : public Thread {
- ChoreographerThread();
- status_t start(PreviewFrameScheduler* parent);
- virtual status_t readyToRun() override;
- virtual bool threadLoop() override;
-
-protected:
- virtual ~ChoreographerThread() {}
-
-private:
- ChoreographerThread &operator=(const ChoreographerThread &);
-
- // This only impacts the shutdown time. It won't impact the choreographer
- // callback frequency.
- static constexpr nsecs_t kPollingTimeoutMs = 5;
- PreviewFrameScheduler* mParent = nullptr;
-};
-
-ChoreographerThread::ChoreographerThread() : Thread(false /*canCallJava*/) {
-}
-
-status_t ChoreographerThread::start(PreviewFrameScheduler* parent) {
- mParent = parent;
- return run("PreviewChoreographer");
-}
-
-status_t ChoreographerThread::readyToRun() {
- ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS);
- if (AChoreographer_getInstance() == NULL) {
- return NO_INIT;
- }
-
- AChoreographer_postVsyncCallback(
- AChoreographer_getInstance(), frameCallback, mParent);
- return OK;
-}
-
-bool ChoreographerThread::threadLoop() {
- if (exitPending()) {
- return false;
- }
- ALooper_pollOnce(kPollingTimeoutMs, nullptr, nullptr, nullptr);
- return true;
-}
-
-/**
- * PreviewFrameScheduler implementation
- */
-
-PreviewFrameScheduler::PreviewFrameScheduler(Camera3OutputStream& parent, sp<Surface> consumer) :
- mParent(parent),
- mConsumer(consumer),
- mChoreographerThread(new ChoreographerThread()) {
-}
-
-PreviewFrameScheduler::~PreviewFrameScheduler() {
- {
- Mutex::Autolock l(mLock);
- mChoreographerThread->requestExit();
- }
- mChoreographerThread->join();
-}
-
-status_t PreviewFrameScheduler::queuePreviewBuffer(nsecs_t timestamp, int32_t transform,
- ANativeWindowBuffer* anwBuffer, int releaseFence) {
- // Start choreographer thread if it's not already running.
- if (!mChoreographerThread->isRunning()) {
- status_t res = mChoreographerThread->start(this);
- if (res != OK) {
- ALOGE("%s: Failed to init choreographer thread!", __FUNCTION__);
- return res;
- }
- }
-
- {
- Mutex::Autolock l(mLock);
- mPendingBuffers.emplace(timestamp, transform, anwBuffer, releaseFence);
-
- // Queue buffer to client right away if pending buffers are more than
- // the queue depth watermark.
- if (mPendingBuffers.size() > kQueueDepthWatermark) {
- auto oldBuffer = mPendingBuffers.front();
- mPendingBuffers.pop();
-
- status_t res = queueBufferToClientLocked(oldBuffer, oldBuffer.timestamp);
- if (res != OK) {
- return res;
- }
-
- // Reset the last capture and presentation time
- mLastCameraCaptureTime = 0;
- mLastCameraPresentTime = 0;
- } else {
- ATRACE_INT(kPendingBufferTraceName, mPendingBuffers.size());
- }
- }
- return OK;
-}
-
-void PreviewFrameScheduler::onNewPresentationTime(const std::vector<nsecs_t>& timeline) {
- ATRACE_CALL();
- Mutex::Autolock l(mLock);
- if (mPendingBuffers.size() > 0) {
- auto nextBuffer = mPendingBuffers.front();
- mPendingBuffers.pop();
-
- // Find the best presentation time by finding the element in the
- // choreographer timeline that's closest to the ideal presentation time.
- // The ideal presentation time is the last presentation time + frame
- // interval.
- nsecs_t cameraInterval = nextBuffer.timestamp - mLastCameraCaptureTime;
- nsecs_t idealPresentTime = (cameraInterval < kSpacingResetIntervalNs) ?
- (mLastCameraPresentTime + cameraInterval) : nextBuffer.timestamp;
- nsecs_t presentTime = *std::min_element(timeline.begin(), timeline.end(),
- [idealPresentTime](nsecs_t p1, nsecs_t p2) {
- return std::abs(p1 - idealPresentTime) < std::abs(p2 - idealPresentTime);
- });
-
- status_t res = queueBufferToClientLocked(nextBuffer, presentTime);
- ATRACE_INT(kPendingBufferTraceName, mPendingBuffers.size());
-
- if (mParent.shouldLogError(res)) {
- ALOGE("%s: Preview Stream: Error queueing buffer to native window:"
- " %s (%d)", __FUNCTION__, strerror(-res), res);
- }
-
- mLastCameraCaptureTime = nextBuffer.timestamp;
- mLastCameraPresentTime = presentTime;
- }
-}
-
-status_t PreviewFrameScheduler::queueBufferToClientLocked(
- const BufferHolder& bufferHolder, nsecs_t timestamp) {
- mParent.setTransform(bufferHolder.transform, true/*mayChangeMirror*/);
-
- status_t res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp);
- if (res != OK) {
- ALOGE("%s: Preview Stream: Error setting timestamp: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
-
- Camera3Stream::queueHDRMetadata(bufferHolder.anwBuffer.get()->handle, mConsumer,
- mParent.getDynamicRangeProfile());
-
- res = mConsumer->queueBuffer(mConsumer.get(), bufferHolder.anwBuffer.get(),
- bufferHolder.releaseFence);
- if (res != OK) {
- close(bufferHolder.releaseFence);
- }
-
- return res;
-}
-
-}; // namespace camera3
-
-}; // namespace android
diff --git a/services/camera/libcameraservice/device3/PreviewFrameScheduler.h b/services/camera/libcameraservice/device3/PreviewFrameScheduler.h
deleted file mode 100644
index c0574fd..0000000
--- a/services/camera/libcameraservice/device3/PreviewFrameScheduler.h
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_PREVIEWFRAMESCHEDULER_H
-#define ANDROID_SERVERS_CAMERA_CAMERA3_PREVIEWFRAMESCHEDULER_H
-
-#include <queue>
-
-#include <android/choreographer.h>
-#include <gui/Surface.h>
-#include <gui/ISurfaceComposer.h>
-#include <utils/Condition.h>
-#include <utils/Mutex.h>
-#include <utils/Looper.h>
-#include <utils/Thread.h>
-#include <utils/Timers.h>
-
-namespace android {
-
-namespace camera3 {
-
-class Camera3OutputStream;
-struct ChoreographerThread;
-
-/***
- * Preview stream scheduler for better preview display synchronization
- *
- * The ideal viewfinder user experience is that frames are presented to the
- * user in the same cadence as outputed by the camera sensor. However, the
- * processing latency between frames could vary, due to factors such
- * as CPU load, differences in request settings, etc. This frame processing
- * latency results in variation in presentation of frames to the user.
- *
- * The PreviewFrameScheduler improves the viewfinder user experience by:
- * 1. Cache preview buffers in the scheduler
- * 2. For each choreographer callback, queue the oldest cached buffer with
- * the best matching presentation timestamp. Frame N's presentation timestamp
- * is the choreographer timeline timestamp closest to (Frame N-1's
- * presentation time + camera capture interval between frame N-1 and frame N).
- * 3. Maintain at most 2 queue-able buffers. If the 3rd preview buffer becomes
- * available, queue the oldest cached buffer to the buffer queue.
- */
-class PreviewFrameScheduler {
- public:
- explicit PreviewFrameScheduler(Camera3OutputStream& parent, sp<Surface> consumer);
- virtual ~PreviewFrameScheduler();
-
- // Queue preview buffer locally
- status_t queuePreviewBuffer(nsecs_t timestamp, int32_t transform,
- ANativeWindowBuffer* anwBuffer, int releaseFence);
-
- // Callback function with a new presentation timeline from choreographer. This
- // will trigger a locally queued buffer be sent to the buffer queue.
- void onNewPresentationTime(const std::vector<nsecs_t>& presentationTimeline);
-
- // Maintain at most 2 queue-able buffers
- static constexpr int32_t kQueueDepthWatermark = 2;
-
- private:
- // structure holding cached preview buffer info
- struct BufferHolder {
- nsecs_t timestamp;
- int32_t transform;
- sp<ANativeWindowBuffer> anwBuffer;
- int releaseFence;
-
- BufferHolder(nsecs_t t, int32_t tr, ANativeWindowBuffer* anwb, int rf) :
- timestamp(t), transform(tr), anwBuffer(anwb), releaseFence(rf) {}
- };
-
- status_t queueBufferToClientLocked(const BufferHolder& bufferHolder,
- nsecs_t presentTime);
-
- static constexpr char kPendingBufferTraceName[] = "pending_preview_buffers";
-
- // Camera capture interval for resetting frame spacing between preview sessions
- static constexpr nsecs_t kSpacingResetIntervalNs = 1000000000L; // 1 second
-
- Camera3OutputStream& mParent;
- sp<ANativeWindow> mConsumer;
- mutable Mutex mLock;
-
- std::queue<BufferHolder> mPendingBuffers;
- nsecs_t mLastCameraCaptureTime = 0;
- nsecs_t mLastCameraPresentTime = 0;
-
- // Choreographer related
- sp<Looper> mLooper;
- sp<ChoreographerThread> mChoreographerThread;
-};
-
-}; //namespace camera3
-}; //namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 529c9f0..f05520f 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -189,7 +189,6 @@
return res;
}
if (session == nullptr) {
- ALOGE("JCLog: null session returned");
SET_ERR("Session iface returned is null");
return INVALID_OPERATION;
}
@@ -372,7 +371,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
- *this, *(mInterface), mLegacyClient}, mResultMetadataQueue
+ *this, *(mInterface), mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
};
for (const auto& result : results) {
@@ -413,7 +412,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
- *this, *(mInterface), mLegacyClient}, mResultMetadataQueue
+ *this, *(mInterface), mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index 895ce56..336719d 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -123,7 +123,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
- *this, mBufferRecords, /*legacyClient*/ false}, mResultMetadataQueue
+ *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -168,7 +168,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
- *this, mBufferRecords, /*legacyClient*/ false}, mResultMetadataQueue
+ *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
index ad4a480..33de2c5 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
@@ -75,7 +75,7 @@
// See explanation for why we need a separate class for this in
// AidlCamera3Device::AidlCameraDeviceCallbacks in AidlCamera3Device.h
class AidlCameraDeviceCallbacks :
- virtual public aidl::android::hardware::camera::device::BnCameraDeviceCallback {
+ public aidl::android::hardware::camera::device::BnCameraDeviceCallback {
public:
AidlCameraDeviceCallbacks(wp<AidlCamera3OfflineSession> parent) : mParent(parent) { }
@@ -112,7 +112,9 @@
offlineSession) :
Camera3OfflineSession(id, inputStream, offlineStreamSet, std::move(bufferRecords),
offlineReqs, offlineStates),
- mSession(offlineSession) { mCallbacks = std::make_shared<AidlCameraDeviceCallbacks>(this);};
+ mSession(offlineSession) {
+ mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
+ };
/**
* End of CameraOfflineSessionBase interface
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 4894ba9..cf6d462 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -369,7 +369,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient}, mResultMetadataQueue
+ *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
};
//HidlCaptureOutputStates hidlStates {
@@ -431,7 +431,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient}, mResultMetadataQueue
+ *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
};
for (const auto& result : results) {
@@ -483,7 +483,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient}, mResultMetadataQueue
+ *mInterface, mLegacyClient, mMinExpectedDuration}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
@@ -972,7 +972,7 @@
}
if (src->use_case != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT &&
mHidlSession_3_8 == nullptr) {
- ALOGE("%s: Camera device doesn't support non-default stream use case %d!",
+ ALOGE("%s: Camera device doesn't support non-default stream use case %" PRId64 "!",
__FUNCTION__, src->use_case);
return BAD_VALUE;
}
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index d517c8d..5c97f0e 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -105,7 +105,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- mBufferRecords, /*legacyClient*/ false}, mResultMetadataQueue
+ mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -145,7 +145,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- mBufferRecords, /*legacyClient*/ false}, mResultMetadataQueue
+ mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -180,7 +180,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- mBufferRecords, /*legacyClient*/ false}, mResultMetadataQueue
+ mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
index 1563dcf..60e4e42 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OutputUtils.cpp
@@ -219,6 +219,7 @@
tmpBuffers[i].buffer = convertToHidl(aBuf.buffer, handlesCreated);
tmpBuffers[i].acquireFence = convertToHidl(aBuf.acquireFence, handlesCreated);
tmpBuffers[i].releaseFence = convertToHidl(aBuf.releaseFence, handlesCreated);
+ i++;
}
hBuffersVal.buffers(std::move(tmpBuffers));
}
diff --git a/services/camera/libcameraservice/tests/PreviewSchedulerTest.cpp b/services/camera/libcameraservice/tests/PreviewSchedulerTest.cpp
deleted file mode 100644
index 025521a..0000000
--- a/services/camera/libcameraservice/tests/PreviewSchedulerTest.cpp
+++ /dev/null
@@ -1,187 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "PreviewSchedulerTest"
-
-#include <chrono>
-#include <thread>
-#include <utility>
-
-#include <gtest/gtest.h>
-#include <utils/Errors.h>
-#include <utils/Log.h>
-#include <utils/Mutex.h>
-
-#include <gui/BufferItemConsumer.h>
-#include <gui/BufferQueue.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <gui/IGraphicBufferConsumer.h>
-#include <gui/Surface.h>
-
-#include "../device3/Camera3OutputStream.h"
-#include "../device3/PreviewFrameScheduler.h"
-
-using namespace android;
-using namespace android::camera3;
-
-// Consumer buffer available listener
-class SimpleListener : public BufferItemConsumer::FrameAvailableListener {
-public:
- SimpleListener(size_t frameCount): mFrameCount(frameCount) {}
-
- void waitForFrames() {
- Mutex::Autolock lock(mMutex);
- while (mFrameCount > 0) {
- mCondition.wait(mMutex);
- }
- }
-
- void onFrameAvailable(const BufferItem& /*item*/) override {
- Mutex::Autolock lock(mMutex);
- if (mFrameCount > 0) {
- mFrameCount--;
- mCondition.signal();
- }
- }
-
- void reset(size_t frameCount) {
- Mutex::Autolock lock(mMutex);
- mFrameCount = frameCount;
- }
-private:
- size_t mFrameCount;
- Mutex mMutex;
- Condition mCondition;
-};
-
-// Test the PreviewFrameScheduler functionatliy of re-timing buffers
-TEST(PreviewSchedulerTest, BasicPreviewSchedulerTest) {
- const int ID = 0;
- const int FORMAT = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- const uint32_t WIDTH = 640;
- const uint32_t HEIGHT = 480;
- const int32_t TRANSFORM = 0;
- const nsecs_t T_OFFSET = 0;
- const android_dataspace DATASPACE = HAL_DATASPACE_UNKNOWN;
- const camera_stream_rotation_t ROTATION = CAMERA_STREAM_ROTATION_0;
- const String8 PHY_ID;
- const std::unordered_set<int32_t> PIX_MODES;
- const int BUFFER_COUNT = 4;
- const int TOTAL_BUFFER_COUNT = BUFFER_COUNT * 2;
-
- // Create buffer queue
- sp<IGraphicBufferProducer> producer;
- sp<IGraphicBufferConsumer> consumer;
- BufferQueue::createBufferQueue(&producer, &consumer);
- ASSERT_NE(producer, nullptr);
- ASSERT_NE(consumer, nullptr);
- ASSERT_EQ(NO_ERROR, consumer->setDefaultBufferSize(WIDTH, HEIGHT));
-
- // Set up consumer
- sp<BufferItemConsumer> bufferConsumer = new BufferItemConsumer(consumer,
- GRALLOC_USAGE_HW_COMPOSER, BUFFER_COUNT);
- ASSERT_NE(bufferConsumer, nullptr);
- sp<SimpleListener> consumerListener = new SimpleListener(BUFFER_COUNT);
- bufferConsumer->setFrameAvailableListener(consumerListener);
-
- // Set up producer
- sp<Surface> surface = new Surface(producer);
- sp<StubProducerListener> listener = new StubProducerListener();
- ASSERT_EQ(NO_ERROR, surface->connect(NATIVE_WINDOW_API_CPU, listener));
- sp<ANativeWindow> anw(surface);
- ASSERT_EQ(NO_ERROR, native_window_set_buffer_count(anw.get(), TOTAL_BUFFER_COUNT));
-
- // Create Camera3OutputStream and PreviewFrameScheduler
- sp<Camera3OutputStream> stream = new Camera3OutputStream(ID, surface, WIDTH, HEIGHT,
- FORMAT, DATASPACE, ROTATION, T_OFFSET, PHY_ID, PIX_MODES);
- ASSERT_NE(stream, nullptr);
- std::unique_ptr<PreviewFrameScheduler> scheduler =
- std::make_unique<PreviewFrameScheduler>(*stream, surface);
- ASSERT_NE(scheduler, nullptr);
-
- // The pair of nsecs_t: camera timestamp delta (negative means in the past) and frame interval
- const std::pair<nsecs_t, nsecs_t> inputTimestamps[][BUFFER_COUNT] = {
- // 30fps, 33ms interval
- {{-100000000LL, 33333333LL}, {-66666667LL, 33333333LL},
- {-33333333LL, 33333333LL}, {0, 0}},
- // 30fps, variable interval
- {{-100000000LL, 16666667LL}, {-66666667LL, 33333333LL},
- {-33333333LL, 50000000LL}, {0, 0}},
- // 60fps, 16.7ms interval
- {{-50000000LL, 16666667LL}, {-33333333LL, 16666667LL},
- {-16666667LL, 16666667LL}, {0, 0}},
- // 60fps, variable interval
- {{-50000000LL, 8666667LL}, {-33333333LL, 19666667LL},
- {-16666667LL, 20666667LL}, {0, 0}},
- };
-
- // Go through different use cases, and check the buffer timestamp
- size_t iterations = sizeof(inputTimestamps)/sizeof(inputTimestamps[0]);
- for (size_t i = 0; i < iterations; i++) {
- // Space out different test sets to reset the frame scheduler
- nsecs_t timeBase = systemTime() - s2ns(1) * (iterations - i);
- nsecs_t lastQueueTime = 0;
- nsecs_t duration = 0;
- for (size_t j = 0; j < BUFFER_COUNT; j++) {
- ANativeWindowBuffer* buffer = nullptr;
- int fenceFd;
- ASSERT_EQ(NO_ERROR, anw->dequeueBuffer(anw.get(), &buffer, &fenceFd));
-
- // Sleep to space out queuePreviewBuffer
- nsecs_t currentTime = systemTime();
- if (duration > 0 && duration > currentTime - lastQueueTime) {
- std::this_thread::sleep_for(
- std::chrono::nanoseconds(duration + lastQueueTime - currentTime));
- }
- nsecs_t timestamp = timeBase + inputTimestamps[i][j].first;
- ASSERT_EQ(NO_ERROR,
- scheduler->queuePreviewBuffer(timestamp, TRANSFORM, buffer, fenceFd));
-
- lastQueueTime = systemTime();
- duration = inputTimestamps[i][j].second;
- }
-
- // Collect output timestamps, making sure they are either set by
- // producer, or set by the scheduler.
- consumerListener->waitForFrames();
- nsecs_t outputTimestamps[BUFFER_COUNT];
- for (size_t j = 0; j < BUFFER_COUNT; j++) {
- BufferItem bufferItem;
- ASSERT_EQ(NO_ERROR, bufferConsumer->acquireBuffer(&bufferItem, 0/*presentWhen*/));
-
- outputTimestamps[j] = bufferItem.mTimestamp;
- ALOGV("%s: [%zu][%zu]: input: %" PRId64 ", output: %" PRId64, __FUNCTION__,
- i, j, timeBase + inputTimestamps[i][j].first, bufferItem.mTimestamp);
- ASSERT_GT(bufferItem.mTimestamp, inputTimestamps[i][j].first);
-
- ASSERT_EQ(NO_ERROR, bufferConsumer->releaseBuffer(bufferItem));
- }
-
- // Check the output timestamp intervals are aligned with input intervals
- const nsecs_t SHIFT_THRESHOLD = ms2ns(2);
- for (size_t j = 0; j < BUFFER_COUNT - 1; j ++) {
- nsecs_t interval_shift = outputTimestamps[j+1] - outputTimestamps[j] -
- (inputTimestamps[i][j+1].first - inputTimestamps[i][j].first);
- ASSERT_LE(std::abs(interval_shift), SHIFT_THRESHOLD);
- }
-
- consumerListener->reset(BUFFER_COUNT);
- }
-
- // Disconnect the surface
- ASSERT_EQ(NO_ERROR, surface->disconnect(NATIVE_WINDOW_API_CPU));
-}
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 82d58e0..a00b221 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -80,6 +80,7 @@
void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onIdle(
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::string& userTag,
const std::vector<hardware::CameraStreamStats>& streamStats) {
Mutex::Autolock l(mLock);
@@ -87,6 +88,7 @@
mSessionStats.mRequestCount = requestCount;
mSessionStats.mResultErrorCount = resultErrorCount;
mSessionStats.mDeviceError = deviceError;
+ mSessionStats.mUserTag = String16(userTag.c_str());
mSessionStats.mStreamStats = streamStats;
updateProxyDeviceState(mSessionStats);
@@ -177,6 +179,7 @@
void CameraServiceProxyWrapper::logIdle(const String8& id,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::string& userTag,
const std::vector<hardware::CameraStreamStats>& streamStats) {
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
@@ -190,8 +193,9 @@
return;
}
- ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d",
- __FUNCTION__, id.c_str(), requestCount, resultErrorCount, deviceError);
+ ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d"
+ ", userTag %s", __FUNCTION__, id.c_str(), requestCount, resultErrorCount,
+ deviceError, userTag.c_str());
for (size_t i = 0; i < streamStats.size(); i++) {
ALOGV("%s: streamStats[%zu]: w %d h %d, requestedCount %" PRId64 ", dropCount %"
PRId64 ", startTimeMs %d" ,
@@ -200,7 +204,7 @@
streamStats[i].mStartLatencyMs);
}
- sessionStats->onIdle(requestCount, resultErrorCount, deviceError, streamStats);
+ sessionStats->onIdle(requestCount, resultErrorCount, deviceError, userTag, streamStats);
}
void CameraServiceProxyWrapper::logOpen(const String8& id, int facing,
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index 037316d..6604aa1 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -50,6 +50,7 @@
void onStreamConfigured(int operatingMode, bool internalReconfig, int32_t latencyMs);
void onActive(float maxPreviewFps);
void onIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::string& userTag,
const std::vector<hardware::CameraStreamStats>& streamStats);
};
@@ -86,6 +87,7 @@
// Session state becomes idle
static void logIdle(const String8& id,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::string& userTag,
const std::vector<hardware::CameraStreamStats>& streamStats);
// Ping camera service proxy for user update
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index af00e81..4090dae 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -313,7 +313,7 @@
}
}
-bool isStreamUseCaseSupported(int streamUseCase,
+bool isStreamUseCaseSupported(int64_t streamUseCase,
const CameraMetadata &deviceInfo) {
camera_metadata_ro_entry_t availableStreamUseCases =
deviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES);
@@ -324,7 +324,7 @@
}
for (size_t i = 0; i < availableStreamUseCases.count; i++) {
- if (availableStreamUseCases.data.i32[i] == streamUseCase) {
+ if (availableStreamUseCases.data.i64[i] == streamUseCase) {
return true;
}
}
@@ -336,7 +336,7 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int streamUseCase, int timestampBase, int mirrorMode) {
+ int64_t streamUseCase, int timestampBase, int mirrorMode) {
// bufferProducer must be non-null
if (gbp == nullptr) {
String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
@@ -452,7 +452,7 @@
}
if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
physicalCameraMetadata)) {
- String8 msg = String8::format("Camera %s: stream use case %d not supported,"
+ String8 msg = String8::format("Camera %s: stream use case %" PRId64 " not supported,"
" failed to create output stream", logicalCameraId.string(), streamUseCase);
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
@@ -718,7 +718,7 @@
return res;
}
- int streamUseCase = it.getStreamUseCase();
+ int64_t streamUseCase = it.getStreamUseCase();
int timestampBase = it.getTimestampBase();
int mirrorMode = it.getMirrorMode();
if (deferredConsumer) {
@@ -1040,7 +1040,7 @@
// image
return false;
}
- if (static_cast<int32_t>(streamConfigV38.streams[i].useCase) !=
+ if (static_cast<int64_t>(streamConfigV38.streams[i].useCase) !=
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
// ICameraDevice older than 3.8 doesn't support stream use case
return false;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 406510f..038c075 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -48,7 +48,7 @@
namespace android {
namespace camera3 {
-typedef std::function<CameraMetadata (const String8 &, int targetSdkVersion)> metadataGetter;
+typedef std::function<CameraMetadata (const String8 &, bool overrideForPerfClass)> metadataGetter;
class StreamConfiguration {
public:
@@ -99,7 +99,7 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int streamUseCase, int timestampBase, int mirrorMode);
+ int64_t streamUseCase, int timestampBase, int mirrorMode);
void mapStreamInfo(const camera3::OutputStreamInfo &streamInfo,
camera3::camera_stream_rotation_t rotation, String8 physicalId, int32_t groupId,
@@ -114,7 +114,7 @@
// Check if the device supports a given dynamicRangeProfile
bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
-bool isStreamUseCaseSupported(int streamUseCase, const CameraMetadata &deviceInfo);
+bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
void mapStreamInfo(const OutputStreamInfo &streamInfo,
camera3::camera_stream_rotation_t rotation, String8 physicalId,
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
index 7a7707c..c3aac72 100644
--- a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
@@ -60,6 +60,7 @@
mErrorResultCount = 0;
mCounterStopped = false;
mDeviceError = false;
+ mUserTag.clear();
for (auto& streamStats : mStatsMap) {
StreamStats& streamStat = streamStats.second;
streamStat.mRequestedFrameCount = 0;
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.h b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
index c23abb6..2936531 100644
--- a/services/camera/libcameraservice/utils/SessionStatsBuilder.h
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
@@ -84,6 +84,7 @@
int64_t mErrorResultCount;
bool mCounterStopped;
bool mDeviceError;
+ std::string mUserTag;
// Map from stream id to stream statistics
std::map<int, StreamStats> mStatsMap;
};
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index aacc2be..99e3691 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -297,33 +297,35 @@
ALOGD("%s", __func__);
// Add action to save AnalyticsState if audioserver is restarted.
- // This triggers on an item of "audio.flinger"
- // with a property "event" set to "AudioFlinger" (the constructor).
+ // This triggers on AudioFlinger or AudioPolicy ctors and onFirstRef,
+ // as well as TimeCheck events.
mActions.addAction(
AMEDIAMETRICS_KEY_AUDIO_FLINGER "." AMEDIAMETRICS_PROP_EVENT,
std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR),
std::make_shared<AnalyticsActions::Function>(
[this](const std::shared_ptr<const android::mediametrics::Item> &item){
- ALOGW("(key=%s) Audioflinger constructor event detected", item->getKey().c_str());
- mPreviousAnalyticsState.set(std::make_shared<AnalyticsState>(
- *mAnalyticsState.get()));
- // Note: get returns shared_ptr temp, whose lifetime is extended
- // to end of full expression.
- mAnalyticsState->clear(); // TODO: filter the analytics state.
- // Perhaps report this.
-
- // Set up a timer to expire the previous audio state to save space.
- // Use the transaction log size as a cookie to see if it is the
- // same as before. A benign race is possible where a state is cleared early.
- const size_t size = mPreviousAnalyticsState->transactionLog().size();
- mTimedAction.postIn(
- std::chrono::seconds(PREVIOUS_STATE_EXPIRE_SEC), [this, size](){
- if (mPreviousAnalyticsState->transactionLog().size() == size) {
- ALOGD("expiring previous audio state after %d seconds.",
- PREVIOUS_STATE_EXPIRE_SEC);
- mPreviousAnalyticsState->clear(); // removes data from the state.
- }
- });
+ mHealth.onAudioServerStart(Health::Module::AUDIOFLINGER, item);
+ }));
+ mActions.addAction(
+ AMEDIAMETRICS_KEY_AUDIO_POLICY "." AMEDIAMETRICS_PROP_EVENT,
+ std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR),
+ std::make_shared<AnalyticsActions::Function>(
+ [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+ mHealth.onAudioServerStart(Health::Module::AUDIOPOLICY, item);
+ }));
+ mActions.addAction(
+ AMEDIAMETRICS_KEY_AUDIO_FLINGER "." AMEDIAMETRICS_PROP_EVENT,
+ std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT),
+ std::make_shared<AnalyticsActions::Function>(
+ [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+ mHealth.onAudioServerTimeout(Health::Module::AUDIOFLINGER, item);
+ }));
+ mActions.addAction(
+ AMEDIAMETRICS_KEY_AUDIO_POLICY "." AMEDIAMETRICS_PROP_EVENT,
+ std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT),
+ std::make_shared<AnalyticsActions::Function>(
+ [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+ mHealth.onAudioServerTimeout(Health::Module::AUDIOPOLICY, item);
}));
// Handle legacy aaudio playback stream statistics
@@ -1390,4 +1392,138 @@
}
}
+// Create new state, typically occurs after an AudioFlinger ctor event.
+void AudioAnalytics::newState()
+{
+ mPreviousAnalyticsState.set(std::make_shared<AnalyticsState>(
+ *mAnalyticsState.get()));
+ // Note: get returns shared_ptr temp, whose lifetime is extended
+ // to end of full expression.
+ mAnalyticsState->clear(); // TODO: filter the analytics state.
+ // Perhaps report this.
+
+ // Set up a timer to expire the previous audio state to save space.
+ // Use the transaction log size as a cookie to see if it is the
+ // same as before. A benign race is possible where a state is cleared early.
+ const size_t size = mPreviousAnalyticsState->transactionLog().size();
+ mTimedAction.postIn(
+ std::chrono::seconds(PREVIOUS_STATE_EXPIRE_SEC), [this, size](){
+ if (mPreviousAnalyticsState->transactionLog().size() == size) {
+ ALOGD("expiring previous audio state after %d seconds.",
+ PREVIOUS_STATE_EXPIRE_SEC);
+ mPreviousAnalyticsState->clear(); // removes data from the state.
+ }
+ });
+}
+
+void AudioAnalytics::Health::onAudioServerStart(Module module,
+ const std::shared_ptr<const android::mediametrics::Item> &item)
+{
+ const auto nowTime = std::chrono::system_clock::now();
+ if (module == Module::AUDIOFLINGER) {
+ {
+ std::lock_guard lg(mLock);
+ // reset state on AudioFlinger construction.
+ // AudioPolicy is created after AudioFlinger.
+ mAudioFlingerCtorTime = nowTime;
+ mSimpleLog.log("AudioFlinger ctor");
+ }
+ mAudioAnalytics.newState();
+ return;
+ }
+ if (module == Module::AUDIOPOLICY) {
+ // A start event occurs when audioserver
+ //
+ // (1) Starts the first time
+ // (2) Restarts because of the TimeCheck watchdog
+ // (3) Restarts not because of the TimeCheck watchdog.
+ int64_t executionTimeNs = 0;
+ (void)item->get(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, &executionTimeNs);
+ const float loadTimeMs = executionTimeNs * 1e-6f;
+ std::lock_guard lg(mLock);
+ const int64_t restarts = mStartCount;
+ if (mStopCount == mStartCount) {
+ mAudioPolicyCtorTime = nowTime;
+ ++mStartCount;
+ if (mStopCount == 0) {
+ // (1) First time initialization.
+ ALOGW("%s: (key=%s) AudioPolicy ctor, loadTimeMs:%f",
+ __func__, item->getKey().c_str(), loadTimeMs);
+ mSimpleLog.log("AudioPolicy ctor, loadTimeMs:%f", loadTimeMs);
+ } else {
+ // (2) Previous failure caught due to TimeCheck. We know how long restart takes.
+ const float restartMs =
+ std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+ mAudioFlingerCtorTime - mStopTime).count();
+ ALOGW("%s: (key=%s) AudioPolicy ctor, "
+ "restarts:%lld restartMs:%f loadTimeMs:%f",
+ __func__, item->getKey().c_str(),
+ (long long)restarts, restartMs, loadTimeMs);
+ mSimpleLog.log("AudioPolicy ctor restarts:%lld restartMs:%f loadTimeMs:%f",
+ (long long)restarts, restartMs, loadTimeMs);
+ }
+ } else {
+ // (3) Previous failure is NOT due to TimeCheck, so we don't know the restart time.
+ // However we can estimate the uptime from the delta time from previous ctor.
+ const float uptimeMs =
+ std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+ nowTime - mAudioFlingerCtorTime).count();
+ mStopCount = mStartCount;
+ mAudioPolicyCtorTime = nowTime;
+ ++mStartCount;
+
+ ALOGW("%s: (key=%s) AudioPolicy ctor after uncaught failure, "
+ "mStartCount:%lld mStopCount:%lld uptimeMs:%f loadTimeMs:%f",
+ __func__, item->getKey().c_str(),
+ (long long)mStartCount, (long long)mStopCount, uptimeMs, loadTimeMs);
+ mSimpleLog.log("AudioPolicy ctor after uncaught failure, "
+ "restarts:%lld uptimeMs:%f loadTimeMs:%f",
+ (long long)restarts, uptimeMs, loadTimeMs);
+ }
+ }
+}
+
+void AudioAnalytics::Health::onAudioServerTimeout(Module module,
+ const std::shared_ptr<const android::mediametrics::Item> &item)
+{
+ std::string moduleName = getModuleName(module);
+ int64_t methodCode{};
+ std::string methodName;
+ (void)item->get(AMEDIAMETRICS_PROP_METHODCODE, &methodCode);
+ (void)item->get(AMEDIAMETRICS_PROP_METHODNAME, &methodName);
+
+ std::lock_guard lg(mLock);
+ if (mStopCount >= mStartCount) {
+ ALOGD("%s: (key=%s) %s timeout %s(%lld) "
+ "unmatched mStopCount(%lld) >= mStartCount(%lld), ignoring",
+ __func__, item->getKey().c_str(), moduleName.c_str(),
+ methodName.c_str(), (long long)methodCode,
+ (long long)mStopCount, (long long)mStartCount);
+ return;
+ }
+
+ const int64_t restarts = mStartCount - 1;
+ ++mStopCount;
+ mStopTime = std::chrono::system_clock::now();
+ const float uptimeMs = std::chrono::duration_cast<std::chrono::duration<float, std::milli>>(
+ mStopTime - mAudioFlingerCtorTime).count();
+ ALOGW("%s: (key=%s) %s timeout %s(%lld) restarts:%lld uptimeMs:%f",
+ __func__, item->getKey().c_str(), moduleName.c_str(),
+ methodName.c_str(), (long long)methodCode,
+ (long long)restarts, uptimeMs);
+ mSimpleLog.log("%s timeout %s(%lld) restarts:%lld uptimeMs:%f",
+ moduleName.c_str(), methodName.c_str(), (long long)methodCode,
+ (long long)restarts, uptimeMs);
+}
+
+std::pair<std::string, int32_t> AudioAnalytics::Health::dump(
+ int32_t lines, const char *prefix) const
+{
+ std::lock_guard lg(mLock);
+ std::string s = mSimpleLog.dumpToString(prefix == nullptr ? "" : prefix, lines);
+ size_t n = std::count(s.begin(), s.end(), '\n');
+ return { s, n };
+}
+
+
} // namespace android::mediametrics
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index 636b343..ff16b9e 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -327,6 +327,15 @@
result << "-- some lines may be truncated --\n";
}
+ const int32_t healthLinesToDump = all ? INT32_MAX : 15;
+ result << "\nHealth Message Log:";
+ const auto [ healthDumpString, healthLines ] =
+ mAudioAnalytics.dumpHealth(healthLinesToDump);
+ result << "\n" << healthDumpString;
+ if (healthLines == healthLinesToDump) {
+ result << "-- some lines may be truncated --\n";
+ }
+
result << "\nLogSessionId:\n"
<< mediametrics::ValidateId::get()->dump();
diff --git a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
index a44fcc1..5ee8c30 100644
--- a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
+++ b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
@@ -83,6 +83,15 @@
return mHeatMap.dump(lines);
}
+ /**
+ * Returns a pair consisting of the dump string and the number of lines in the string.
+ *
+ * Health dump.
+ */
+ std::pair<std::string, int32_t> dumpHealth(int32_t lines = INT32_MAX) const {
+ return mHealth.dump(lines);
+ }
+
void clear() {
// underlying state is locked.
mPreviousAnalyticsState->clear();
@@ -247,6 +256,67 @@
AudioAnalytics &mAudioAnalytics;
} mAAudioStreamInfo{*this};
+ // Create new state, typically occurs after an AudioFlinger ctor event.
+ void newState();
+
+ // Health is a nested class that tracks audioserver health properties
+ class Health {
+ public:
+ explicit Health(AudioAnalytics &audioAnalytics)
+ : mAudioAnalytics(audioAnalytics) {}
+
+ enum class Module {
+ AUDIOFLINGER,
+ AUDIOPOLICY,
+ };
+
+ const char *getModuleName(Module module) {
+ switch (module) {
+ case Module::AUDIOFLINGER: return "AudioFlinger";
+ case Module::AUDIOPOLICY: return "AudioPolicy";
+ }
+ return "Unknown";
+ }
+
+ // Called when we believe audioserver starts (AudioFlinger ctor)
+ void onAudioServerStart(Module module,
+ const std::shared_ptr<const android::mediametrics::Item> &item);
+
+ // Called when we believe audioserver crashes (TimeCheck timeouts).
+ void onAudioServerTimeout(Module module,
+ const std::shared_ptr<const android::mediametrics::Item> &item);
+
+ std::pair<std::string, int32_t> dump(
+ int32_t lines = INT32_MAX, const char *prefix = nullptr) const;
+
+ private:
+ AudioAnalytics& mAudioAnalytics;
+
+ mutable std::mutex mLock;
+
+ // Life cycle of AudioServer
+ // mAudioFlingerCtorTime
+ // mAudioPolicyCtorTime
+ // mAudioPolicyCtorDoneTime
+ // ...
+ // possibly mStopTime (if TimeCheck thread)
+ //
+ // UpTime is measured from mStopTime - mAudioFlingerCtorTime.
+ //
+ // The stop events come from TimeCheck timeout aborts. There may be other
+ // uncaught signals, e.g. SIGSEGV, that cause missing stop events.
+ std::chrono::system_clock::time_point mAudioFlingerCtorTime GUARDED_BY(mLock);
+ std::chrono::system_clock::time_point mAudioPolicyCtorTime GUARDED_BY(mLock);
+ std::chrono::system_clock::time_point mAudioPolicyCtorDoneTime GUARDED_BY(mLock);
+ std::chrono::system_clock::time_point mStopTime GUARDED_BY(mLock);
+
+ // mStartCount and mStopCount track the audioserver start and stop events.
+ int64_t mStartCount GUARDED_BY(mLock) = 0;
+ int64_t mStopCount GUARDED_BY(mLock) = 0;
+
+ SimpleLog mSimpleLog GUARDED_BY(mLock) {64};
+ } mHealth{*this};
+
AudioPowerUsage mAudioPowerUsage;
};
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 17a3a5f..e322d62 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -231,7 +231,7 @@
sessionId = mediametrics::ValidateId::get()->validateId(sessionId);
metrics_proto.set_log_session_id(sessionId);
}
- AStatsEvent_writeString(event, codec.c_str());
+ AStatsEvent_writeString(event, sessionId.c_str());
int32_t channelCount = -1;
if (item->getInt32("android.media.mediacodec.channelCount", &channelCount)) {
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 57be435..b4610bc 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -483,7 +483,7 @@
}
}
if (info.cookie == 0 && client != nullptr) {
- info.cookie = addCookieAndLink_l(client->asBinder(),
+ info.cookie = addCookieAndLink_l(client,
new DeathNotifier(ref<ResourceManagerService>(), pid, clientId));
}
if (mObserverService != nullptr && !resourceAdded.empty()) {
@@ -592,7 +592,7 @@
onLastRemoved(it->second, info);
}
- removeCookieAndUnlink_l(info.client->asBinder(), info.cookie);
+ removeCookieAndUnlink_l(info.client, info.cookie);
if (mObserverService != nullptr && !info.resources.empty()) {
mObserverService->onResourceRemoved(info.uid, pid, info.resources);
@@ -696,11 +696,11 @@
if (clients.size() == 0) {
// if we are here, run the fourth pass to free one codec with the different type.
if (secureCodec != NULL) {
- MediaResource temp(MediaResource::Type::kNonSecureCodec, 1);
+ MediaResource temp(MediaResource::Type::kNonSecureCodec, secureCodec->subType, 1);
getClientForResource_l(callingPid, &temp, &clients);
}
if (nonSecureCodec != NULL) {
- MediaResource temp(MediaResource::Type::kSecureCodec, 1);
+ MediaResource temp(MediaResource::Type::kSecureCodec, nonSecureCodec->subType, 1);
getClientForResource_l(callingPid, &temp, &clients);
}
}
@@ -732,6 +732,7 @@
return true;
}
+ int failedClientPid = -1;
{
Mutex::Autolock lock(mLock);
bool found = false;
@@ -746,11 +747,14 @@
}
}
if (found) {
+ failedClientPid = mMap.keyAt(i);
break;
}
}
- if (!found) {
- ALOGV("didn't find failed client");
+ if (found) {
+ ALOGW("Failed to reclaim resources from client with pid %d", failedClientPid);
+ } else {
+ ALOGW("Failed to reclaim resources from unlocateable client");
}
}
@@ -812,7 +816,7 @@
return Status::fromServiceSpecificError(BAD_VALUE);
}
- uintptr_t cookie = addCookieAndLink_l(client->asBinder(),
+ uintptr_t cookie = addCookieAndLink_l(client,
new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), pid));
mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{cookie, client});
@@ -820,23 +824,29 @@
return Status::ok();
}
-uintptr_t ResourceManagerService::addCookieAndLink_l(::ndk::SpAIBinder binder,
- const sp<DeathNotifier>& notifier) {
+uintptr_t ResourceManagerService::addCookieAndLink_l(
+ const std::shared_ptr<IResourceManagerClient>& client, const sp<DeathNotifier>& notifier) {
+ if (client == nullptr) {
+ return 0;
+ }
std::scoped_lock lock{sCookieLock};
uintptr_t cookie;
// Need to skip cookie 0 (if it wraps around). ResourceInfo has cookie initialized to 0
// indicating the death notifier is not created yet.
while ((cookie = ++sCookieCounter) == 0);
- AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), (void*)cookie);
+ AIBinder_linkToDeath(client->asBinder().get(), mDeathRecipient.get(), (void*)cookie);
sCookieToDeathNotifierMap.emplace(cookie, notifier);
return cookie;
}
-void ResourceManagerService::removeCookieAndUnlink_l(::ndk::SpAIBinder binder, uintptr_t cookie) {
+void ResourceManagerService::removeCookieAndUnlink_l(
+ const std::shared_ptr<IResourceManagerClient>& client, uintptr_t cookie) {
std::scoped_lock lock{sCookieLock};
- AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), (void*)cookie);
+ if (client != nullptr) {
+ AIBinder_unlinkToDeath(client->asBinder().get(), mDeathRecipient.get(), (void*)cookie);
+ }
sCookieToDeathNotifierMap.erase(cookie);
}
@@ -854,7 +864,7 @@
mProcessInfo->removeProcessInfoOverride(pid);
- removeCookieAndUnlink_l(it->second.client->asBinder(), it->second.cookie);
+ removeCookieAndUnlink_l(it->second.client, it->second.cookie);
mProcessInfoOverrideMap.erase(pid);
}
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 6551371..c636a0f 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -166,8 +166,10 @@
void removeProcessInfoOverride(int pid);
void removeProcessInfoOverride_l(int pid);
- uintptr_t addCookieAndLink_l(::ndk::SpAIBinder binder, const sp<DeathNotifier>& notifier);
- void removeCookieAndUnlink_l(::ndk::SpAIBinder binder, uintptr_t cookie);
+ uintptr_t addCookieAndLink_l(const std::shared_ptr<IResourceManagerClient>& client,
+ const sp<DeathNotifier>& notifier);
+ void removeCookieAndUnlink_l(const std::shared_ptr<IResourceManagerClient>& client,
+ uintptr_t cookie);
mutable Mutex mLock;
sp<ProcessInfoInterface> mProcessInfo;
diff --git a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
index 8f25ee6..e4aaea0 100644
--- a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
+++ b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
@@ -227,33 +227,31 @@
mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(kMinThreadPairs, kMaxThreadPairs);
// Make even number of threads
size_t numThreads = numThreadPairs * 2;
- resourceThreadArgs threadArgs;
- vector<MediaResourceParcel> mediaResource;
+ resourceThreadArgs threadArgs[numThreadPairs];
+ vector<MediaResourceParcel> mediaResource[numThreadPairs];
pthread_t pt[numThreads];
- int i;
- for (i = 0; i < numThreads - 1; i += 2) {
- threadArgs.pid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
- threadArgs.uid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+ for (int k = 0; k < numThreadPairs; ++k) {
+ threadArgs[k].pid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+ threadArgs[k].uid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
int32_t mediaResourceType = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(
kMinResourceType, kMaxResourceType);
int32_t mediaResourceSubType = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(
kMinResourceType, kMaxResourceType);
uint64_t mediaResourceValue = mFuzzedDataProvider->ConsumeIntegral<uint64_t>();
- threadArgs.service = mService;
+ threadArgs[k].service = mService;
shared_ptr<IResourceManagerClient> testClient =
- ::ndk::SharedRefBase::make<TestClient>(threadArgs.pid, mService);
- threadArgs.testClient = testClient;
- threadArgs.testClientId = getId(testClient);
- mediaResource.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
- static_cast<MedResSubType>(mediaResourceSubType),
- mediaResourceValue));
- threadArgs.mediaResource = mediaResource;
- pthread_create(&pt[i], nullptr, addResource, &threadArgs);
- pthread_create(&pt[i + 1], nullptr, removeResource, &threadArgs);
- mediaResource.clear();
+ ::ndk::SharedRefBase::make<TestClient>(threadArgs[k].pid, mService);
+ threadArgs[k].testClient = testClient;
+ threadArgs[k].testClientId = getId(testClient);
+ mediaResource[k].push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
+ static_cast<MedResSubType>(mediaResourceSubType),
+ mediaResourceValue));
+ threadArgs[k].mediaResource = mediaResource[k];
+ pthread_create(&pt[2 * k], nullptr, addResource, &threadArgs[k]);
+ pthread_create(&pt[2 * k + 1], nullptr, removeResource, &threadArgs[k]);
}
- for (i = 0; i < numThreads; ++i) {
+ for (int i = 0; i < numThreads; ++i) {
pthread_join(pt[i], nullptr);
}
@@ -266,14 +264,14 @@
int32_t mediaResourceSubType =
mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(kMinResourceType, kMaxResourceType);
uint64_t mediaResourceValue = mFuzzedDataProvider->ConsumeIntegral<uint64_t>();
- mediaResource.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
- static_cast<MedResSubType>(mediaResourceSubType),
- mediaResourceValue));
+ vector<MediaResourceParcel> mediaRes;
+ mediaRes.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
+ static_cast<MedResSubType>(mediaResourceSubType),
+ mediaResourceValue));
bool result;
- mService->reclaimResource(pidZero, mediaResource, &result);
- mService->removeResource(pidZero, getId(testClient), mediaResource);
+ mService->reclaimResource(pidZero, mediaRes, &result);
+ mService->removeResource(pidZero, getId(testClient), mediaRes);
mService->removeClient(pidZero, getId(testClient));
- mediaResource.clear();
}
void ResourceManagerServiceFuzzer::setServiceLog() {
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index ec62d4e..5c1dda1 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -92,11 +92,8 @@
"liblog",
"libtunerservice",
"libutils",
- "tv_tuner_resource_manager_aidl_interface-ndk",
- ],
-
- static_libs: [
"tv_tuner_aidl_interface-ndk",
+ "tv_tuner_resource_manager_aidl_interface-ndk",
],
init_rc: ["mediatuner.rc"],
diff --git a/services/tuner/hidl/TunerHidlFilter.cpp b/services/tuner/hidl/TunerHidlFilter.cpp
index 6d8ae03..fe74a5c 100644
--- a/services/tuner/hidl/TunerHidlFilter.cpp
+++ b/services/tuner/hidl/TunerHidlFilter.cpp
@@ -390,7 +390,9 @@
static_cast<int32_t>(Result::INVALID_STATE));
}
- HidlResult res = mFilter->releaseAvHandle(hidl_handle(makeFromAidl(in_handle)), in_avDataId);
+ hidl_handle handle;
+ handle.setTo(makeFromAidl(in_handle), true);
+ HidlResult res = mFilter->releaseAvHandle(handle, in_avDataId);
if (res != HidlResult::SUCCESS) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
}