Merge "Camera: Update JPEG_ORIENTATION doc to clarify Image behavior" into main
diff --git a/Android.bp b/Android.bp
index 7a2bb9b..72b8721 100644
--- a/Android.bp
+++ b/Android.bp
@@ -105,7 +105,6 @@
aidl_interface {
name: "av-audio-types-aidl",
- unstable: true,
host_supported: true,
vendor_available: true,
double_loadable: true,
@@ -125,4 +124,12 @@
sdk_version: "module_current",
},
},
+ versions_with_info: [
+ {
+ version: "1",
+ imports: ["android.hardware.audio.core-V2"],
+ },
+ ],
+ frozen: true,
+
}
diff --git a/aidl/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl/android/media/audio/IHalAdapterVendorExtension.aidl
index b7a7678..48fb291 100644
--- a/aidl/android/media/audio/IHalAdapterVendorExtension.aidl
+++ b/aidl/android/media/audio/IHalAdapterVendorExtension.aidl
@@ -23,6 +23,8 @@
* is optional. Vendors may provide an implementation on the system_ext
* partition. The default instance of this interface, if provided, must be
* registered prior to the moment when the audio server connects to HAL modules.
+ * Vendors need to set the system property `ro.audio.ihaladaptervendorextension_enabled`
+ * to `true` for the framework to bind to this service.
*
* {@hide}
*/
diff --git a/aidl_api/av-audio-types-aidl/1/.hash b/aidl_api/av-audio-types-aidl/1/.hash
new file mode 100644
index 0000000..0002682
--- /dev/null
+++ b/aidl_api/av-audio-types-aidl/1/.hash
@@ -0,0 +1 @@
+ef1bc5ed9db445fbfc116cdec6e6ad081458ee40
diff --git a/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl
new file mode 100644
index 0000000..a9aa2c1
--- /dev/null
+++ b/aidl_api/av-audio-types-aidl/1/android/media/audio/IHalAdapterVendorExtension.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media.audio;
+/* @hide */
+interface IHalAdapterVendorExtension {
+ @utf8InCpp String[] parseVendorParameterIds(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeys);
+ void parseVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeysAndValues, out android.hardware.audio.core.VendorParameter[] syncParameters, out android.hardware.audio.core.VendorParameter[] asyncParameters);
+ android.hardware.audio.core.VendorParameter[] parseBluetoothA2dpReconfigureOffload(in @utf8InCpp String rawValue);
+ android.hardware.audio.core.VendorParameter[] parseBluetoothLeReconfigureOffload(in @utf8InCpp String rawValue);
+ @utf8InCpp String processVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in android.hardware.audio.core.VendorParameter[] parameters);
+ enum ParameterScope {
+ MODULE = 0,
+ STREAM = 1,
+ }
+}
diff --git a/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl b/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl
new file mode 100644
index 0000000..a9aa2c1
--- /dev/null
+++ b/aidl_api/av-audio-types-aidl/current/android/media/audio/IHalAdapterVendorExtension.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media.audio;
+/* @hide */
+interface IHalAdapterVendorExtension {
+ @utf8InCpp String[] parseVendorParameterIds(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeys);
+ void parseVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in @utf8InCpp String rawKeysAndValues, out android.hardware.audio.core.VendorParameter[] syncParameters, out android.hardware.audio.core.VendorParameter[] asyncParameters);
+ android.hardware.audio.core.VendorParameter[] parseBluetoothA2dpReconfigureOffload(in @utf8InCpp String rawValue);
+ android.hardware.audio.core.VendorParameter[] parseBluetoothLeReconfigureOffload(in @utf8InCpp String rawValue);
+ @utf8InCpp String processVendorParameters(android.media.audio.IHalAdapterVendorExtension.ParameterScope scope, in android.hardware.audio.core.VendorParameter[] parameters);
+ enum ParameterScope {
+ MODULE = 0,
+ STREAM = 1,
+ }
+}
diff --git a/camera/Android.bp b/camera/Android.bp
index 22f1633..d0f8e7e 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -13,6 +13,7 @@
// limitations under the License.
package {
+ default_team: "trendy_team_camera_framework",
default_applicable_licenses: ["frameworks_av_camera_license"],
}
@@ -46,6 +47,7 @@
aconfig_declarations {
name: "camera_platform_flags",
package: "com.android.internal.camera.flags",
+ container: "system",
srcs: ["camera_platform.aconfig"],
}
@@ -64,6 +66,7 @@
name: "camera_headers",
export_include_dirs: ["include"],
}
+
cc_library {
name: "libcamera_client",
@@ -119,10 +122,14 @@
"frameworks/native/include/media/openmax",
],
export_include_dirs: [
- "include",
- "include/camera"
+ "include",
+ "include/camera",
],
- export_shared_lib_headers: ["libcamera_metadata", "libnativewindow", "libgui"],
+ export_shared_lib_headers: [
+ "libcamera_metadata",
+ "libnativewindow",
+ "libgui",
+ ],
cflags: [
"-Werror",
@@ -152,7 +159,7 @@
export_include_dirs: [
"include",
- "include/camera"
+ "include/camera",
],
}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 0eeeb7f..4bea896 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -278,14 +278,28 @@
CameraMetadataNative createDefaultRequest(@utf8InCpp String cameraId, int templateId);
/**
- * Check whether a particular session configuration with optional session parameters
- * has camera device support.
- *
- * @param cameraId The camera id to query session configuration on
- * @param sessionConfiguration Specific session configuration to be verified.
- * @return true - in case the stream combination is supported.
- * false - in case there is no device support.
- */
+ * Check whether a particular session configuration with optional session parameters
+ * has camera device support.
+ *
+ * @param cameraId The camera id to query session configuration for
+ * @param sessionConfiguration Specific session configuration to be verified.
+ * @return true - in case the stream combination is supported.
+ * false - in case there is no device support.
+ */
boolean isSessionConfigurationWithParametersSupported(@utf8InCpp String cameraId,
in SessionConfiguration sessionConfiguration);
+
+ /**
+ * Get the camera characteristics for a particular session configuration for
+ * the given camera device.
+ *
+ * @param cameraId ID of the device for which the session characteristics must be fetched.
+ * @param sessionConfiguration session configuration for which the characteristics
+ * must be fetched.
+ * @return - characteristics associated with the given session.
+ */
+ CameraMetadataNative getSessionCharacteristics(@utf8InCpp String cameraId,
+ int targetSdkVersion,
+ boolean overrideToPortrait,
+ in SessionConfiguration sessionConfiguration);
}
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 843e0d4..8e1fcc0 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -99,15 +99,6 @@
*/
boolean isSessionConfigurationSupported(in SessionConfiguration sessionConfiguration);
- /**
- * Get the camera characteristics for a particular session configuration
- *
- * @param sessionConfiguration Specific session configuration for which the characteristics
- * are fetched.
- * @return - characteristics associated with the given session.
- */
- CameraMetadataNative getSessionCharacteristics(in SessionConfiguration sessionConfiguration);
-
void deleteStream(int streamId);
/**
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 73b153c..2d1af32 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -25,6 +25,7 @@
#include <binder/Parcel.h>
#include <gui/view/Surface.h>
#include <system/camera_metadata.h>
+#include <system/graphics.h>
#include <utils/String8.h>
@@ -102,6 +103,25 @@
return mUseReadoutTimestamp;
}
+int OutputConfiguration::getFormat() const {
+ return mFormat;
+}
+
+int OutputConfiguration::getDataspace() const {
+ return mDataspace;
+}
+
+int64_t OutputConfiguration::getUsage() const {
+ return mUsage;
+}
+
+bool OutputConfiguration::isComplete() const {
+ return !((mSurfaceType == SURFACE_TYPE_MEDIA_RECORDER ||
+ mSurfaceType == SURFACE_TYPE_MEDIA_CODEC ||
+ mSurfaceType == SURFACE_TYPE_IMAGE_READER) &&
+ mGbps.empty());
+}
+
OutputConfiguration::OutputConfiguration() :
mRotation(INVALID_ROTATION),
mSurfaceSetID(INVALID_SET_ID),
@@ -116,7 +136,10 @@
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
mMirrorMode(MIRROR_MODE_AUTO),
- mUseReadoutTimestamp(false) {
+ mUseReadoutTimestamp(false),
+ mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
+ mDataspace(0),
+ mUsage(0) {
}
OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -234,6 +257,24 @@
return err;
}
+ int format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ if ((err = parcel->readInt32(&format)) != OK) {
+ ALOGE("%s: Failed to read format from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int dataspace = 0;
+ if ((err = parcel->readInt32(&dataspace)) != OK) {
+ ALOGE("%s: Failed to read dataspace from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int64_t usage = 0;
+ if ((err = parcel->readInt64(&usage)) != OK) {
+ ALOGE("%s: Failed to read usage flag from parcel", __FUNCTION__);
+ return err;
+ }
+
mRotation = rotation;
mSurfaceSetID = setID;
mSurfaceType = surfaceType;
@@ -256,13 +297,17 @@
mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
mDynamicRangeProfile = dynamicProfile;
mColorSpace = colorSpace;
+ mFormat = format;
+ mDataspace = dataspace;
+ mUsage = usage;
ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
" physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
- ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d",
+ ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d, format = %d, "
+ "dataspace = %d, usage = %" PRId64,
__FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
mPhysicalCameraId.c_str(), mIsMultiResolution, mStreamUseCase, timestampBase,
- mMirrorMode, mUseReadoutTimestamp);
+ mMirrorMode, mUseReadoutTimestamp, mFormat, mDataspace, mUsage);
return err;
}
@@ -283,6 +328,9 @@
mTimestampBase = TIMESTAMP_BASE_DEFAULT;
mMirrorMode = MIRROR_MODE_AUTO;
mUseReadoutTimestamp = false;
+ mFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ mDataspace = 0;
+ mUsage = 0;
}
OutputConfiguration::OutputConfiguration(
@@ -296,7 +344,9 @@
mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
- mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false) { }
+ mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false),
+ mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), mDataspace(0),
+ mUsage(0) { }
status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
@@ -362,6 +412,15 @@
err = parcel->writeInt32(mUseReadoutTimestamp ? 1 : 0);
if (err != OK) return err;
+ err = parcel->writeInt32(mFormat);
+ if (err != OK) return err;
+
+ err = parcel->writeInt32(mDataspace);
+ if (err != OK) return err;
+
+ err = parcel->writeInt64(mUsage);
+ if (err != OK) return err;
+
return OK;
}
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 076394d..1dc45c3 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -1,8 +1,10 @@
package: "com.android.internal.camera.flags"
+container: "system"
flag {
namespace: "camera_platform"
name: "camera_hsum_permission"
+ is_exported: true
description: "Camera access by headless system user"
bug: "273539631"
}
@@ -10,6 +12,7 @@
flag {
namespace: "camera_platform"
name: "concert_mode"
+ is_exported: true
description: "Introduces a new concert mode camera extension type"
bug: "297083874"
}
@@ -17,12 +20,20 @@
flag {
namespace: "camera_platform"
name: "feature_combination_query"
+ is_exported: true
description: "Query feature combination support and session specific characteristics"
bug: "309627704"
}
flag {
namespace: "camera_platform"
+ name: "watch_foreground_changes"
+ description: "Request AppOps to notify changes in the foreground status of the client"
+ bug: "290086710"
+}
+
+flag {
+ namespace: "camera_platform"
name: "log_ultrawide_usage"
description: "Enable measuring how much usage there is for ultrawide-angle cameras"
bug: "300515796"
@@ -31,6 +42,7 @@
flag {
namespace: "camera_platform"
name: "camera_manual_flash_strength_control"
+ is_exported: true
description: "Flash brightness level control in manual flash mode"
bug: "238348881"
}
@@ -66,6 +78,7 @@
flag {
namespace: "camera_platform"
name: "camera_ae_mode_low_light_boost"
+ is_exported: true
description: "An AE mode that enables increased brightening in low light scenes"
bug: "312803148"
}
@@ -83,3 +96,67 @@
description: "Enable using ro.board.api_level instead of ro.vndk.version to get VNDK version"
bug: "312315580"
}
+
+flag {
+ namespace: "camera_platform"
+ name: "camera_extensions_characteristics_get"
+ is_exported: true
+ description: "Enable get extension specific camera characteristics API"
+ bug: "280649914"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "delay_lazy_hal_instantiation"
+ description: "Only trigger lazy HAL instantiation when the HAL is needed for an operation."
+ bug: "319735068"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "return_buffers_outside_locks"
+ description: "Enable returning graphics buffers to buffer queues without holding the in-flight mutex"
+ bug: "315526878"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "camera_device_setup"
+ is_exported: true
+ description: "Create an intermediate Camera Device class for limited CameraDevice access."
+ bug: "320741775"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "camera_privacy_allowlist"
+ is_exported: true
+ description: "Allowlisting to exempt safety-relevant cameras from privacy control for automotive devices"
+ bug: "282814430"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "extension_10_bit"
+ is_exported: true
+ description: "Enables 10-bit support in the camera extensions."
+ bug: "316375635"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "surface_leak_fix"
+ description: "Address Surface release leaks in CaptureRequest"
+ bug: "324071855"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "concert_mode_api"
+ description: "Covers the eyes free videography public facing API"
+ bug: "297083874"
+}
+
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 13b705c..6862cb1 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -13,6 +13,7 @@
// limitations under the License.
package {
+ default_team: "trendy_team_camera_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_camera_license"
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 3f74b4a..83ce39d 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -35,10 +35,13 @@
static const int INVALID_ROTATION;
static const int INVALID_SET_ID;
- enum SurfaceType{
+ enum SurfaceType {
SURFACE_TYPE_UNKNOWN = -1,
SURFACE_TYPE_SURFACE_VIEW = 0,
- SURFACE_TYPE_SURFACE_TEXTURE = 1
+ SURFACE_TYPE_SURFACE_TEXTURE = 1,
+ SURFACE_TYPE_MEDIA_RECORDER = 2,
+ SURFACE_TYPE_MEDIA_CODEC = 3,
+ SURFACE_TYPE_IMAGE_READER = 4
};
enum TimestampBaseType {
TIMESTAMP_BASE_DEFAULT = 0,
@@ -71,6 +74,10 @@
int getTimestampBase() const;
int getMirrorMode() const;
bool useReadoutTimestamp() const;
+ int getFormat() const;
+ int getDataspace() const;
+ int64_t getUsage() const;
+ bool isComplete() const;
// set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
const std::vector<int32_t>& getSensorPixelModesUsed() const;
@@ -98,7 +105,7 @@
OutputConfiguration(const std::vector<sp<IGraphicBufferProducer>>& gbps,
int rotation, const std::string& physicalCameraId,
int surfaceSetID = INVALID_SET_ID,
- int surfaceType = OutputConfiguration::SURFACE_TYPE_UNKNOWN, int width = 0,
+ int surfaceType = SURFACE_TYPE_UNKNOWN, int width = 0,
int height = 0, bool isShared = false);
bool operator == (const OutputConfiguration& other) const {
@@ -118,7 +125,10 @@
mStreamUseCase == other.mStreamUseCase &&
mTimestampBase == other.mTimestampBase &&
mMirrorMode == other.mMirrorMode &&
- mUseReadoutTimestamp == other.mUseReadoutTimestamp);
+ mUseReadoutTimestamp == other.mUseReadoutTimestamp &&
+ mFormat == other.mFormat &&
+ mDataspace == other.mDataspace &&
+ mUsage == other.mUsage);
}
bool operator != (const OutputConfiguration& other) const {
return !(*this == other);
@@ -173,6 +183,15 @@
if (mUseReadoutTimestamp != other.mUseReadoutTimestamp) {
return mUseReadoutTimestamp < other.mUseReadoutTimestamp;
}
+ if (mFormat != other.mFormat) {
+ return mFormat < other.mFormat;
+ }
+ if (mDataspace != other.mDataspace) {
+ return mDataspace < other.mDataspace;
+ }
+ if (mUsage != other.mUsage) {
+ return mUsage < other.mUsage;
+ }
return gbpsLessThan(other);
}
@@ -203,6 +222,9 @@
int mTimestampBase;
int mMirrorMode;
bool mUseReadoutTimestamp;
+ int mFormat;
+ int mDataspace;
+ int64_t mUsage;
};
} // namespace params
} // namespace camera2
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 165395a..421469a 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -17,6 +17,7 @@
// frameworks/av/include.
package {
+ default_team: "trendy_team_camera_framework",
default_applicable_licenses: ["frameworks_av_camera_ndk_license"],
}
diff --git a/camera/ndk/NdkCameraMetadata.cpp b/camera/ndk/NdkCameraMetadata.cpp
index 7d3a53e..a2dfaee 100644
--- a/camera/ndk/NdkCameraMetadata.cpp
+++ b/camera/ndk/NdkCameraMetadata.cpp
@@ -121,6 +121,18 @@
}
EXPORT
+camera_status_t ACameraMetadata_getTagFromName(
+ const ACameraMetadata* acm, const char* name, uint32_t* tag) {
+ ATRACE_CALL();
+ if (acm == nullptr || name == nullptr || tag == nullptr) {
+ ALOGE("%s: invalid argument! metadata %p, name %p, tag %p",
+ __FUNCTION__, acm, name, tag);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ return acm->getTagFromName(name, tag);
+}
+
+EXPORT
ACameraMetadata* ACameraMetadata_copy(const ACameraMetadata* src) {
ATRACE_CALL();
if (src == nullptr) {
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 5d3b65b..8c3424f 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -255,6 +255,7 @@
template<class T>
void CameraManagerGlobal::registerAvailCallback(const T *callback) {
Mutex::Autolock _l(mLock);
+ getCameraServiceLocked();
Callback cb(callback);
auto pair = mCallbacks.insert(cb);
// Send initial callbacks if callback is newly registered
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index b6b8012..69b30f7 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -18,6 +18,8 @@
#define LOG_TAG "ACameraMetadata"
#include "ACameraMetadata.h"
+
+#include <camera_metadata_hidden.h>
#include <utils/Vector.h>
#include <system/graphics.h>
#include <media/NdkImage.h>
@@ -85,6 +87,19 @@
filterDurations(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS);
}
// TODO: filter request/result keys
+ const CameraMetadata& metadata = *mData;
+ const camera_metadata_t *rawMetadata = metadata.getAndLock();
+ metadata_vendor_id_t vendorTagId = get_camera_metadata_vendor_id(rawMetadata);
+ metadata.unlock(rawMetadata);
+ sp<VendorTagDescriptorCache> vtCache = VendorTagDescriptorCache::getGlobalVendorTagCache();
+ if (vtCache == nullptr) {
+ ALOGE("%s: error vendor tag descriptor cache is not initialized", __FUNCTION__);
+ return;
+ }
+ vtCache->getVendorTagDescriptor(vendorTagId, &mVTags);
+ if (mVTags == nullptr) {
+ ALOGE("%s: error retrieving vendor tag descriptor", __FUNCTION__);
+ }
}
bool
@@ -473,6 +488,13 @@
return (*mData);
}
+camera_status_t
+ACameraMetadata::getTagFromName(const char *name, uint32_t *tag) const {
+ Mutex::Autolock _l(mLock);
+ status_t status = CameraMetadata::getTagFromName(name, mVTags.get(), tag);
+ return status == OK ? ACAMERA_OK : ACAMERA_ERROR_METADATA_NOT_FOUND;
+}
+
bool
ACameraMetadata::isLogicalMultiCamera(size_t* count, const char*const** physicalCameraIds) const {
if (mType != ACM_CHARACTERISTICS) {
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index 084a60b..e89e620 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -27,9 +27,17 @@
#ifdef __ANDROID_VNDK__
#include <CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/common/VendorTag.h>
+#include <aidl/android/frameworks/cameraservice/common/VendorTagSection.h>
+#include <aidl/android/frameworks/cameraservice/common/ProviderIdAndVendorTagSections.h>
+#include <VendorTagDescriptor.h>
using CameraMetadata = android::hardware::camera::common::V1_0::helper::CameraMetadata;
+using ::aidl::android::frameworks::cameraservice::common::ProviderIdAndVendorTagSections;
+using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptor;
+using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
#else
#include <camera/CameraMetadata.h>
+#include <camera/VendorTagDescriptor.h>
#endif
#include <camera/NdkCameraMetadata.h>
@@ -73,6 +81,8 @@
camera_status_t getTags(/*out*/int32_t* numTags,
/*out*/const uint32_t** tags) const;
+ camera_status_t
+ getTagFromName(const char *name, uint32_t *tag) const;
const CameraMetadata& getInternalData() const;
bool isLogicalMultiCamera(size_t* count, const char* const** physicalCameraIds) const;
@@ -134,6 +144,7 @@
std::vector<const char*> mStaticPhysicalCameraIds;
std::vector<String8> mStaticPhysicalCameraIdValues;
+ sp<VendorTagDescriptor> mVTags = nullptr;
};
#endif // _ACAMERA_METADATA_H
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index cf29736..237d07b 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -221,6 +221,24 @@
/*out*/int32_t* numEntries, /*out*/const uint32_t** tags) __INTRODUCED_IN(24);
/**
+ * Look up tag ID value for device-specific custom tags that are usable only
+ * for the particular device, by name. The name and type of the tag need to be
+ * discovered from some other source, such as the manufacturer. The ID value is
+ * stable during the lifetime of an application, but should be queried again after
+ * process restarts. This method can also be used to query tag values using the names
+ * for public tags which exist in the Java API, however it is just simpler and faster to
+ * use the values of the tags which exist in the ndk.
+ * @param metadata The {@link ACameraMetadata} of to query the tag value from.
+ * @param name The name of the tag being queried.
+ * @param tag The output tag assigned by this method.
+ *
+ * @return ACAMERA_OK only if the function call was successful.
+ */
+
+camera_status_t
+ACameraMetadata_getTagFromName(const ACameraMetadata* metadata, const char *name, uint32_t *tag) __INTRODUCED_IN(35);
+
+/**
* Create a copy of input {@link ACameraMetadata}.
*
* <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 2888b39..6c58bcf 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -76,6 +76,7 @@
ACAMERA_AUTOMOTIVE_LENS,
ACAMERA_EXTENSION,
ACAMERA_JPEGR,
+ ACAMERA_EFV,
ACAMERA_SECTION_COUNT,
ACAMERA_VENDOR = 0x8000
@@ -123,6 +124,7 @@
ACAMERA_AUTOMOTIVE_LENS_START = ACAMERA_AUTOMOTIVE_LENS << 16,
ACAMERA_EXTENSION_START = ACAMERA_EXTENSION << 16,
ACAMERA_JPEGR_START = ACAMERA_JPEGR << 16,
+ ACAMERA_EFV_START = ACAMERA_EFV << 16,
ACAMERA_VENDOR_START = ACAMERA_VENDOR << 16
} acamera_metadata_section_start_t;
@@ -2147,7 +2149,7 @@
* </ul>
*/
ACAMERA_CONTROL_SETTINGS_OVERRIDE = // int32 (acamera_metadata_enum_android_control_settings_override_t)
- ACAMERA_CONTROL_START + 49,
+ ACAMERA_CONTROL_START + 52,
/**
* <p>List of available settings overrides supported by the camera device that can
* be used to speed up certain controls.</p>
@@ -2173,7 +2175,7 @@
* @see ACAMERA_CONTROL_SETTINGS_OVERRIDE
*/
ACAMERA_CONTROL_AVAILABLE_SETTINGS_OVERRIDES = // int32[n]
- ACAMERA_CONTROL_START + 50,
+ ACAMERA_CONTROL_START + 53,
/**
* <p>Automatic crop, pan and zoom to keep objects in the center of the frame.</p>
*
@@ -2200,7 +2202,7 @@
* @see ACAMERA_SCALER_CROP_REGION
*/
ACAMERA_CONTROL_AUTOFRAMING = // byte (acamera_metadata_enum_android_control_autoframing_t)
- ACAMERA_CONTROL_START + 52,
+ ACAMERA_CONTROL_START + 55,
/**
* <p>Whether the camera device supports ACAMERA_CONTROL_AUTOFRAMING.</p>
*
@@ -2216,7 +2218,7 @@
* <p>Will be <code>false</code> if auto-framing is not available.</p>
*/
ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE = // byte (acamera_metadata_enum_android_control_autoframing_available_t)
- ACAMERA_CONTROL_START + 53,
+ ACAMERA_CONTROL_START + 56,
/**
* <p>Current state of auto-framing.</p>
*
@@ -2243,7 +2245,7 @@
* @see ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE
*/
ACAMERA_CONTROL_AUTOFRAMING_STATE = // byte (acamera_metadata_enum_android_control_autoframing_state_t)
- ACAMERA_CONTROL_START + 54,
+ ACAMERA_CONTROL_START + 57,
/**
* <p>The operating luminance range of low light boost measured in lux (lx).</p>
*
@@ -2256,7 +2258,7 @@
*
*/
ACAMERA_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE = // float[2]
- ACAMERA_CONTROL_START + 55,
+ ACAMERA_CONTROL_START + 58,
/**
* <p>Current state of the low light boost AE mode.</p>
*
@@ -2276,7 +2278,7 @@
* 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY.</p>
*/
ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE = // byte (acamera_metadata_enum_android_control_low_light_boost_state_t)
- ACAMERA_CONTROL_START + 56,
+ ACAMERA_CONTROL_START + 59,
ACAMERA_CONTROL_END,
/**
@@ -4671,7 +4673,7 @@
* application should leave stream use cases within the session as DEFAULT.</p>
*/
ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES = // int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
- ACAMERA_SCALER_START + 25,
+ ACAMERA_SCALER_START + 26,
/**
* <p>The region of the sensor that corresponds to the RAW read out for this
* capture when the stream use case of a RAW stream is set to CROPPED_RAW.</p>
@@ -4707,24 +4709,27 @@
* </ul>
* <p>should be interpreted in the effective after raw crop field-of-view coordinate system.
* In this coordinate system,
- * {preCorrectionActiveArraySize.left, preCorrectionActiveArraySize.top} corresponds to the
+ * {ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.left,
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.top} corresponds to the
* the top left corner of the cropped RAW frame and
- * {preCorrectionActiveArraySize.right, preCorrectionActiveArraySize.bottom} corresponds to
+ * {ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.right,
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.bottom} corresponds to
* the bottom right corner. Client applications must use the values of the keys
* in the CaptureResult metadata if present.</p>
- * <p>Crop regions (android.scaler.CropRegion), AE/AWB/AF regions and face coordinates still
+ * <p>Crop regions ACAMERA_SCALER_CROP_REGION, AE/AWB/AF regions and face coordinates still
* use the ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE coordinate system as usual.</p>
*
* @see ACAMERA_LENS_DISTORTION
* @see ACAMERA_LENS_INTRINSIC_CALIBRATION
* @see ACAMERA_LENS_POSE_ROTATION
* @see ACAMERA_LENS_POSE_TRANSLATION
+ * @see ACAMERA_SCALER_CROP_REGION
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
* @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
* @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
*/
ACAMERA_SCALER_RAW_CROP_REGION = // int32[4]
- ACAMERA_SCALER_START + 26,
+ ACAMERA_SCALER_START + 27,
ACAMERA_SCALER_END,
/**
@@ -11526,6 +11531,7 @@
+
__END_DECLS
#endif /* _NDK_CAMERA_METADATA_TAGS_H */
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index 4c54658..7d7868b 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -35,6 +35,7 @@
ACameraMetadata_copy;
ACameraMetadata_free;
ACameraMetadata_getAllTags;
+ ACameraMetadata_getTagFromName; #introduced=35
ACameraMetadata_getConstEntry;
ACameraMetadata_isLogicalMultiCamera; # introduced=29
ACameraMetadata_fromCameraMetadata; # introduced=30
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index 3aa7817..099786b 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -396,6 +396,7 @@
template <class T>
void CameraManagerGlobal::registerAvailCallback(const T *callback) {
+ getCameraService();
Mutex::Autolock _l(mLock);
Callback cb(callback);
auto res = mCallbacks.insert(cb);
diff --git a/camera/tests/Android.bp b/camera/tests/Android.bp
index 65b8b41..9aaac6a 100644
--- a/camera/tests/Android.bp
+++ b/camera/tests/Android.bp
@@ -13,6 +13,7 @@
// limitations under the License.
package {
+ default_team: "trendy_team_camera_framework",
// See: http://go/android-license-faq
default_applicable_licenses: [
"frameworks_av_camera_license",
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
index b74b7a1..bd97c39 100644
--- a/camera/tests/fuzzer/Android.bp
+++ b/camera/tests/fuzzer/Android.bp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
package {
+ default_team: "trendy_team_camera_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_camera_license"
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Android.bp b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
index 0b0d46a..9a06bd2 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
@@ -107,6 +107,17 @@
installable: false, // installed in APEX
}
+cc_binary {
+ name: "android.hardware.drm-service-lazy.clearkey.apex",
+ stem: "android.hardware.drm-service-lazy.clearkey",
+ defaults: [
+ "aidl_clearkey_service_defaults",
+ "aidl_clearkey_service_defaults-use-static-deps",
+ ],
+ srcs: ["ServiceLazy.cpp"],
+ installable: false, // installed in APEX
+}
+
phony {
name: "android.hardware.drm@latest-service.clearkey",
required: [
@@ -183,17 +194,63 @@
"android.hardware.drm-service.clearkey.apex.rc",
"android.hardware.drm-service.clearkey.xml"
],
+ overrides: [
+ "android.hardware.drm-service.clearkey",
+ ],
}
prebuilt_etc {
name: "android.hardware.drm-service.clearkey.apex.rc",
- src: "android.hardware.drm-service.clearkey.apex.rc",
+ src: ":gen-android.hardware.drm-service.clearkey.apex.rc",
installable: false,
}
+genrule {
+ name: "gen-android.hardware.drm-service.clearkey.apex.rc",
+ srcs: ["android.hardware.drm-service.clearkey.rc"],
+ out: ["android.hardware.drm-service.clearkey.apex.rc"],
+ cmd: "sed -E 's%/vendor/bin/%/apex/com.android.hardware.drm.clearkey/bin/%' $(in) > $(out)",
+}
+
prebuilt_etc {
name: "android.hardware.drm-service.clearkey.xml",
src: "android.hardware.drm-service.clearkey.xml",
sub_dir: "vintf",
installable: false,
}
+
+apex {
+ name: "com.android.hardware.drm.clearkey.lazy",
+ manifest: "manifest.json",
+ file_contexts: "file_contexts",
+ key: "com.android.hardware.key",
+ certificate: ":com.android.hardware.certificate",
+ vendor: true,
+ updatable: false,
+
+ binaries: [
+ "android.hardware.drm-service-lazy.clearkey.apex",
+ ],
+ prebuilts: [
+ "android.hardware.drm-service-lazy.clearkey.apex.rc",
+ "android.hardware.drm-service.clearkey.xml"
+ ],
+ overrides: [
+ "android.hardware.drm-service.clearkey",
+ "android.hardware.drm-service-lazy.clearkey",
+ "com.android.hardware.drm.clearkey",
+ ],
+}
+
+prebuilt_etc {
+ name: "android.hardware.drm-service-lazy.clearkey.apex.rc",
+ src: ":gen-android.hardware.drm-service-lazy.clearkey.apex.rc",
+ installable: false,
+}
+
+genrule {
+ name: "gen-android.hardware.drm-service-lazy.clearkey.apex.rc",
+ srcs: ["android.hardware.drm-service-lazy.clearkey.rc"],
+ out: ["android.hardware.drm-service-lazy.clearkey.apex.rc"],
+ cmd: "sed -E 's%/vendor/bin/%/apex/com.android.hardware.drm.clearkey/bin/%' $(in) > $(out)",
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.apex.rc b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.apex.rc
deleted file mode 100644
index f4645b3..0000000
--- a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.apex.rc
+++ /dev/null
@@ -1,7 +0,0 @@
-service vendor.drm-clearkey-service /apex/com.android.hardware.drm.clearkey/bin/hw/android.hardware.drm-service.clearkey
- class hal
- user media
- group mediadrm drmrpc
- ioprio rt 4
- task_profiles ProcessCapacityHigh
- interface aidl android.hardware.drm.IDrmFactory/clearkey
diff --git a/include/media/Interpolator.h b/include/media/Interpolator.h
index 0ee8779..e26290f 100644
--- a/include/media/Interpolator.h
+++ b/include/media/Interpolator.h
@@ -204,7 +204,7 @@
mInterpolatorType = interpolatorType;
return NO_ERROR;
default:
- ALOGE("invalid interpolatorType: %d", interpolatorType);
+ ALOGE("invalid interpolatorType: %d", static_cast<int>(interpolatorType));
return BAD_VALUE;
}
}
diff --git a/media/Android.mk b/media/Android.mk
deleted file mode 100644
index 220a358..0000000
--- a/media/Android.mk
+++ /dev/null
@@ -1,5 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.conf))
-$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.xml))
-$(eval $(call declare-1p-copy-files,frameworks/av/media/libstagefright,))
diff --git a/media/aconfig/Android.bp b/media/aconfig/Android.bp
index ee25c03..16beb28 100644
--- a/media/aconfig/Android.bp
+++ b/media/aconfig/Android.bp
@@ -2,6 +2,7 @@
aconfig_declarations {
name: "aconfig_mediacodec_flags",
package: "com.android.media.codec.flags",
+ container: "system",
srcs: ["mediacodec_flags.aconfig"],
}
@@ -28,6 +29,7 @@
aconfig_declarations {
name: "aconfig_codec_fwk_flags",
package: "android.media.codec",
+ container: "system",
srcs: ["codec_fwk.aconfig"],
}
@@ -41,6 +43,7 @@
name: "android.media.codec-aconfig-cc",
min_sdk_version: "30",
vendor_available: true,
+ double_loadable: true,
apex_available: [
"//apex_available:platform",
"com.android.media.swcodec",
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index 183bd99..a2b6a82 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -3,9 +3,18 @@
# !!! Please add flags in alphabetical order. !!!
package: "android.media.codec"
+container: "system"
+
+flag {
+ name: "aidl_hal_input_surface"
+ namespace: "codec_fwk"
+ description: "Feature flags for enabling AIDL HAL InputSurface handling"
+ bug: "201479783"
+}
flag {
name: "dynamic_color_aspects"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flag for dynamic color aspect support"
bug: "297914560"
@@ -13,14 +22,74 @@
flag {
name: "hlg_editing"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flag for HLG editing support"
bug: "316397061"
}
flag {
+ name: "in_process_sw_audio_codec"
+ is_exported: true
+ namespace: "codec_fwk"
+ description: "Feature flag for in-process software audio codec API"
+ bug: "297922713"
+}
+
+flag {
+ name: "in_process_sw_audio_codec_support"
+ namespace: "codec_fwk"
+ description: "Feature flag for in-process software audio codec support"
+ bug: "325520135"
+}
+
+flag {
+ name: "large_audio_frame_finish"
+ namespace: "codec_fwk"
+ description: "Implementation flag for large audio frame finishing tasks"
+ bug: "325512893"
+}
+
+flag {
+ name: "native_capabilites"
+ namespace: "codec_fwk"
+ description: "Feature flag for native codec capabilities"
+ bug: "306023029"
+}
+
+flag {
name: "null_output_surface"
+ is_exported: true
+ namespace: "codec_fwk"
+ description: "Feature flag for null output Surface API"
+ bug: "297920102"
+}
+
+flag {
+ name: "null_output_surface_support"
namespace: "codec_fwk"
description: "Feature flag for null output Surface support"
- bug: "297920102"
+ bug: "325550522"
+}
+
+flag {
+ name: "region_of_interest"
+ is_exported: true
+ namespace: "codec_fwk"
+ description: "Feature flag for region of interest API"
+ bug: "299191092"
+}
+
+flag {
+ name: "region_of_interest_support"
+ namespace: "codec_fwk"
+ description: "Feature flag for region of interest support"
+ bug: "325549730"
+}
+
+flag {
+ name: "teamfood"
+ namespace: "codec_fwk"
+ description: "Feature flag to track teamfood population"
+ bug: "328770262"
}
diff --git a/media/aconfig/mediacodec_flags.aconfig b/media/aconfig/mediacodec_flags.aconfig
index be0fc5c..3cc9a1a 100644
--- a/media/aconfig/mediacodec_flags.aconfig
+++ b/media/aconfig/mediacodec_flags.aconfig
@@ -1,4 +1,5 @@
package: "com.android.media.codec.flags"
+container: "system"
# ******************************************************************
# !!! DO NOT ADD FURTHER FLAGS TO THIS FILE !!!
@@ -6,22 +7,23 @@
# ******************************************************************
flag {
- name: "large_audio_frame"
+ name: "aidl_hal"
namespace: "codec_fwk"
- description: "Feature flags for large audio frame support"
- bug: "297219557"
+ description: "Feature flags for enabling AIDL HAL handling"
+ bug: "251850069"
}
flag {
name: "codec_importance"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flags for media codec importance"
bug: "297929011"
}
flag {
- name: "aidl_hal"
+ name: "large_audio_frame"
namespace: "codec_fwk"
- description: "Feature flags for enabling AIDL HAL handling"
- bug: "251850069"
+ description: "Feature flags for large audio frame support"
+ bug: "297219557"
}
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index af97dac..6d21e97 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -8,18 +8,21 @@
aconfig_declarations {
name: "com.android.media.audioserver-aconfig",
package: "com.android.media.audioserver",
+ container: "system",
srcs: ["audioserver.aconfig"],
}
aconfig_declarations {
name: "com.android.media.audio-aconfig",
package: "com.android.media.audio",
+ container: "system",
srcs: ["audio.aconfig"],
}
aconfig_declarations {
name: "com.android.media.aaudio-aconfig",
package: "com.android.media.aaudio",
+ container: "system",
srcs: ["aaudio.aconfig"],
}
@@ -81,6 +84,7 @@
aconfig_declarations {
name: "android.media.audio-aconfig",
package: "android.media.audio",
+ container: "system",
srcs: ["audio_framework.aconfig"],
visibility: ["//frameworks/base/api"],
}
@@ -88,6 +92,7 @@
aconfig_declarations {
name: "android.media.audiopolicy-aconfig",
package: "android.media.audiopolicy",
+ container: "system",
srcs: ["audiopolicy_framework.aconfig"],
visibility: ["//frameworks/base/api"],
}
@@ -95,6 +100,7 @@
aconfig_declarations {
name: "android.media.midi-aconfig",
package: "android.media.midi",
+ container: "system",
srcs: ["midi_flags.aconfig"],
visibility: ["//frameworks/base/api"],
}
@@ -122,11 +128,17 @@
defaults: ["framework-minus-apex-aconfig-java-defaults"],
}
-filegroup {
+cc_aconfig_library {
+ name: "android.media.audiopolicy-aconfig-cc",
+ aconfig_declarations: "android.media.audiopolicy-aconfig",
+ defaults: ["audio-aconfig-cc-defaults"],
+}
+
+aconfig_declarations_group {
name: "audio-framework-aconfig",
- srcs: [
- ":android.media.audio-aconfig-java{.generated_srcjars}",
- ":android.media.audiopolicy-aconfig-java{.generated_srcjars}",
- ":android.media.midi-aconfig-java{.generated_srcjars}",
+ java_aconfig_libraries: [
+ "android.media.audio-aconfig-java",
+ "android.media.audiopolicy-aconfig-java",
+ "android.media.midi-aconfig-java",
],
}
diff --git a/media/audio/aconfig/aaudio.aconfig b/media/audio/aconfig/aaudio.aconfig
index 7196525..c160109 100644
--- a/media/audio/aconfig/aaudio.aconfig
+++ b/media/audio/aconfig/aaudio.aconfig
@@ -3,6 +3,7 @@
# Please add flags in alphabetical order.
package: "com.android.media.aaudio"
+container: "system"
flag {
name: "sample_rate_conversion"
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index 0b2a5c4..1df5727 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -3,6 +3,7 @@
# Please add flags in alphabetical order.
package: "com.android.media.audio"
+container: "system"
flag {
name: "alarm_min_volume_zero"
@@ -12,6 +13,13 @@
}
flag {
+ name: "as_device_connection_failure"
+ namespace: "media_audio"
+ description: "AudioService handles device connection failures."
+ bug: "326597760"
+}
+
+flag {
name: "bluetooth_mac_address_anonymization"
namespace: "media_audio"
description:
@@ -52,8 +60,22 @@
}
flag {
+ name: "spatializer_upmix"
+ namespace: "media_audio"
+ description: "Enable spatializer upmix"
+ bug: "323985367"
+}
+
+flag {
name: "stereo_spatialization"
namespace: "media_audio"
description: "Enable stereo channel mask for spatialization."
bug: "303920722"
}
+
+flag {
+ name: "volume_refactoring"
+ namespace: "media_audio"
+ description: "Refactor the audio volume internal architecture logic"
+ bug: "324152869"
+}
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index 34c026a..4018efc 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -4,6 +4,7 @@
# Please add flags in alphabetical order.
package: "android.media.audio"
+container: "system"
flag {
name: "auto_public_volume_api_hardening"
@@ -22,7 +23,16 @@
}
flag {
+ name: "feature_spatial_audio_headtracking_low_latency"
+ is_exported: true
+ namespace: "media_audio"
+ description: "Define feature for low latency headtracking for SA"
+ bug: "324291076"
+}
+
+flag {
name: "focus_exclusive_with_recording"
+ is_exported: true
namespace: "media_audio"
description:
"Audio focus GAIN_TRANSIENT_EXCLUSIVE only mutes"
@@ -30,9 +40,20 @@
bug: "316414750"
}
+flag {
+ name: "foreground_audio_control"
+ is_exported: true
+ namespace: "media_audio"
+ description:
+ "Audio focus gain requires FGS or delegation to "
+ "take effect"
+ bug: "296232417"
+}
+
# TODO remove
flag {
name: "focus_freeze_test_api"
+ is_exported: true
namespace: "media_audio"
description: "\
AudioManager audio focus test APIs:\
@@ -46,10 +67,44 @@
flag {
name: "loudness_configurator_api"
+ is_exported: true
namespace: "media_audio"
description: "\
Enable the API for providing loudness metadata and CTA-2075 \
support."
bug: "298463873"
+ is_exported: true
}
+flag {
+ name: "mute_background_audio"
+ namespace: "media_audio"
+ description: "mute audio playing in background"
+ bug: "296232417"
+}
+
+flag {
+ name: "sco_managed_by_audio"
+ is_exported: true
+ namespace: "media_audio"
+ description: "\
+Enable new implementation of headset profile device connection and\
+SCO audio activation."
+ bug: "265057196"
+}
+
+flag {
+ name: "supported_device_types_api"
+ is_exported: true
+ namespace: "media_audio"
+ description: "Surface new API method AudioManager.getSupportedDeviceTypes()"
+ bug: "307537538"
+}
+
+flag {
+ name: "volume_ringer_api_hardening"
+ namespace: "media_audio"
+ description: "Limit access to volume and ringer SDK APIs in AudioManager"
+ bug: "296232417"
+ is_fixed_read_only: true
+}
diff --git a/media/audio/aconfig/audiopolicy_framework.aconfig b/media/audio/aconfig/audiopolicy_framework.aconfig
index b41c1c3..28b6c7f 100644
--- a/media/audio/aconfig/audiopolicy_framework.aconfig
+++ b/media/audio/aconfig/audiopolicy_framework.aconfig
@@ -4,9 +4,36 @@
# Please add flags in alphabetical order.
package: "android.media.audiopolicy"
+container: "system"
+
+flag {
+ name: "audio_mix_ownership"
+ namespace: "media_audio"
+ description: "Improves ownership model of AudioMixes and the relationship between AudioPolicy and AudioMix."
+ bug: "309080867"
+ is_fixed_read_only: true
+}
+
+flag {
+ name: "audio_mix_policy_ordering"
+ namespace: "media_audio"
+ description: "Orders AudioMixes per registered AudioPolicy."
+ bug: "309080867"
+ is_fixed_read_only: true
+}
+
+flag {
+ name: "audio_mix_test_api"
+ is_exported: true
+ namespace: "media_audio"
+ description: "Enable new Test APIs that provide access to registered AudioMixes on system server and native side."
+ bug: "309080867"
+ is_fixed_read_only: true
+}
flag {
name: "audio_policy_update_mixing_rules_api"
+ is_exported: true
namespace: "media_audio"
description: "Enable AudioPolicy.updateMixingRules API for hot-swapping audio mixing rules."
bug: "293874525"
@@ -14,6 +41,7 @@
flag {
name: "enable_fade_manager_configuration"
+ is_exported: true
namespace: "media_audio"
description: "Enable Fade Manager Configuration support to determine fade properties"
bug: "307354764"
@@ -25,3 +53,11 @@
description: "Enable multi-zone audio support in audio product strategies."
bug: "316643994"
}
+
+flag {
+ name: "record_audio_device_aware_permission"
+ namespace: "media_audio"
+ description: "Enable device-aware permission handling for RECORD_AUDIO permission"
+ bug: "291737188"
+ is_fixed_read_only: true
+}
\ No newline at end of file
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 21ea1a2..5c6504f 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -3,6 +3,7 @@
# Please add flags in alphabetical order.
package: "com.android.media.audioserver"
+container: "system"
flag {
name: "direct_track_reprioritization"
diff --git a/media/audio/aconfig/midi_flags.aconfig b/media/audio/aconfig/midi_flags.aconfig
index ff9238a..1620e1b 100644
--- a/media/audio/aconfig/midi_flags.aconfig
+++ b/media/audio/aconfig/midi_flags.aconfig
@@ -4,9 +4,11 @@
# Please add flags in alphabetical order.
package: "android.media.midi"
+container: "system"
flag {
name: "virtual_ump"
+ is_exported: true
namespace: "media_audio"
description: "Enable virtual UMP MIDI."
bug: "291115176"
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 1a6c7f1..77418eb 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -904,7 +904,7 @@
case Tag::voiceMask:
return convert(aidl, mVoice, __func__, "voice");
}
- ALOGE("%s: unexpected tag value %d", __func__, aidl.getTag());
+ ALOGE("%s: unexpected tag value %d", __func__, static_cast<int>(aidl.getTag()));
return unexpected(BAD_VALUE);
}
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index 2030dc7..479e13a 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -71,7 +72,6 @@
"frameworks/av/services/medialog",
"frameworks/av/services/oboeservice", // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
-
],
init_rc: ["audioserver.rc"],
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index c7a1bfd..55847f4 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -169,6 +169,11 @@
"%s: AudioSystem already has an AudioFlinger instance!", __func__);
const auto aps = sp<AudioPolicyService>::make();
ALOGD("%s: AudioPolicy created", __func__);
+ ALOGW_IF(AudioSystem::setLocalAudioPolicyService(aps) != OK,
+ "%s: AudioSystem already has an AudioPolicyService instance!", __func__);
+
+ // Start initialization of internally managed audio objects such as Device Effects.
+ aps->onAudioSystemReady();
// Add AudioFlinger and AudioPolicy to ServiceManager.
sp<IServiceManager> sm = defaultServiceManager();
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index d1b08bd..c770d0c 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -35,14 +35,14 @@
#define FILEREAD_MAX_LAYERS 2
-#define DRC_DEFAULT_MOBILE_REF_LEVEL -16.0 /* 64*-0.25dB = -16 dB below full scale for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_CUT 1.0 /* maximum compression of dynamic range for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_BOOST 1.0 /* maximum compression of dynamic range for mobile conf */
-#define DRC_DEFAULT_MOBILE_DRC_HEAVY C2Config::DRC_COMPRESSION_HEAVY /* switch for heavy compression for mobile conf */
+#define DRC_DEFAULT_MOBILE_REF_LEVEL 64 /* 64*-0.25dB = -16 dB below full scale for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_CUT 127 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_HEAVY 1 /* switch for heavy compression for mobile conf */
#define DRC_DEFAULT_MOBILE_DRC_EFFECT 3 /* MPEG-D DRC effect type; 3 => Limited playback range */
#define DRC_DEFAULT_MOBILE_DRC_ALBUM 0 /* MPEG-D DRC album mode; 0 => album mode is disabled, 1 => album mode is enabled */
#define DRC_DEFAULT_MOBILE_OUTPUT_LOUDNESS (0.25) /* decoder output loudness; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
-#define DRC_DEFAULT_MOBILE_ENC_LEVEL (0.25) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
+#define DRC_DEFAULT_MOBILE_ENC_LEVEL (-1) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
#define MAX_CHANNEL_COUNT 8 /* maximum number of audio channels that can be decoded */
// names of properties that can be used to override the default DRC settings
#define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level"
@@ -145,9 +145,13 @@
.withSetter(ProfileLevelSetter)
.build());
+ C2Config::drc_compression_mode_t defaultCompressionMode =
+ property_get_int32(PROP_DRC_OVERRIDE_HEAVY, DRC_DEFAULT_MOBILE_DRC_HEAVY) == 1
+ ? C2Config::DRC_COMPRESSION_HEAVY
+ : C2Config::DRC_COMPRESSION_LIGHT;
addParameter(
DefineParam(mDrcCompressMode, C2_PARAMKEY_DRC_COMPRESSION_MODE)
- .withDefault(new C2StreamDrcCompressionModeTuning::input(0u, C2Config::DRC_COMPRESSION_HEAVY))
+ .withDefault(new C2StreamDrcCompressionModeTuning::input(0u, defaultCompressionMode))
.withFields({
C2F(mDrcCompressMode, value).oneOf({
C2Config::DRC_COMPRESSION_ODM_DEFAULT,
@@ -158,37 +162,48 @@
.withSetter(Setter<decltype(*mDrcCompressMode)>::StrictValueWithNoDeps)
.build());
+
+ float defaultRefLevel = -0.25 * property_get_int32(PROP_DRC_OVERRIDE_REF_LEVEL,
+ DRC_DEFAULT_MOBILE_REF_LEVEL);
addParameter(
DefineParam(mDrcTargetRefLevel, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL)
- .withDefault(new C2StreamDrcTargetReferenceLevelTuning::input(0u, DRC_DEFAULT_MOBILE_REF_LEVEL))
+ .withDefault(new C2StreamDrcTargetReferenceLevelTuning::input(0u, defaultRefLevel))
.withFields({C2F(mDrcTargetRefLevel, value).inRange(-31.75, 0.25)})
.withSetter(Setter<decltype(*mDrcTargetRefLevel)>::StrictValueWithNoDeps)
.build());
+ float defaultEncLevel = -0.25 * property_get_int32(PROP_DRC_OVERRIDE_ENC_LEVEL,
+ DRC_DEFAULT_MOBILE_ENC_LEVEL);
addParameter(
DefineParam(mDrcEncTargetLevel, C2_PARAMKEY_DRC_ENCODED_TARGET_LEVEL)
- .withDefault(new C2StreamDrcEncodedTargetLevelTuning::input(0u, DRC_DEFAULT_MOBILE_ENC_LEVEL))
+ .withDefault(new C2StreamDrcEncodedTargetLevelTuning::input(0u, defaultEncLevel))
.withFields({C2F(mDrcEncTargetLevel, value).inRange(-31.75, 0.25)})
.withSetter(Setter<decltype(*mDrcEncTargetLevel)>::StrictValueWithNoDeps)
.build());
+ float defaultDrcBoost =
+ property_get_int32(PROP_DRC_OVERRIDE_BOOST, DRC_DEFAULT_MOBILE_DRC_BOOST) / 127.;
addParameter(
DefineParam(mDrcBoostFactor, C2_PARAMKEY_DRC_BOOST_FACTOR)
- .withDefault(new C2StreamDrcBoostFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_BOOST))
+ .withDefault(new C2StreamDrcBoostFactorTuning::input(0u, defaultDrcBoost))
.withFields({C2F(mDrcBoostFactor, value).inRange(0, 1.)})
.withSetter(Setter<decltype(*mDrcBoostFactor)>::StrictValueWithNoDeps)
.build());
+ float defaultDrcCut =
+ property_get_int32(PROP_DRC_OVERRIDE_CUT, DRC_DEFAULT_MOBILE_DRC_CUT) / 127.;
addParameter(
DefineParam(mDrcAttenuationFactor, C2_PARAMKEY_DRC_ATTENUATION_FACTOR)
- .withDefault(new C2StreamDrcAttenuationFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_CUT))
+ .withDefault(new C2StreamDrcAttenuationFactorTuning::input(0u, defaultDrcCut))
.withFields({C2F(mDrcAttenuationFactor, value).inRange(0, 1.)})
.withSetter(Setter<decltype(*mDrcAttenuationFactor)>::StrictValueWithNoDeps)
.build());
+ C2Config::drc_effect_type_t defaultDrcEffectType = (C2Config::drc_effect_type_t)
+ property_get_int32(PROP_DRC_OVERRIDE_EFFECT, DRC_DEFAULT_MOBILE_DRC_EFFECT);
addParameter(
DefineParam(mDrcEffectType, C2_PARAMKEY_DRC_EFFECT_TYPE)
- .withDefault(new C2StreamDrcEffectTypeTuning::input(0u, C2Config::DRC_EFFECT_LIMITED_PLAYBACK_RANGE))
+ .withDefault(new C2StreamDrcEffectTypeTuning::input(0u, defaultDrcEffectType))
.withFields({
C2F(mDrcEffectType, value).oneOf({
C2Config::DRC_EFFECT_ODM_DEFAULT,
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index 7c9d3e8..128e16e 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -605,7 +605,7 @@
mCodecConfiguration->g_timebase.den = 1000000;
// rc_target_bitrate is in kbps, mBitrate in bps
mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
- mCodecConfiguration->rc_end_usage = mBitrateControlMode == AOM_Q ? AOM_Q : AOM_CBR;
+ mCodecConfiguration->rc_end_usage = mBitrateControlMode;
// Disable frame drop - not allowed in MediaCodec now.
mCodecConfiguration->rc_dropframe_thresh = 0;
// Disable lagged encoding.
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 96a4c4a..3385b95 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -416,6 +416,7 @@
ivdext_create_op_t s_create_op = {};
s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
+ s_create_ip.u4_keep_threads_active = 1;
s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index e424860..80a5e67 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -152,6 +152,17 @@
.build());
addParameter(
+ DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+ .withDefault(new C2StreamBitrateModeTuning::output(0u, C2Config::BITRATE_VARIABLE))
+ .withFields({C2F(mBitrateMode, value).oneOf({
+ C2Config::BITRATE_CONST,
+ C2Config::BITRATE_VARIABLE,
+ C2Config::BITRATE_IGNORE})
+ })
+ .withSetter(Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
.withDefault(new C2StreamBitrateInfo::output(0u, 64000))
.withFields({C2F(mBitrate, value).inRange(4096, 12000000)})
@@ -536,6 +547,9 @@
std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const { return mIntraRefresh; }
std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
+ std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const {
+ return mBitrateMode;
+ }
std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
std::shared_ptr<C2StreamGopTuning::output> getGop_l() const { return mGop; }
@@ -552,6 +566,7 @@
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
std::shared_ptr<C2StreamGopTuning::output> mGop;
@@ -1154,6 +1169,7 @@
{
IntfImpl::Lock lock = mIntf->lock();
mSize = mIntf->getSize_l();
+ mBitrateMode = mIntf->getBitrateMode_l();
mBitrate = mIntf->getBitrate_l();
mFrameRate = mIntf->getFrameRate_l();
mIntraRefresh = mIntf->getIntraRefresh_l();
@@ -1326,8 +1342,23 @@
} else {
ps_init_ip->u4_enable_recon = 0;
}
+
+ switch (mBitrateMode->value) {
+ case C2Config::BITRATE_IGNORE:
+ ps_init_ip->e_rc_mode = IVE_RC_NONE;
+ break;
+ case C2Config::BITRATE_CONST:
+ ps_init_ip->e_rc_mode = IVE_RC_CBR_NON_LOW_DELAY;
+ break;
+ case C2Config::BITRATE_VARIABLE:
+ ps_init_ip->e_rc_mode = IVE_RC_STORAGE;
+ break;
+ default:
+ ps_init_ip->e_rc_mode = DEFAULT_RC_MODE;
+ break;
+ break;
+ }
ps_init_ip->e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
- ps_init_ip->e_rc_mode = DEFAULT_RC_MODE;
ps_init_ip->u4_max_framerate = DEFAULT_MAX_FRAMERATE;
ps_init_ip->u4_max_bitrate = DEFAULT_MAX_BITRATE;
ps_init_ip->u4_num_bframes = mBframes;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index cde6604..33d166f 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -191,6 +191,7 @@
std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 15d6dcd..81db2a1 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -407,6 +407,7 @@
ivdext_create_op_t s_create_op = {};
s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
+ s_create_ip.u4_keep_threads_active = 1;
s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorformat;
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 439323c..491098d 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -433,6 +433,7 @@
s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = sizeof(ivdext_fill_mem_rec_ip_t);
s_fill_mem_ip.u4_share_disp_buf = 0;
+ s_fill_mem_ip.u4_keep_threads_active = 1;
s_fill_mem_ip.e_output_format = mIvColorformat;
s_fill_mem_ip.u4_deinterlace = 1;
s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
@@ -474,6 +475,7 @@
s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
s_init_ip.u4_share_disp_buf = 0;
s_init_ip.u4_deinterlace = 1;
+ s_init_ip.u4_keep_threads_active = 1;
s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorformat;
s_init_op.s_ivd_init_op_t.u4_size = sizeof(ivdext_init_op_t);
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 76e74ec..0384e2e 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -29,6 +29,12 @@
#define INT32_MAX 2147483647
#endif
+/* Quantization param values defined by the spec */
+#define VPX_QP_MIN 0
+#define VPX_QP_MAX 63
+#define VPX_QP_DEFAULT_MIN VPX_QP_MIN
+#define VPX_QP_DEFAULT_MAX VPX_QP_MAX
+
namespace android {
C2SoftVpxEnc::IntfImpl::IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
@@ -197,6 +203,20 @@
})
.withSetter(CodedColorAspectsSetter, mColorAspects)
.build());
+
+ addParameter(
+ DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+ .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+ 0 /* flexCount */, 0u /* stream */))
+ .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+ {C2Config::I_FRAME, C2Config::P_FRAME}),
+ C2F(mPictureQuantization, m.values[0].min).inRange(
+ VPX_QP_DEFAULT_MIN, VPX_QP_DEFAULT_MAX),
+ C2F(mPictureQuantization, m.values[0].max).inRange(
+ VPX_QP_DEFAULT_MIN, VPX_QP_DEFAULT_MAX)})
+ .withSetter(PictureQuantizationSetter)
+ .build());
+
}
C2R C2SoftVpxEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
@@ -330,6 +350,58 @@
double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
}
+
+C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(bool mayBlock,
+ C2P<C2StreamPictureQuantizationTuning::output>
+ &me) {
+ (void)mayBlock;
+ // these are the ones we're going to set, so want them to default
+ // to the DEFAULT values for the codec
+ int32_t iMin = VPX_QP_DEFAULT_MIN, pMin = VPX_QP_DEFAULT_MIN;
+ int32_t iMax = VPX_QP_DEFAULT_MAX, pMax = VPX_QP_DEFAULT_MAX;
+ for (size_t i = 0; i < me.v.flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+ // layerMin is clamped to [VPX_QP_MIN, layerMax] to avoid error
+ // cases where layer.min > layer.max
+ int32_t layerMax = std::clamp(layer.max, VPX_QP_MIN, VPX_QP_MAX);
+ int32_t layerMin = std::clamp(layer.min, VPX_QP_MIN, layerMax);
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ iMax = layerMax;
+ iMin = layerMin;
+ ALOGV("iMin %d iMax %d", iMin, iMax);
+ } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+ pMax = layerMax;
+ pMin = layerMin;
+ ALOGV("pMin %d pMax %d", pMin, pMax);
+ }
+ }
+ ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d",
+ iMin, iMax, pMin, pMax);
+
+ // vpx library takes same range for I/P picture type
+ int32_t maxFrameQP = std::min({iMax, pMax});
+ int32_t minFrameQP = std::max({iMin, pMin});
+ if (minFrameQP > maxFrameQP) {
+ minFrameQP = maxFrameQP;
+ }
+ // put them back into the structure
+ for (size_t i = 0; i < me.v.flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ me.set().m.values[i].max = maxFrameQP;
+ me.set().m.values[i].min = minFrameQP;
+ }
+ else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+ me.set().m.values[i].max = maxFrameQP;
+ me.set().m.values[i].min = minFrameQP;
+ }
+ }
+ ALOGV("PictureQuantizationSetter(exit): minFrameQP = %d maxFrameQP = %d",
+ minFrameQP, maxFrameQP);
+ return C2R::Ok();
+}
+
C2R C2SoftVpxEnc::IntfImpl::ColorAspectsSetter(bool mayBlock,
C2P<C2StreamColorAspectsInfo::input>& me) {
(void)mayBlock;
@@ -453,6 +525,7 @@
mRequestSync = mIntf->getRequestSync_l();
mLayering = mIntf->getTemporalLayers_l();
mTemporalLayers = mLayering->m.layerCount;
+ mQpBounds = mIntf->getPictureQuantization_l();
}
switch (mBitrateMode->value) {
@@ -466,6 +539,18 @@
break;
}
+ if (mQpBounds->flexCount() > 0) {
+ // read min max qp for sequence
+ for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ mMaxQuantizer = layer.max;
+ mMinQuantizer = layer.min;
+ break;
+ }
+ }
+ }
+
setCodecSpecificInterface();
if (!mCodecInterface) goto CleanUp;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index bfb4444..980de04 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -219,6 +219,7 @@
std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
std::shared_ptr<C2StreamTemporalLayeringTuning::output> mLayering;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> mQpBounds;
C2_DO_NOT_COPY(C2SoftVpxEnc);
};
@@ -250,6 +251,9 @@
static C2R LayeringSetter(bool mayBlock, C2P<C2StreamTemporalLayeringTuning::output>& me);
+ static C2R PictureQuantizationSetter(bool mayBlock,
+ C2P<C2StreamPictureQuantizationTuning::output> &me);
+
// unsafe getters
std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const {
@@ -269,6 +273,9 @@
std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
return mCodedColorAspects;
}
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+ return mPictureQuantization;
+ }
uint32_t getSyncFramePeriod() const;
static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me);
static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
@@ -287,6 +294,7 @@
std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
};
} // namespace android
diff --git a/media/codec2/core/include/C2Param.h b/media/codec2/core/include/C2Param.h
index e938f96..387d2b8 100644
--- a/media/codec2/core/include/C2Param.h
+++ b/media/codec2/core/include/C2Param.h
@@ -427,7 +427,9 @@
inline bool operator==(const C2Param &o) const {
return equals(o) && memcmp(this, &o, _mSize) == 0;
}
+#if __cplusplus < 202002
inline bool operator!=(const C2Param &o) const { return !operator==(o); }
+#endif
/// safe(r) type cast from pointer and size
inline static C2Param* From(void *addr, size_t len) {
diff --git a/media/codec2/core/include/C2ParamDef.h b/media/codec2/core/include/C2ParamDef.h
index 86dfe65..1805464 100644
--- a/media/codec2/core/include/C2ParamDef.h
+++ b/media/codec2/core/include/C2ParamDef.h
@@ -212,6 +212,26 @@
}
};
+/// Define equality (and inequality) operators for params.
+#if __cplusplus < 202002
+
+#define DEFINE_EQUALITY_OPERATORS(_Type, T) \
+ inline bool operator==(const _Type &o) const { \
+ return this->T::operator==(o); \
+ } \
+ inline bool operator!=(const _Type &o) const { \
+ return !operator==(o); \
+ }
+
+#else
+
+#define DEFINE_EQUALITY_OPERATORS(_Type, T) \
+ inline bool operator==(const _Type &o) const { \
+ return this->T::operator==(o); \
+ }
+
+#endif
+
/// Define From() cast operators for params.
#define DEFINE_CAST_OPERATORS(_Type) \
inline static _Type* From(C2Param *other) { \
@@ -404,12 +424,12 @@
/// Specialization for an input port parameter.
struct input : public T, public S,
public _C2StructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_INPUT> {
- using T::operator!=;
_C2_CORE_INDEX_OVERRIDE(ParamIndex)
/// Wrapper around base structure's constructor.
template<typename ...Args>
inline input(const Args(&... args)) : T(sizeof(_Type), input::PARAM_TYPE), S(args...) { }
+ DEFINE_EQUALITY_OPERATORS(input, T)
DEFINE_CAST_OPERATORS(input)
};
@@ -417,12 +437,12 @@
/// Specialization for an output port parameter.
struct output : public T, public S,
public _C2StructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_OUTPUT> {
- using T::operator!=;
_C2_CORE_INDEX_OVERRIDE(ParamIndex)
/// Wrapper around base structure's constructor.
template<typename ...Args>
inline output(const Args(&... args)) : T(sizeof(_Type), output::PARAM_TYPE), S(args...) { }
+ DEFINE_EQUALITY_OPERATORS(output, T)
DEFINE_CAST_OPERATORS(output)
};
};
@@ -472,7 +492,6 @@
/// Specialization for an input port parameter.
struct input : public T,
public _C2FlexStructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_INPUT> {
- using T::operator!=;
private:
/// Wrapper around base structure's constructor while also specifying port/direction.
template<typename ...Args>
@@ -482,6 +501,7 @@
public:
S m; ///< wrapped flexible structure
+ DEFINE_EQUALITY_OPERATORS(input, T)
DEFINE_FLEXIBLE_METHODS(input, S)
DEFINE_CAST_OPERATORS(input)
};
@@ -489,7 +509,6 @@
/// Specialization for an output port parameter.
struct output : public T,
public _C2FlexStructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_OUTPUT> {
- using T::operator!=;
private:
/// Wrapper around base structure's constructor while also specifying port/direction.
template<typename ...Args>
@@ -499,6 +518,7 @@
public:
S m; ///< wrapped flexible structure
+ DEFINE_EQUALITY_OPERATORS(output, T)
DEFINE_FLEXIBLE_METHODS(output, S)
DEFINE_CAST_OPERATORS(output)
};
@@ -553,7 +573,6 @@
struct input : public T, public S,
public _C2StructCheck<S, ParamIndex,
T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_INPUT> {
- using T::operator!=;
_C2_CORE_INDEX_OVERRIDE(ParamIndex)
/// Default constructor. Stream-ID is undefined.
@@ -565,6 +584,7 @@
/// Set stream-id. \retval true if the stream-id was successfully set.
inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
+ DEFINE_EQUALITY_OPERATORS(input, T)
DEFINE_CAST_OPERATORS(input)
};
@@ -572,7 +592,6 @@
struct output : public T, public S,
public _C2StructCheck<S, ParamIndex,
T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_OUTPUT> {
- using T::operator!=;
_C2_CORE_INDEX_OVERRIDE(ParamIndex)
/// Default constructor. Stream-ID is undefined.
@@ -584,6 +603,7 @@
/// Set stream-id. \retval true if the stream-id was successfully set.
inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
+ DEFINE_EQUALITY_OPERATORS(output, T)
DEFINE_CAST_OPERATORS(output)
};
};
@@ -640,7 +660,6 @@
struct input : public T,
public _C2FlexStructCheck<S, ParamIndex,
T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_INPUT> {
- using T::operator!=;
private:
/// Default constructor. Stream-ID is undefined.
inline input(size_t flexCount) : T(_Type::CalcSize(flexCount), input::PARAM_TYPE) { }
@@ -655,6 +674,7 @@
/// Set stream-id. \retval true if the stream-id was successfully set.
inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
+ DEFINE_EQUALITY_OPERATORS(input, T)
DEFINE_FLEXIBLE_METHODS(input, S)
DEFINE_CAST_OPERATORS(input)
};
@@ -663,7 +683,6 @@
struct output : public T,
public _C2FlexStructCheck<S, ParamIndex,
T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_OUTPUT> {
- using T::operator!=;
private:
/// Default constructor. Stream-ID is undefined.
inline output(size_t flexCount) : T(_Type::CalcSize(flexCount), output::PARAM_TYPE) { }
@@ -678,6 +697,7 @@
/// Set stream-id. \retval true if the stream-id was successfully set.
inline bool setStream(unsigned stream) { return C2Param::setStream(stream); }
+ DEFINE_EQUALITY_OPERATORS(output, T)
DEFINE_FLEXIBLE_METHODS(output, S)
DEFINE_CAST_OPERATORS(output)
};
diff --git a/media/codec2/hal/aidl/Component.cpp b/media/codec2/hal/aidl/Component.cpp
index 8da9861..87c9d87 100644
--- a/media/codec2/hal/aidl/Component.cpp
+++ b/media/codec2/hal/aidl/Component.cpp
@@ -46,6 +46,8 @@
using ::aidl::android::hardware::common::NativeHandle;
using ::aidl::android::hardware::media::bufferpool2::IClientManager;
using ::ndk::ScopedAStatus;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
// ComponentListener wrapper
struct Component::Listener : public C2Component::Listener {
@@ -139,6 +141,52 @@
std::weak_ptr<IComponentListener> mListener;
};
+// Component listener for handle multiple access-units
+struct MultiAccessUnitListener : public Component::Listener {
+ MultiAccessUnitListener(const std::shared_ptr<Component>& component,
+ const std::shared_ptr<MultiAccessUnitHelper> &helper):
+ Listener(component), mHelper(helper) {
+ }
+
+ virtual void onError_nb(
+ std::weak_ptr<C2Component> c2component,
+ uint32_t errorCode) override {
+ if (mHelper) {
+ std::list<std::unique_ptr<C2Work>> worklist;
+ mHelper->error(&worklist);
+ if (!worklist.empty()) {
+ Listener::onWorkDone_nb(c2component, std::move(worklist));
+ }
+ }
+ Listener::onError_nb(c2component, errorCode);
+ }
+
+ virtual void onTripped_nb(
+ std::weak_ptr<C2Component> c2component,
+ std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+ ) override {
+ Listener::onTripped_nb(c2component,
+ c2settingResult);
+ }
+
+ virtual void onWorkDone_nb(
+ std::weak_ptr<C2Component> c2component,
+ std::list<std::unique_ptr<C2Work>> c2workItems) override {
+ if (mHelper) {
+ std::list<std::unique_ptr<C2Work>> processedWork;
+ mHelper->gather(c2workItems, &processedWork);
+ if (!processedWork.empty()) {
+ Listener::onWorkDone_nb(c2component, std::move(processedWork));
+ }
+ } else {
+ Listener::onWorkDone_nb(c2component, std::move(c2workItems));
+ }
+ }
+
+ protected:
+ std::shared_ptr<MultiAccessUnitHelper> mHelper;
+};
+
// Component::DeathContext
struct Component::DeathContext {
std::weak_ptr<Component> mWeakComp;
@@ -151,14 +199,15 @@
const std::shared_ptr<ComponentStore>& store,
const std::shared_ptr<IClientManager>& clientPoolManager)
: mComponent{component},
- mInterface{SharedRefBase::make<ComponentInterface>(
- component->intf(), store->getParameterCache())},
mListener{listener},
mStore{store},
mBufferPoolSender{clientPoolManager},
mDeathContext(nullptr) {
// Retrieve supported parameters from store
// TODO: We could cache this per component/interface type
+ mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
+ mInterface = SharedRefBase::make<ComponentInterface>(
+ component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
mInit = mInterface->status();
}
@@ -181,8 +230,21 @@
registerFrameData(mListener, work->input);
}
}
+ c2_status_t err = C2_OK;
+ if (mMultiAccessUnitHelper) {
+ std::list<std::list<std::unique_ptr<C2Work>>> c2worklists;
+ mMultiAccessUnitHelper->scatter(c2works, &c2worklists);
+ for (auto &c2worklist : c2worklists) {
+ err = mComponent->queue_nb(&c2worklist);
+ if (err != C2_OK) {
+ LOG(ERROR) << "Error Queuing to component.";
+ return ScopedAStatus::fromServiceSpecificError(err);
+ }
+ }
+ return ScopedAStatus::ok();
+ }
- c2_status_t err = mComponent->queue_nb(&c2works);
+ err = mComponent->queue_nb(&c2works);
if (err == C2_OK) {
return ScopedAStatus::ok();
}
@@ -194,7 +256,9 @@
c2_status_t c2res = mComponent->flush_sm(
C2Component::FLUSH_COMPONENT,
&c2flushedWorks);
-
+ if (mMultiAccessUnitHelper) {
+ c2res = mMultiAccessUnitHelper->flush(&c2flushedWorks);
+ }
// Unregister input buffers.
for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
if (work) {
@@ -364,6 +428,9 @@
std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
mBlockPools.clear();
}
+ if (mMultiAccessUnitHelper) {
+ mMultiAccessUnitHelper->reset();
+ }
InputBufferManager::unregisterFrameData(mListener);
if (status == C2_OK) {
return ScopedAStatus::ok();
@@ -377,6 +444,9 @@
std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
mBlockPools.clear();
}
+ if (mMultiAccessUnitHelper) {
+ mMultiAccessUnitHelper->reset();
+ }
InputBufferManager::unregisterFrameData(mListener);
if (status == C2_OK) {
return ScopedAStatus::ok();
@@ -415,7 +485,24 @@
void Component::initListener(const std::shared_ptr<Component>& self) {
if (__builtin_available(android __ANDROID_API_T__, *)) {
- std::shared_ptr<C2Component::Listener> c2listener =
+ std::shared_ptr<C2Component::Listener> c2listener;
+ if (mMultiAccessUnitIntf) {
+ std::shared_ptr<C2Allocator> allocator;
+ std::shared_ptr<C2BlockPool> linearPool;
+ std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+ if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+ ::android::C2PlatformAllocatorDesc desc;
+ desc.allocatorId = allocator->getId();
+ if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+ if (linearPool) {
+ mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+ mMultiAccessUnitIntf, linearPool);
+ }
+ }
+ }
+ }
+ c2listener = mMultiAccessUnitHelper ?
+ std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
std::make_shared<Listener>(self);
c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
if (res != C2_OK) {
diff --git a/media/codec2/hal/aidl/ComponentInterface.cpp b/media/codec2/hal/aidl/ComponentInterface.cpp
index 2d812c9..8ae9fa8 100644
--- a/media/codec2/hal/aidl/ComponentInterface.cpp
+++ b/media/codec2/hal/aidl/ComponentInterface.cpp
@@ -24,6 +24,8 @@
#include <utils/Timers.h>
+#include <codec2/common/MultiAccessUnitHelper.h>
+
#include <C2Debug.h>
#include <C2PlatformSupport.h>
@@ -43,9 +45,10 @@
// Implementation of ConfigurableC2Intf based on C2ComponentInterface
struct CompIntf : public ConfigurableC2Intf {
- CompIntf(const std::shared_ptr<C2ComponentInterface>& intf) :
+ CompIntf(const std::shared_ptr<C2ComponentInterface>& intf,
+ const std::shared_ptr<MultiAccessUnitInterface>& multiAccessUnitIntf):
ConfigurableC2Intf{intf->getName(), intf->getId()},
- mIntf{intf} {
+ mIntf{intf}, mMultiAccessUnitIntf{multiAccessUnitIntf} {
}
virtual c2_status_t config(
@@ -53,7 +56,34 @@
c2_blocking_t mayBlock,
std::vector<std::unique_ptr<C2SettingResult>>* const failures
) override {
- return mIntf->config_vb(params, mayBlock, failures);
+ std::vector<C2Param*> paramsToIntf;
+ std::vector<C2Param*> paramsToLargeFrameIntf;
+ c2_status_t err = C2_OK;
+ if (mMultiAccessUnitIntf == nullptr) {
+ err = mIntf->config_vb(params, mayBlock, failures);
+ return err;
+ }
+ for (auto &p : params) {
+ if (mMultiAccessUnitIntf->isParamSupported(p->index())) {
+ paramsToLargeFrameIntf.push_back(p);
+ } else {
+ paramsToIntf.push_back(p);
+ }
+ }
+ c2_status_t err1 = C2_OK;
+ if (paramsToIntf.size() > 0) {
+ err1 = mIntf->config_vb(paramsToIntf, mayBlock, failures);
+ }
+ if (err1 != C2_OK) {
+ LOG(ERROR) << "We have a failed config";
+ }
+ c2_status_t err2 = C2_OK;
+ if (paramsToLargeFrameIntf.size() > 0) {
+ err2 = mMultiAccessUnitIntf->config(
+ paramsToLargeFrameIntf, mayBlock, failures);
+ }
+ // TODO: correct failure vector
+ return err1 != C2_OK ? err1 : err2;
}
virtual c2_status_t query(
@@ -61,23 +91,82 @@
c2_blocking_t mayBlock,
std::vector<std::unique_ptr<C2Param>>* const params
) const override {
- return mIntf->query_vb({}, indices, mayBlock, params);
+ c2_status_t err = C2_OK;
+ if (mMultiAccessUnitIntf == nullptr) {
+ err = mIntf->query_vb({}, indices, mayBlock, params);
+ return err;
+ }
+ std::vector<C2Param::Index> paramsToIntf;
+ std::vector<C2Param::Index> paramsToLargeFrameIntf;
+ for (auto &i : indices) {
+ if (mMultiAccessUnitIntf->isParamSupported(i)) {
+ paramsToLargeFrameIntf.push_back(i);
+ } else {
+ paramsToIntf.push_back(i);
+ }
+ }
+ c2_status_t err1 = C2_OK;
+ if (paramsToIntf.size() > 0) {
+ err1 = mIntf->query_vb({}, paramsToIntf, mayBlock, params);
+ }
+ c2_status_t err2 = C2_OK;
+ if (paramsToLargeFrameIntf.size() > 0) {
+ err2 = mMultiAccessUnitIntf->query(
+ {}, paramsToLargeFrameIntf, mayBlock, params);
+ }
+ // TODO: correct failure vector
+ return err1 != C2_OK ? err1 : err2;
}
virtual c2_status_t querySupportedParams(
std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
) const override {
- return mIntf->querySupportedParams_nb(params);
+ c2_status_t err = mIntf->querySupportedParams_nb(params);
+ if (mMultiAccessUnitIntf != nullptr) {
+ err = mMultiAccessUnitIntf->querySupportedParams(params);
+ }
+ return err;
}
virtual c2_status_t querySupportedValues(
std::vector<C2FieldSupportedValuesQuery>& fields,
c2_blocking_t mayBlock) const override {
- return mIntf->querySupportedValues_vb(fields, mayBlock);
+ if (mMultiAccessUnitIntf == nullptr) {
+ return mIntf->querySupportedValues_vb(fields, mayBlock);
+ }
+ std::vector<C2FieldSupportedValuesQuery> dup = fields;
+ std::vector<C2FieldSupportedValuesQuery> queryArray[2];
+ std::map<C2ParamField, std::pair<uint32_t, size_t>> queryMap;
+ c2_status_t err = C2_OK;
+ for (int i = 0 ; i < fields.size(); i++) {
+ const C2ParamField &field = fields[i].field();
+ uint32_t queryArrayIdx = 1;
+ if (mMultiAccessUnitIntf->isValidField(fields[i].field())) {
+ queryArrayIdx = 0;
+ }
+ queryMap[field] = std::make_pair(
+ queryArrayIdx, queryArray[queryArrayIdx].size());
+ queryArray[queryArrayIdx].push_back(fields[i]);
+ }
+ if (queryArray[0].size() > 0) {
+ err = mMultiAccessUnitIntf->querySupportedValues(queryArray[0], mayBlock);
+ }
+ if (queryArray[1].size() > 0) {
+ err = mIntf->querySupportedValues_vb(queryArray[1], mayBlock);
+ }
+ for (int i = 0 ; i < dup.size(); i++) {
+ auto it = queryMap.find(dup[i].field());
+ if (it != queryMap.end()) {
+ std::pair<uint32_t, size_t> queryid = it->second;
+ fields[i] = queryArray[queryid.first][queryid.second];
+ }
+ }
+ return err;
}
protected:
std::shared_ptr<C2ComponentInterface> mIntf;
+ std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
};
} // unnamed namespace
@@ -85,10 +174,16 @@
// ComponentInterface
ComponentInterface::ComponentInterface(
const std::shared_ptr<C2ComponentInterface>& intf,
+ const std::shared_ptr<ParameterCache>& cache):ComponentInterface(intf, nullptr, cache) {
+}
+
+ComponentInterface::ComponentInterface(
+ const std::shared_ptr<C2ComponentInterface>& intf,
+ const std::shared_ptr<MultiAccessUnitInterface>& multiAccessUnitIntf,
const std::shared_ptr<ParameterCache>& cache)
: mInterface{intf},
mConfigurable{SharedRefBase::make<CachedConfigurable>(
- std::make_unique<CompIntf>(intf))} {
+ std::make_unique<CompIntf>(intf, multiAccessUnitIntf))} {
mInit = mConfigurable->init(cache);
}
diff --git a/media/codec2/hal/aidl/ComponentStore.cpp b/media/codec2/hal/aidl/ComponentStore.cpp
index 3f687b5..b95c09e 100644
--- a/media/codec2/hal/aidl/ComponentStore.cpp
+++ b/media/codec2/hal/aidl/ComponentStore.cpp
@@ -199,6 +199,36 @@
}
#endif
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface) {
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+ if (c2interface == nullptr) {
+ return nullptr;
+ }
+ if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+ c2_status_t err = C2_OK;
+ C2ComponentDomainSetting domain;
+ std::vector<std::unique_ptr<C2Param>> heapParams;
+ err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+ if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+ std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+ bool isComponentSupportsLargeAudioFrame = false;
+ c2interface->querySupportedParams_nb(¶ms);
+ for (const auto ¶mDesc : params) {
+ if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+ isComponentSupportsLargeAudioFrame = true;
+ break;
+ }
+ }
+ if (!isComponentSupportsLargeAudioFrame) {
+ multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+ c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+ }
+ }
+ }
+ return multiAccessUnitIntf;
+}
+
// Methods from ::aidl::android::hardware::media::c2::IComponentStore
ScopedAStatus ComponentStore::createComponent(
const std::string& name,
@@ -206,12 +236,19 @@
const std::shared_ptr<IClientManager>& pool,
std::shared_ptr<IComponent> *component) {
+ if (!listener) {
+ ALOGE("createComponent(): listener is null");
+ return ScopedAStatus::fromServiceSpecificError(Status::BAD_VALUE);
+ }
+ if (!pool) {
+ ALOGE("createComponent(): pool is null");
+ return ScopedAStatus::fromServiceSpecificError(Status::BAD_VALUE);
+ }
+
std::shared_ptr<C2Component> c2component;
c2_status_t status =
mStore->createComponent(name, &c2component);
- ALOGD("createComponent(): listener(%d)", bool(listener));
-
if (status == C2_OK) {
#ifndef __ANDROID_APEX__
c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
@@ -251,7 +288,10 @@
c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
#endif
onInterfaceLoaded(c2interface);
- *intf = SharedRefBase::make<ComponentInterface>(c2interface, mParameterCache);
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+ tryCreateMultiAccessUnitInterface(c2interface);
+ *intf = SharedRefBase::make<ComponentInterface>(
+ c2interface, multiAccessUnitIntf, mParameterCache);
return ScopedAStatus::ok();
}
return ScopedAStatus::fromServiceSpecificError(res);
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/Component.h b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
index 94b760f..9725bcf 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/Component.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/Component.h
@@ -31,6 +31,8 @@
#include <aidl/android/hardware/media/c2/IInputSurface.h>
#include <aidl/android/hardware/media/c2/IInputSurfaceConnection.h>
+#include <codec2/common/MultiAccessUnitHelper.h>
+
#include <C2Component.h>
#include <C2Buffer.h>
#include <C2.h>
@@ -46,6 +48,8 @@
namespace c2 {
namespace utils {
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
struct ComponentStore;
@@ -85,6 +89,8 @@
std::shared_ptr<C2Component> mComponent;
std::shared_ptr<ComponentInterface> mInterface;
std::shared_ptr<IComponentListener> mListener;
+ std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
+ std::shared_ptr<MultiAccessUnitHelper> mMultiAccessUnitHelper;
std::shared_ptr<ComponentStore> mStore;
DefaultBufferPoolSender mBufferPoolSender;
@@ -102,6 +108,8 @@
struct Listener;
+ friend struct MultiAccessUnitListener;
+
::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
static void OnBinderDied(void *cookie);
static void OnBinderUnlinked(void *cookie);
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ComponentInterface.h b/media/codec2/hal/aidl/include/codec2/aidl/ComponentInterface.h
index 7723bee..bd19cd6 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ComponentInterface.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ComponentInterface.h
@@ -22,11 +22,14 @@
#include <aidl/android/hardware/media/c2/BnComponentInterface.h>
+#include <codec2/common/MultiAccessUnitHelper.h>
+
#include <C2Component.h>
#include <C2Buffer.h>
#include <C2.h>
#include <memory>
+#include <set>
namespace aidl {
namespace android {
@@ -35,12 +38,16 @@
namespace c2 {
namespace utils {
-struct ComponentStore;
+using ::android::MultiAccessUnitInterface;
struct ComponentInterface : public BnComponentInterface {
ComponentInterface(
const std::shared_ptr<C2ComponentInterface>& interface,
const std::shared_ptr<ParameterCache>& cache);
+ ComponentInterface(
+ const std::shared_ptr<C2ComponentInterface>& interface,
+ const std::shared_ptr<MultiAccessUnitInterface>& largeBufferIntf,
+ const std::shared_ptr<ParameterCache>& cache);
c2_status_t status() const;
::ndk::ScopedAStatus getConfigurable(
std::shared_ptr<IConfigurable> *intf) override;
@@ -51,7 +58,6 @@
c2_status_t mInit;
};
-
} // namespace utils
} // namespace c2
} // namespace media
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
index 0698b0f..746e1bf 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
@@ -75,6 +75,9 @@
static std::shared_ptr<::android::FilterWrapper> GetFilterWrapper();
+ std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface);
+
// Methods from ::aidl::android::hardware::media::c2::IComponentStore.
virtual ::ndk::ScopedAStatus createComponent(
const std::string& name,
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 9ed9458..b3ae514 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -649,7 +649,7 @@
return C2_CORRUPTED;
}
size_t i = 0;
- size_t numUpdatedStackParams = 0;
+ size_t numQueried = 0;
for (auto it = paramPointers.begin(); it != paramPointers.end(); ) {
C2Param* paramPointer = *it;
if (numStackIndices > 0) {
@@ -678,7 +678,7 @@
continue;
}
if (stackParams[i++]->updateFrom(*paramPointer)) {
- ++numUpdatedStackParams;
+ ++numQueried;
} else {
LOG(WARNING) << "query -- param update failed: "
"index = "
@@ -695,14 +695,11 @@
"unexpected extra stack param.";
} else {
heapParams->emplace_back(C2Param::Copy(*paramPointer));
+ ++numQueried;
}
}
++it;
}
- size_t numQueried = numUpdatedStackParams;
- if (heapParams) {
- numQueried += heapParams->size();
- }
if (status == C2_OK && indices.size() != numQueried) {
status = C2_BAD_INDEX;
}
diff --git a/media/codec2/hal/common/Android.bp b/media/codec2/hal/common/Android.bp
index 2aedd8b..7d7b285 100644
--- a/media/codec2/hal/common/Android.bp
+++ b/media/codec2/hal/common/Android.bp
@@ -11,6 +11,7 @@
srcs: [
"BufferTypes.cpp",
+ "MultiAccessUnitHelper.cpp",
],
export_include_dirs: ["include/"],
@@ -26,7 +27,10 @@
"libcodec2_vndk",
"liblog",
"libstagefright_foundation",
+ "server_configurable_flags",
],
+
+ static_libs: ["aconfig_mediacodec_flags_c_lib"],
}
cc_library_static {
diff --git a/media/codec2/hal/common/HalSelection.cpp b/media/codec2/hal/common/HalSelection.cpp
index 761a409..d3ea181 100644
--- a/media/codec2/hal/common/HalSelection.cpp
+++ b/media/codec2/hal/common/HalSelection.cpp
@@ -28,7 +28,12 @@
namespace android {
bool IsCodec2AidlHalSelected() {
- if (!com::android::media::codec::flags::provider_->aidl_hal()) {
+ // For new devices with vendor software targeting 202404, we always want to
+ // use AIDL if it exists
+ constexpr int kAndroidApi202404 = 202404;
+ int vendorVersion = ::android::base::GetIntProperty("ro.vendor.api_level", -1);
+ if (!com::android::media::codec::flags::provider_->aidl_hal() &&
+ vendorVersion < kAndroidApi202404) {
// Cannot select AIDL if not enabled
return false;
}
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
new file mode 100644
index 0000000..8086ef2
--- /dev/null
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -0,0 +1,742 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-MultiAccessUnitHelper"
+#include <android-base/logging.h>
+
+#include <com_android_media_codec_flags.h>
+
+#include <codec2/common/MultiAccessUnitHelper.h>
+#include <android-base/properties.h>
+
+#include <C2BufferPriv.h>
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+
+namespace android {
+
+static C2R MultiAccessUnitParamsSetter(
+ bool mayBlock, C2InterfaceHelper::C2P<C2LargeFrame::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.maxSize).supportsAtAll(me.v.maxSize)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.maxSize)));
+ } else if (!me.F(me.v.thresholdSize).supportsAtAll(me.v.thresholdSize)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.thresholdSize)));
+ } else if (me.v.maxSize < me.v.thresholdSize) {
+ me.set().maxSize = me.v.thresholdSize;
+ } else if (me.v.thresholdSize == 0 && me.v.maxSize > 0) {
+ me.set().thresholdSize = me.v.maxSize;
+ }
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ res.retrieveFailures(&failures);
+ if (!failures.empty()) {
+ me.set().maxSize = 0;
+ me.set().thresholdSize = 0;
+ }
+ return res;
+}
+
+MultiAccessUnitInterface::MultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface>& interface,
+ std::shared_ptr<C2ReflectorHelper> helper)
+ : C2InterfaceHelper(helper), mC2ComponentIntf(interface) {
+ setDerivedInstance(this);
+ addParameter(
+ DefineParam(mLargeFrameParams, C2_PARAMKEY_OUTPUT_LARGE_FRAME)
+ .withDefault(new C2LargeFrame::output(0u, 0, 0))
+ .withFields({
+ C2F(mLargeFrameParams, maxSize).inRange(
+ 0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u)),
+ C2F(mLargeFrameParams, thresholdSize).inRange(
+ 0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u))
+ })
+ .withSetter(MultiAccessUnitParamsSetter)
+ .build());
+ std::vector<std::shared_ptr<C2ParamDescriptor>> supportedParams;
+ querySupportedParams(&supportedParams);
+ // Adding to set to do intf seperation in query/config
+ for (std::shared_ptr<C2ParamDescriptor> &desc : supportedParams) {
+ mSupportedParamIndexSet.insert(desc->index());
+ }
+ mParamFields.emplace_back(mLargeFrameParams.get(), &(mLargeFrameParams.get()->maxSize));
+ mParamFields.emplace_back(mLargeFrameParams.get(), &(mLargeFrameParams.get()->thresholdSize));
+
+ if (mC2ComponentIntf) {
+ c2_status_t err = mC2ComponentIntf->query_vb({&mKind}, {}, C2_MAY_BLOCK, nullptr);
+ }
+}
+
+bool MultiAccessUnitInterface::isValidField(const C2ParamField &field) const {
+ return (std::find(mParamFields.begin(), mParamFields.end(), field) != mParamFields.end());
+}
+bool MultiAccessUnitInterface::isParamSupported(C2Param::Index index) {
+ return (mSupportedParamIndexSet.count(index) != 0);
+}
+
+C2LargeFrame::output MultiAccessUnitInterface::getLargeFrameParam() const {
+ return *mLargeFrameParams;
+}
+
+C2Component::kind_t MultiAccessUnitInterface::kind() const {
+ return (C2Component::kind_t)(mKind.value);
+}
+
+bool MultiAccessUnitInterface::getDecoderSampleRateAndChannelCount(
+ uint32_t * const sampleRate_, uint32_t * const channelCount_) const {
+ if (sampleRate_ == nullptr || channelCount_ == nullptr) {
+ return false;
+ }
+ if (mC2ComponentIntf) {
+ C2StreamSampleRateInfo::output sampleRate;
+ C2StreamChannelCountInfo::output channelCount;
+ c2_status_t res = mC2ComponentIntf->query_vb(
+ {&sampleRate, &channelCount}, {}, C2_MAY_BLOCK, nullptr);
+ if (res == C2_OK && sampleRate.value > 0 && channelCount.value > 0) {
+ *sampleRate_ = sampleRate.value;
+ *channelCount_ = channelCount.value;
+ return true;
+ }
+ }
+ return false;
+}
+
+//C2MultiAccessUnitBuffer
+class C2MultiAccessUnitBuffer : public C2Buffer {
+ public:
+ explicit C2MultiAccessUnitBuffer(
+ const std::vector<C2ConstLinearBlock> &blocks):
+ C2Buffer(blocks) {
+ }
+};
+
+//MultiAccessUnitHelper
+MultiAccessUnitHelper::MultiAccessUnitHelper(
+ const std::shared_ptr<MultiAccessUnitInterface>& intf,
+ std::shared_ptr<C2BlockPool>& linearPool):
+ mInit(false),
+ mInterface(intf),
+ mLinearPool(linearPool) {
+ if (mLinearPool) {
+ mInit = true;
+ }
+}
+
+MultiAccessUnitHelper::~MultiAccessUnitHelper() {
+ std::unique_lock<std::mutex> l(mLock);
+ mFrameHolder.clear();
+}
+
+bool MultiAccessUnitHelper::isEnabledOnPlatform() {
+ bool result = com::android::media::codec::flags::provider_->large_audio_frame();
+ if (!result) {
+ return false;
+ }
+ //TODO: remove this before launch
+ result = ::android::base::GetBoolProperty("debug.media.c2.large.audio.frame", true);
+ LOG(DEBUG) << "MultiAccessUnitHelper " << (result ? "enabled" : "disabled");
+ return result;
+}
+
+std::shared_ptr<MultiAccessUnitInterface> MultiAccessUnitHelper::getInterface() {
+ return mInterface;
+}
+
+bool MultiAccessUnitHelper::getStatus() {
+ return mInit;
+}
+
+void MultiAccessUnitHelper::reset() {
+ std::lock_guard<std::mutex> l(mLock);
+ mFrameHolder.clear();
+}
+
+c2_status_t MultiAccessUnitHelper::error(
+ std::list<std::unique_ptr<C2Work>> * const worklist) {
+ if (worklist == nullptr) {
+ LOG(ERROR) << "Provided null worklist for error()";
+ mFrameHolder.clear();
+ return C2_OK;
+ }
+ std::unique_lock<std::mutex> l(mLock);
+ for (auto frame = mFrameHolder.begin(); frame != mFrameHolder.end(); frame++) {
+ if (frame->mLargeWork) {
+ finalizeWork(*frame, 0, true);
+ worklist->push_back(std::move(frame->mLargeWork));
+ frame->reset();
+ }
+ }
+ mFrameHolder.clear();
+ return C2_OK;
+}
+
+c2_status_t MultiAccessUnitHelper::flush(
+ std::list<std::unique_ptr<C2Work>>* const c2flushedWorks) {
+ c2_status_t c2res = C2_OK;
+ std::lock_guard<std::mutex> l(mLock);
+ for (auto iterWork = c2flushedWorks->begin() ; iterWork != c2flushedWorks->end(); ) {
+ bool foundFlushedFrame = false;
+ std::list<MultiAccessUnitInfo>::iterator frame =
+ mFrameHolder.begin();
+ while (frame != mFrameHolder.end() && !foundFlushedFrame) {
+ auto it = frame->mComponentFrameIds.find(
+ (*iterWork)->input.ordinal.frameIndex.peekull());
+ if (it != frame->mComponentFrameIds.end()) {
+ LOG(DEBUG) << "Multi access-unit flush "
+ << (*iterWork)->input.ordinal.frameIndex.peekull()
+ << " with " << frame->inOrdinal.frameIndex.peekull();
+ (*iterWork)->input.ordinal.frameIndex = frame->inOrdinal.frameIndex;
+ frame = mFrameHolder.erase(frame);
+ foundFlushedFrame = true;
+ } else {
+ ++frame;
+ }
+ }
+ if (!foundFlushedFrame) {
+ iterWork = c2flushedWorks->erase(iterWork);
+ } else {
+ ++iterWork;
+ }
+ }
+ return c2res;
+}
+
+c2_status_t MultiAccessUnitHelper::scatter(
+ std::list<std::unique_ptr<C2Work>> &largeWork,
+ std::list<std::list<std::unique_ptr<C2Work>>>* const processedWork) {
+ LOG(DEBUG) << "Multiple access-unit: scatter";
+ if (processedWork == nullptr) {
+ LOG(ERROR) << "MultiAccessUnitHelper provided with no work list";
+ return C2_CORRUPTED;
+ }
+ for (std::unique_ptr<C2Work>& w : largeWork) {
+ std::list<std::unique_ptr<C2Work>> sliceWork;
+ C2WorkOrdinalStruct inputOrdinal = w->input.ordinal;
+ // To hold correspondence and processing bits b/w input and output
+ MultiAccessUnitInfo frameInfo(inputOrdinal);
+ std::set<uint64_t>& frameSet = frameInfo.mComponentFrameIds;
+ uint64_t newFrameIdx = mFrameIndex++;
+ // TODO: Do not split buffers if component inherantly supports MultipleFrames.
+ // if thats case, only replace frameindex.
+ auto cloneInputWork = [&newFrameIdx](std::unique_ptr<C2Work>& inWork, uint32_t flags) {
+ std::unique_ptr<C2Work> newWork(new C2Work);
+ newWork->input.flags = (C2FrameData::flags_t)flags;
+ newWork->input.ordinal = inWork->input.ordinal;
+ newWork->input.ordinal.frameIndex = newFrameIdx;
+ if (!inWork->input.configUpdate.empty()) {
+ for (std::unique_ptr<C2Param>& param : inWork->input.configUpdate) {
+ newWork->input.configUpdate.push_back(
+ std::move(C2Param::Copy(*(param.get()))));
+ }
+ }
+ newWork->input.infoBuffers = (inWork->input.infoBuffers);
+ if (!inWork->worklets.empty() && inWork->worklets.front() != nullptr) {
+ newWork->worklets.emplace_back(new C2Worklet);
+ newWork->worklets.front()->component = inWork->worklets.front()->component;
+ std::vector<std::unique_ptr<C2Tuning>> tunings;
+ for (std::unique_ptr<C2Tuning>& tuning : inWork->worklets.front()->tunings) {
+ tunings.push_back(std::move(
+ std::unique_ptr<C2Tuning>(
+ static_cast<C2Tuning*>(
+ C2Param::Copy(*(tuning.get())).release()))));
+ }
+ newWork->worklets.front()->tunings = std::move(tunings);
+ }
+ return newWork;
+ };
+ if (w->input.buffers.empty()
+ || (w->input.buffers.front() == nullptr)
+ || (!w->input.buffers.front()->hasInfo(
+ C2AccessUnitInfos::input::PARAM_TYPE))) {
+ LOG(DEBUG) << "Empty or MultiAU info buffer scatter frames with frameIndex "
+ << inputOrdinal.frameIndex.peekull()
+ << ") -> newFrameIndex " << newFrameIdx
+ <<" : input ts " << inputOrdinal.timestamp.peekull();
+ sliceWork.push_back(std::move(cloneInputWork(w, w->input.flags)));
+ if (!w->input.buffers.empty() && w->input.buffers.front() != nullptr) {
+ sliceWork.back()->input.buffers = std::move(w->input.buffers);
+ }
+ frameSet.insert(newFrameIdx);
+ processedWork->push_back(std::move(sliceWork));
+ } else {
+ const std::vector<std::shared_ptr<C2Buffer>>& inBuffers = w->input.buffers;
+ if (inBuffers.front()->data().linearBlocks().size() == 0) {
+ LOG(ERROR) << "ERROR: Work has Large frame info but has no linear blocks.";
+ return C2_CORRUPTED;
+ }
+ frameInfo.mInputC2Ref = inBuffers;
+ const std::vector<C2ConstLinearBlock>& multiAU =
+ inBuffers.front()->data().linearBlocks();
+ std::shared_ptr<const C2AccessUnitInfos::input> auInfo =
+ std::static_pointer_cast<const C2AccessUnitInfos::input>(
+ w->input.buffers.front()->getInfo(C2AccessUnitInfos::input::PARAM_TYPE));
+ uint32_t offset = 0; uint32_t multiAUSize = multiAU.front().size();
+ bool sendEos = false;
+ for (int idx = 0; idx < auInfo->flexCount(); ++idx) {
+ std::vector<C2ConstLinearBlock> au;
+ const C2AccessUnitInfosStruct &info = auInfo->m.values[idx];
+ sendEos |= (info.flags & C2FrameData::FLAG_END_OF_STREAM);
+ std::unique_ptr<C2Work> newWork = cloneInputWork(w, info.flags);
+ frameSet.insert(newFrameIdx);
+ newFrameIdx = mFrameIndex++;
+ newWork->input.ordinal.timestamp = info.timestamp;
+ au.push_back(multiAU.front().subBlock(offset, info.size));
+ if ((offset + info.size) > multiAUSize) {
+ LOG(ERROR) << "ERROR: access-unit offset > buffer size"
+ << " current offset " << (offset + info.size)
+ << " buffer size " << multiAUSize;
+ return C2_CORRUPTED;
+ }
+ newWork->input.buffers.push_back(
+ std::shared_ptr<C2Buffer>(new C2MultiAccessUnitBuffer(au)));
+ LOG(DEBUG) << "Frame scatter queuing frames WITH info in ordinal "
+ << inputOrdinal.frameIndex.peekull()
+ << " info.size " << info.size
+ << " : TS " << newWork->input.ordinal.timestamp.peekull()
+ << " with index " << newFrameIdx - 1;
+ // add to worklist
+ sliceWork.push_back(std::move(newWork));
+ processedWork->push_back(std::move(sliceWork));
+ offset += info.size;
+ }
+ mFrameIndex--;
+ if (!sendEos && (w->input.flags & C2FrameData::FLAG_END_OF_STREAM)) {
+ if (!processedWork->empty()) {
+ std::list<std::unique_ptr<C2Work>> &sliceWork = processedWork->back();
+ if (!sliceWork.empty()) {
+ std::unique_ptr<C2Work> &work = sliceWork.back();
+ if (work) {
+ work->input.flags = C2FrameData::FLAG_END_OF_STREAM;
+ }
+ }
+ }
+ }
+ }
+ if (!processedWork->empty()) {
+ C2LargeFrame::output multiAccessParams = mInterface->getLargeFrameParam();
+ frameInfo.mLargeFrameTuning = multiAccessParams;
+ std::lock_guard<std::mutex> l(mLock);
+ mFrameHolder.push_back(std::move(frameInfo));
+ }
+ }
+ return C2_OK;
+}
+
+c2_status_t MultiAccessUnitHelper::gather(
+ std::list<std::unique_ptr<C2Work>> &c2workItems,
+ std::list<std::unique_ptr<C2Work>>* const processedWork) {
+ LOG(DEBUG) << "Multi access-unit gather process";
+ if (processedWork == nullptr) {
+ LOG(ERROR) << "Nothing provided for processed work";
+ return C2_CORRUPTED;
+ }
+ auto addOutWork = [&processedWork](std::unique_ptr<C2Work>& work) {
+ processedWork->push_back(std::move(work));
+ };
+ {
+ std::lock_guard<std::mutex> l(mLock);
+ for (auto& work : c2workItems) {
+ LOG(DEBUG) << "FrameHolder Size: " << mFrameHolder.size();
+ uint64_t thisFrameIndex = work->input.ordinal.frameIndex.peekull();
+ bool removeEntry = work->worklets.empty()
+ || !work->worklets.front()
+ || (work->worklets.front()->output.flags
+ & C2FrameData::FLAG_INCOMPLETE) == 0;
+ bool foundFrame = false;
+ std::list<MultiAccessUnitInfo>::iterator frame =
+ mFrameHolder.begin();
+ while (!foundFrame && frame != mFrameHolder.end()) {
+ auto it = frame->mComponentFrameIds.find(thisFrameIndex);
+ if (it != frame->mComponentFrameIds.end()) {
+ foundFrame = true;
+ LOG(DEBUG) << "onWorkDone (frameIndex " << thisFrameIndex
+ << " worklstsSze " << work->worklets.size()
+ << ") -> frameIndex " << frame->inOrdinal.frameIndex.peekull();
+ if (work->result != C2_OK
+ || work->worklets.empty()
+ || !work->worklets.front()
+ || (frame->mLargeFrameTuning.thresholdSize == 0
+ || frame->mLargeFrameTuning.maxSize == 0)) {
+ if (removeEntry) {
+ frame->mComponentFrameIds.erase(it);
+ removeEntry = false;
+ }
+ if (frame->mLargeWork) {
+ finalizeWork(*frame);
+ addOutWork(frame->mLargeWork);
+ frame->reset();
+ }
+ c2_status_t workResult = work->result;
+ frame->mLargeWork = std::move(work);
+ frame->mLargeWork->input.ordinal.frameIndex =
+ frame->inOrdinal.frameIndex;
+ finalizeWork(*frame);
+ addOutWork(frame->mLargeWork);
+ frame->reset();
+ if (workResult != C2_OK) {
+ frame->mAccessUnitInfos.clear();
+ }
+ } else if (C2_OK != processWorklets(*frame, work, addOutWork)) {
+ LOG(DEBUG) << "Error while processing work";
+ }
+ if (removeEntry) {
+ LOG(DEBUG) << "Removing entry: " << thisFrameIndex
+ << " -> " << frame->inOrdinal.frameIndex.peekull();
+ frame->mComponentFrameIds.erase(it);
+ }
+ // This is to take care of the last bytes and to decide to send with
+ // FLAG_INCOMPLETE or not.
+ if ((frame->mWview
+ && (frame->mWview->offset() > frame->mLargeFrameTuning.thresholdSize))
+ || frame->mComponentFrameIds.empty()) {
+ if (frame->mLargeWork) {
+ finalizeWork(*frame);
+ addOutWork(frame->mLargeWork);
+ frame->reset();
+ }
+ }
+ if (frame->mComponentFrameIds.empty()) {
+ LOG(DEBUG) << "This frame is finished ID " << thisFrameIndex;
+ frame = mFrameHolder.erase(frame);
+ continue;
+ }
+ } else {
+ LOG(DEBUG) << "Received an out-of-order output " << thisFrameIndex
+ << " expected: " <<mFrameHolder.front().inOrdinal.frameIndex.peekull();
+ }
+ frame++;
+ }
+ if (!foundFrame) {
+ LOG(ERROR) <<" Error: Frame Holder reports no frame " << thisFrameIndex;
+ }
+ }
+ }
+ return C2_OK;
+}
+
+c2_status_t MultiAccessUnitHelper::createLinearBlock(MultiAccessUnitInfo &frame) {
+ if (!mInit) {
+ LOG(ERROR) << "Large buffer allocator failed";
+ return C2_NO_MEMORY;
+ }
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ uint32_t maxOutSize = frame.mLargeFrameTuning.maxSize;
+ c2_status_t err = mLinearPool->fetchLinearBlock(maxOutSize, usage, &frame.mBlock);
+ LOG(DEBUG) << "Allocated block with offset : " << frame.mBlock->offset()
+ << " size " << frame.mBlock->size() << " Capacity " << frame.mBlock->capacity();
+ if (err != C2_OK) {
+ LOG(ERROR) << "Error allocating Multi access-unit Buffer";
+ return err;
+ }
+ frame.mWview = std::make_shared<C2WriteView>(frame.mBlock->map().get());
+ LOG(DEBUG) << "Allocated buffer : requested size : " <<
+ frame.mLargeFrameTuning.maxSize
+ << " alloc size " << frame.mWview->size();
+ return C2_OK;
+}
+
+/*
+ * For every work from the component, we try to do aggregation of work here.
+*/
+c2_status_t MultiAccessUnitHelper::processWorklets(MultiAccessUnitInfo &frame,
+ std::unique_ptr<C2Work>& work,
+ const std::function <void(std::unique_ptr<C2Work>&)>& addWork) {
+ // This will allocate work, worklet, c2Block
+ auto allocateWork = [&](MultiAccessUnitInfo &frame,
+ bool allocateWorket = false,
+ bool allocateBuffer = false) {
+ c2_status_t ret = C2_OK;
+ if (frame.mLargeWork == nullptr) {
+ frame.mLargeWork.reset(new C2Work);
+ frame.mLargeWork->input.ordinal = frame.inOrdinal;
+ frame.mLargeWork->input.ordinal.frameIndex = frame.inOrdinal.frameIndex;
+ }
+ if (allocateWorket) {
+ if (frame.mLargeWork->worklets.size() == 0) {
+ frame.mLargeWork->worklets.emplace_back(new C2Worklet);
+ }
+ }
+ if (allocateBuffer) {
+ if (frame.mWview == nullptr) {
+ ret = createLinearBlock(frame);
+ }
+ }
+ return ret;
+ };
+ // we will only have one worklet.
+ bool foundEndOfStream = false;
+ for (auto worklet = work->worklets.begin();
+ worklet != work->worklets.end() && (*worklet) != nullptr; ++worklet) {
+ uint32_t flagsForNoCopy = C2FrameData::FLAG_DROP_FRAME
+ | C2FrameData::FLAG_DISCARD_FRAME
+ | C2FrameData::FLAG_CORRUPT;
+ if ((*worklet)->output.flags & flagsForNoCopy) {
+ if (frame.mLargeWork) {
+ finalizeWork(frame);
+ addWork(frame.mLargeWork);
+ frame.reset();
+ }
+ frame.mLargeWork = std::move(work);
+ frame.mLargeWork->input.ordinal.frameIndex = frame.inOrdinal.frameIndex;
+ finalizeWork(frame, (*worklet)->output.flags, true);
+ addWork(frame.mLargeWork);
+ frame.reset();
+ return C2_OK;
+ }
+ int64_t sampleTimeUs = 0;
+ uint32_t frameSize = 0;
+ uint32_t sampleRate = 0;
+ uint32_t channelCount = 0;
+ if (mInterface->getDecoderSampleRateAndChannelCount(&sampleRate, &channelCount)) {
+ sampleTimeUs = (1000000u) / (sampleRate * channelCount * 2);
+ frameSize = channelCount * 2;
+ if (mInterface->kind() == C2Component::KIND_DECODER) {
+ frame.mLargeFrameTuning.maxSize =
+ (frame.mLargeFrameTuning.maxSize / frameSize) * frameSize;
+ frame.mLargeFrameTuning.thresholdSize =
+ (frame.mLargeFrameTuning.thresholdSize / frameSize) * frameSize;
+ }
+ }
+ c2_status_t c2ret = allocateWork(frame, true);
+ if (c2ret != C2_OK) {
+ return c2ret;
+ }
+ C2FrameData& outputFramedata = frame.mLargeWork->worklets.front()->output;
+ if (!(*worklet)->output.configUpdate.empty()) {
+ for (auto& configUpdate : (*worklet)->output.configUpdate) {
+ outputFramedata.configUpdate.push_back(std::move(configUpdate));
+ }
+ (*worklet)->output.configUpdate.clear();
+ }
+ outputFramedata.infoBuffers.insert(outputFramedata.infoBuffers.begin(),
+ (*worklet)->output.infoBuffers.begin(),
+ (*worklet)->output.infoBuffers.end());
+
+ LOG(DEBUG) << "maxOutSize " << frame.mLargeFrameTuning.maxSize
+ << " threshold " << frame.mLargeFrameTuning.thresholdSize;
+ if ((*worklet)->output.buffers.size() > 0) {
+ allocateWork(frame, true, true);
+ }
+ LOG(DEBUG) << "This worklet has " << (*worklet)->output.buffers.size() << " buffers"
+ << " ts: " << (*worklet)->output.ordinal.timestamp.peekull();
+ int64_t workletTimestamp = (*worklet)->output.ordinal.timestamp.peekull();
+ int64_t timestamp = workletTimestamp;
+ uint32_t flagsForCopy = ((*worklet)->output.flags) & C2FrameData::FLAG_CODEC_CONFIG;
+ for (int bufIdx = 0; bufIdx < (*worklet)->output.buffers.size(); ++bufIdx) {
+ std::shared_ptr<C2Buffer>& buffer = (*worklet)->output.buffers[bufIdx];
+ if (!buffer || buffer->data().linearBlocks().empty()) {
+ continue;
+ }
+ const std::vector<C2ConstLinearBlock>& blocks = buffer->data().linearBlocks();
+ if (blocks.size() > 0) {
+ uint32_t inputOffset = 0;
+ uint32_t inputSize = blocks.front().size();
+ frame.mInfos.insert(frame.mInfos.end(),
+ buffer->info().begin(), buffer->info().end());
+ if (frameSize != 0 && (mInterface->kind() == C2Component::KIND_DECODER)) {
+ // For decoders we only split multiples of 16bChannelCount*2
+ inputSize -= (inputSize % frameSize);
+ }
+ while (inputOffset < inputSize) {
+ if (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize) {
+ frame.mLargeWork->result = C2_OK;
+ finalizeWork(frame, flagsForCopy);
+ addWork(frame.mLargeWork);
+ frame.reset();
+ allocateWork(frame, true, true);
+ }
+ if (mInterface->kind() == C2Component::KIND_ENCODER) {
+ if (inputSize > frame.mLargeFrameTuning.maxSize) {
+ LOG(ERROR) << "Enc: Output buffer too small for AU, configured with "
+ << frame.mLargeFrameTuning.maxSize
+ << " block size: " << blocks.front().size()
+ << "alloc size " << frame.mWview->size();
+ if (frame.mLargeWork
+ && frame.mWview && frame.mWview->offset() > 0) {
+ finalizeWork(frame, flagsForCopy);
+ addWork(frame.mLargeWork);
+ frame.reset();
+ allocateWork(frame, true, false);
+ }
+ frame.mLargeWork->result = C2_NO_MEMORY;
+ finalizeWork(frame, 0, true);
+ addWork(frame.mLargeWork);
+ frame.reset();
+ return C2_NO_MEMORY;
+ } else if (inputSize > frame.mWview->size()) {
+ LOG(DEBUG) << "Enc: Large frame hitting bufer limit, current size "
+ << frame.mWview->offset();
+ if (frame.mLargeWork
+ && frame.mWview && frame.mWview->offset() > 0) {
+ finalizeWork(frame, flagsForCopy);
+ addWork(frame.mLargeWork);
+ frame.reset();
+ allocateWork(frame, true, true);
+ }
+ }
+ }
+ C2ReadView rView = blocks.front().map().get();
+ if (rView.error()) {
+ LOG(ERROR) << "Buffer read view error";
+ frame.mLargeWork->result = rView.error();
+ frame.mLargeWork->worklets.clear();
+ finalizeWork(frame, 0, true);
+ addWork(frame.mLargeWork);
+ frame.reset();
+ return C2_NO_MEMORY;
+ }
+ uint32_t toCopy = 0;
+ if (mInterface->kind() == C2Component::KIND_ENCODER) {
+ toCopy = inputSize;
+ } else {
+ toCopy = c2_min(frame.mWview->size(), (inputSize - inputOffset));
+ timestamp = workletTimestamp + inputOffset * sampleTimeUs;
+ LOG(DEBUG) << "ts " << timestamp
+ << " copiedOutput " << inputOffset
+ << " sampleTimeUs " << sampleTimeUs;
+ }
+ LOG(DEBUG) << " Copy size " << toCopy
+ << " ts " << timestamp;
+ memcpy(frame.mWview->data(), rView.data() + inputOffset, toCopy);
+ frame.mWview->setOffset(frame.mWview->offset() + toCopy);
+ inputOffset += toCopy;
+ mergeAccessUnitInfo(frame, flagsForCopy, toCopy, timestamp);
+ }
+ } else {
+ frame.mLargeWork->worklets.front()->output.buffers.push_back(std::move(buffer));
+ LOG(DEBUG) << "Copying worklets without linear buffer";
+ }
+ }
+ uint32_t flagsForCsdOrEnd = (*worklet)->output.flags
+ & (C2FrameData::FLAG_END_OF_STREAM | C2FrameData::FLAG_CODEC_CONFIG);
+ if (flagsForCsdOrEnd) {
+ LOG(DEBUG) << "Output worklet has CSD/EOS data";
+ frame.mLargeWork->result = C2_OK;
+ // we can assign timestamp as this will be evaluated in finalizeWork
+ frame.mLargeWork->worklets.front()->output.ordinal.timestamp = timestamp;
+ finalizeWork(frame, flagsForCsdOrEnd, true);
+ addWork(frame.mLargeWork);
+ frame.reset();
+ }
+ }
+ return C2_OK;
+}
+
+c2_status_t MultiAccessUnitHelper::finalizeWork(
+ MultiAccessUnitInfo& frame, uint32_t inFlags, bool forceComplete) {
+ if (frame.mLargeWork == nullptr) {
+ return C2_OK;
+ }
+ //prepare input ordinal
+ frame.mLargeWork->input.ordinal = frame.inOrdinal;
+ // remove this
+ int64_t timeStampUs = frame.inOrdinal.timestamp.peekull();
+ if (!frame.mAccessUnitInfos.empty()) {
+ timeStampUs = frame.mAccessUnitInfos.front().timestamp;
+ } else if (!frame.mLargeWork->worklets.empty()) {
+ std::unique_ptr<C2Worklet> &worklet = frame.mLargeWork->worklets.front();
+ if (worklet) {
+ timeStampUs = worklet->output.ordinal.timestamp.peekull();
+ }
+ }
+ LOG(DEBUG) << "Finalizing work with input Idx "
+ << frame.mLargeWork->input.ordinal.frameIndex.peekull()
+ << " timestamp " << timeStampUs;
+ uint32_t finalFlags = 0;
+ if ((!forceComplete)
+ && (frame.mLargeWork->result == C2_OK)
+ && (!frame.mComponentFrameIds.empty())) {
+ finalFlags |= C2FrameData::FLAG_INCOMPLETE;
+ }
+ if (frame.mLargeWork->result == C2_OK) {
+ finalFlags |= inFlags;
+ }
+ // update worklet if present
+ if (!frame.mLargeWork->worklets.empty() &&
+ frame.mLargeWork->worklets.front() != nullptr) {
+ frame.mLargeWork->workletsProcessed = 1;
+ C2FrameData& outFrameData = frame.mLargeWork->worklets.front()->output;
+ outFrameData.ordinal.frameIndex = frame.inOrdinal.frameIndex.peekull();
+ outFrameData.ordinal.timestamp = timeStampUs;
+ finalFlags |= frame.mLargeWork->worklets.front()->output.flags;
+ outFrameData.flags = (C2FrameData::flags_t)finalFlags;
+ // update buffers
+ if (frame.mBlock && (frame.mWview->offset() > 0)) {
+ size_t size = frame.mWview->offset();
+ LOG(DEBUG) << "Finalize : Block: Large frame size set as " << size
+ << " timestamp as " << timeStampUs
+ << "frameIndex " << outFrameData.ordinal.frameIndex.peekull();
+ frame.mWview->setOffset(0);
+ std::shared_ptr<C2Buffer> c2Buffer = C2Buffer::CreateLinearBuffer(
+ frame.mBlock->share(0, size, ::C2Fence()));
+ if (frame.mAccessUnitInfos.size() > 0) {
+ if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
+ frame.mAccessUnitInfos.back().flags |=
+ C2FrameData::FLAG_END_OF_STREAM;
+ }
+ std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
+ C2AccessUnitInfos::output::AllocShared(
+ frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
+ frame.mInfos.push_back(largeFrame);
+ frame.mAccessUnitInfos.clear();
+ }
+ for (auto &info : frame.mInfos) {
+ c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
+ }
+ frame.mLargeWork->worklets.front()->output.buffers.push_back(std::move(c2Buffer));
+ frame.mInfos.clear();
+ frame.mBlock.reset();
+ frame.mWview.reset();
+ }
+ }
+ LOG(DEBUG) << "Multi access-unitflag setting as " << finalFlags;
+ return C2_OK;
+}
+
+void MultiAccessUnitHelper::mergeAccessUnitInfo(
+ MultiAccessUnitInfo &frame,
+ uint32_t flags_,
+ uint32_t size,
+ int64_t timestamp) {
+ // Remove flags that are not part of Access unit info
+ uint32_t flags = flags_ & ~(C2FrameData::FLAG_INCOMPLETE
+ | C2FrameData::FLAG_DISCARD_FRAME
+ | C2FrameData::FLAG_CORRUPT
+ | C2FrameData::FLAG_CORRECTED);
+ if (frame.mAccessUnitInfos.empty()) {
+ frame.mAccessUnitInfos.emplace_back(flags, size, timestamp);
+ return;
+ }
+ if ((mInterface->kind() == C2Component::KIND_DECODER) &&
+ (frame.mAccessUnitInfos.back().flags == flags)) {
+ // merge access units here
+ C2AccessUnitInfosStruct &s = frame.mAccessUnitInfos.back();
+ s.size += size; // don't have to update timestamp
+ } else {
+ frame.mAccessUnitInfos.emplace_back(flags, size, timestamp);
+ }
+}
+
+void MultiAccessUnitHelper::MultiAccessUnitInfo::reset() {
+ mBlock.reset();
+ mWview.reset();
+ mInfos.clear();
+ mAccessUnitInfos.clear();
+ mLargeWork.reset();
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
new file mode 100644
index 0000000..bb4464c
--- /dev/null
+++ b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
@@ -0,0 +1,217 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_COMMON_MULTI_ACCESSUNIT_HELPER_H
+#define CODEC2_COMMON_MULTI_ACCESSUNIT_HELPER_H
+
+#include <hidl/Status.h>
+#include <hwbinder/IBinder.h>
+
+#include <C2Config.h>
+#include <util/C2InterfaceHelper.h>
+#include <C2Buffer.h>
+#include <C2.h>
+
+#include <set>
+#include <memory>
+#include <mutex>
+
+namespace android {
+
+struct MultiAccessUnitHelper;
+
+struct MultiAccessUnitInterface : public C2InterfaceHelper {
+ explicit MultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface>& interface,
+ std::shared_ptr<C2ReflectorHelper> helper);
+
+ bool isParamSupported(C2Param::Index index);
+ C2LargeFrame::output getLargeFrameParam() const;
+ C2Component::kind_t kind() const;
+ bool isValidField(const C2ParamField &field) const;
+
+protected:
+ bool getDecoderSampleRateAndChannelCount(
+ uint32_t * const sampleRate_, uint32_t * const channelCount_) const;
+ const std::shared_ptr<C2ComponentInterface> mC2ComponentIntf;
+ std::shared_ptr<C2LargeFrame::output> mLargeFrameParams;
+ C2ComponentKindSetting mKind;
+ std::set<C2Param::Index> mSupportedParamIndexSet;
+ std::vector<C2ParamField> mParamFields;
+
+ friend struct MultiAccessUnitHelper;
+};
+
+struct MultiAccessUnitHelper {
+public:
+ MultiAccessUnitHelper(
+ const std::shared_ptr<MultiAccessUnitInterface>& intf,
+ std::shared_ptr<C2BlockPool> &linearPool);
+
+ virtual ~MultiAccessUnitHelper();
+
+ static bool isEnabledOnPlatform();
+
+ /*
+ * Scatters the incoming linear buffer into access-unit sized buffers
+ * based on the access-unit info.
+ */
+ c2_status_t scatter(
+ std::list<std::unique_ptr<C2Work>> &c2workItems,
+ std::list<std::list<std::unique_ptr<C2Work>>> * const processedWork);
+
+ /*
+ * Gathers different access-units into a single buffer based on the scatter list
+ * and the configured max and threshold sizes. This also generates the associated
+ * access-unit information and attach it with the final result.
+ */
+ c2_status_t gather(
+ std::list<std::unique_ptr<C2Work>> &c2workItems,
+ std::list<std::unique_ptr<C2Work>> * const processedWork);
+
+ /*
+ * Flushes the codec and generated the list of flushed buffers.
+ */
+ c2_status_t flush(
+ std::list<std::unique_ptr<C2Work>> * const c2flushedWorks);
+
+ /*
+ * Gets all the pending buffers under generation in c2workItems.
+ */
+ c2_status_t error(std::list<std::unique_ptr<C2Work>> * const c2workItems);
+
+ /*
+ * Get the interface object of this handler.
+ */
+ std::shared_ptr<MultiAccessUnitInterface> getInterface();
+
+ /*
+ * Gets the status of the object. This really is to make sure that
+ * all the allocators are configured properly within the handler.
+ */
+ bool getStatus();
+
+ /*
+ * Resets the structures inside the handler.
+ */
+ void reset();
+
+protected:
+
+ struct MultiAccessUnitInfo {
+ /*
+ * From the input
+ * Ordinal of the input frame
+ */
+ C2WorkOrdinalStruct inOrdinal;
+
+ /*
+ * Frame indexes of the scattered buffers
+ */
+ std::set<uint64_t> mComponentFrameIds;
+
+ /*
+ * For the output
+ * Current output block.
+ */
+ std::shared_ptr<C2LinearBlock> mBlock;
+
+ /*
+ * Write view of current block
+ */
+ std::shared_ptr<C2WriteView> mWview;
+
+ /*
+ * C2Info related to the current mBlock
+ */
+ std::vector<std::shared_ptr<const C2Info>> mInfos;
+
+ /*
+ * C2AccessUnitInfos for the current buffer
+ */
+ std::vector<C2AccessUnitInfosStruct> mAccessUnitInfos;
+
+ /*
+ * Current tuning used to process this input work
+ */
+ C2LargeFrame::output mLargeFrameTuning;
+
+ /*
+ * Current output C2Work being processed
+ */
+ std::unique_ptr<C2Work> mLargeWork;
+
+ /*
+ * For holding a reference to the incoming buffer
+ */
+ std::vector<std::shared_ptr<C2Buffer>> mInputC2Ref;
+
+ MultiAccessUnitInfo(C2WorkOrdinalStruct ordinal):inOrdinal(ordinal) {
+
+ }
+
+ /*
+ * Resets this frame
+ */
+ void reset();
+ };
+
+ /*
+ * Creates a linear block to be used with work
+ */
+ c2_status_t createLinearBlock(MultiAccessUnitInfo &frame);
+
+ /*
+ * Processes worklets from the component
+ */
+ c2_status_t processWorklets(MultiAccessUnitInfo &frame,
+ std::unique_ptr<C2Work> &work,
+ const std::function <void(std::unique_ptr<C2Work>&)> &addWork);
+
+ /*
+ * Finalizes the work to be send out.
+ */
+ c2_status_t finalizeWork(MultiAccessUnitInfo &frame,
+ uint32_t flags = 0, bool forceComplete = false);
+
+ /*
+ * Merges different access unit infos if possible
+ */
+ void mergeAccessUnitInfo(MultiAccessUnitInfo &frame,
+ uint32_t flags,
+ uint32_t size,
+ int64_t timestamp);
+
+ bool mInit;
+
+ // Interface of this module
+ std::shared_ptr<MultiAccessUnitInterface> mInterface;
+ // Local pool id used for output buffer allocation
+ C2BlockPool::local_id_t mBlockPoolId;
+ // C2Blockpool for output buffer allocation
+ std::shared_ptr<C2BlockPool> mLinearPool;
+ // FrameIndex for the current outgoing work
+ std::atomic_uint64_t mFrameIndex;
+ // Mutex to protect mFrameHolder
+ std::mutex mLock;
+ // List of Infos that contains the input and
+ // output work and buffer objects
+ std::list<MultiAccessUnitInfo> mFrameHolder;
+};
+
+} // namespace android
+
+#endif // CODEC2_COMMON_MULTI_ACCESSUNIT_HELPER_H
diff --git a/media/codec2/hal/hidl/1.0/utils/Android.bp b/media/codec2/hal/hidl/1.0/utils/Android.bp
index 2f2ecd1..9646a0b 100644
--- a/media/codec2/hal/hidl/1.0/utils/Android.bp
+++ b/media/codec2/hal/hidl/1.0/utils/Android.bp
@@ -52,7 +52,6 @@
],
}
-
// DO NOT DEPEND ON THIS DIRECTLY
// use libcodec2-hidl-defaults instead
cc_library {
diff --git a/media/codec2/hal/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
index 0aeed08..0259d90 100644
--- a/media/codec2/hal/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/Component.cpp
@@ -135,6 +135,52 @@
wp<IComponentListener> mListener;
};
+// Component listener for handle multiple access-units
+struct MultiAccessUnitListener : public Component::Listener {
+ MultiAccessUnitListener(const sp<Component> &component,
+ const std::shared_ptr<MultiAccessUnitHelper> &handler):
+ Listener(component), mHandler(handler) {
+ }
+
+ virtual void onError_nb(
+ std::weak_ptr<C2Component> c2component,
+ uint32_t errorCode) override {
+ if (mHandler) {
+ std::list<std::unique_ptr<C2Work>> worklist;
+ mHandler->error(&worklist);
+ if (!worklist.empty()) {
+ Listener::onWorkDone_nb(c2component, std::move(worklist));
+ }
+ }
+ Listener::onError_nb(c2component, errorCode);
+ }
+
+ virtual void onTripped_nb(
+ std::weak_ptr<C2Component> c2component,
+ std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+ ) override {
+ Listener::onTripped_nb(c2component,
+ c2settingResult);
+ }
+
+ virtual void onWorkDone_nb(
+ std::weak_ptr<C2Component> c2component,
+ std::list<std::unique_ptr<C2Work>> c2workItems) override {
+ if (mHandler) {
+ std::list<std::unique_ptr<C2Work>> processedWork;
+ mHandler->gather(c2workItems, &processedWork);
+ if (!processedWork.empty()) {
+ Listener::onWorkDone_nb(c2component, std::move(processedWork));
+ }
+ } else {
+ Listener::onWorkDone_nb(c2component, std::move(c2workItems));
+ }
+ }
+
+ protected:
+ std::shared_ptr<MultiAccessUnitHelper> mHandler;
+};
+
// Component::Sink
struct Component::Sink : public IInputSink {
std::shared_ptr<Component> mComponent;
@@ -208,13 +254,14 @@
const sp<::android::hardware::media::bufferpool::V2_0::
IClientManager>& clientPoolManager)
: mComponent{component},
- mInterface{new ComponentInterface(component->intf(),
- store->getParameterCache())},
mListener{listener},
mStore{store},
mBufferPoolSender{clientPoolManager} {
// Retrieve supported parameters from store
// TODO: We could cache this per component/interface type
+ mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
+ mInterface = new ComponentInterface(
+ component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
mInit = mInterface->status();
}
@@ -240,7 +287,6 @@
// Methods from ::android::hardware::media::c2::V1_0::IComponent
Return<Status> Component::queue(const WorkBundle& workBundle) {
std::list<std::unique_ptr<C2Work>> c2works;
-
if (!objcpy(&c2works, workBundle)) {
return Status::CORRUPTED;
}
@@ -252,7 +298,19 @@
registerFrameData(mListener, work->input);
}
}
-
+ c2_status_t err = C2_OK;
+ if (mMultiAccessUnitHelper) {
+ std::list<std::list<std::unique_ptr<C2Work>>> c2worklists;
+ mMultiAccessUnitHelper->scatter(c2works, &c2worklists);
+ for (auto &c2worklist : c2worklists) {
+ err = mComponent->queue_nb(&c2worklist);
+ if (err != C2_OK) {
+ LOG(ERROR) << "Error Queuing to component.";
+ break;
+ }
+ }
+ return static_cast<Status>(err);
+ }
return static_cast<Status>(mComponent->queue_nb(&c2works));
}
@@ -261,6 +319,9 @@
c2_status_t c2res = mComponent->flush_sm(
C2Component::FLUSH_COMPONENT,
&c2flushedWorks);
+ if (mMultiAccessUnitHelper) {
+ c2res = mMultiAccessUnitHelper->flush(&c2flushedWorks);
+ }
// Unregister input buffers.
for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
@@ -469,6 +530,9 @@
std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
mBlockPools.clear();
}
+ if (mMultiAccessUnitHelper) {
+ mMultiAccessUnitHelper->reset();
+ }
InputBufferManager::unregisterFrameData(mListener);
return status;
}
@@ -479,6 +543,9 @@
std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
mBlockPools.clear();
}
+ if (mMultiAccessUnitHelper) {
+ mMultiAccessUnitHelper->reset();
+ }
InputBufferManager::unregisterFrameData(mListener);
return status;
}
@@ -501,8 +568,26 @@
}
void Component::initListener(const sp<Component>& self) {
- std::shared_ptr<C2Component::Listener> c2listener =
+ std::shared_ptr<C2Component::Listener> c2listener;
+ if (mMultiAccessUnitIntf) {
+ std::shared_ptr<C2Allocator> allocator;
+ std::shared_ptr<C2BlockPool> linearPool;
+ std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+ if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+ ::android::C2PlatformAllocatorDesc desc;
+ desc.allocatorId = allocator->getId();
+ if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+ if (linearPool) {
+ mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+ mMultiAccessUnitIntf, linearPool);
+ }
+ }
+ }
+ }
+ c2listener = mMultiAccessUnitHelper ?
+ std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
std::make_shared<Listener>(self);
+
c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
if (res != C2_OK) {
mInit = res;
diff --git a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
index 12078e0..41a8904 100644
--- a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
@@ -25,7 +25,6 @@
#include <hidl/HidlBinderSupport.h>
#include <utils/Timers.h>
-#include <C2BqBufferPriv.h>
#include <C2Debug.h>
#include <C2PlatformSupport.h>
@@ -45,9 +44,10 @@
// Implementation of ConfigurableC2Intf based on C2ComponentInterface
struct CompIntf : public ConfigurableC2Intf {
- CompIntf(const std::shared_ptr<C2ComponentInterface>& intf) :
+ CompIntf(const std::shared_ptr<C2ComponentInterface>& intf,
+ const std::shared_ptr<MultiAccessUnitInterface>& multiAccessUnitIntf):
ConfigurableC2Intf{intf->getName(), intf->getId()},
- mIntf{intf} {
+ mIntf{intf}, mMultiAccessUnitIntf{multiAccessUnitIntf} {
}
virtual c2_status_t config(
@@ -55,7 +55,34 @@
c2_blocking_t mayBlock,
std::vector<std::unique_ptr<C2SettingResult>>* const failures
) override {
- return mIntf->config_vb(params, mayBlock, failures);
+ std::vector<C2Param*> paramsToIntf;
+ std::vector<C2Param*> paramsToLargeFrameIntf;
+ c2_status_t err = C2_OK;
+ if (mMultiAccessUnitIntf == nullptr) {
+ err = mIntf->config_vb(params, mayBlock, failures);
+ return err;
+ }
+ for (auto &p : params) {
+ if (mMultiAccessUnitIntf->isParamSupported(p->index())) {
+ paramsToLargeFrameIntf.push_back(p);
+ } else {
+ paramsToIntf.push_back(p);
+ }
+ }
+ c2_status_t err1 = C2_OK;
+ if (paramsToIntf.size() > 0) {
+ err1 = mIntf->config_vb(paramsToIntf, mayBlock, failures);
+ }
+ if (err1 != C2_OK) {
+ LOG(ERROR) << "We have a failed config";
+ }
+ c2_status_t err2 = C2_OK;
+ if (paramsToLargeFrameIntf.size() > 0) {
+ err2 = mMultiAccessUnitIntf->config(
+ paramsToLargeFrameIntf, mayBlock, failures);
+ }
+ // TODO: correct failure vector
+ return err1 != C2_OK ? err1 : err2;
}
virtual c2_status_t query(
@@ -63,33 +90,100 @@
c2_blocking_t mayBlock,
std::vector<std::unique_ptr<C2Param>>* const params
) const override {
- return mIntf->query_vb({}, indices, mayBlock, params);
+ c2_status_t err = C2_OK;
+ if (mMultiAccessUnitIntf == nullptr) {
+ err = mIntf->query_vb({}, indices, mayBlock, params);
+ return err;
+ }
+ std::vector<C2Param::Index> paramsToIntf;
+ std::vector<C2Param::Index> paramsToLargeFrameIntf;
+ for (auto &i : indices) {
+ if (mMultiAccessUnitIntf->isParamSupported(i)) {
+ paramsToLargeFrameIntf.push_back(i);
+ } else {
+ paramsToIntf.push_back(i);
+ }
+ }
+ c2_status_t err1 = C2_OK;
+ if (paramsToIntf.size() > 0) {
+ err1 = mIntf->query_vb({}, paramsToIntf, mayBlock, params);
+ }
+ c2_status_t err2 = C2_OK;
+ if (paramsToLargeFrameIntf.size() > 0) {
+ err2 = mMultiAccessUnitIntf->query(
+ {}, paramsToLargeFrameIntf, mayBlock, params);
+ }
+ // TODO: correct failure vector
+ return err1 != C2_OK ? err1 : err2;
}
virtual c2_status_t querySupportedParams(
std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
) const override {
- return mIntf->querySupportedParams_nb(params);
+ c2_status_t err = mIntf->querySupportedParams_nb(params);
+ if (mMultiAccessUnitIntf != nullptr) {
+ err = mMultiAccessUnitIntf->querySupportedParams(params);
+ }
+ return err;
}
virtual c2_status_t querySupportedValues(
std::vector<C2FieldSupportedValuesQuery>& fields,
c2_blocking_t mayBlock) const override {
- return mIntf->querySupportedValues_vb(fields, mayBlock);
+ if (mMultiAccessUnitIntf == nullptr) {
+ return mIntf->querySupportedValues_vb(fields, mayBlock);
+ }
+ std::vector<C2FieldSupportedValuesQuery> dup = fields;
+ std::vector<C2FieldSupportedValuesQuery> queryArray[2];
+ std::map<C2ParamField, std::pair<uint32_t, size_t>> queryMap;
+ c2_status_t err = C2_OK;
+ for (int i = 0 ; i < fields.size(); i++) {
+ const C2ParamField &field = fields[i].field();
+ uint32_t queryArrayIdx = 1;
+ if (mMultiAccessUnitIntf->isValidField(field)) {
+ queryArrayIdx = 0;
+ }
+ queryMap[field] = std::make_pair(
+ queryArrayIdx, queryArray[queryArrayIdx].size());
+ queryArray[queryArrayIdx].push_back(fields[i]);
+ }
+ if (queryArray[0].size() > 0) {
+ err = mMultiAccessUnitIntf->querySupportedValues(queryArray[0], mayBlock);
+ }
+ if (queryArray[1].size() > 0) {
+ err = mIntf->querySupportedValues_vb(queryArray[1], mayBlock);
+ }
+ for (int i = 0 ; i < dup.size(); i++) {
+ auto it = queryMap.find(dup[i].field());
+ if (it != queryMap.end()) {
+ std::pair<uint32_t, size_t> queryid = it->second;
+ fields[i] = queryArray[queryid.first][queryid.second];
+ }
+ }
+ return err;
}
protected:
std::shared_ptr<C2ComponentInterface> mIntf;
+ std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
};
} // unnamed namespace
+
// ComponentInterface
ComponentInterface::ComponentInterface(
const std::shared_ptr<C2ComponentInterface>& intf,
+ const std::shared_ptr<ParameterCache>& cache):ComponentInterface(intf, nullptr, cache) {
+}
+
+ComponentInterface::ComponentInterface(
+ const std::shared_ptr<C2ComponentInterface>& intf,
+ const std::shared_ptr<MultiAccessUnitInterface>& multiAccessUnitIntf,
const std::shared_ptr<ParameterCache>& cache)
: mInterface{intf},
- mConfigurable{new CachedConfigurable(std::make_unique<CompIntf>(intf))} {
+ mConfigurable{new CachedConfigurable(
+ std::make_unique<CompIntf>(intf, multiAccessUnitIntf))} {
mInit = mConfigurable->init(cache);
}
diff --git a/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
index 1c0d5b0..988ab6f 100644
--- a/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
@@ -194,6 +194,36 @@
}
#endif
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface) {
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+ if (c2interface == nullptr) {
+ return nullptr;
+ }
+ if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+ c2_status_t err = C2_OK;
+ C2ComponentDomainSetting domain;
+ std::vector<std::unique_ptr<C2Param>> heapParams;
+ err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+ if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+ std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+ bool isComponentSupportsLargeAudioFrame = false;
+ c2interface->querySupportedParams_nb(¶ms);
+ for (const auto ¶mDesc : params) {
+ if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+ isComponentSupportsLargeAudioFrame = true;
+ break;
+ }
+ }
+ if (!isComponentSupportsLargeAudioFrame) {
+ multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+ c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+ }
+ }
+ }
+ return multiAccessUnitIntf;
+}
+
// Methods from ::android::hardware::media::c2::V1_0::IComponentStore
Return<void> ComponentStore::createComponent(
const hidl_string& name,
@@ -242,7 +272,9 @@
c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
#endif
onInterfaceLoaded(c2interface);
- interface = new ComponentInterface(c2interface, mParameterCache);
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+ tryCreateMultiAccessUnitInterface(c2interface);
+ interface = new ComponentInterface(c2interface, multiAccessUnitIntf, mParameterCache);
}
_hidl_cb(static_cast<Status>(res), interface);
return Void();
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
index 3f55618..aed94ec 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
@@ -30,6 +30,8 @@
#include <hidl/Status.h>
#include <hwbinder/IBinder.h>
+#include <codec2/common/MultiAccessUnitHelper.h>
+
#include <C2Component.h>
#include <C2Buffer.h>
#include <C2.h>
@@ -54,6 +56,8 @@
using ::android::hardware::IBinder;
using ::android::sp;
using ::android::wp;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
struct ComponentStore;
@@ -113,6 +117,8 @@
std::shared_ptr<C2Component> mComponent;
sp<ComponentInterface> mInterface;
sp<IComponentListener> mListener;
+ std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
+ std::shared_ptr<MultiAccessUnitHelper> mMultiAccessUnitHelper;
sp<ComponentStore> mStore;
::android::hardware::media::c2::V1_0::utils::DefaultBufferPoolSender
mBufferPoolSender;
@@ -135,6 +141,8 @@
struct Listener;
+ friend struct MultiAccessUnitListener;
+
using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
sp<HwDeathRecipient> mDeathRecipient;
bool mClientDied{false};
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
index 9102f92..2995faf 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentInterface.h
@@ -23,10 +23,15 @@
#include <android/hardware/media/c2/1.0/IComponentInterface.h>
#include <hidl/Status.h>
+#include <codec2/common/MultiAccessUnitHelper.h>
+
#include <C2Component.h>
#include <C2Buffer.h>
+#include <C2Config.h>
+#include <util/C2InterfaceHelper.h>
#include <C2.h>
+#include <set>
#include <memory>
namespace android {
@@ -39,6 +44,7 @@
using ::android::hardware::Return;
using ::android::hardware::Void;
using ::android::sp;
+using ::android::MultiAccessUnitInterface;
struct ComponentStore;
@@ -46,6 +52,11 @@
ComponentInterface(
const std::shared_ptr<C2ComponentInterface>& interface,
const std::shared_ptr<ParameterCache>& cache);
+
+ ComponentInterface(
+ const std::shared_ptr<C2ComponentInterface>& interface,
+ const std::shared_ptr<MultiAccessUnitInterface>& largeBufferIntf,
+ const std::shared_ptr<ParameterCache>& cache);
c2_status_t status() const;
virtual Return<sp<IConfigurable>> getConfigurable() override;
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
index 27e2a05..b5d85da 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
@@ -78,6 +78,9 @@
static std::shared_ptr<FilterWrapper> GetFilterWrapper();
+ std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface);
+
// Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
virtual Return<void> createComponent(
const hidl_string& name,
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
index 2054fe6..ccdde5e 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
@@ -15,6 +15,7 @@
//
package {
+ default_team: "trendy_team_media_codec_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
index 624aad2..2b1bca0 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
@@ -15,6 +15,7 @@
*/
package {
+ default_team: "trendy_team_media_codec_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
index 0f07077..564de47 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_codec_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index 92b0bf5..2da6501 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -17,7 +17,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "media_c2_hidl_test_common"
#include <stdio.h>
-
+#include <numeric>
#include "media_c2_hidl_test_common.h"
#include <android/hardware/media/c2/1.0/IComponentStore.h>
@@ -221,6 +221,32 @@
return parameters;
}
+constexpr static std::initializer_list<std::pair<uint32_t, uint32_t>> flagList{
+ {(1 << VTS_BIT_FLAG_SYNC_FRAME), 0},
+ {(1 << VTS_BIT_FLAG_CSD_FRAME), C2FrameData::FLAG_CODEC_CONFIG},
+};
+
+/*
+ * This is a conversion function that can be used to convert
+ * VTS flags to C2 flags and vice-versa based on the initializer list.
+ * @param flags can be a C2 flag or a VTS flag
+ * @param toC2 if true, converts flags to a C2 flag
+ * if false, converts flags to a VTS flag
+ */
+static uint32_t convertFlags(uint32_t flags, bool toC2) {
+ return std::transform_reduce(
+ flagList.begin(), flagList.end(),
+ 0u,
+ std::bit_or{},
+ [flags, toC2](const std::pair<uint32_t, uint32_t> &entry) {
+ if (toC2) {
+ return (flags & entry.first) ? entry.second : 0;
+ } else {
+ return (flags & entry.second) ? entry.first : 0;
+ }
+ });
+}
+
// Populate Info vector and return number of CSDs
int32_t populateInfoVector(std::string info, android::Vector<FrameInfo>* frameInfo,
bool timestampDevTest, std::list<uint64_t>* timestampUslist) {
@@ -258,9 +284,9 @@
eleInfo >> bytesCount;
eleInfo >> flags;
eleInfo >> timestamp;
- vtsFlags = mapInfoFlagstoVtsFlags(flags);
+ uint32_t c2Flags = convertFlags(flags, true);
frameInfo->editItemAt(frameInfo->size() - 1).largeFrameInfo.push_back(
- {vtsFlags, static_cast<uint32_t>(bytesCount), timestamp});
+ {c2Flags, static_cast<uint32_t>(bytesCount), timestamp});
}
}
}
@@ -298,5 +324,8 @@
else if (infoFlags == 0x10) return (1 << VTS_BIT_FLAG_NO_SHOW_FRAME);
else if (infoFlags == 0x20) return (1 << VTS_BIT_FLAG_CSD_FRAME);
else if (infoFlags == 0x40) return (1 << VTS_BIT_FLAG_LARGE_AUDIO_FRAME);
+ else if (infoFlags == 0x80) {
+ return (1 << VTS_BIT_FLAG_LARGE_AUDIO_FRAME) | (1 << VTS_BIT_FLAG_SYNC_FRAME);
+ }
return 0xFF;
}
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
index cc019da..0640f02 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
@@ -15,6 +15,7 @@
*/
package {
+ default_team: "trendy_team_media_codec_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index 275a721..ab47b7c 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "codec2_hidl_hal_component_test"
#include <android-base/logging.h>
+#include <android/binder_process.h>
#include <gtest/gtest.h>
#include <hidl/GtestPrinter.h>
@@ -382,5 +383,6 @@
}
::testing::InitGoogleTest(&argc, argv);
+ ABinderProcess_startThreadPool();
return RUN_ALL_TESTS();
}
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
index 40f5201..5e52fde 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
@@ -15,6 +15,7 @@
*/
package {
+ default_team: "trendy_team_media_codec_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
index 47ceed5..d1f0fb5 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "codec2_hidl_hal_master_test"
#include <android-base/logging.h>
+#include <android-base/properties.h>
#include <gtest/gtest.h>
#include <hidl/GtestPrinter.h>
#include <hidl/ServiceManagement.h>
@@ -82,6 +83,17 @@
}
}
+TEST_P(Codec2MasterHalTest, MustUseAidlBeyond202404) {
+ static int sVendorApiLevel = android::base::GetIntProperty("ro.vendor.api_level", 0);
+ if (sVendorApiLevel < 202404) {
+ GTEST_SKIP() << "vendor api level less than 202404: " << sVendorApiLevel;
+ }
+ ALOGV("MustUseAidlBeyond202404 Test");
+
+ EXPECT_NE(mClient->getAidlBase(), nullptr) << "android.hardware.media.c2 MUST use AIDL "
+ << "for chipsets launching at 202404 or above";
+}
+
} // anonymous namespace
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2MasterHalTest,
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le_largeframe.info b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le_largeframe.info
index 291e323..ee59a8e 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le_largeframe.info
+++ b/media/codec2/hal/hidl/1.0/vts/functional/res/bbb_raw_1ch_8khz_s32le_largeframe.info
@@ -1,5 +1,5 @@
-16384 64 0 1 16384 1 0
-49152 64 1024000 3 16384 1 1024000 16384 1 2048000 16384 1 3072000
-32768 64 4096000 2 16384 1 4096000 16384 1 5120000
-49152 64 6144000 3 16384 1 6144000 16384 1 7168000 16384 1 8192000
-10924 64 9216000 1 10924 1 9216000
+16384 128 0 1 16384 1 0
+49152 128 1024000 3 16384 1 1024000 16384 1 2048000 16384 1 3072000
+32768 128 4096000 2 16384 1 4096000 16384 1 5120000
+49152 128 6144000 3 16384 1 6144000 16384 1 7168000 16384 1 8192000
+10924 128 9216000 1 10924 1 9216000
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
index ecc4f9d..d04c2f6 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
@@ -15,6 +15,7 @@
*/
package {
+ default_team: "trendy_team_media_codec_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.1/utils/Android.bp b/media/codec2/hal/hidl/1.1/utils/Android.bp
index 4f86511..d8b5db5 100644
--- a/media/codec2/hal/hidl/1.1/utils/Android.bp
+++ b/media/codec2/hal/hidl/1.1/utils/Android.bp
@@ -54,7 +54,6 @@
],
}
-
// DO NOT DEPEND ON THIS DIRECTLY
// use libcodec2-hidl-defaults instead
cc_library {
@@ -66,7 +65,6 @@
"com.android.media.swcodec",
],
-
defaults: ["hidl_defaults"],
srcs: [
@@ -97,6 +95,7 @@
"android.hardware.media.omx@1.0",
"libbase",
"libcodec2",
+ "libcodec2_hal_common",
"libcodec2_hidl@1.0",
"libcodec2_hidl_plugin_stub",
"libcodec2_vndk",
@@ -173,4 +172,3 @@
"libhidlbase",
],
}
-
diff --git a/media/codec2/hal/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
index d0f4f19..d34d84e 100644
--- a/media/codec2/hal/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/Component.cpp
@@ -29,6 +29,8 @@
#include <hidl/HidlBinderSupport.h>
#include <utils/Timers.h>
+#include <codec2/common/MultiAccessUnitHelper.h>
+
#include <C2BqBufferPriv.h>
#include <C2Debug.h>
#include <C2PlatformSupport.h>
@@ -44,6 +46,8 @@
namespace utils {
using namespace ::android;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
// ComponentListener wrapper
struct Component::Listener : public C2Component::Listener {
@@ -135,6 +139,52 @@
wp<IComponentListener> mListener;
};
+// Component listener for handle multiple access-units
+struct MultiAccessUnitListener : public Component::Listener {
+ MultiAccessUnitListener(const sp<Component> &component,
+ const std::shared_ptr<MultiAccessUnitHelper> &helper):
+ Listener(component), mHelper(helper) {
+ }
+
+ virtual void onError_nb(
+ std::weak_ptr<C2Component> c2component,
+ uint32_t errorCode) override {
+ if (mHelper) {
+ std::list<std::unique_ptr<C2Work>> worklist;
+ mHelper->error(&worklist);
+ if (!worklist.empty()) {
+ Listener::onWorkDone_nb(c2component, std::move(worklist));
+ }
+ }
+ Listener::onError_nb(c2component, errorCode);
+ }
+
+ virtual void onTripped_nb(
+ std::weak_ptr<C2Component> c2component,
+ std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+ ) override {
+ Listener::onTripped_nb(c2component,
+ c2settingResult);
+ }
+
+ virtual void onWorkDone_nb(
+ std::weak_ptr<C2Component> c2component,
+ std::list<std::unique_ptr<C2Work>> c2workItems) override {
+ if (mHelper) {
+ std::list<std::unique_ptr<C2Work>> processedWork;
+ mHelper->gather(c2workItems, &processedWork);
+ if (!processedWork.empty()) {
+ Listener::onWorkDone_nb(c2component, std::move(processedWork));
+ }
+ } else {
+ Listener::onWorkDone_nb(c2component, std::move(c2workItems));
+ }
+ }
+
+ protected:
+ std::shared_ptr<MultiAccessUnitHelper> mHelper;
+};
+
// Component::Sink
struct Component::Sink : public IInputSink {
std::shared_ptr<Component> mComponent;
@@ -208,13 +258,14 @@
const sp<::android::hardware::media::bufferpool::V2_0::
IClientManager>& clientPoolManager)
: mComponent{component},
- mInterface{new ComponentInterface(component->intf(),
- store->getParameterCache())},
mListener{listener},
mStore{store},
mBufferPoolSender{clientPoolManager} {
// Retrieve supported parameters from store
// TODO: We could cache this per component/interface type
+ mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
+ mInterface = new ComponentInterface(
+ component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
mInit = mInterface->status();
}
@@ -252,6 +303,19 @@
registerFrameData(mListener, work->input);
}
}
+ c2_status_t err = C2_OK;
+ if (mMultiAccessUnitHelper) {
+ std::list<std::list<std::unique_ptr<C2Work>>> c2worklists;
+ mMultiAccessUnitHelper->scatter(c2works, &c2worklists);
+ for (auto &c2worklist : c2worklists) {
+ err = mComponent->queue_nb(&c2worklist);
+ if (err != C2_OK) {
+ LOG(ERROR) << "Error Queuing to component.";
+ break;
+ }
+ }
+ return static_cast<Status>(err);
+ }
return static_cast<Status>(mComponent->queue_nb(&c2works));
}
@@ -261,6 +325,9 @@
c2_status_t c2res = mComponent->flush_sm(
C2Component::FLUSH_COMPONENT,
&c2flushedWorks);
+ if (mMultiAccessUnitHelper) {
+ c2res = mMultiAccessUnitHelper->flush(&c2flushedWorks);
+ }
// Unregister input buffers.
for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
@@ -469,6 +536,9 @@
std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
mBlockPools.clear();
}
+ if (mMultiAccessUnitHelper) {
+ mMultiAccessUnitHelper->reset();
+ }
InputBufferManager::unregisterFrameData(mListener);
return status;
}
@@ -479,6 +549,9 @@
std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
mBlockPools.clear();
}
+ if (mMultiAccessUnitHelper) {
+ mMultiAccessUnitHelper->reset();
+ }
InputBufferManager::unregisterFrameData(mListener);
return status;
}
@@ -508,7 +581,24 @@
}
void Component::initListener(const sp<Component>& self) {
- std::shared_ptr<C2Component::Listener> c2listener =
+ std::shared_ptr<C2Component::Listener> c2listener;
+ if (mMultiAccessUnitIntf) {
+ std::shared_ptr<C2Allocator> allocator;
+ std::shared_ptr<C2BlockPool> linearPool;
+ std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+ if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+ ::android::C2PlatformAllocatorDesc desc;
+ desc.allocatorId = allocator->getId();
+ if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+ if (linearPool) {
+ mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+ mMultiAccessUnitIntf, linearPool);
+ }
+ }
+ }
+ }
+ c2listener = mMultiAccessUnitHelper ?
+ std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
std::make_shared<Listener>(self);
c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
if (res != C2_OK) {
diff --git a/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
index d47abdd..46af809 100644
--- a/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
@@ -194,6 +194,37 @@
}
#endif
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface) {
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+ if (c2interface == nullptr) {
+ return nullptr;
+ }
+ if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+ c2_status_t err = C2_OK;
+ C2ComponentDomainSetting domain;
+ std::vector<std::unique_ptr<C2Param>> heapParams;
+ err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+ if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+ std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+ bool isComponentSupportsLargeAudioFrame = false;
+ c2interface->querySupportedParams_nb(¶ms);
+ for (const auto ¶mDesc : params) {
+ if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+ isComponentSupportsLargeAudioFrame = true;
+ break;
+ }
+ }
+
+ if (!isComponentSupportsLargeAudioFrame) {
+ multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+ c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+ }
+ }
+ }
+ return multiAccessUnitIntf;
+}
+
// Methods from ::android::hardware::media::c2::V1_0::IComponentStore
Return<void> ComponentStore::createComponent(
const hidl_string& name,
@@ -241,7 +272,10 @@
c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
#endif
onInterfaceLoaded(c2interface);
- interface = new ComponentInterface(c2interface, mParameterCache);
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+ tryCreateMultiAccessUnitInterface(c2interface);
+ interface = new ComponentInterface(
+ c2interface, multiAccessUnitIntf, mParameterCache);
}
_hidl_cb(static_cast<Status>(res), interface);
return Void();
diff --git a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
index f16de24..8f0478f 100644
--- a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
+++ b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
@@ -29,6 +29,8 @@
#include <hidl/Status.h>
#include <hwbinder/IBinder.h>
+#include <codec2/common/MultiAccessUnitHelper.h>
+
#include <C2Component.h>
#include <C2Buffer.h>
#include <C2.h>
@@ -57,6 +59,8 @@
using ::android::hardware::IBinder;
using ::android::sp;
using ::android::wp;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
struct ComponentStore;
@@ -118,6 +122,8 @@
std::shared_ptr<C2Component> mComponent;
sp<ComponentInterface> mInterface;
sp<IComponentListener> mListener;
+ std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
+ std::shared_ptr<MultiAccessUnitHelper> mMultiAccessUnitHelper;
sp<ComponentStore> mStore;
::android::hardware::media::c2::V1_1::utils::DefaultBufferPoolSender
mBufferPoolSender;
@@ -140,6 +146,8 @@
struct Listener;
+ friend struct MultiAccessUnitListener;
+
using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
sp<HwDeathRecipient> mDeathRecipient;
bool mClientDied{false};
diff --git a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
index f6daee7..85862a9 100644
--- a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
@@ -79,6 +79,9 @@
static std::shared_ptr<FilterWrapper> GetFilterWrapper();
+ std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface);
+
// Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
virtual Return<void> createComponent(
const hidl_string& name,
diff --git a/media/codec2/hal/hidl/1.2/utils/Android.bp b/media/codec2/hal/hidl/1.2/utils/Android.bp
index b92dc07..a339946 100644
--- a/media/codec2/hal/hidl/1.2/utils/Android.bp
+++ b/media/codec2/hal/hidl/1.2/utils/Android.bp
@@ -58,7 +58,6 @@
],
}
-
// DO NOT DEPEND ON THIS DIRECTLY
// use libcodec2-hidl-defaults instead
cc_library {
@@ -102,6 +101,7 @@
"android.hardware.media.omx@1.0",
"libbase",
"libcodec2",
+ "libcodec2_hal_common",
"libcodec2_hidl@1.0",
"libcodec2_hidl@1.1",
"libcodec2_hidl_plugin_stub",
@@ -197,4 +197,3 @@
name: "libcodec2-hidl-client-defaults",
defaults: ["libcodec2-hidl-client-defaults@1.2"],
}
-
diff --git a/media/codec2/hal/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
index 036c900..f78e827 100644
--- a/media/codec2/hal/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/Component.cpp
@@ -44,6 +44,8 @@
namespace utils {
using namespace ::android;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
// ComponentListener wrapper
struct Component::Listener : public C2Component::Listener {
@@ -135,6 +137,52 @@
wp<IComponentListener> mListener;
};
+// Component listener for handle multiple access-units
+struct MultiAccessUnitListener : public Component::Listener {
+ MultiAccessUnitListener(const sp<Component> &component,
+ const std::shared_ptr<MultiAccessUnitHelper> &helper):
+ Listener(component), mHelper(helper) {
+ }
+
+ virtual void onError_nb(
+ std::weak_ptr<C2Component> c2component,
+ uint32_t errorCode) override {
+ if (mHelper) {
+ std::list<std::unique_ptr<C2Work>> worklist;
+ mHelper->error(&worklist);
+ if (!worklist.empty()) {
+ Listener::onWorkDone_nb(c2component, std::move(worklist));
+ }
+ }
+ Listener::onError_nb(c2component, errorCode);
+ }
+
+ virtual void onTripped_nb(
+ std::weak_ptr<C2Component> c2component,
+ std::vector<std::shared_ptr<C2SettingResult>> c2settingResult
+ ) override {
+ Listener::onTripped_nb(c2component,
+ c2settingResult);
+ }
+
+ virtual void onWorkDone_nb(
+ std::weak_ptr<C2Component> c2component,
+ std::list<std::unique_ptr<C2Work>> c2workItems) override {
+ if (mHelper) {
+ std::list<std::unique_ptr<C2Work>> processedWork;
+ mHelper->gather(c2workItems, &processedWork);
+ if (!processedWork.empty()) {
+ Listener::onWorkDone_nb(c2component, std::move(processedWork));
+ }
+ } else {
+ Listener::onWorkDone_nb(c2component, std::move(c2workItems));
+ }
+ }
+
+ protected:
+ std::shared_ptr<MultiAccessUnitHelper> mHelper;
+};
+
// Component::Sink
struct Component::Sink : public IInputSink {
std::shared_ptr<Component> mComponent;
@@ -208,13 +256,14 @@
const sp<::android::hardware::media::bufferpool::V2_0::
IClientManager>& clientPoolManager)
: mComponent{component},
- mInterface{new ComponentInterface(component->intf(),
- store->getParameterCache())},
mListener{listener},
mStore{store},
mBufferPoolSender{clientPoolManager} {
// Retrieve supported parameters from store
// TODO: We could cache this per component/interface type
+ mMultiAccessUnitIntf = store->tryCreateMultiAccessUnitInterface(component->intf());
+ mInterface = new ComponentInterface(
+ component->intf(), mMultiAccessUnitIntf, store->getParameterCache());
mInit = mInterface->status();
}
@@ -252,7 +301,19 @@
registerFrameData(mListener, work->input);
}
}
-
+ c2_status_t err = C2_OK;
+ if (mMultiAccessUnitHelper) {
+ std::list<std::list<std::unique_ptr<C2Work>>> c2worklists;
+ mMultiAccessUnitHelper->scatter(c2works, &c2worklists);
+ for (auto &c2worklist : c2worklists) {
+ err = mComponent->queue_nb(&c2worklist);
+ if (err != C2_OK) {
+ LOG(ERROR) << "Error Queuing to component.";
+ break;
+ }
+ }
+ return static_cast<Status>(err);
+ }
return static_cast<Status>(mComponent->queue_nb(&c2works));
}
@@ -261,7 +322,9 @@
c2_status_t c2res = mComponent->flush_sm(
C2Component::FLUSH_COMPONENT,
&c2flushedWorks);
-
+ if (mMultiAccessUnitHelper) {
+ c2res = mMultiAccessUnitHelper->flush(&c2flushedWorks);
+ }
// Unregister input buffers.
for (const std::unique_ptr<C2Work>& work : c2flushedWorks) {
if (work) {
@@ -469,6 +532,9 @@
std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
mBlockPools.clear();
}
+ if (mMultiAccessUnitHelper) {
+ mMultiAccessUnitHelper->reset();
+ }
InputBufferManager::unregisterFrameData(mListener);
return status;
}
@@ -479,6 +545,9 @@
std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
mBlockPools.clear();
}
+ if (mMultiAccessUnitHelper) {
+ mMultiAccessUnitHelper->reset();
+ }
InputBufferManager::unregisterFrameData(mListener);
return status;
}
@@ -539,7 +608,24 @@
}
void Component::initListener(const sp<Component>& self) {
- std::shared_ptr<C2Component::Listener> c2listener =
+ std::shared_ptr<C2Component::Listener> c2listener;
+ if (mMultiAccessUnitIntf) {
+ std::shared_ptr<C2Allocator> allocator;
+ std::shared_ptr<C2BlockPool> linearPool;
+ std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+ if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+ ::android::C2PlatformAllocatorDesc desc;
+ desc.allocatorId = allocator->getId();
+ if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+ if (linearPool) {
+ mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+ mMultiAccessUnitIntf, linearPool);
+ }
+ }
+ }
+ }
+ c2listener = mMultiAccessUnitHelper ?
+ std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
std::make_shared<Listener>(self);
c2_status_t res = mComponent->setListener_vb(c2listener, C2_DONT_BLOCK);
if (res != C2_OK) {
diff --git a/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
index 9fac5d5..f89c835 100644
--- a/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
@@ -194,6 +194,36 @@
}
#endif
+std::shared_ptr<MultiAccessUnitInterface> ComponentStore::tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface) {
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf = nullptr;
+ if (c2interface == nullptr) {
+ return nullptr;
+ }
+ if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+ c2_status_t err = C2_OK;
+ C2ComponentDomainSetting domain;
+ std::vector<std::unique_ptr<C2Param>> heapParams;
+ err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
+ if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+ std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+ bool isComponentSupportsLargeAudioFrame = false;
+ c2interface->querySupportedParams_nb(¶ms);
+ for (const auto ¶mDesc : params) {
+ if (paramDesc->name().compare(C2_PARAMKEY_OUTPUT_LARGE_FRAME) == 0) {
+ isComponentSupportsLargeAudioFrame = true;
+ break;
+ }
+ }
+ if (!isComponentSupportsLargeAudioFrame) {
+ multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
+ c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+ }
+ }
+ }
+ return multiAccessUnitIntf;
+}
+
// Methods from ::android::hardware::media::c2::V1_0::IComponentStore
Return<void> ComponentStore::createComponent(
const hidl_string& name,
@@ -241,7 +271,9 @@
c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
#endif
onInterfaceLoaded(c2interface);
- interface = new ComponentInterface(c2interface, mParameterCache);
+ std::shared_ptr<MultiAccessUnitInterface> multiAccessUnitIntf =
+ tryCreateMultiAccessUnitInterface(c2interface);
+ interface = new ComponentInterface(c2interface, multiAccessUnitIntf, mParameterCache);
}
_hidl_cb(static_cast<Status>(res), interface);
return Void();
diff --git a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
index 6a73392..f2b77bc 100644
--- a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
+++ b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
@@ -29,6 +29,8 @@
#include <hidl/Status.h>
#include <hwbinder/IBinder.h>
+#include <codec2/common/MultiAccessUnitHelper.h>
+
#include <C2Component.h>
#include <C2Buffer.h>
#include <C2.h>
@@ -57,6 +59,8 @@
using ::android::hardware::IBinder;
using ::android::sp;
using ::android::wp;
+using ::android::MultiAccessUnitInterface;
+using ::android::MultiAccessUnitHelper;
struct ComponentStore;
@@ -123,6 +127,8 @@
std::shared_ptr<C2Component> mComponent;
sp<ComponentInterface> mInterface;
sp<IComponentListener> mListener;
+ std::shared_ptr<MultiAccessUnitInterface> mMultiAccessUnitIntf;
+ std::shared_ptr<MultiAccessUnitHelper> mMultiAccessUnitHelper;
sp<ComponentStore> mStore;
::android::hardware::media::c2::V1_2::utils::DefaultBufferPoolSender
mBufferPoolSender;
@@ -145,6 +151,8 @@
struct Listener;
+ friend struct MultiAccessUnitListener;
+
using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
sp<HwDeathRecipient> mDeathRecipient;
bool mClientDied{false};
diff --git a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
index e95a651..c08fce4 100644
--- a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
@@ -79,6 +79,9 @@
static std::shared_ptr<FilterWrapper> GetFilterWrapper();
+ std::shared_ptr<MultiAccessUnitInterface> tryCreateMultiAccessUnitInterface(
+ const std::shared_ptr<C2ComponentInterface> &c2interface);
+
// Methods from ::android::hardware::media::c2::V1_0::IComponentStore.
virtual Return<void> createComponent(
const hidl_string& name,
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index d867eb1..4de2347 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -45,6 +45,8 @@
static_libs: [
"libSurfaceFlingerProperties",
+ "aconfig_mediacodec_flags_c_lib",
+ "android.media.codec-aconfig-cc",
],
shared_libs: [
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 6e6d3f7..f58dc65 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -90,6 +90,28 @@
return v == "true";
}
+// Flags can come with individual BufferInfos
+// when used with large frame audio
+constexpr static std::initializer_list<std::pair<uint32_t, uint32_t>> flagList = {
+ {BUFFER_FLAG_CODEC_CONFIG, C2FrameData::FLAG_CODEC_CONFIG},
+ {BUFFER_FLAG_END_OF_STREAM, C2FrameData::FLAG_END_OF_STREAM},
+ {BUFFER_FLAG_DECODE_ONLY, C2FrameData::FLAG_DROP_FRAME}
+};
+
+static uint32_t convertFlags(uint32_t flags, bool toC2) {
+ return std::transform_reduce(
+ flagList.begin(), flagList.end(),
+ 0u,
+ std::bit_or{},
+ [flags, toC2](const std::pair<uint32_t, uint32_t> &entry) {
+ if (toC2) {
+ return (flags & entry.first) ? entry.second : 0;
+ } else {
+ return (flags & entry.second) ? entry.first : 0;
+ }
+ });
+}
+
} // namespace
CCodecBufferChannel::QueueGuard::QueueGuard(
@@ -245,7 +267,8 @@
if (buffer->meta()->findInt32("decode-only", &tmp) && tmp) {
flags |= C2FrameData::FLAG_DROP_FRAME;
}
- ALOGV("[%s] queueInputBuffer: buffer->size() = %zu", mName, buffer->size());
+ ALOGV("[%s] queueInputBuffer: buffer->size() = %zu time: %lld",
+ mName, buffer->size(), (long long)timeUs);
std::list<std::unique_ptr<C2Work>> items;
std::unique_ptr<C2Work> work(new C2Work);
work->input.ordinal.timestamp = timeUs;
@@ -296,6 +319,34 @@
uint64_t frameIndex = work->input.ordinal.frameIndex.peeku();
output->rotation[frameIndex] = rotation;
}
+ sp<RefBase> obj;
+ if (buffer->meta()->findObject("accessUnitInfo", &obj)) {
+ ALOGV("Filling C2Info from multiple access units");
+ sp<WrapperObject<std::vector<AccessUnitInfo>>> infos{
+ (decltype(infos.get()))obj.get()};
+ std::vector<AccessUnitInfo> &accessUnitInfoVec = infos->value;
+ std::vector<C2AccessUnitInfosStruct> multipleAccessUnitInfos;
+ uint32_t outFlags = 0;
+ for (int i = 0; i < accessUnitInfoVec.size(); i++) {
+ outFlags = 0;
+ outFlags = convertFlags(accessUnitInfoVec[i].mFlags, true);
+ if (eos && (outFlags & C2FrameData::FLAG_END_OF_STREAM)) {
+ outFlags &= (~C2FrameData::FLAG_END_OF_STREAM);
+ }
+ multipleAccessUnitInfos.emplace_back(
+ outFlags,
+ accessUnitInfoVec[i].mSize,
+ accessUnitInfoVec[i].mTimestamp);
+ ALOGV("%d) flags: %d, size: %d, time: %llu",
+ i, outFlags, accessUnitInfoVec[i].mSize,
+ (long long)accessUnitInfoVec[i].mTimestamp);
+
+ }
+ const std::shared_ptr<C2AccessUnitInfos::input> c2AccessUnitInfos =
+ C2AccessUnitInfos::input::AllocShared(
+ multipleAccessUnitInfos.size(), 0u, multipleAccessUnitInfos);
+ c2buffer->setInfo(c2AccessUnitInfos);
+ }
work->input.buffers.push_back(c2buffer);
if (encryptedBlock) {
work->input.infoBuffers.emplace_back(C2InfoBuffer::CreateLinearBuffer(
@@ -432,6 +483,131 @@
return heapSeqNum;
}
+typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
+typedef WrapperObject<std::vector<std::unique_ptr<CodecCryptoInfo>>> CryptoInfosWrapper;
+status_t CCodecBufferChannel::attachEncryptedBuffers(
+ const sp<hardware::HidlMemory> &memory,
+ size_t offset,
+ const sp<MediaCodecBuffer> &buffer,
+ bool secure,
+ AString* errorDetailMsg) {
+ static const C2MemoryUsage kDefaultReadWriteUsage{
+ C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+ if (!hasCryptoOrDescrambler()) {
+ ALOGE("attachEncryptedBuffers requires Crypto/descrambler object");
+ return -ENOSYS;
+ }
+ size_t size = 0;
+ CHECK(buffer->meta()->findSize("ssize", &size));
+ if (size == 0) {
+ buffer->setRange(0, 0);
+ return OK;
+ }
+ sp<RefBase> obj;
+ CHECK(buffer->meta()->findObject("cryptoInfos", &obj));
+ sp<CryptoInfosWrapper> cryptoInfos{(CryptoInfosWrapper *)obj.get()};
+ CHECK(buffer->meta()->findObject("accessUnitInfo", &obj));
+ sp<BufferInfosWrapper> bufferInfos{(BufferInfosWrapper *)obj.get()};
+ if (secure || (mCrypto == nullptr)) {
+ if (cryptoInfos->value.size() != 1) {
+ ALOGE("Cannot decrypt multiple access units");
+ return -ENOSYS;
+ }
+ // we are dealing with just one cryptoInfo or descrambler.
+ std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[0]);
+ if (info == nullptr) {
+ ALOGE("Cannot decrypt, CryptoInfos are null.");
+ return -ENOSYS;
+ }
+ return attachEncryptedBuffer(
+ memory,
+ secure,
+ info->mKey,
+ info->mIv,
+ info->mMode,
+ info->mPattern,
+ offset,
+ info->mSubSamples,
+ info->mNumSubSamples,
+ buffer,
+ errorDetailMsg);
+ }
+ std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
+ std::shared_ptr<C2LinearBlock> block;
+ c2_status_t err = pool->fetchLinearBlock(
+ size,
+ kDefaultReadWriteUsage,
+ &block);
+ if (err != C2_OK) {
+ ALOGI("[%s] attachEncryptedBuffers: fetchLinearBlock failed: size = %zu (%s) err = %d",
+ mName, size, secure ? "secure" : "non-secure", err);
+ return NO_MEMORY;
+ }
+ ensureDecryptDestination(size);
+ C2WriteView wView = block->map().get();
+ if (wView.error() != C2_OK) {
+ ALOGI("[%s] attachEncryptedBuffers: block map error: %d (non-secure)",
+ mName, wView.error());
+ return UNKNOWN_ERROR;
+ }
+
+ ssize_t result = -1;
+ size_t srcOffset = offset;
+ size_t outBufferSize = 0;
+ uint32_t cryptoInfoIdx = 0;
+ int32_t heapSeqNum = getHeapSeqNum(memory);
+ hardware::drm::V1_0::SharedBuffer src{(uint32_t)heapSeqNum, offset, size};
+ hardware::drm::V1_0::DestinationBuffer dst;
+ dst.type = DrmBufferType::SHARED_MEMORY;
+ IMemoryToSharedBuffer(
+ mDecryptDestination, mHeapSeqNum, &dst.nonsecureMemory);
+ for (int i = 0; i < bufferInfos->value.size(); i++) {
+ if (bufferInfos->value[i].mSize > 0) {
+ std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[cryptoInfoIdx++]);
+ src.offset = srcOffset;
+ src.size = bufferInfos->value[i].mSize;
+ result = mCrypto->decrypt(
+ (uint8_t*)info->mKey,
+ (uint8_t*)info->mIv,
+ info->mMode,
+ info->mPattern,
+ src,
+ 0,
+ info->mSubSamples,
+ info->mNumSubSamples,
+ dst,
+ errorDetailMsg);
+ srcOffset += bufferInfos->value[i].mSize;
+ if (result < 0) {
+ ALOGI("[%s] attachEncryptedBuffers: decrypt failed: result = %zd",
+ mName, result);
+ return result;
+ }
+ if (wView.error() == C2_OK) {
+ if (wView.size() < result) {
+ ALOGI("[%s] attachEncryptedBuffers: block size too small:"
+ "size=%u result=%zd (non-secure)", mName, wView.size(), result);
+ return UNKNOWN_ERROR;
+ }
+ memcpy(wView.data(), mDecryptDestination->unsecurePointer(), result);
+ bufferInfos->value[i].mSize = result;
+ wView.setOffset(wView.offset() + result);
+ }
+ outBufferSize += result;
+ }
+ }
+ if (wView.error() == C2_OK) {
+ wView.setOffset(0);
+ }
+ std::shared_ptr<C2Buffer> c2Buffer{C2Buffer::CreateLinearBuffer(
+ block->share(0, outBufferSize, C2Fence{}))};
+ if (!buffer->copy(c2Buffer)) {
+ ALOGI("[%s] attachEncryptedBuffers: buffer copy failed", mName);
+ return -ENOSYS;
+ }
+ return OK;
+}
+
status_t CCodecBufferChannel::attachEncryptedBuffer(
const sp<hardware::HidlMemory> &memory,
bool secure,
@@ -726,6 +902,140 @@
return queueInputBufferInternal(buffer, block, bufferSize);
}
+status_t CCodecBufferChannel::queueSecureInputBuffers(
+ const sp<MediaCodecBuffer> &buffer,
+ bool secure,
+ AString *errorDetailMsg) {
+ QueueGuard guard(mSync);
+ if (!guard.isRunning()) {
+ ALOGD("[%s] No more buffers should be queued at current state.", mName);
+ return -ENOSYS;
+ }
+
+ if (!hasCryptoOrDescrambler()) {
+ ALOGE("queueSecureInputBuffers requires a Crypto/descrambler Object");
+ return -ENOSYS;
+ }
+ sp<RefBase> obj;
+ CHECK(buffer->meta()->findObject("cryptoInfos", &obj));
+ sp<CryptoInfosWrapper> cryptoInfos{(CryptoInfosWrapper *)obj.get()};
+ CHECK(buffer->meta()->findObject("accessUnitInfo", &obj));
+ sp<BufferInfosWrapper> bufferInfos{(BufferInfosWrapper *)obj.get()};
+ if (secure || mCrypto == nullptr) {
+ if (cryptoInfos->value.size() != 1) {
+ ALOGE("Cannot decrypt multiple access units on native handles");
+ return -ENOSYS;
+ }
+ std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[0]);
+ if (info == nullptr) {
+ ALOGE("Cannot decrypt, CryptoInfos are null");
+ return -ENOSYS;
+ }
+ return queueSecureInputBuffer(
+ buffer,
+ secure,
+ info->mKey,
+ info->mIv,
+ info->mMode,
+ info->mPattern,
+ info->mSubSamples,
+ info->mNumSubSamples,
+ errorDetailMsg);
+ }
+ sp<EncryptedLinearBlockBuffer> encryptedBuffer((EncryptedLinearBlockBuffer *)buffer.get());
+
+ std::shared_ptr<C2LinearBlock> block;
+ size_t allocSize = buffer->size();
+ size_t bufferSize = 0;
+ c2_status_t blockRes = C2_OK;
+ bool copied = false;
+ ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+ "CCodecBufferChannel::decrypt(%s)", mName).c_str());
+ if (mSendEncryptedInfoBuffer) {
+ static const C2MemoryUsage kDefaultReadWriteUsage{
+ C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+ constexpr int kAllocGranule0 = 1024 * 64;
+ constexpr int kAllocGranule1 = 1024 * 1024;
+ std::shared_ptr<C2BlockPool> pool = mBlockPools.lock()->inputPool;
+ // round up encrypted sizes to limit fragmentation and encourage buffer reuse
+ if (allocSize <= kAllocGranule1) {
+ bufferSize = align(allocSize, kAllocGranule0);
+ } else {
+ bufferSize = align(allocSize, kAllocGranule1);
+ }
+ blockRes = pool->fetchLinearBlock(
+ bufferSize, kDefaultReadWriteUsage, &block);
+
+ if (blockRes == C2_OK) {
+ C2WriteView view = block->map().get();
+ if (view.error() == C2_OK && view.size() == bufferSize) {
+ copied = true;
+ // TODO: only copy clear sections
+ memcpy(view.data(), buffer->data(), allocSize);
+ }
+ }
+ }
+
+ if (!copied) {
+ block.reset();
+ }
+ // size of cryptoInfo and accessUnitInfo should be the same?
+ ssize_t result = -1;
+ size_t srcOffset = 0;
+ size_t outBufferSize = 0;
+ uint32_t cryptoInfoIdx = 0;
+ {
+ // scoped this block to enable destruction of mappedBlock
+ std::unique_ptr<EncryptedLinearBlockBuffer::MappedBlock> mappedBlock = nullptr;
+ hardware::drm::V1_0::DestinationBuffer destination;
+ destination.type = DrmBufferType::SHARED_MEMORY;
+ IMemoryToSharedBuffer(
+ mDecryptDestination, mHeapSeqNum, &destination.nonsecureMemory);
+ encryptedBuffer->getMappedBlock(&mappedBlock);
+ hardware::drm::V1_0::SharedBuffer source;
+ encryptedBuffer->fillSourceBuffer(&source);
+ srcOffset = source.offset;
+ for (int i = 0 ; i < bufferInfos->value.size(); i++) {
+ if (bufferInfos->value[i].mSize > 0) {
+ std::unique_ptr<CodecCryptoInfo> info =
+ std::move(cryptoInfos->value[cryptoInfoIdx++]);
+ if (info->mNumSubSamples == 1
+ && info->mSubSamples[0].mNumBytesOfClearData == 0
+ && info->mSubSamples[0].mNumBytesOfEncryptedData == 0) {
+ // no data so we only populate the bufferInfo
+ result = 0;
+ } else {
+ source.offset = srcOffset;
+ source.size = bufferInfos->value[i].mSize;
+ result = mCrypto->decrypt(
+ (uint8_t*)info->mKey,
+ (uint8_t*)info->mIv,
+ info->mMode,
+ info->mPattern,
+ source,
+ buffer->offset(),
+ info->mSubSamples,
+ info->mNumSubSamples,
+ destination,
+ errorDetailMsg);
+ srcOffset += bufferInfos->value[i].mSize;
+ if (result < 0) {
+ ALOGI("[%s] decrypt failed: result=%zd", mName, result);
+ return result;
+ }
+ if (destination.type == DrmBufferType::SHARED_MEMORY && mappedBlock) {
+ mappedBlock->copyDecryptedContent(mDecryptDestination, result);
+ }
+ bufferInfos->value[i].mSize = result;
+ outBufferSize += result;
+ }
+ }
+ }
+ buffer->setRange(0, outBufferSize);
+ }
+ return queueInputBufferInternal(buffer, block, bufferSize);
+}
+
void CCodecBufferChannel::feedInputBufferIfAvailable() {
QueueGuard guard(mSync);
if (!guard.isRunning()) {
@@ -2265,12 +2575,34 @@
case OutputBuffers::DISCARD:
break;
case OutputBuffers::NOTIFY_CLIENT:
+ {
// TRICKY: we want popped buffers reported in order, so sending
// the callback while holding the lock here. This assumes that
// onOutputBufferAvailable() does not block. onOutputBufferAvailable()
// callbacks are always sent with the Output lock held.
+ if (c2Buffer) {
+ std::shared_ptr<const C2AccessUnitInfos::output> bufferMetadata =
+ std::static_pointer_cast<const C2AccessUnitInfos::output>(
+ c2Buffer->getInfo(C2AccessUnitInfos::output::PARAM_TYPE));
+ if (bufferMetadata && bufferMetadata->flexCount() > 0) {
+ uint32_t flag = 0;
+ std::vector<AccessUnitInfo> accessUnitInfos;
+ for (int nMeta = 0; nMeta < bufferMetadata->flexCount(); nMeta++) {
+ const C2AccessUnitInfosStruct &bufferMetadataStruct =
+ bufferMetadata->m.values[nMeta];
+ flag = convertFlags(bufferMetadataStruct.flags, false);
+ accessUnitInfos.emplace_back(flag,
+ static_cast<size_t>(bufferMetadataStruct.size),
+ static_cast<size_t>(bufferMetadataStruct.timestamp));
+ }
+ sp<WrapperObject<std::vector<AccessUnitInfo>>> obj{
+ new WrapperObject<std::vector<AccessUnitInfo>>{accessUnitInfos}};
+ outBuffer->meta()->setObject("accessUnitInfo", obj);
+ }
+ }
mCallback->onOutputBufferAvailable(index, outBuffer);
break;
+ }
case OutputBuffers::REALLOCATE:
if (++reallocTryNum > kMaxReallocTry) {
output.unlock();
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 8dc9fb6..b470655 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -73,6 +73,10 @@
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
AString *errorDetailMsg) override;
+ status_t queueSecureInputBuffers(
+ const sp<MediaCodecBuffer> &buffer,
+ bool secure,
+ AString *errorDetailMsg) override;
status_t attachBuffer(
const std::shared_ptr<C2Buffer> &c2Buffer,
const sp<MediaCodecBuffer> &buffer) override;
@@ -88,6 +92,12 @@
size_t numSubSamples,
const sp<MediaCodecBuffer> &buffer,
AString* errorDetailMsg) override;
+ status_t attachEncryptedBuffers(
+ const sp<hardware::HidlMemory> &memory,
+ size_t offset,
+ const sp<MediaCodecBuffer> &buffer,
+ bool secure,
+ AString* errorDetailMsg) override;
status_t renderOutputBuffer(
const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
void pollForRenderedBuffers() override;
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 670923b..d313f33 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -18,11 +18,14 @@
#define LOG_TAG "CCodecBuffers"
#include <utils/Log.h>
+#include <numeric>
+
#include <C2AllocatorGralloc.h>
#include <C2PlatformSupport.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/MediaDefs.h>
+#include <media/stagefright/CodecBase.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/SkipCutBuffer.h>
#include <mediadrm/ICrypto.h>
@@ -147,6 +150,171 @@
return copy;
}
+// MultiAccessUnitSkipCutBuffer for buffer and bufferInfos
+
+class MultiAccessUnitSkipCutBuffer : public SkipCutBuffer {
+
+public:
+ explicit MultiAccessUnitSkipCutBuffer(
+ int32_t skip, int32_t cut, size_t num16BitChannels):
+ SkipCutBuffer(skip, cut, num16BitChannels),
+ mFrontPaddingDelay(0), mSize(0) {
+ }
+ void clearAll() {
+ mInfos.clear();
+ mFrontPaddingDelay = 0;
+ mSize = 0;
+ SkipCutBuffer::clear();
+ }
+
+ virtual ~MultiAccessUnitSkipCutBuffer() {
+
+ }
+
+ void submitMultiAccessUnits(
+ const sp<MediaCodecBuffer>& buffer,
+ int32_t sampleRate, size_t num16BitChannels,
+ std::shared_ptr<const C2AccessUnitInfos::output> &infos) {
+ if (infos == nullptr) {
+ // there is nothing to do more.
+ SkipCutBuffer::submit(buffer);
+ return;
+ }
+ typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
+ CHECK_EQ(mSize, SkipCutBuffer::size());
+ sp<BufferInfosWrapper> bufferInfos{new BufferInfosWrapper(decltype(bufferInfos->value)())};
+ uint32_t availableSize = buffer->size() + SkipCutBuffer::size();
+ uint32_t frontPadding = mFrontPadding;
+ int32_t lastEmptyAccessUnitIndex = -1;
+ int64_t byteInUs = 0;
+ if (sampleRate > 0 && num16BitChannels > 0) {
+ byteInUs = (1000000u / (sampleRate * num16BitChannels * 2));
+ }
+ if (frontPadding > 0) {
+ mInfos.clear();
+ mSize = 0;
+ }
+ for (int i = 0 ; i < infos->flexCount() && frontPadding > 0; i++) {
+ uint32_t flagsInPadding = 0;
+ int64_t timeInPadding = 0;
+ if (infos->m.values[i].size <= frontPadding) {
+ // we have more front padding so this buffer is not going to be used.
+ int32_t consumed = infos->m.values[i].size;
+ frontPadding -= consumed;
+ mFrontPaddingDelay += byteInUs * (consumed);
+ availableSize -= consumed;
+ flagsInPadding |= toMediaCodecFlags(infos->m.values[i].flags);
+ timeInPadding = infos->m.values[i].timestamp;
+ } else {
+ C2AccessUnitInfosStruct info = infos->m.values[i];
+ mFrontPaddingDelay += byteInUs * (frontPadding);
+ info.size -= frontPadding;
+ info.timestamp -= mFrontPaddingDelay;
+ availableSize -= frontPadding;
+ flagsInPadding |= toMediaCodecFlags(infos->m.values[i].flags);
+ timeInPadding = infos->m.values[i].timestamp;
+ frontPadding = 0;
+ mInfos.push_back(info);
+ mSize += info.size;
+ }
+ if (flagsInPadding != 0) {
+ bufferInfos->value.emplace_back(
+ flagsInPadding, 0, timeInPadding);
+ }
+ lastEmptyAccessUnitIndex = i;
+ }
+ if (frontPadding <= 0) {
+ // process what's already in the buffer first
+ auto it = mInfos.begin();
+ while (it != mInfos.end() && availableSize > mBackPadding) {
+ // we have samples to send out.
+ if ((availableSize - it->size) >= mBackPadding) {
+ // this is totally used here.
+ int32_t consumed = it->size;
+ bufferInfos->value.emplace_back(
+ toMediaCodecFlags(it->flags), consumed, it->timestamp);
+ availableSize -= consumed;
+ mSize -= consumed;
+ it = mInfos.erase(it);
+ } else {
+ int32_t consumed = availableSize - mBackPadding;
+ bufferInfos->value.emplace_back(
+ toMediaCodecFlags(it->flags),
+ consumed,
+ it->timestamp);
+ it->size -= consumed;
+ it->timestamp += consumed * byteInUs;
+ availableSize -= consumed;
+ mSize -= consumed;
+ it++;
+ }
+ }
+ // if buffer has more process all of it and keep the remaining info.
+ for (int i = (lastEmptyAccessUnitIndex + 1) ; i < infos->flexCount() ; i++) {
+ // upddate updatedInfo and mInfos
+ if (availableSize > mBackPadding) {
+ // we have to take data from the new buffer.
+ if (availableSize - infos->m.values[i].size >= mBackPadding) {
+ // we are using this info
+ int32_t consumed = infos->m.values[i].size;
+ bufferInfos->value.emplace_back(
+ toMediaCodecFlags(infos->m.values[i].flags),
+ consumed,
+ infos->m.values[i].timestamp - mFrontPaddingDelay);
+ availableSize -= consumed;
+ } else {
+ // if we need to update the size
+ C2AccessUnitInfosStruct info = infos->m.values[i];
+ int32_t consumed = availableSize - mBackPadding;
+ bufferInfos->value.emplace_back(
+ toMediaCodecFlags(infos->m.values[i].flags),
+ consumed,
+ infos->m.values[i].timestamp - mFrontPaddingDelay);
+ info.size -= consumed;
+ info.timestamp = info.timestamp - mFrontPaddingDelay +
+ consumed * byteInUs;
+ mInfos.push_back(info);
+ availableSize -= consumed;
+ mSize += info.size;
+ }
+ } else {
+ // we have to maintain infos
+ C2AccessUnitInfosStruct info = infos->m.values[i];
+ info.timestamp -= mFrontPaddingDelay;
+ mInfos.push_back(info);
+ mSize += info.size;
+ }
+ }
+ }
+ SkipCutBuffer::submit(buffer);
+ infos = nullptr;
+ if (!bufferInfos->value.empty()) {
+ buffer->meta()->setObject("accessUnitInfo", bufferInfos);
+ }
+ }
+protected:
+ // Flags can come with individual BufferInfos
+ // when used with large frame audio
+ constexpr static std::initializer_list<std::pair<uint32_t, uint32_t>> flagList = {
+ {BUFFER_FLAG_CODEC_CONFIG, C2FrameData::FLAG_CODEC_CONFIG},
+ {BUFFER_FLAG_END_OF_STREAM, C2FrameData::FLAG_END_OF_STREAM},
+ {BUFFER_FLAG_DECODE_ONLY, C2FrameData::FLAG_DROP_FRAME}
+ };
+
+ static uint32_t toMediaCodecFlags(uint32_t flags) {
+ return std::transform_reduce(
+ flagList.begin(), flagList.end(),
+ 0u,
+ std::bit_or{},
+ [flags](const std::pair<uint32_t, uint32_t> &entry) {
+ return (flags & entry.second) ? entry.first : 0;
+ });
+ }
+ std::list<C2AccessUnitInfosStruct> mInfos;
+ int64_t mFrontPaddingDelay;
+ size_t mSize;
+};
+
// OutputBuffers
OutputBuffers::OutputBuffers(const char *componentName, const char *name)
@@ -201,6 +369,15 @@
}
}
+bool OutputBuffers::submit(const sp<MediaCodecBuffer> &buffer, int32_t sampleRate,
+ int32_t channelCount, std::shared_ptr<const C2AccessUnitInfos::output> &infos) {
+ if (mSkipCutBuffer == nullptr) {
+ return false;
+ }
+ mSkipCutBuffer->submitMultiAccessUnits(buffer, sampleRate, channelCount, infos);
+ return true;
+}
+
void OutputBuffers::setSkipCutBuffer(int32_t skip, int32_t cut) {
if (mSkipCutBuffer != nullptr) {
size_t prevSize = mSkipCutBuffer->size();
@@ -208,7 +385,7 @@
ALOGD("[%s] Replacing SkipCutBuffer holding %zu bytes", mName, prevSize);
}
}
- mSkipCutBuffer = new SkipCutBuffer(skip, cut, mChannelCount);
+ mSkipCutBuffer = new MultiAccessUnitSkipCutBuffer(skip, cut, mChannelCount);
}
bool OutputBuffers::convert(
@@ -1160,7 +1337,16 @@
ALOGD("[%s] copy buffer failed", mName);
return WOULD_BLOCK;
}
- submit(c2Buffer);
+ if (buffer && buffer->hasInfo(C2AccessUnitInfos::output::PARAM_TYPE)) {
+ std::shared_ptr<const C2AccessUnitInfos::output> bufferMetadata =
+ std::static_pointer_cast<const C2AccessUnitInfos::output>(
+ buffer->getInfo(C2AccessUnitInfos::output::PARAM_TYPE));
+ if (submit(c2Buffer, mSampleRate, mChannelCount, bufferMetadata)) {
+ buffer->removeInfo(C2AccessUnitInfos::output::PARAM_TYPE);
+ }
+ } else {
+ submit(c2Buffer);
+ }
handleImageData(c2Buffer);
*clientBuffer = c2Buffer;
ALOGV("[%s] grabbed buffer %zu", mName, *index);
@@ -1198,7 +1384,7 @@
(void)flushedWork;
mImpl.flush();
if (mSkipCutBuffer != nullptr) {
- mSkipCutBuffer->clear();
+ mSkipCutBuffer->clearAll();
}
}
@@ -1356,7 +1542,7 @@
void LinearOutputBuffers::flush(
const std::list<std::unique_ptr<C2Work>> &flushedWork) {
if (mSkipCutBuffer != nullptr) {
- mSkipCutBuffer->clear();
+ mSkipCutBuffer->clearAll();
}
FlexOutputBuffers::flush(flushedWork);
}
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index cbef644..f0936bc 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -20,6 +20,7 @@
#include <optional>
#include <string>
+#include <vector>
#include <C2Config.h>
#include <DataConverter.h>
@@ -33,6 +34,8 @@
struct ICrypto;
class MemoryDealer;
class SkipCutBuffer;
+class MultiAccessUnitSkipCutBuffer;
+struct AccessUnitInfo;
constexpr size_t kLinearBufferSize = 1048576;
// This can fit an 8K frame.
@@ -382,13 +385,17 @@
sp<MediaCodecBuffer>* outBuffer);
protected:
- sp<SkipCutBuffer> mSkipCutBuffer;
+
+ sp<MultiAccessUnitSkipCutBuffer> mSkipCutBuffer;
/**
* Update the SkipCutBuffer object. No-op if it's never initialized.
*/
void updateSkipCutBuffer(int32_t sampleRate, int32_t channelCount);
+ bool submit(const sp<MediaCodecBuffer> &buffer, int32_t sampleRate,
+ int32_t channelCount, std::shared_ptr<const C2AccessUnitInfos::output> &infos);
+
/**
* Submit buffer to SkipCutBuffer object, if initialized.
*/
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 6d49fa8..c22deca 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -402,10 +402,19 @@
add(ConfigMapper(KEY_MAX_INPUT_SIZE, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE, "value")
.limitTo(D::INPUT));
+
// remove when codecs switch to PARAMKEY
deprecated(ConfigMapper(KEY_MAX_INPUT_SIZE, "coded.max-frame-size", "value")
.limitTo(D::INPUT));
+ // large frame params
+ add(ConfigMapper(KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE,
+ C2_PARAMKEY_OUTPUT_LARGE_FRAME, "max-size")
+ .limitTo(D::AUDIO & D::OUTPUT));
+ add(ConfigMapper(KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE,
+ C2_PARAMKEY_OUTPUT_LARGE_FRAME, "threshold-size")
+ .limitTo(D::AUDIO & D::OUTPUT));
+
// Rotation
// Note: SDK rotation is clock-wise, while C2 rotation is counter-clock-wise
add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_VUI_ROTATION, "value")
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 5c1755e..9c514f2 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -1036,6 +1036,37 @@
return const_cast<native_handle_t *>(mBlock->handle());
}
+void EncryptedLinearBlockBuffer::getMappedBlock(
+ std::unique_ptr<MappedBlock> * const mappedBlock) const {
+ if (mappedBlock) {
+ mappedBlock->reset(new EncryptedLinearBlockBuffer::MappedBlock(mBlock));
+ }
+ return;
+}
+
+EncryptedLinearBlockBuffer::MappedBlock::MappedBlock(
+ const std::shared_ptr<C2LinearBlock> &block) : mView(block->map().get()) {
+}
+
+bool EncryptedLinearBlockBuffer::MappedBlock::copyDecryptedContent(
+ const sp<IMemory> &decrypted, size_t length) {
+ if (mView.error() != C2_OK) {
+ return false;
+ }
+ if (mView.size() < length) {
+ ALOGE("View size(%d) less than decrypted length(%zu)",
+ mView.size(), length);
+ return false;
+ }
+ memcpy(mView.data(), decrypted->unsecurePointer(), length);
+ mView.setOffset(mView.offset() + length);
+ return true;
+}
+
+EncryptedLinearBlockBuffer::MappedBlock::~MappedBlock() {
+ mView.setOffset(0);
+}
+
using ::aidl::android::hardware::graphics::common::Cta861_3;
using ::aidl::android::hardware::graphics::common::Smpte2086;
@@ -1049,11 +1080,8 @@
// Unwrap raw buffer handle from the C2Handle
native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
if (!nh) {
- nh = UnwrapNativeCodec2AhwbHandle(handle);
- if (!nh) {
- ALOGE("handle is not compatible to neither C2HandleGralloc nor C2HandleAhwb");
- return;
- }
+ ALOGE("handle is not compatible to any gralloc C2Handle types");
+ return;
}
// Import the raw handle so IMapper can use the buffer. The imported
// handle must be freed when the client is done with the buffer.
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index b73acab..5e96921 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -384,6 +384,17 @@
*/
native_handle_t *handle() const;
+ class MappedBlock {
+ public:
+ explicit MappedBlock(const std::shared_ptr<C2LinearBlock> &block);
+ virtual ~MappedBlock();
+ bool copyDecryptedContent(const sp<IMemory> &decrypted, size_t length);
+ private:
+ C2WriteView mView;
+ };
+
+ void getMappedBlock(std::unique_ptr<MappedBlock> * const mappedBlock) const;
+
private:
std::shared_ptr<C2LinearBlock> mBlock;
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 453a0d2..37a7a4f 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -20,6 +20,9 @@
#include <strings.h>
+#include <com_android_media_codec_flags.h>
+#include <android_media_codec.h>
+
#include <C2Component.h>
#include <C2Config.h>
#include <C2Debug.h>
@@ -752,6 +755,25 @@
}
addSupportedColorFormats(
intf, caps.get(), trait, mediaType, it->second);
+
+ if (com::android::media::codec::flags::provider_->large_audio_frame()
+ && android::media::codec::provider_->large_audio_frame_finish()) {
+ // Adding feature-multiple-frames when C2LargeFrame param is present
+ if (trait.domain == C2Component::DOMAIN_AUDIO) {
+ std::vector<std::shared_ptr<C2ParamDescriptor>> params;
+ c2_status_t err = intf->querySupportedParams(¶ms);
+ if (err == C2_OK) {
+ for (const auto ¶mDesc : params) {
+ if (C2LargeFrame::output::PARAM_TYPE == paramDesc->index()) {
+ std::string featureMultipleFrames =
+ std::string(KEY_FEATURE_) + FEATURE_MultipleFrames;
+ caps->addDetail(featureMultipleFrames.c_str(), 0);
+ break;
+ }
+ }
+ }
+ }
+ }
}
}
}
diff --git a/media/codec2/tests/C2ComponentInterface_test.cpp b/media/codec2/tests/C2ComponentInterface_test.cpp
index 67f733d..d1844b0 100644
--- a/media/codec2/tests/C2ComponentInterface_test.cpp
+++ b/media/codec2/tests/C2ComponentInterface_test.cpp
@@ -235,7 +235,7 @@
// |*heapParams[0]| is a parameter value. The size of |heapParams| has to be one.
ASSERT_EQ(1u, heapParams.size());
EXPECT_TRUE(heapParams[0]);
- EXPECT_EQ(*heapParams[0], expected);
+ EXPECT_EQ(*heapParams[0], (C2Param &)(expected));
}
template <typename T> void C2CompIntfTest::querySupportedParam() {
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index 60b5b29..971b5a5 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -318,6 +318,14 @@
return reinterpret_cast<C2HandleAhwb *>(res);
}
+ static uint32_t getPixelFormat(const C2Handle *const handle) {
+ if (handle == nullptr) {
+ return 0;
+ }
+ const ExtraData *xd = GetExtraData(handle);
+ return xd->format;
+ }
+
static C2HandleAhwb* WrapNativeHandle(
const native_handle_t *const handle,
uint32_t width, uint32_t height, uint32_t format, uint64_t usage,
@@ -899,7 +907,17 @@
native_handle_t *UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle) {
- return C2HandleGralloc::UnwrapNativeHandle(handle);
+ if (handle == nullptr) {
+ return nullptr;
+ }
+ if (C2AllocatorGralloc::CheckHandle(handle)) {
+ return C2HandleGralloc::UnwrapNativeHandle(handle);
+ }
+ if (C2AllocatorAhwb::CheckHandle(handle)) {
+ return C2HandleAhwb::UnwrapNativeHandle(handle);
+ }
+ ALOGE("tried to unwrap non c2 compatible handle");
+ return nullptr;
}
C2Handle *WrapNativeCodec2GrallocHandle(
@@ -911,7 +929,38 @@
}
uint32_t ExtractFormatFromCodec2GrallocHandle(const C2Handle *const handle) {
- return C2HandleGralloc::getPixelFormat(handle);
+ if (C2AllocatorGralloc::CheckHandle(handle)) {
+ return C2HandleGralloc::getPixelFormat(handle);
+ }
+ if (C2AllocatorAhwb::CheckHandle(handle)) {
+ return C2HandleAhwb::getPixelFormat(handle);
+ }
+ ALOGE("tried to extract pixelformat from non c2 compatible handle");
+ return 0;
+}
+
+bool ExtractMetadataFromCodec2GrallocHandle(
+ const C2Handle *const handle,
+ uint32_t *width, uint32_t *height, uint32_t *format, uint64_t *usage, uint32_t *stride) {
+ if (handle == nullptr) {
+ ALOGE("ExtractMetadata from nullptr");
+ return false;
+ }
+ if (C2AllocatorGralloc::CheckHandle(handle)) {
+ uint32_t generation;
+ uint64_t igbp_id;
+ uint32_t igbp_slot;
+ (void)C2HandleGralloc::Import(handle, width, height, format, usage, stride,
+ &generation, &igbp_id, &igbp_slot);
+ return true;
+ }
+ if (C2AllocatorAhwb::CheckHandle(handle)) {
+ uint64_t origId;
+ (void)C2HandleAhwb::Import(handle, width, height, format, usage, stride, &origId);
+ return true;
+ }
+ ALOGE("ExtractMetadata from non compatible handle");
+ return false;
}
bool MigrateNativeCodec2GrallocHandle(
@@ -1137,8 +1186,17 @@
const C2Handle *const handle,
uint32_t *width, uint32_t *height, uint32_t *format,uint64_t *usage, uint32_t *stride,
uint32_t *generation, uint64_t *igbp_id, uint32_t *igbp_slot) {
- (void)C2HandleGralloc::Import(handle, width, height, format, usage, stride,
- generation, igbp_id, igbp_slot);
+ if (C2AllocatorGralloc::CheckHandle(handle)) {
+ (void)C2HandleGralloc::Import(handle, width, height, format, usage, stride,
+ generation, igbp_id, igbp_slot);
+ return;
+ }
+ if (C2AllocatorAhwb::CheckHandle(handle)) {
+ uint64_t origId;
+ (void)C2HandleAhwb::Import(handle, width, height, format, usage, stride, &origId);
+ return;
+ }
+ ALOGE("Tried to extract metadata from non c2 compatible handle");
}
C2AllocatorGralloc::Impl::Impl(id_t id, bool bufferQueue)
@@ -1250,10 +1308,6 @@
}
-native_handle_t *UnwrapNativeCodec2AhwbHandle(const C2Handle *const handle) {
- return C2HandleAhwb::UnwrapNativeHandle(handle);
-}
-
C2Handle *WrapNativeCodec2AhwbHandle(
const native_handle_t *const handle,
uint32_t width, uint32_t height, uint32_t format, uint64_t usage, uint32_t stride,
@@ -1477,13 +1531,6 @@
c2_status_t mInit;
};
-void _UnwrapNativeCodec2AhwbMetadata(
- const C2Handle *const handle,
- uint32_t *width, uint32_t *height, uint32_t *format,uint64_t *usage, uint32_t *stride,
- uint64_t *origId) {
- (void)C2HandleAhwb::Import(handle, width, height, format, usage, stride, origId);
-}
-
C2AllocatorAhwb::Impl::Impl(id_t id)
: mInit(C2_OK) {
// TODO: get this from allocator
diff --git a/media/codec2/vndk/include/C2AllocatorGralloc.h b/media/codec2/vndk/include/C2AllocatorGralloc.h
index 1a34c30..53b6262 100644
--- a/media/codec2/vndk/include/C2AllocatorGralloc.h
+++ b/media/codec2/vndk/include/C2AllocatorGralloc.h
@@ -22,8 +22,25 @@
#include <C2Buffer.h>
namespace android {
+// VNDK
+/**
+ * Extract pixel format from the extra data of gralloc handle.
+ *
+ * @return 0 when no valid pixel format exists.
+ */
+uint32_t ExtractFormatFromCodec2GrallocHandle(const C2Handle *const handle);
/**
+ * Extract metadata from the extra data of gralloc handle.
+ *
+ * @return {@code false} if extraction was failed, {@code true} otherwise.
+ */
+bool ExtractMetadataFromCodec2GrallocHandle(
+ const C2Handle *const handle,
+ uint32_t *width, uint32_t *height, uint32_t *format, uint64_t *usage, uint32_t *stride);
+
+// Not for VNDK (system partition and inside vndk only)
+/**
* Unwrap the native handle from a Codec2 handle allocated by C2AllocatorGralloc.
*
* @param handle a handle allocated by C2AllocatorGralloc. This includes handles returned for a
@@ -46,13 +63,6 @@
uint32_t generation = 0, uint64_t igbp_id = 0, uint32_t igbp_slot = 0);
/**
- * Extract pixel format from the extra data of gralloc handle.
- *
- * @return 0 when no valid pixel format exists.
- */
-uint32_t ExtractFormatFromCodec2GrallocHandle(const C2Handle *const handle);
-
-/**
* When the gralloc handle is migrated to another bufferqueue, update
* bufferqueue information.
*
@@ -71,16 +81,6 @@
uint32_t *generation, uint64_t *igbp_id, uint32_t *igbp_slot);
/**
- * Unwrap the native handle from a Codec2 handle allocated by C2AllocatorAhwb.
- *
- * @param handle a handle allocated by C2AllocatorAhwb. This includes handles returned for a
- * graphic block allocation handle based on an AHardwareBuffer.
- *
- * @return a new NON-OWNING native handle that must be deleted using native_handle_delete.
- */
-native_handle_t *UnwrapNativeCodec2AhwbHandle(const C2Handle *const handle);
-
-/**
* Wrap the gralloc handle and metadata based on AHardwareBuffer into Codec2 handle
* recognized by C2AllocatorAhwb.
*
@@ -92,14 +92,6 @@
uint32_t width, uint32_t height, uint32_t format, uint64_t usage, uint32_t stride,
uint64_t origId);
-/**
- * \todo Get this from the buffer
- */
-void _UnwrapNativeCodec2AhwbMetadata(
- const C2Handle *const handle,
- uint32_t *width, uint32_t *height, uint32_t *format, uint64_t *usage, uint32_t *stride,
- uint64_t *origId);
-
class C2AllocatorGralloc : public C2Allocator {
public:
virtual id_t getId() const override;
diff --git a/media/codec2/vndk/platform/C2IgbaBuffer.cpp b/media/codec2/vndk/platform/C2IgbaBuffer.cpp
index eafdb22..3622d5e 100644
--- a/media/codec2/vndk/platform/C2IgbaBuffer.cpp
+++ b/media/codec2/vndk/platform/C2IgbaBuffer.cpp
@@ -192,28 +192,25 @@
c2_status_t res = _fetchGraphicBlock(
width, height, format, usage, kBlockingFetchTimeoutNs, &origId, block, &fence);
- if (res == C2_BLOCKING) {
+ if (res == C2_TIMED_OUT) {
+ // SyncFence waiting timeout.
+ // Usually HAL treats C2_TIMED_OUT as an irrecoverable error.
+ // We want HAL to re-try.
return C2_BLOCKING;
}
- if (res != C2_OK) {
- return res;
- }
- // TODO: bundle the fence to the block. Are API changes required?
- res = fence.wait(kSyncFenceWaitNs);
- if (res != C2_OK) {
- bool aidlRet = true;
- ::ndk::ScopedAStatus status = mIgba->deallocate(origId, &aidlRet);
- ALOGE("Waiting a sync fence failed %d aidl(%d: %d)",
- res, status.isOk(), aidlRet);
- }
- return C2_OK;
+ return res;
}
c2_status_t C2IgbaBlockPool::fetchGraphicBlock(
uint32_t width, uint32_t height, uint32_t format, C2MemoryUsage usage,
std::shared_ptr<C2GraphicBlock> *block, C2Fence *fence) {
uint64_t origId;
- return _fetchGraphicBlock(width, height, format, usage, 0LL, &origId, block, fence);
+ c2_status_t res = _fetchGraphicBlock(width, height, format, usage, 0LL, &origId, block, fence);
+ if (res == C2_TIMED_OUT) {
+ *fence = C2Fence();
+ return C2_BLOCKING;
+ }
+ return res;
}
c2_status_t C2IgbaBlockPool::_fetchGraphicBlock(
@@ -263,10 +260,27 @@
}
}
- *fence = _C2FenceFactory::CreateSyncFence(allocation.fence.release());
+ C2Fence syncFence = _C2FenceFactory::CreateSyncFence(allocation.fence.release());
AHardwareBuffer *ahwb = allocation.buffer.release(); // This is acquired.
CHECK(AHardwareBuffer_getId(ahwb, origId) == ::android::OK);
- c2_status_t res = CreateGraphicBlockFromAhwb(ahwb, mAllocator, mIgba, block);
+
+ // We are waiting for SyncFence here for backward compatibility.
+ // H/W based Sync Fence could be returned to improve pipeline latency.
+ //
+ // TODO: Add a component configuration for returning sync fence
+ // from fetchGraphicBlock() as the C2Fence output param(b/322283520).
+ // In the case C2_OK along with GraphicBlock must be returned together.
+ c2_status_t res = syncFence.wait(kSyncFenceWaitNs);
+ if (res != C2_OK) {
+ AHardwareBuffer_release(ahwb);
+ bool aidlRet = true;
+ ::ndk::ScopedAStatus status = mIgba->deallocate(*origId, &aidlRet);
+ ALOGE("Waiting a sync fence failed %d aidl(%d: %d)",
+ res, status.isOk(), aidlRet);
+ return C2_TIMED_OUT;
+ }
+
+ res = CreateGraphicBlockFromAhwb(ahwb, mAllocator, mIgba, block);
AHardwareBuffer_release(ahwb);
if (res != C2_OK) {
bool aidlRet = true;
diff --git a/media/libaaudio/examples/Android.bp b/media/libaaudio/examples/Android.bp
index e2c1878..aa3ae5e 100644
--- a/media/libaaudio/examples/Android.bp
+++ b/media/libaaudio/examples/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaaudio/examples/input_monitor/Android.bp b/media/libaaudio/examples/input_monitor/Android.bp
index 72adfd7..52a5914 100644
--- a/media/libaaudio/examples/input_monitor/Android.bp
+++ b/media/libaaudio/examples/input_monitor/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -11,7 +12,10 @@
name: "input_monitor",
gtest: false,
srcs: ["src/input_monitor.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
shared_libs: ["libaaudio"],
header_libs: ["libaaudio_example_utils"],
}
@@ -20,7 +24,10 @@
name: "input_monitor_callback",
gtest: false,
srcs: ["src/input_monitor_callback.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
shared_libs: ["libaaudio"],
header_libs: ["libaaudio_example_utils"],
}
diff --git a/media/libaaudio/examples/loopback/Android.bp b/media/libaaudio/examples/loopback/Android.bp
index b18aeec..6552113 100644
--- a/media/libaaudio/examples/loopback/Android.bp
+++ b/media/libaaudio/examples/loopback/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -11,13 +12,16 @@
name: "aaudio_loopback",
gtest: false,
srcs: ["src/loopback.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
static_libs: ["libsndfile"],
include_dirs: ["external/oboe/apps/OboeTester/app/src/main/cpp"],
shared_libs: [
"libaaudio",
"libaudioutils",
- "liblog"
- ],
+ "liblog",
+ ],
header_libs: ["libaaudio_example_utils"],
}
diff --git a/media/libaaudio/examples/write_sine/Android.bp b/media/libaaudio/examples/write_sine/Android.bp
index 1c7e0f1..fe78112 100644
--- a/media/libaaudio/examples/write_sine/Android.bp
+++ b/media/libaaudio/examples/write_sine/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -10,7 +11,10 @@
cc_test {
name: "write_sine",
srcs: ["src/write_sine.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
shared_libs: ["libaaudio"],
header_libs: ["libaaudio_example_utils"],
}
@@ -18,7 +22,10 @@
cc_test {
name: "write_sine_callback",
srcs: ["src/write_sine_callback.cpp"],
- cflags: ["-Wall", "-Werror"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
shared_libs: ["libaaudio"],
header_libs: ["libaaudio_example_utils"],
}
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index 46c4148..6d94f38 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -15,6 +15,7 @@
*/
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaaudio/include/aaudio/AAudioTesting.h b/media/libaaudio/include/aaudio/AAudioTesting.h
index 01d97b6..d67ec70 100644
--- a/media/libaaudio/include/aaudio/AAudioTesting.h
+++ b/media/libaaudio/include/aaudio/AAudioTesting.h
@@ -49,12 +49,6 @@
};
typedef int32_t aaudio_policy_t;
-// Internal error codes. Only used by the framework.
-enum {
- AAUDIO_INTERNAL_ERROR_BASE = -1000,
- AAUDIO_ERROR_STANDBY,
-};
-
/**
* Control whether AAudioStreamBuilder_openStream() will use the new MMAP data path
* or the older "Legacy" data path.
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index fcb376c..d2cb265 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -128,7 +129,7 @@
tidy_checks_as_errors: tidy_errors,
tidy_flags: [
"-format-style=file",
- ]
+ ],
}
cc_library {
@@ -250,7 +251,7 @@
tidy_checks_as_errors: tidy_errors,
tidy_flags: [
"-format-style=file",
- ]
+ ],
}
aidl_interface {
@@ -274,8 +275,7 @@
"shared-file-region-aidl",
"framework-permission-aidl",
],
- backend:
- {
+ backend: {
java: {
sdk_version: "module_current",
},
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index e780f4f..cd7679c 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -278,3 +278,9 @@
mDataQueue->eraseMemory();
}
}
+
+void AudioEndpoint::eraseEmptyDataMemory(int32_t numFrames) {
+ if (mDataQueue != nullptr) {
+ mDataQueue->eraseEmptyMemory(numFrames);
+ }
+}
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index b117572..7e97c6a 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -107,6 +107,8 @@
*/
void eraseDataMemory();
+ void eraseEmptyDataMemory(int32_t numFrames);
+
void freeDataQueue() { mDataQueue.reset(); }
void dump() const;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 7648e25..b2e93f0 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -575,10 +575,20 @@
return AAUDIO_ERROR_INVALID_STATE;
}
+ // For playback, sleep until all the audio data has played.
+ // Then clear the buffer to prevent noise.
+ prepareBuffersForStop();
+
mClockModel.stop(AudioClock::getNanoseconds());
setState(AAUDIO_STREAM_STATE_STOPPING);
mAtomicInternalTimestamp.clear();
+#if 0
+ // Simulate very slow CPU, force race condition where the
+ // DSP keeps playing after we stop writing.
+ AudioClock::sleepForNanos(800 * AAUDIO_NANOS_PER_MILLISECOND);
+#endif
+
result = mServiceInterface.stopStream(mServiceStreamHandleInfo);
if (result == AAUDIO_ERROR_INVALID_HANDLE) {
ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index a5981b1..20d55f9 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -123,6 +123,8 @@
virtual void prepareBuffersForStart() {}
+ virtual void prepareBuffersForStop() {}
+
virtual void advanceClientToMatchServerPosition(int32_t serverMargin) = 0;
virtual void onFlushFromServer() {}
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 5d4c3d4..0427777 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -19,6 +19,8 @@
#define ATRACE_TAG ATRACE_TAG_AUDIO
+#include <algorithm>
+
#include <media/MediaMetricsItem.h>
#include <utils/Trace.h>
@@ -108,6 +110,61 @@
mAudioEndpoint->eraseDataMemory();
}
+void AudioStreamInternalPlay::prepareBuffersForStop() {
+ // If this is a shared stream and the FIFO is being read by the mixer then
+ // we don't have to worry about the DSP reading past the valid data. We can skip all this.
+ if(!mAudioEndpoint->isFreeRunning()) {
+ return;
+ }
+ // Sleep until the DSP has read all of the data written.
+ int64_t validFramesInBuffer = getFramesWritten() - getFramesRead();
+ if (validFramesInBuffer >= 0) {
+ int64_t emptyFramesInBuffer = ((int64_t) getBufferCapacity()) - validFramesInBuffer;
+
+ // Prevent stale data from being played if the DSP is still running.
+ // Erase some of the FIFO memory in front of the DSP read cursor.
+ // Subtract one burst so we do not accidentally erase data that the DSP might be using.
+ int64_t framesToErase = std::max((int64_t) 0,
+ emptyFramesInBuffer - getFramesPerBurst());
+ mAudioEndpoint->eraseEmptyDataMemory(framesToErase);
+
+ // Sleep until we are confident the DSP has consumed all of the valid data.
+ // Sleep for one extra burst as a safety margin because the IsochronousClockModel
+ // is not perfectly accurate.
+ int64_t positionInEmptyMemory = getFramesWritten() + getFramesPerBurst();
+ int64_t timeAllConsumed = mClockModel.convertPositionToTime(positionInEmptyMemory);
+ int64_t durationAllConsumed = timeAllConsumed - AudioClock::getNanoseconds();
+ // Prevent sleeping for too long.
+ durationAllConsumed = std::min(200 * AAUDIO_NANOS_PER_MILLISECOND, durationAllConsumed);
+ AudioClock::sleepForNanos(durationAllConsumed);
+ }
+
+ // Erase all of the memory in case the DSP keeps going and wraps around.
+ mAudioEndpoint->eraseDataMemory();
+
+ // Wait for the last buffer to reach the DAC.
+ // This is because the expected behavior of stop() is that all data written to the stream
+ // should be played before the hardware actually shuts down.
+ // This is different than pause(), where we just end as soon as possible.
+ // This can be important when, for example, playing car navigation and
+ // you want the user to hear the complete instruction.
+ if (mAtomicInternalTimestamp.isValid()) {
+ // Use timestamps to calculate the latency between the DSP reading
+ // a frame and when it reaches the DAC.
+ // This code assumes that timestamps are accurate.
+ Timestamp timestamp = mAtomicInternalTimestamp.read();
+ int64_t dacPosition = timestamp.getPosition();
+ int64_t hardwareReadTime = mClockModel.convertPositionToTime(dacPosition);
+ int64_t hardwareLatencyNanos = timestamp.getNanoseconds() - hardwareReadTime;
+ ALOGD("%s() hardwareLatencyNanos = %lld", __func__,
+ (long long) hardwareLatencyNanos);
+ // Prevent sleeping for too long.
+ hardwareLatencyNanos = std::min(30 * AAUDIO_NANOS_PER_MILLISECOND,
+ hardwareLatencyNanos);
+ AudioClock::sleepForNanos(hardwareLatencyNanos);
+ }
+}
+
void AudioStreamInternalPlay::advanceClientToMatchServerPosition(int32_t serverMargin) {
int64_t readCounter = mAudioEndpoint->getDataReadCounter() + serverMargin;
int64_t writeCounter = mAudioEndpoint->getDataWriteCounter();
@@ -353,20 +410,26 @@
// Call application using the AAudio callback interface.
callbackResult = maybeCallDataCallback(mCallbackBuffer.get(), mCallbackFrames);
- if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
- // Write audio data to stream. This is a BLOCKING WRITE!
- result = write(mCallbackBuffer.get(), mCallbackFrames, timeoutNanos);
- if ((result != mCallbackFrames)) {
- if (result >= 0) {
- // Only wrote some of the frames requested. The stream can be disconnected
- // or timed out.
- processCommands();
- result = isDisconnected() ? AAUDIO_ERROR_DISCONNECTED : AAUDIO_ERROR_TIMEOUT;
- }
- maybeCallErrorCallback(result);
- break;
+ // Write audio data to stream. This is a BLOCKING WRITE!
+ // Write data regardless of the callbackResult because we assume the data
+ // is valid even when the callback returns AAUDIO_CALLBACK_RESULT_STOP.
+ // Imagine a callback that is playing a large sound in menory.
+ // When it gets to the end of the sound it can partially fill
+ // the last buffer with the end of the sound, then zero pad the buffer, then return STOP.
+ // If the callback has no valid data then it should zero-fill the entire buffer.
+ result = write(mCallbackBuffer.get(), mCallbackFrames, timeoutNanos);
+ if ((result != mCallbackFrames)) {
+ if (result >= 0) {
+ // Only wrote some of the frames requested. The stream can be disconnected
+ // or timed out.
+ processCommands();
+ result = isDisconnected() ? AAUDIO_ERROR_DISCONNECTED : AAUDIO_ERROR_TIMEOUT;
}
- } else if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
+ maybeCallErrorCallback(result);
+ break;
+ }
+
+ if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
ALOGD("%s(): callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
result = systemStopInternal();
break;
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index b51b5d0..4e14f18 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -66,6 +66,8 @@
void prepareBuffersForStart() override;
+ void prepareBuffersForStop() override;
+
void advanceClientToMatchServerPosition(int32_t serverMargin) override;
void onFlushFromServer() override;
diff --git a/media/libaaudio/src/client/IsochronousClockModel.cpp b/media/libaaudio/src/client/IsochronousClockModel.cpp
index a39e90e..430ba83 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.cpp
+++ b/media/libaaudio/src/client/IsochronousClockModel.cpp
@@ -18,7 +18,6 @@
//#define LOG_NDEBUG 0
#include <log/log.h>
-#define __STDC_FORMAT_MACROS
#include <inttypes.h>
#include <stdint.h>
#include <algorithm>
diff --git a/media/libaaudio/src/core/AudioGlobal.h b/media/libaaudio/src/core/AudioGlobal.h
index 6c22744..8af49b4 100644
--- a/media/libaaudio/src/core/AudioGlobal.h
+++ b/media/libaaudio/src/core/AudioGlobal.h
@@ -22,6 +22,14 @@
namespace aaudio {
+// Internal error codes. Only used by the framework.
+enum {
+ AAUDIO_INTERNAL_ERROR_BASE = -1000,
+ AAUDIO_ERROR_STANDBY,
+ AAUDIO_ERROR_ALREADY_CLOSED,
+
+};
+
aaudio_policy_t AudioGlobal_getMMapPolicy();
aaudio_result_t AudioGlobal_setMMapPolicy(aaudio_policy_t policy);
diff --git a/media/libaaudio/src/fifo/FifoBuffer.cpp b/media/libaaudio/src/fifo/FifoBuffer.cpp
index 5c11882..f3e3bbd 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.cpp
+++ b/media/libaaudio/src/fifo/FifoBuffer.cpp
@@ -150,7 +150,7 @@
getEmptyRoomAvailable(&wrappingBuffer);
- // Read data in one or two parts.
+ // Write data in one or two parts.
int partIndex = 0;
while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
fifo_frames_t framesToWrite = framesLeft;
@@ -192,3 +192,29 @@
memset(getStorage(), 0, (size_t) numBytes);
}
}
+
+fifo_frames_t FifoBuffer::eraseEmptyMemory(fifo_frames_t numFrames) {
+ WrappingBuffer wrappingBuffer;
+ fifo_frames_t framesLeft = numFrames;
+
+ getEmptyRoomAvailable(&wrappingBuffer);
+
+ // Erase data in one or two parts.
+ int partIndex = 0;
+ while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
+ fifo_frames_t framesToWrite = framesLeft;
+ fifo_frames_t framesAvailable = wrappingBuffer.numFrames[partIndex];
+ if (framesAvailable > 0) {
+ if (framesToWrite > framesAvailable) {
+ framesToWrite = framesAvailable;
+ }
+ int32_t numBytes = convertFramesToBytes(framesToWrite);
+ memset(wrappingBuffer.data[partIndex], 0, numBytes);
+ framesLeft -= framesToWrite;
+ } else {
+ break;
+ }
+ partIndex++;
+ }
+ return numFrames - framesLeft; // number erased
+}
diff --git a/media/libaaudio/src/fifo/FifoBuffer.h b/media/libaaudio/src/fifo/FifoBuffer.h
index 7b0aca1..860ccad 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.h
+++ b/media/libaaudio/src/fifo/FifoBuffer.h
@@ -115,6 +115,13 @@
*/
void eraseMemory();
+ /**
+ * Clear some memory after the write pointer.
+ * This can be used to prevent the reader from accidentally reading stale data
+ * in case it is reading asynchronously.
+ */
+ fifo_frames_t eraseEmptyMemory(fifo_frames_t numFrames);
+
protected:
virtual uint8_t *getStorage() const = 0;
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index d59afef..5ec8276 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -183,9 +184,9 @@
defaults: ["libaaudio_tests_defaults"],
srcs: ["test_full_queue.cpp"],
shared_libs: [
- "libaaudio",
- "liblog"
- ],
+ "libaaudio",
+ "liblog",
+ ],
}
cc_test {
@@ -248,3 +249,30 @@
srcs: ["test_idle_disconnected_shared_stream.cpp"],
shared_libs: ["libaaudio"],
}
+
+cc_test {
+ name: "test_multiple_close_simultaneously",
+ defaults: [
+ "latest_android_media_audio_common_types_cpp_shared",
+ "libaaudio_tests_defaults",
+ ],
+ srcs: ["test_multiple_close_simultaneously.cpp"],
+ shared_libs: [
+ "aaudio-aidl-cpp",
+ "framework-permission-aidl-cpp",
+ "libaaudio",
+ "libbinder",
+ "liblog",
+ "libutils",
+ ],
+ // This test will run 1 minute to ensure there is no crash happen.
+ // In that case, set the timeout as 2 minutes to allow the test to complete.
+ test_options: {
+ test_runner_options: [
+ {
+ name: "native-test-timeout",
+ value: "2m",
+ },
+ ],
+ },
+}
diff --git a/media/libaaudio/tests/test_multiple_close_simultaneously.cpp b/media/libaaudio/tests/test_multiple_close_simultaneously.cpp
new file mode 100644
index 0000000..f6351b6
--- /dev/null
+++ b/media/libaaudio/tests/test_multiple_close_simultaneously.cpp
@@ -0,0 +1,153 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "test_multiple_close_simultaneously"
+
+#include <chrono>
+#include <condition_variable>
+#include <shared_mutex>
+#include <string>
+#include <thread>
+
+#include <gtest/gtest.h>
+
+#include <binder/IBinder.h>
+#include <binder/IServiceManager.h>
+#include <utils/Log.h>
+
+#include <aaudio/AAudio.h>
+#include <aaudio/IAAudioService.h>
+#include <aaudio/StreamRequest.h>
+#include <aaudio/StreamParameters.h>
+
+using namespace android;
+using namespace aaudio;
+
+#define AAUDIO_SERVICE_NAME "media.aaudio"
+
+static constexpr int THREAD_NUM = 2;
+static constexpr auto TEST_DURATION = std::chrono::minutes(1);
+
+static std::string sError;
+static bool sTestPassed = true;
+
+struct Signal {
+ std::atomic_int value{0};
+ std::shared_mutex lock;
+ std::condition_variable_any cv;
+};
+
+class AAudioServiceDeathRecipient : public IBinder::DeathRecipient {
+public:
+ void binderDied(const wp<IBinder>& who __unused) override {
+ sError = "AAudioService is dead";
+ ALOGE("%s", sError.c_str());
+ sTestPassed = false;
+ }
+};
+
+sp<IAAudioService> getAAudioService(const sp<IBinder::DeathRecipient>& recipient) {
+ auto sm = defaultServiceManager();
+ if (sm == nullptr) {
+ sError = "Cannot get service manager";
+ ALOGE("%s", sError.c_str());
+ return nullptr;
+ }
+ sp<IBinder> binder = sm->waitForService(String16(AAUDIO_SERVICE_NAME));
+ if (binder == nullptr) {
+ sError = "Cannot get aaudio service";
+ ALOGE("%s", sError.c_str());
+ return nullptr;
+ }
+ if (binder->linkToDeath(recipient) != NO_ERROR) {
+ sError = "Cannot link to binder death";
+ ALOGE("%s", sError.c_str());
+ return nullptr;
+ }
+ return interface_cast<IAAudioService>(binder);
+}
+
+void openAndMultipleClose(const sp<IAAudioService>& aaudioService) {
+ auto start = std::chrono::system_clock::now();
+ bool hasFailedOpening = false;
+ while (sTestPassed && std::chrono::system_clock::now() - start < TEST_DURATION) {
+ StreamRequest inRequest;
+ StreamParameters outParams;
+ int32_t handle = 0;
+ inRequest.attributionSource.uid = getuid();
+ inRequest.attributionSource.pid = getpid();
+ inRequest.attributionSource.token = sp<BBinder>::make();
+ auto status = aaudioService->openStream(inRequest, &outParams, &handle);
+ if (!status.isOk()) {
+ sError = "Cannot open stream, it can be caused by service death";
+ ALOGE("%s", sError.c_str());
+ sTestPassed = false;
+ break;
+ }
+ if (handle <= 0) {
+ sError = "Cannot get stream handle after open, returned handle"
+ + std::to_string(handle);
+ ALOGE("%s", sError.c_str());
+ sTestPassed = false;
+ break;
+ }
+ hasFailedOpening = false;
+
+ Signal isReady;
+ Signal startWork;
+ Signal isCompleted;
+ std::unique_lock readyLock(isReady.lock);
+ std::unique_lock completedLock(isCompleted.lock);
+ for (int i = 0; i < THREAD_NUM; ++i) {
+ std::thread closeStream([aaudioService, handle, &isReady, &startWork, &isCompleted] {
+ isReady.value++;
+ isReady.cv.notify_one();
+ {
+ std::shared_lock<std::shared_mutex> _l(startWork.lock);
+ startWork.cv.wait(_l, [&startWork] { return startWork.value.load() == 1; });
+ }
+ int32_t result;
+ aaudioService->closeStream(handle, &result);
+ isCompleted.value++;
+ isCompleted.cv.notify_one();
+ });
+ closeStream.detach();
+ }
+ isReady.cv.wait(readyLock, [&isReady] { return isReady.value == THREAD_NUM; });
+ {
+ std::unique_lock startWorkLock(startWork.lock);
+ startWork.value.store(1);
+ }
+ startWork.cv.notify_all();
+ isCompleted.cv.wait_for(completedLock,
+ std::chrono::milliseconds(1000),
+ [&isCompleted] { return isCompleted.value == THREAD_NUM; });
+ if (isCompleted.value != THREAD_NUM) {
+ sError = "Close is not completed within 1 second";
+ ALOGE("%s", sError.c_str());
+ sTestPassed = false;
+ break;
+ }
+ }
+}
+
+TEST(test_multiple_close_simultaneously, open_multiple_close) {
+ const auto recipient = sp<AAudioServiceDeathRecipient>::make();
+ auto aaudioService = getAAudioService(recipient);
+ ASSERT_NE(nullptr, aaudioService) << sError;
+ openAndMultipleClose(aaudioService);
+ ASSERT_TRUE(sTestPassed) << sError;
+}
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 51a679b..c23d7d3 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -52,7 +53,7 @@
"AudioPolicy.cpp",
"AudioProductStrategy.cpp",
"AudioVolumeGroup.cpp",
- "PolicyAidlConversion.cpp"
+ "PolicyAidlConversion.cpp",
],
defaults: [
"latest_android_media_audio_common_types_cpp_export_shared",
@@ -63,6 +64,7 @@
"audiopolicy-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"capture_state_listener-aidl-cpp",
+ "framework-permission-aidl-cpp",
"libaudiofoundation",
"libaudioclient_aidl_conversion",
"libaudioutils",
@@ -121,6 +123,7 @@
"latest_android_media_audio_common_types_cpp_shared",
],
shared_libs: [
+ "android.media.audiopolicy-aconfig-cc",
"audioclient-types-aidl-cpp",
"audioflinger-aidl-cpp",
"audiopolicy-aidl-cpp",
@@ -185,6 +188,7 @@
"-Wall",
"-Werror",
"-Wno-error=deprecated-declarations",
+ "-Wthread-safety",
],
sanitize: {
misc_undefined: [
@@ -331,6 +335,7 @@
},
},
}
+
aidl_interface {
name: "audiopolicy-types-aidl",
unstable: true,
@@ -363,6 +368,7 @@
],
imports: [
"audioclient-types-aidl",
+ "framework-permission-aidl",
],
backend: {
cpp: {
diff --git a/media/libaudioclient/AudioProductStrategy.cpp b/media/libaudioclient/AudioProductStrategy.cpp
index d9fd58c..1417182 100644
--- a/media/libaudioclient/AudioProductStrategy.cpp
+++ b/media/libaudioclient/AudioProductStrategy.cpp
@@ -60,9 +60,13 @@
}
// Keep in sync with android/media/audiopolicy/AudioProductStrategy#attributeMatches
-int AudioProductStrategy::attributesMatchesScore(const audio_attributes_t refAttributes,
- const audio_attributes_t clientAttritubes)
+int AudioProductStrategy::attributesMatchesScore(audio_attributes_t refAttributes,
+ audio_attributes_t clientAttritubes)
{
+ refAttributes.flags = static_cast<audio_flags_mask_t>(
+ refAttributes.flags & AUDIO_FLAGS_AFFECT_STRATEGY_SELECTION);
+ clientAttritubes.flags = static_cast<audio_flags_mask_t>(
+ clientAttritubes.flags & AUDIO_FLAGS_AFFECT_STRATEGY_SELECTION);
if (refAttributes == clientAttritubes) {
return MATCH_EQUALS;
}
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 5bfdd5f..d1b1849 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -22,6 +22,7 @@
#include <android/media/IAudioPolicyService.h>
#include <android/media/AudioMixUpdate.h>
#include <android/media/BnCaptureStateListener.h>
+#include <android_media_audiopolicy.h>
#include <binder/IServiceManager.h>
#include <binder/ProcessState.h>
#include <binder/IPCThreadState.h>
@@ -44,6 +45,8 @@
// ----------------------------------------------------------------------------
+namespace audio_flags = android::media::audiopolicy;
+
namespace android {
using aidl_utils::statusTFromBinderStatus;
using binder::Status;
@@ -62,115 +65,184 @@
using media::audio::common::AudioUsage;
using media::audio::common::Int;
-// client singleton for AudioFlinger binder interface
-Mutex AudioSystem::gLock;
-Mutex AudioSystem::gLockErrorCallbacks;
-Mutex AudioSystem::gLockAPS;
-sp<IAudioFlinger> AudioSystem::gAudioFlinger;
-sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient;
-std::set<audio_error_callback> AudioSystem::gAudioErrorCallbacks;
+std::mutex AudioSystem::gMutex;
dynamic_policy_callback AudioSystem::gDynPolicyCallback = NULL;
record_config_callback AudioSystem::gRecordConfigCallback = NULL;
routing_callback AudioSystem::gRoutingCallback = NULL;
vol_range_init_req_callback AudioSystem::gVolRangeInitReqCallback = NULL;
-// Required to be held while calling into gSoundTriggerCaptureStateListener.
-class CaptureStateListenerImpl;
+std::mutex AudioSystem::gApsCallbackMutex;
+std::mutex AudioSystem::gErrorCallbacksMutex;
+std::set<audio_error_callback> AudioSystem::gAudioErrorCallbacks;
-Mutex gSoundTriggerCaptureStateListenerLock;
-sp<CaptureStateListenerImpl> gSoundTriggerCaptureStateListener = nullptr;
+std::mutex AudioSystem::gSoundTriggerMutex;
+sp<CaptureStateListenerImpl> AudioSystem::gSoundTriggerCaptureStateListener;
-// Binder for the AudioFlinger service that's passed to this client process from the system server.
+// Sets the Binder for the AudioFlinger service, passed to this client process
+// from the system server.
// This allows specific isolated processes to access the audio system. Currently used only for the
// HotwordDetectionService.
-static sp<IBinder> gAudioFlingerBinder = nullptr;
+template <typename ServiceInterface, typename Client, typename AidlInterface,
+ typename ServiceTraits>
+class ServiceHandler {
+public:
+ sp<ServiceInterface> getService(bool canStartThreadPool = true)
+ EXCLUDES(mMutex) NO_THREAD_SAFETY_ANALYSIS { // std::unique_ptr
+ sp<ServiceInterface> service;
+ sp<Client> client;
-void AudioSystem::setAudioFlingerBinder(const sp<IBinder>& audioFlinger) {
- if (audioFlinger->getInterfaceDescriptor() != media::IAudioFlingerService::descriptor) {
- ALOGE("setAudioFlingerBinder: received a binder of type %s",
- String8(audioFlinger->getInterfaceDescriptor()).c_str());
- return;
- }
- Mutex::Autolock _l(gLock);
- if (gAudioFlinger != nullptr) {
- ALOGW("setAudioFlingerBinder: ignoring; AudioFlinger connection already established.");
- return;
- }
- gAudioFlingerBinder = audioFlinger;
-}
-
-static sp<IAudioFlinger> gLocalAudioFlinger; // set if we are local.
-
-status_t AudioSystem::setLocalAudioFlinger(const sp<IAudioFlinger>& af) {
- Mutex::Autolock _l(gLock);
- if (gAudioFlinger != nullptr) return INVALID_OPERATION;
- gLocalAudioFlinger = af;
- return OK;
-}
-
-// establish binder interface to AudioFlinger service
-const sp<IAudioFlinger> AudioSystem::getAudioFlingerImpl(bool canStartThreadPool = true) {
- sp<IAudioFlinger> af;
- sp<AudioFlingerClient> afc;
- bool reportNoError = false;
- {
- Mutex::Autolock _l(gLock);
- if (gAudioFlinger != nullptr) {
- return gAudioFlinger;
+ bool reportNoError = false;
+ {
+ std::lock_guard _l(mMutex);
+ if (mService != nullptr) {
+ return mService;
+ }
}
- if (gAudioFlingerClient == nullptr) {
- gAudioFlingerClient = sp<AudioFlingerClient>::make();
+ std::unique_lock ul_only1thread(mSingleGetter);
+ std::unique_lock ul(mMutex);
+ if (mService != nullptr) {
+ return mService;
+ }
+ if (mClient == nullptr) {
+ mClient = sp<Client>::make();
} else {
reportNoError = true;
}
+ while (true) {
+ mService = mLocalService;
+ if (mService != nullptr) break;
- if (gLocalAudioFlinger != nullptr) {
- gAudioFlinger = gLocalAudioFlinger;
- } else {
- sp<IBinder> binder;
- if (gAudioFlingerBinder != nullptr) {
- binder = gAudioFlingerBinder;
- } else {
- sp<IServiceManager> sm = defaultServiceManager();
- binder = sm->waitForService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+ sp<IBinder> binder = mBinder;
+ if (binder == nullptr) {
+ sp <IServiceManager> sm = defaultServiceManager();
+ binder = sm->checkService(String16(ServiceTraits::SERVICE_NAME));
if (binder == nullptr) {
- return nullptr;
+ ALOGD("%s: waiting for %s", __func__, ServiceTraits::SERVICE_NAME);
+
+ // if the condition variable is present, setLocalService() and
+ // setBinder() is allowed to use it to notify us.
+ if (mCvGetter == nullptr) {
+ mCvGetter = std::make_shared<std::condition_variable>();
+ }
+ mCvGetter->wait_for(ul, std::chrono::seconds(1));
+ continue;
}
}
- binder->linkToDeath(gAudioFlingerClient);
- const auto afs = interface_cast<media::IAudioFlingerService>(binder);
- LOG_ALWAYS_FATAL_IF(afs == nullptr);
- gAudioFlinger = sp<AudioFlingerClientAdapter>::make(afs);
+ binder->linkToDeath(mClient);
+ auto aidlInterface = interface_cast<AidlInterface>(binder);
+ LOG_ALWAYS_FATAL_IF(aidlInterface == nullptr);
+ if constexpr (std::is_same_v<ServiceInterface, AidlInterface>) {
+ mService = std::move(aidlInterface);
+ } else /* constexpr */ {
+ mService = ServiceTraits::createServiceAdapter(aidlInterface);
+ }
+ break;
}
- afc = gAudioFlingerClient;
- af = gAudioFlinger;
- // Make sure callbacks can be received by gAudioFlingerClient
- if(canStartThreadPool) {
+ if (mCvGetter) mCvGetter.reset(); // remove condition variable.
+ client = mClient;
+ service = mService;
+ // Make sure callbacks can be received by the client
+ if (canStartThreadPool) {
ProcessState::self()->startThreadPool();
}
+ ul.unlock();
+ ul_only1thread.unlock();
+ ServiceTraits::onServiceCreate(service, client);
+ if (reportNoError) AudioSystem::reportError(NO_ERROR);
+ return service;
}
- const int64_t token = IPCThreadState::self()->clearCallingIdentity();
- af->registerClient(afc);
- IPCThreadState::self()->restoreCallingIdentity(token);
- if (reportNoError) reportError(NO_ERROR);
- return af;
+
+ status_t setLocalService(const sp<ServiceInterface>& service) EXCLUDES(mMutex) {
+ std::lock_guard _l(mMutex);
+ // we allow clearing once set, but not a double non-null set.
+ if (mService != nullptr && service != nullptr) return INVALID_OPERATION;
+ mLocalService = service;
+ if (mCvGetter) mCvGetter->notify_one();
+ return OK;
+ }
+
+ sp<Client> getClient() EXCLUDES(mMutex) {
+ const auto service = getService();
+ if (service == nullptr) return nullptr;
+ std::lock_guard _l(mMutex);
+ return mClient;
+ }
+
+ void setBinder(const sp<IBinder>& binder) EXCLUDES(mMutex) {
+ std::lock_guard _l(mMutex);
+ if (mService != nullptr) {
+ ALOGW("%s: ignoring; %s connection already established.",
+ __func__, ServiceTraits::SERVICE_NAME);
+ return;
+ }
+ mBinder = binder;
+ if (mCvGetter) mCvGetter->notify_one();
+ }
+
+ void clearService() EXCLUDES(mMutex) {
+ std::lock_guard _l(mMutex);
+ mService.clear();
+ if (mClient) ServiceTraits::onClearService(mClient);
+ }
+
+private:
+ std::mutex mSingleGetter;
+ std::mutex mMutex;
+ std::shared_ptr<std::condition_variable> mCvGetter GUARDED_BY(mMutex);
+ sp<IBinder> mBinder GUARDED_BY(mMutex);
+ sp<ServiceInterface> mLocalService GUARDED_BY(mMutex);
+ sp<ServiceInterface> mService GUARDED_BY(mMutex);
+ sp<Client> mClient GUARDED_BY(mMutex);
+};
+
+struct AudioFlingerTraits {
+ static void onServiceCreate(
+ const sp<IAudioFlinger>& af, const sp<AudioSystem::AudioFlingerClient>& afc) {
+ const int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ af->registerClient(afc);
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ }
+
+ static sp<IAudioFlinger> createServiceAdapter(
+ const sp<media::IAudioFlingerService>& aidlInterface) {
+ return sp<AudioFlingerClientAdapter>::make(aidlInterface);
+ }
+
+ static void onClearService(const sp<AudioSystem::AudioFlingerClient>& afc) {
+ afc->clearIoCache();
+ }
+
+ static constexpr const char* SERVICE_NAME = IAudioFlinger::DEFAULT_SERVICE_NAME;
+};
+
+[[clang::no_destroy]] static constinit ServiceHandler<IAudioFlinger,
+ AudioSystem::AudioFlingerClient, media::IAudioFlingerService,
+ AudioFlingerTraits> gAudioFlingerServiceHandler;
+
+sp<IAudioFlinger> AudioSystem::get_audio_flinger() {
+ return gAudioFlingerServiceHandler.getService();
}
-const sp<IAudioFlinger> AudioSystem:: get_audio_flinger() {
- return getAudioFlingerImpl();
+sp<IAudioFlinger> AudioSystem::get_audio_flinger_for_fuzzer() {
+ return gAudioFlingerServiceHandler.getService(false /* canStartThreadPool */);
}
-const sp<IAudioFlinger> AudioSystem:: get_audio_flinger_for_fuzzer() {
- return getAudioFlingerImpl(false);
+sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
+ return gAudioFlingerServiceHandler.getClient();
}
-const sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
- // calling get_audio_flinger() will initialize gAudioFlingerClient if needed
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return 0;
- Mutex::Autolock _l(gLock);
- return gAudioFlingerClient;
+void AudioSystem::setAudioFlingerBinder(const sp<IBinder>& audioFlinger) {
+ if (audioFlinger->getInterfaceDescriptor() != media::IAudioFlingerService::descriptor) {
+ ALOGE("%s: received a binder of type %s",
+ __func__, String8(audioFlinger->getInterfaceDescriptor()).c_str());
+ return;
+ }
+ gAudioFlingerServiceHandler.setBinder(audioFlinger);
+}
+
+status_t AudioSystem::setLocalAudioFlinger(const sp<IAudioFlinger>& af) {
+ return gAudioFlingerServiceHandler.setLocalService(af);
}
sp<AudioIoDescriptor> AudioSystem::getIoDescriptor(audio_io_handle_t ioHandle) {
@@ -192,41 +264,41 @@
// FIXME Declare in binder opcode order, similarly to IAudioFlinger.h and IAudioFlinger.cpp
status_t AudioSystem::muteMicrophone(bool state) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->setMicMute(state);
}
status_t AudioSystem::isMicrophoneMuted(bool* state) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
*state = af->getMicMute();
return NO_ERROR;
}
status_t AudioSystem::setMasterVolume(float value) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
af->setMasterVolume(value);
return NO_ERROR;
}
status_t AudioSystem::setMasterMute(bool mute) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
af->setMasterMute(mute);
return NO_ERROR;
}
status_t AudioSystem::getMasterVolume(float* volume) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
*volume = af->masterVolume();
return NO_ERROR;
}
status_t AudioSystem::getMasterMute(bool* mute) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
*mute = af->masterMute();
return NO_ERROR;
@@ -235,7 +307,7 @@
status_t AudioSystem::setStreamVolume(audio_stream_type_t stream, float value,
audio_io_handle_t output) {
if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
af->setStreamVolume(stream, value, output);
return NO_ERROR;
@@ -243,7 +315,7 @@
status_t AudioSystem::setStreamMute(audio_stream_type_t stream, bool mute) {
if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
af->setStreamMute(stream, mute);
return NO_ERROR;
@@ -252,7 +324,7 @@
status_t AudioSystem::getStreamVolume(audio_stream_type_t stream, float* volume,
audio_io_handle_t output) {
if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
*volume = af->streamVolume(stream, output);
return NO_ERROR;
@@ -260,7 +332,7 @@
status_t AudioSystem::getStreamMute(audio_stream_type_t stream, bool* mute) {
if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
*mute = af->streamMute(stream);
return NO_ERROR;
@@ -268,25 +340,25 @@
status_t AudioSystem::setMode(audio_mode_t mode) {
if (uint32_t(mode) >= AUDIO_MODE_CNT) return BAD_VALUE;
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->setMode(mode);
}
status_t AudioSystem::setSimulateDeviceConnections(bool enabled) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->setSimulateDeviceConnections(enabled);
}
status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->setParameters(ioHandle, keyValuePairs);
}
String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
String8 result = String8("");
if (af == 0) return result;
@@ -305,23 +377,23 @@
// convert volume steps to natural log scale
// change this value to change volume scaling
-static const float dBPerStep = 0.5f;
+constexpr float kdbPerStep = 0.5f;
// shouldn't need to touch these
-static const float dBConvert = -dBPerStep * 2.302585093f / 20.0f;
-static const float dBConvertInverse = 1.0f / dBConvert;
+constexpr float kdBConvert = -kdbPerStep * 2.302585093f / 20.0f;
+constexpr float kdBConvertInverse = 1.0f / kdBConvert;
float AudioSystem::linearToLog(int volume) {
- // float v = volume ? exp(float(100 - volume) * dBConvert) : 0;
+ // float v = volume ? exp(float(100 - volume) * kdBConvert) : 0;
// ALOGD("linearToLog(%d)=%f", volume, v);
// return v;
- return volume ? exp(float(100 - volume) * dBConvert) : 0;
+ return volume ? exp(float(100 - volume) * kdBConvert) : 0;
}
int AudioSystem::logToLinear(float volume) {
- // int v = volume ? 100 - int(dBConvertInverse * log(volume) + 0.5) : 0;
+ // int v = volume ? 100 - int(kdBConvertInverse * log(volume) + 0.5) : 0;
// ALOGD("logTolinear(%d)=%f", v, volume);
// return v;
- return volume ? 100 - int(dBConvertInverse * log(volume) + 0.5) : 0;
+ return volume ? 100 - int(kdBConvertInverse * log(volume) + 0.5) : 0;
}
/* static */ size_t AudioSystem::calculateMinFrameCount(
@@ -366,7 +438,7 @@
status_t AudioSystem::getSamplingRate(audio_io_handle_t ioHandle,
uint32_t* samplingRate) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
if (desc == 0) {
@@ -401,7 +473,7 @@
status_t AudioSystem::getFrameCount(audio_io_handle_t ioHandle,
size_t* frameCount) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
if (desc == 0) {
@@ -436,7 +508,7 @@
status_t AudioSystem::getLatency(audio_io_handle_t output,
uint32_t* latency) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
sp<AudioIoDescriptor> outputDesc = getIoDescriptor(output);
if (outputDesc == 0) {
@@ -460,21 +532,21 @@
}
status_t AudioSystem::setVoiceVolume(float value) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->setVoiceVolume(value);
}
status_t AudioSystem::getRenderPosition(audio_io_handle_t output, uint32_t* halFrames,
uint32_t* dspFrames) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->getRenderPosition(halFrames, dspFrames, output);
}
uint32_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
uint32_t result = 0;
if (af == 0) return result;
if (ioHandle == AUDIO_IO_HANDLE_NONE) return result;
@@ -485,46 +557,46 @@
audio_unique_id_t AudioSystem::newAudioUniqueId(audio_unique_id_use_t use) {
// Must not use AF as IDs will re-roll on audioserver restart, b/130369529.
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return AUDIO_UNIQUE_ID_ALLOCATE;
return af->newAudioUniqueId(use);
}
void AudioSystem::acquireAudioSessionId(audio_session_t audioSession, pid_t pid, uid_t uid) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af != 0) {
af->acquireAudioSessionId(audioSession, pid, uid);
}
}
void AudioSystem::releaseAudioSessionId(audio_session_t audioSession, pid_t pid) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af != 0) {
af->releaseAudioSessionId(audioSession, pid);
}
}
audio_hw_sync_t AudioSystem::getAudioHwSyncForSession(audio_session_t sessionId) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return AUDIO_HW_SYNC_INVALID;
return af->getAudioHwSyncForSession(sessionId);
}
status_t AudioSystem::systemReady() {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return NO_INIT;
return af->systemReady();
}
status_t AudioSystem::audioPolicyReady() {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return NO_INIT;
return af->audioPolicyReady();
}
status_t AudioSystem::getFrameCountHAL(audio_io_handle_t ioHandle,
size_t* frameCount) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
if (desc == 0) {
@@ -546,7 +618,7 @@
void AudioSystem::AudioFlingerClient::clearIoCache() {
- Mutex::Autolock _l(mLock);
+ std::lock_guard _l(mMutex);
mIoDescriptors.clear();
mInBuffSize = 0;
mInSamplingRate = 0;
@@ -555,14 +627,7 @@
}
void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused) {
- {
- Mutex::Autolock _l(AudioSystem::gLock);
- AudioSystem::gAudioFlinger.clear();
- }
-
- // clear output handles and stream to output map caches
- clearIoCache();
-
+ gAudioFlingerServiceHandler.clearService();
reportError(DEAD_OBJECT);
ALOGW("AudioFlinger server died!");
@@ -584,7 +649,7 @@
audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
std::vector<sp<AudioDeviceCallback>> callbacksToCall;
{
- Mutex::Autolock _l(mLock);
+ std::lock_guard _l(mMutex);
auto callbacks = std::map<audio_port_handle_t, wp<AudioDeviceCallback>>();
switch (event) {
@@ -592,13 +657,10 @@
case AUDIO_OUTPUT_REGISTERED:
case AUDIO_INPUT_OPENED:
case AUDIO_INPUT_REGISTERED: {
- sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->getIoHandle());
- if (oldDesc == 0) {
- mIoDescriptors.add(ioDesc->getIoHandle(), ioDesc);
- } else {
+ if (sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->getIoHandle())) {
deviceId = oldDesc->getDeviceId();
- mIoDescriptors.replaceValueFor(ioDesc->getIoHandle(), ioDesc);
}
+ mIoDescriptors[ioDesc->getIoHandle()] = ioDesc;
if (ioDesc->getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
deviceId = ioDesc->getDeviceId();
@@ -627,7 +689,7 @@
ALOGV("ioConfigChanged() %s %d closed",
event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->getIoHandle());
- mIoDescriptors.removeItem(ioDesc->getIoHandle());
+ mIoDescriptors.erase(ioDesc->getIoHandle());
mAudioDeviceCallbacks.erase(ioDesc->getIoHandle());
}
break;
@@ -643,7 +705,7 @@
}
deviceId = oldDesc->getDeviceId();
- mIoDescriptors.replaceValueFor(ioDesc->getIoHandle(), ioDesc);
+ mIoDescriptors[ioDesc->getIoHandle()] = ioDesc;
if (deviceId != ioDesc->getDeviceId()) {
deviceId = ioDesc->getDeviceId();
@@ -689,8 +751,8 @@
}
}
- // Callbacks must be called without mLock held. May lead to dead lock if calling for
- // example getRoutedDevice that updates the device and tries to acquire mLock.
+ // Callbacks must be called without mMutex held. May lead to dead lock if calling for
+ // example getRoutedDevice that updates the device and tries to acquire mMutex.
for (auto cb : callbacksToCall) {
// If callbacksToCall is not empty, it implies ioDesc->getIoHandle() and deviceId are valid
cb->onAudioDeviceUpdate(ioDesc->getIoHandle(), deviceId);
@@ -709,7 +771,7 @@
std::vector<sp<SupportedLatencyModesCallback>> callbacks;
{
- Mutex::Autolock _l(mLock);
+ std::lock_guard _l(mMutex);
for (auto callback : mSupportedLatencyModesCallbacks) {
if (auto ref = callback.promote(); ref != nullptr) {
callbacks.push_back(ref);
@@ -726,11 +788,11 @@
status_t AudioSystem::AudioFlingerClient::getInputBufferSize(
uint32_t sampleRate, audio_format_t format,
audio_channel_mask_t channelMask, size_t* buffSize) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) {
return PERMISSION_DENIED;
}
- Mutex::Autolock _l(mLock);
+ std::lock_guard _l(mMutex);
// Do we have a stale mInBuffSize or are we requesting the input buffer size for new values
if ((mInBuffSize == 0) || (sampleRate != mInSamplingRate) || (format != mInFormat)
|| (channelMask != mInChannelMask)) {
@@ -756,16 +818,15 @@
sp<AudioIoDescriptor>
AudioSystem::AudioFlingerClient::getIoDescriptor_l(audio_io_handle_t ioHandle) {
- sp<AudioIoDescriptor> desc;
- ssize_t index = mIoDescriptors.indexOfKey(ioHandle);
- if (index >= 0) {
- desc = mIoDescriptors.valueAt(index);
+ if (const auto it = mIoDescriptors.find(ioHandle);
+ it != mIoDescriptors.end()) {
+ return it->second;
}
- return desc;
+ return {};
}
sp<AudioIoDescriptor> AudioSystem::AudioFlingerClient::getIoDescriptor(audio_io_handle_t ioHandle) {
- Mutex::Autolock _l(mLock);
+ std::lock_guard _l(mMutex);
return getIoDescriptor_l(ioHandle);
}
@@ -773,7 +834,7 @@
const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo,
audio_port_handle_t portId) {
ALOGV("%s audioIo %d portId %d", __func__, audioIo, portId);
- Mutex::Autolock _l(mLock);
+ std::lock_guard _l(mMutex);
auto& callbacks = mAudioDeviceCallbacks.emplace(
audioIo,
std::map<audio_port_handle_t, wp<AudioDeviceCallback>>()).first->second;
@@ -788,7 +849,7 @@
const wp<AudioDeviceCallback>& callback __unused, audio_io_handle_t audioIo,
audio_port_handle_t portId) {
ALOGV("%s audioIo %d portId %d", __func__, audioIo, portId);
- Mutex::Autolock _l(mLock);
+ std::lock_guard _l(mMutex);
auto it = mAudioDeviceCallbacks.find(audioIo);
if (it == mAudioDeviceCallbacks.end()) {
return INVALID_OPERATION;
@@ -804,7 +865,7 @@
status_t AudioSystem::AudioFlingerClient::addSupportedLatencyModesCallback(
const sp<SupportedLatencyModesCallback>& callback) {
- Mutex::Autolock _l(mLock);
+ std::lock_guard _l(mMutex);
if (std::find(mSupportedLatencyModesCallbacks.begin(),
mSupportedLatencyModesCallbacks.end(),
callback) != mSupportedLatencyModesCallbacks.end()) {
@@ -816,7 +877,7 @@
status_t AudioSystem::AudioFlingerClient::removeSupportedLatencyModesCallback(
const sp<SupportedLatencyModesCallback>& callback) {
- Mutex::Autolock _l(mLock);
+ std::lock_guard _l(mMutex);
auto it = std::find(mSupportedLatencyModesCallbacks.begin(),
mSupportedLatencyModesCallbacks.end(),
callback);
@@ -828,93 +889,78 @@
}
/* static */ uintptr_t AudioSystem::addErrorCallback(audio_error_callback cb) {
- Mutex::Autolock _l(gLockErrorCallbacks);
+ std::lock_guard _l(gErrorCallbacksMutex);
gAudioErrorCallbacks.insert(cb);
return reinterpret_cast<uintptr_t>(cb);
}
/* static */ void AudioSystem::removeErrorCallback(uintptr_t cb) {
- Mutex::Autolock _l(gLockErrorCallbacks);
+ std::lock_guard _l(gErrorCallbacksMutex);
gAudioErrorCallbacks.erase(reinterpret_cast<audio_error_callback>(cb));
}
/* static */ void AudioSystem::reportError(status_t err) {
- Mutex::Autolock _l(gLockErrorCallbacks);
+ std::lock_guard _l(gErrorCallbacksMutex);
for (auto callback : gAudioErrorCallbacks) {
callback(err);
}
}
/*static*/ void AudioSystem::setDynPolicyCallback(dynamic_policy_callback cb) {
- Mutex::Autolock _l(gLock);
+ std::lock_guard _l(gMutex);
gDynPolicyCallback = cb;
}
/*static*/ void AudioSystem::setRecordConfigCallback(record_config_callback cb) {
- Mutex::Autolock _l(gLock);
+ std::lock_guard _l(gMutex);
gRecordConfigCallback = cb;
}
/*static*/ void AudioSystem::setRoutingCallback(routing_callback cb) {
- Mutex::Autolock _l(gLock);
+ std::lock_guard _l(gMutex);
gRoutingCallback = cb;
}
/*static*/ void AudioSystem::setVolInitReqCallback(vol_range_init_req_callback cb) {
- Mutex::Autolock _l(gLock);
+ std::lock_guard _l(gMutex);
gVolRangeInitReqCallback = cb;
}
-// client singleton for AudioPolicyService binder interface
-// protected by gLockAPS
-sp<IAudioPolicyService> AudioSystem::gAudioPolicyService;
-sp<AudioSystem::AudioPolicyServiceClient> AudioSystem::gAudioPolicyServiceClient;
-
-
-// establish binder interface to AudioPolicy service
-const sp<IAudioPolicyService> AudioSystem::get_audio_policy_service() {
- sp<IAudioPolicyService> ap;
- sp<AudioPolicyServiceClient> apc;
- {
- Mutex::Autolock _l(gLockAPS);
- if (gAudioPolicyService == 0) {
- sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder = sm->waitForService(String16("media.audio_policy"));
- if (binder == nullptr) {
- return nullptr;
- }
- if (gAudioPolicyServiceClient == NULL) {
- gAudioPolicyServiceClient = new AudioPolicyServiceClient();
- }
- binder->linkToDeath(gAudioPolicyServiceClient);
- gAudioPolicyService = interface_cast<IAudioPolicyService>(binder);
- LOG_ALWAYS_FATAL_IF(gAudioPolicyService == 0);
- apc = gAudioPolicyServiceClient;
- // Make sure callbacks can be received by gAudioPolicyServiceClient
- ProcessState::self()->startThreadPool();
- }
- ap = gAudioPolicyService;
- }
- if (apc != 0) {
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
+struct AudioPolicyTraits {
+ static void onServiceCreate(const sp<IAudioPolicyService>& ap,
+ const sp<AudioSystem::AudioPolicyServiceClient>& apc) {
+ const int64_t token = IPCThreadState::self()->clearCallingIdentity();
ap->registerClient(apc);
ap->setAudioPortCallbacksEnabled(apc->isAudioPortCbEnabled());
ap->setAudioVolumeGroupCallbacksEnabled(apc->isAudioVolumeGroupCbEnabled());
IPCThreadState::self()->restoreCallingIdentity(token);
}
- return ap;
+ static void onClearService(const sp<AudioSystem::AudioPolicyServiceClient>&) {}
+
+ static constexpr const char *SERVICE_NAME = "media.audio_policy";
+};
+
+[[clang::no_destroy]] static constinit ServiceHandler<IAudioPolicyService,
+ AudioSystem::AudioPolicyServiceClient, IAudioPolicyService,
+ AudioPolicyTraits> gAudioPolicyServiceHandler;
+
+status_t AudioSystem::setLocalAudioPolicyService(const sp<IAudioPolicyService>& aps) {
+ return gAudioPolicyServiceHandler.setLocalService(aps);
+}
+
+sp<IAudioPolicyService> AudioSystem::get_audio_policy_service() {
+ return gAudioPolicyServiceHandler.getService();
}
void AudioSystem::clearAudioPolicyService() {
- Mutex::Autolock _l(gLockAPS);
- gAudioPolicyService.clear();
+ gAudioPolicyServiceHandler.clearService();
}
// ---------------------------------------------------------------------------
void AudioSystem::onNewAudioModulesAvailable() {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return;
aps->onNewAudioModulesAvailable();
}
@@ -922,7 +968,7 @@
status_t AudioSystem::setDeviceConnectionState(audio_policy_dev_state_t state,
const android::media::audio::common::AudioPort& port,
audio_format_t encodedFormat) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
@@ -937,7 +983,7 @@
audio_policy_dev_state_t AudioSystem::getDeviceConnectionState(audio_devices_t device,
const char* device_address) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
auto result = [&]() -> ConversionResult<audio_policy_dev_state_t> {
@@ -957,7 +1003,7 @@
const char* device_address,
const char* device_name,
audio_format_t encodedFormat) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
const char* address = "";
const char* name = "";
@@ -980,7 +1026,7 @@
status_t AudioSystem::setPhoneState(audio_mode_t state, uid_t uid) {
if (uint32_t(state) >= AUDIO_MODE_CNT) return BAD_VALUE;
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
return statusTFromBinderStatus(aps->setPhoneState(
@@ -990,7 +1036,7 @@
status_t
AudioSystem::setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
return statusTFromBinderStatus(
@@ -1003,7 +1049,7 @@
}
audio_policy_forced_cfg_t AudioSystem::getForceUse(audio_policy_force_use_t usage) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return AUDIO_POLICY_FORCE_NONE;
auto result = [&]() -> ConversionResult<audio_policy_forced_cfg_t> {
@@ -1020,7 +1066,7 @@
audio_io_handle_t AudioSystem::getOutput(audio_stream_type_t stream) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return AUDIO_IO_HANDLE_NONE;
auto result = [&]() -> ConversionResult<audio_io_handle_t> {
@@ -1068,7 +1114,7 @@
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return NO_INIT;
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1117,7 +1163,7 @@
}
status_t AudioSystem::startOutput(audio_port_handle_t portId) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1125,7 +1171,7 @@
}
status_t AudioSystem::stopOutput(audio_port_handle_t portId) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1133,7 +1179,7 @@
}
void AudioSystem::releaseOutput(audio_port_handle_t portId) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return;
auto status = [&]() -> status_t {
@@ -1173,7 +1219,7 @@
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return NO_INIT;
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1207,7 +1253,7 @@
}
status_t AudioSystem::startInput(audio_port_handle_t portId) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1215,7 +1261,7 @@
}
status_t AudioSystem::stopInput(audio_port_handle_t portId) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1223,7 +1269,7 @@
}
void AudioSystem::releaseInput(audio_port_handle_t portId) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return;
auto status = [&]() -> status_t {
@@ -1240,7 +1286,7 @@
status_t AudioSystem::initStreamVolume(audio_stream_type_t stream,
int indexMin,
int indexMax) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1261,7 +1307,7 @@
status_t AudioSystem::setStreamVolumeIndex(audio_stream_type_t stream,
int index,
audio_devices_t device) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1276,7 +1322,7 @@
status_t AudioSystem::getStreamVolumeIndex(audio_stream_type_t stream,
int* index,
audio_devices_t device) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1295,7 +1341,7 @@
status_t AudioSystem::setVolumeIndexForAttributes(const audio_attributes_t& attr,
int index,
audio_devices_t device) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1310,7 +1356,7 @@
status_t AudioSystem::getVolumeIndexForAttributes(const audio_attributes_t& attr,
int& index,
audio_devices_t device) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1325,7 +1371,7 @@
}
status_t AudioSystem::getMaxVolumeIndexForAttributes(const audio_attributes_t& attr, int& index) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1338,7 +1384,7 @@
}
status_t AudioSystem::getMinVolumeIndexForAttributes(const audio_attributes_t& attr, int& index) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
@@ -1351,7 +1397,7 @@
}
product_strategy_t AudioSystem::getStrategyForStream(audio_stream_type_t stream) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PRODUCT_STRATEGY_NONE;
auto result = [&]() -> ConversionResult<product_strategy_t> {
@@ -1371,7 +1417,7 @@
if (devices == nullptr) {
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
@@ -1387,7 +1433,7 @@
}
audio_io_handle_t AudioSystem::getOutputForEffect(const effect_descriptor_t* desc) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
// FIXME change return type to status_t, and return PERMISSION_DENIED here
if (aps == 0) return AUDIO_IO_HANDLE_NONE;
@@ -1408,7 +1454,7 @@
product_strategy_t strategy,
audio_session_t session,
int id) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::EffectDescriptor descAidl = VALUE_OR_RETURN_STATUS(
@@ -1422,7 +1468,7 @@
}
status_t AudioSystem::unregisterEffect(int id) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t idAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(id));
@@ -1431,7 +1477,7 @@
}
status_t AudioSystem::setEffectEnabled(int id, bool enabled) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t idAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(id));
@@ -1440,7 +1486,7 @@
}
status_t AudioSystem::moveEffectsToIo(const std::vector<int>& ids, audio_io_handle_t io) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
std::vector<int32_t> idsAidl = VALUE_OR_RETURN_STATUS(
@@ -1450,7 +1496,7 @@
}
status_t AudioSystem::isStreamActive(audio_stream_type_t stream, bool* state, uint32_t inPastMs) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
if (state == NULL) return BAD_VALUE;
@@ -1464,7 +1510,7 @@
status_t AudioSystem::isStreamActiveRemotely(audio_stream_type_t stream, bool* state,
uint32_t inPastMs) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
if (state == NULL) return BAD_VALUE;
@@ -1477,7 +1523,7 @@
}
status_t AudioSystem::isSourceActive(audio_source_t stream, bool* state) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
if (state == NULL) return BAD_VALUE;
@@ -1489,19 +1535,19 @@
}
uint32_t AudioSystem::getPrimaryOutputSamplingRate() {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return 0;
return af->getPrimaryOutputSamplingRate();
}
size_t AudioSystem::getPrimaryOutputFrameCount() {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return 0;
return af->getPrimaryOutputFrameCount();
}
status_t AudioSystem::setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->setLowRamDevice(isLowRamDevice, totalMemory);
}
@@ -1509,18 +1555,12 @@
void AudioSystem::clearAudioConfigCache() {
// called by restoreTrack_l(), which needs new IAudioFlinger and IAudioPolicyService instances
ALOGV("clearAudioConfigCache()");
- {
- Mutex::Autolock _l(gLock);
- if (gAudioFlingerClient != 0) {
- gAudioFlingerClient->clearIoCache();
- }
- gAudioFlinger.clear();
- }
+ gAudioFlingerServiceHandler.clearService();
clearAudioPolicyService();
}
status_t AudioSystem::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == nullptr) return PERMISSION_DENIED;
std::vector<AudioUsage> systemUsagesAidl = VALUE_OR_RETURN_STATUS(
@@ -1530,7 +1570,7 @@
}
status_t AudioSystem::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t capturePolicy) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == nullptr) return PERMISSION_DENIED;
int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
@@ -1541,7 +1581,7 @@
audio_offload_mode_t AudioSystem::getOffloadSupport(const audio_offload_info_t& info) {
ALOGV("%s", __func__);
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return AUDIO_OFFLOAD_NOT_SUPPORTED;
auto result = [&]() -> ConversionResult<audio_offload_mode_t> {
@@ -1566,7 +1606,7 @@
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::AudioPortRole roleAidl = VALUE_OR_RETURN_STATUS(
@@ -1590,7 +1630,7 @@
status_t AudioSystem::listDeclaredDevicePorts(media::AudioPortRole role,
std::vector<media::AudioPortFw>* result) {
if (result == nullptr) return BAD_VALUE;
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aps->listDeclaredDevicePorts(role, result)));
return OK;
@@ -1600,7 +1640,7 @@
if (port == nullptr) {
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::AudioPortFw portAidl;
@@ -1616,7 +1656,7 @@
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::AudioPatchFw patchAidl = VALUE_OR_RETURN_STATUS(
@@ -1629,7 +1669,7 @@
}
status_t AudioSystem::releaseAudioPatch(audio_patch_handle_t handle) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t handleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(handle));
@@ -1644,7 +1684,7 @@
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
@@ -1667,7 +1707,7 @@
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::AudioPortConfigFw configAidl = VALUE_OR_RETURN_STATUS(
@@ -1676,14 +1716,13 @@
}
status_t AudioSystem::addAudioPortCallback(const sp<AudioPortCallback>& callback) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
+ const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (apc == nullptr) return NO_INIT;
- Mutex::Autolock _l(gLockAPS);
- if (gAudioPolicyServiceClient == 0) {
- return NO_INIT;
- }
- int ret = gAudioPolicyServiceClient->addAudioPortCallback(callback);
+ std::lock_guard _l(gApsCallbackMutex);
+ const int ret = apc->addAudioPortCallback(callback);
if (ret == 1) {
aps->setAudioPortCallbacksEnabled(true);
}
@@ -1692,14 +1731,13 @@
/*static*/
status_t AudioSystem::removeAudioPortCallback(const sp<AudioPortCallback>& callback) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
+ const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (apc == nullptr) return NO_INIT;
- Mutex::Autolock _l(gLockAPS);
- if (gAudioPolicyServiceClient == 0) {
- return NO_INIT;
- }
- int ret = gAudioPolicyServiceClient->removeAudioPortCallback(callback);
+ std::lock_guard _l(gApsCallbackMutex);
+ const int ret = apc->removeAudioPortCallback(callback);
if (ret == 0) {
aps->setAudioPortCallbacksEnabled(false);
}
@@ -1707,14 +1745,13 @@
}
status_t AudioSystem::addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
+ const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (apc == nullptr) return NO_INIT;
- Mutex::Autolock _l(gLockAPS);
- if (gAudioPolicyServiceClient == 0) {
- return NO_INIT;
- }
- int ret = gAudioPolicyServiceClient->addAudioVolumeGroupCallback(callback);
+ std::lock_guard _l(gApsCallbackMutex);
+ const int ret = apc->addAudioVolumeGroupCallback(callback);
if (ret == 1) {
aps->setAudioVolumeGroupCallbacksEnabled(true);
}
@@ -1722,14 +1759,13 @@
}
status_t AudioSystem::removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
+ const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (apc == nullptr) return NO_INIT;
- Mutex::Autolock _l(gLockAPS);
- if (gAudioPolicyServiceClient == 0) {
- return NO_INIT;
- }
- int ret = gAudioPolicyServiceClient->removeAudioVolumeGroupCallback(callback);
+ std::lock_guard _l(gApsCallbackMutex);
+ const int ret = apc->removeAudioVolumeGroupCallback(callback);
if (ret == 0) {
aps->setAudioVolumeGroupCallbacksEnabled(false);
}
@@ -1745,7 +1781,7 @@
}
status_t status = afc->addAudioDeviceCallback(callback, audioIo, portId);
if (status == NO_ERROR) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af != 0) {
af->registerClient(afc);
}
@@ -1782,7 +1818,7 @@
}
audio_port_handle_t AudioSystem::getDeviceIdForIo(audio_io_handle_t audioIo) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
const sp<AudioIoDescriptor> desc = getIoDescriptor(audioIo);
if (desc == 0) {
@@ -1797,7 +1833,7 @@
if (session == nullptr || ioHandle == nullptr || device == nullptr) {
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::SoundTriggerSession retAidl;
@@ -1811,7 +1847,7 @@
}
status_t AudioSystem::releaseSoundTriggerSession(audio_session_t session) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
@@ -1819,7 +1855,7 @@
}
audio_mode_t AudioSystem::getPhoneState() {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return AUDIO_MODE_INVALID;
auto result = [&]() -> ConversionResult<audio_mode_t> {
@@ -1832,7 +1868,7 @@
}
status_t AudioSystem::registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
size_t mixesSize = std::min(mixes.size(), size_t{MAX_MIXES_PER_POLICY});
@@ -1843,10 +1879,29 @@
return statusTFromBinderStatus(aps->registerPolicyMixes(mixesAidl, registration));
}
+status_t AudioSystem::getRegisteredPolicyMixes(std::vector<AudioMix>& mixes) {
+ if (!audio_flags::audio_mix_test_api()) {
+ return INVALID_OPERATION;
+ }
+
+ const sp<IAudioPolicyService> aps = AudioSystem::get_audio_policy_service();
+ if (aps == nullptr) return PERMISSION_DENIED;
+
+ std::vector<::android::media::AudioMix> aidlMixes;
+ Status status = aps->getRegisteredPolicyMixes(&aidlMixes);
+
+ for (const auto& aidlMix : aidlMixes) {
+ AudioMix mix = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioMix(aidlMix));
+ mixes.push_back(mix);
+ }
+
+ return statusTFromBinderStatus(status);
+}
+
status_t AudioSystem::updatePolicyMixes(
const std::vector<std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>&
mixesWithUpdates) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
std::vector<media::AudioMixUpdate> updatesAidl;
@@ -1865,7 +1920,7 @@
}
status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
@@ -1876,7 +1931,7 @@
}
status_t AudioSystem::removeUidDeviceAffinities(uid_t uid) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
@@ -1885,7 +1940,7 @@
status_t AudioSystem::setUserIdDeviceAffinities(int userId,
const AudioDeviceTypeAddrVector& devices) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t userIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(userId));
@@ -1897,7 +1952,7 @@
}
status_t AudioSystem::removeUserIdDeviceAffinities(int userId) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t userIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(userId));
return statusTFromBinderStatus(aps->removeUserIdDeviceAffinities(userIdAidl));
@@ -1909,7 +1964,7 @@
if (source == nullptr || attributes == nullptr || portId == nullptr) {
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::AudioPortConfigFw sourceAidl = VALUE_OR_RETURN_STATUS(
@@ -1924,7 +1979,7 @@
}
status_t AudioSystem::stopAudioSource(audio_port_handle_t portId) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
@@ -1932,7 +1987,7 @@
}
status_t AudioSystem::setMasterMono(bool mono) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
return statusTFromBinderStatus(aps->setMasterMono(mono));
}
@@ -1941,26 +1996,26 @@
if (mono == nullptr) {
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
return statusTFromBinderStatus(aps->getMasterMono(mono));
}
status_t AudioSystem::setMasterBalance(float balance) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->setMasterBalance(balance);
}
status_t AudioSystem::getMasterBalance(float* balance) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->getMasterBalance(balance);
}
float
AudioSystem::getStreamVolumeDB(audio_stream_type_t stream, int index, audio_devices_t device) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return NAN;
auto result = [&]() -> ConversionResult<float> {
@@ -1978,13 +2033,13 @@
}
status_t AudioSystem::getMicrophones(std::vector<media::MicrophoneInfoFw>* microphones) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->getMicrophones(microphones);
}
status_t AudioSystem::setAudioHalPids(const std::vector<pid_t>& pids) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == nullptr) return PERMISSION_DENIED;
return af->setAudioHalPids(pids);
}
@@ -1998,7 +2053,7 @@
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
Int numSurroundFormatsAidl;
numSurroundFormatsAidl.value =
@@ -2025,7 +2080,7 @@
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
Int numSurroundFormatsAidl;
numSurroundFormatsAidl.value =
@@ -2043,7 +2098,7 @@
}
status_t AudioSystem::setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
AudioFormatDescription audioFormatAidl = VALUE_OR_RETURN_STATUS(
@@ -2053,7 +2108,7 @@
}
status_t AudioSystem::setAssistantServicesUids(const std::vector<uid_t>& uids) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
std::vector<int32_t> uidsAidl = VALUE_OR_RETURN_STATUS(
@@ -2062,7 +2117,7 @@
}
status_t AudioSystem::setActiveAssistantServicesUids(const std::vector<uid_t>& activeUids) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
std::vector<int32_t> activeUidsAidl = VALUE_OR_RETURN_STATUS(
@@ -2071,7 +2126,7 @@
}
status_t AudioSystem::setA11yServicesUids(const std::vector<uid_t>& uids) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
std::vector<int32_t> uidsAidl = VALUE_OR_RETURN_STATUS(
@@ -2080,7 +2135,7 @@
}
status_t AudioSystem::setCurrentImeUid(uid_t uid) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
@@ -2088,7 +2143,7 @@
}
bool AudioSystem::isHapticPlaybackSupported() {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return false;
auto result = [&]() -> ConversionResult<bool> {
@@ -2101,7 +2156,7 @@
}
bool AudioSystem::isUltrasoundSupported() {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return false;
auto result = [&]() -> ConversionResult<bool> {
@@ -2119,7 +2174,7 @@
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
std::vector<AudioFormatDescription> formatsAidl;
@@ -2135,7 +2190,7 @@
}
status_t AudioSystem::listAudioProductStrategies(AudioProductStrategyVector& strategies) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
std::vector<media::AudioProductStrategy> strategiesAidl;
@@ -2197,7 +2252,7 @@
status_t AudioSystem::getProductStrategyFromAudioAttributes(const audio_attributes_t& aa,
product_strategy_t& productStrategy,
bool fallbackOnDefault) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
@@ -2213,7 +2268,7 @@
}
status_t AudioSystem::listAudioVolumeGroups(AudioVolumeGroupVector& groups) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
std::vector<media::AudioVolumeGroup> groupsAidl;
@@ -2227,7 +2282,7 @@
status_t AudioSystem::getVolumeGroupFromAudioAttributes(const audio_attributes_t &aa,
volume_group_t& volumeGroup,
bool fallbackOnDefault) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
@@ -2240,13 +2295,13 @@
}
status_t AudioSystem::setRttEnabled(bool enabled) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
return statusTFromBinderStatus(aps->setRttEnabled(enabled));
}
bool AudioSystem::isCallScreenModeSupported() {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) return false;
auto result = [&]() -> ConversionResult<bool> {
@@ -2261,7 +2316,7 @@
status_t AudioSystem::setDevicesRoleForStrategy(product_strategy_t strategy,
device_role_t role,
const AudioDeviceTypeAddrVector& devices) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
@@ -2278,7 +2333,7 @@
status_t AudioSystem::removeDevicesRoleForStrategy(product_strategy_t strategy,
device_role_t role,
const AudioDeviceTypeAddrVector& devices) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
@@ -2294,7 +2349,7 @@
status_t
AudioSystem::clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
@@ -2307,7 +2362,7 @@
status_t AudioSystem::getDevicesForRoleAndStrategy(product_strategy_t strategy,
device_role_t role,
AudioDeviceTypeAddrVector& devices) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
@@ -2325,7 +2380,7 @@
status_t AudioSystem::setDevicesRoleForCapturePreset(audio_source_t audioSource,
device_role_t role,
const AudioDeviceTypeAddrVector& devices) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
@@ -2343,7 +2398,7 @@
status_t AudioSystem::addDevicesRoleForCapturePreset(audio_source_t audioSource,
device_role_t role,
const AudioDeviceTypeAddrVector& devices) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
@@ -2359,7 +2414,7 @@
status_t AudioSystem::removeDevicesRoleForCapturePreset(
audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
@@ -2375,7 +2430,7 @@
status_t AudioSystem::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
device_role_t role) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
@@ -2389,7 +2444,7 @@
status_t AudioSystem::getDevicesForRoleAndCapturePreset(audio_source_t audioSource,
device_role_t role,
AudioDeviceTypeAddrVector& devices) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
@@ -2407,7 +2462,7 @@
status_t AudioSystem::getSpatializer(const sp<media::INativeSpatializerCallback>& callback,
sp<media::ISpatializer>* spatializer) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (spatializer == nullptr) {
return BAD_VALUE;
}
@@ -2426,7 +2481,7 @@
const audio_config_t *config,
const AudioDeviceTypeAddrVector &devices,
bool *canBeSpatialized) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (canBeSpatialized == nullptr) {
return BAD_VALUE;
}
@@ -2450,7 +2505,7 @@
status_t AudioSystem::getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
sp<media::ISoundDose>* soundDose) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == nullptr) {
return PERMISSION_DENIED;
}
@@ -2469,7 +2524,7 @@
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
@@ -2493,7 +2548,7 @@
return BAD_VALUE;
}
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
@@ -2512,7 +2567,7 @@
status_t AudioSystem::setRequestedLatencyMode(
audio_io_handle_t output, audio_latency_mode_t mode) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == nullptr) {
return PERMISSION_DENIED;
}
@@ -2521,7 +2576,7 @@
status_t AudioSystem::getSupportedLatencyModes(audio_io_handle_t output,
std::vector<audio_latency_mode_t>* modes) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == nullptr) {
return PERMISSION_DENIED;
}
@@ -2529,7 +2584,7 @@
}
status_t AudioSystem::setBluetoothVariableLatencyEnabled(bool enabled) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == nullptr) {
return PERMISSION_DENIED;
}
@@ -2538,7 +2593,7 @@
status_t AudioSystem::isBluetoothVariableLatencyEnabled(
bool *enabled) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == nullptr) {
return PERMISSION_DENIED;
}
@@ -2547,7 +2602,7 @@
status_t AudioSystem::supportsBluetoothVariableLatency(
bool *support) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == nullptr) {
return PERMISSION_DENIED;
}
@@ -2555,7 +2610,7 @@
}
status_t AudioSystem::getAudioPolicyConfig(media::AudioPolicyConfig *config) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == nullptr) {
return PERMISSION_DENIED;
}
@@ -2583,15 +2638,15 @@
}
binder::Status setCaptureState(bool active) override {
- Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
+ std::lock_guard _l(AudioSystem::gSoundTriggerMutex);
mListener->onStateChanged(active);
return binder::Status::ok();
}
void binderDied(const wp<IBinder>&) override {
- Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
+ std::lock_guard _l(AudioSystem::gSoundTriggerMutex);
mListener->onServiceDied();
- gSoundTriggerCaptureStateListener = nullptr;
+ AudioSystem::gSoundTriggerCaptureStateListener = nullptr;
}
private:
@@ -2604,13 +2659,12 @@
const sp<CaptureStateListener>& listener) {
LOG_ALWAYS_FATAL_IF(listener == nullptr);
- const sp<IAudioPolicyService>& aps =
- AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == 0) {
return PERMISSION_DENIED;
}
- Mutex::Autolock _l(gSoundTriggerCaptureStateListenerLock);
+ std::lock_guard _l(AudioSystem::gSoundTriggerMutex);
gSoundTriggerCaptureStateListener = new CaptureStateListenerImpl(aps, listener);
gSoundTriggerCaptureStateListener->init();
@@ -2619,7 +2673,7 @@
status_t AudioSystem::setVibratorInfos(
const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == nullptr) {
return PERMISSION_DENIED;
}
@@ -2628,7 +2682,7 @@
status_t AudioSystem::getMmapPolicyInfo(
AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *policyInfos) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == nullptr) {
return PERMISSION_DENIED;
}
@@ -2636,7 +2690,7 @@
}
int32_t AudioSystem::getAAudioMixerBurstCount() {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == nullptr) {
return PERMISSION_DENIED;
}
@@ -2644,7 +2698,7 @@
}
int32_t AudioSystem::getAAudioHardwareBurstMinUsec() {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ const sp<IAudioFlinger> af = get_audio_flinger();
if (af == nullptr) {
return PERMISSION_DENIED;
}
@@ -2653,7 +2707,7 @@
status_t AudioSystem::getSupportedMixerAttributes(
audio_port_handle_t portId, std::vector<audio_mixer_attributes_t> *mixerAttrs) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == nullptr) {
return PERMISSION_DENIED;
}
@@ -2673,7 +2727,7 @@
audio_port_handle_t portId,
uid_t uid,
const audio_mixer_attributes_t *mixerAttr) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == nullptr) {
return PERMISSION_DENIED;
}
@@ -2693,7 +2747,7 @@
const audio_attributes_t *attr,
audio_port_handle_t portId,
std::optional<audio_mixer_attributes_t> *mixerAttr) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == nullptr) {
return PERMISSION_DENIED;
}
@@ -2716,7 +2770,7 @@
status_t AudioSystem::clearPreferredMixerAttributes(const audio_attributes_t *attr,
audio_port_handle_t portId,
uid_t uid) {
- const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
if (aps == nullptr) {
return PERMISSION_DENIED;
}
@@ -2733,45 +2787,28 @@
int AudioSystem::AudioPolicyServiceClient::addAudioPortCallback(
const sp<AudioPortCallback>& callback) {
- Mutex::Autolock _l(mLock);
- for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
- if (mAudioPortCallbacks[i] == callback) {
- return -1;
- }
- }
- mAudioPortCallbacks.add(callback);
- return mAudioPortCallbacks.size();
+ std::lock_guard _l(mMutex);
+ return mAudioPortCallbacks.insert(callback).second ? mAudioPortCallbacks.size() : -1;
}
int AudioSystem::AudioPolicyServiceClient::removeAudioPortCallback(
const sp<AudioPortCallback>& callback) {
- Mutex::Autolock _l(mLock);
- size_t i;
- for (i = 0; i < mAudioPortCallbacks.size(); i++) {
- if (mAudioPortCallbacks[i] == callback) {
- break;
- }
- }
- if (i == mAudioPortCallbacks.size()) {
- return -1;
- }
- mAudioPortCallbacks.removeAt(i);
- return mAudioPortCallbacks.size();
+ std::lock_guard _l(mMutex);
+ return mAudioPortCallbacks.erase(callback) > 0 ? mAudioPortCallbacks.size() : -1;
}
-
Status AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate() {
- Mutex::Autolock _l(mLock);
- for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
- mAudioPortCallbacks[i]->onAudioPortListUpdate();
+ std::lock_guard _l(mMutex);
+ for (const auto& callback : mAudioPortCallbacks) {
+ callback->onAudioPortListUpdate();
}
return Status::ok();
}
Status AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate() {
- Mutex::Autolock _l(mLock);
- for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
- mAudioPortCallbacks[i]->onAudioPatchListUpdate();
+ std::lock_guard _l(mMutex);
+ for (const auto& callback : mAudioPortCallbacks) {
+ callback->onAudioPatchListUpdate();
}
return Status::ok();
}
@@ -2779,30 +2816,16 @@
// ----------------------------------------------------------------------------
int AudioSystem::AudioPolicyServiceClient::addAudioVolumeGroupCallback(
const sp<AudioVolumeGroupCallback>& callback) {
- Mutex::Autolock _l(mLock);
- for (size_t i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
- if (mAudioVolumeGroupCallback[i] == callback) {
- return -1;
- }
- }
- mAudioVolumeGroupCallback.add(callback);
- return mAudioVolumeGroupCallback.size();
+ std::lock_guard _l(mMutex);
+ return mAudioVolumeGroupCallbacks.insert(callback).second
+ ? mAudioVolumeGroupCallbacks.size() : -1;
}
int AudioSystem::AudioPolicyServiceClient::removeAudioVolumeGroupCallback(
const sp<AudioVolumeGroupCallback>& callback) {
- Mutex::Autolock _l(mLock);
- size_t i;
- for (i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
- if (mAudioVolumeGroupCallback[i] == callback) {
- break;
- }
- }
- if (i == mAudioVolumeGroupCallback.size()) {
- return -1;
- }
- mAudioVolumeGroupCallback.removeAt(i);
- return mAudioVolumeGroupCallback.size();
+ std::lock_guard _l(mMutex);
+ return mAudioVolumeGroupCallbacks.erase(callback) > 0
+ ? mAudioVolumeGroupCallbacks.size() : -1;
}
Status AudioSystem::AudioPolicyServiceClient::onAudioVolumeGroupChanged(int32_t group,
@@ -2811,9 +2834,9 @@
aidl2legacy_int32_t_volume_group_t(group));
int flagsLegacy = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(flags));
- Mutex::Autolock _l(mLock);
- for (size_t i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
- mAudioVolumeGroupCallback[i]->onAudioVolumeGroupChanged(groupLegacy, flagsLegacy);
+ std::lock_guard _l(mMutex);
+ for (const auto& callback : mAudioVolumeGroupCallbacks) {
+ callback->onAudioVolumeGroupChanged(groupLegacy, flagsLegacy);
}
return Status::ok();
}
@@ -2827,7 +2850,7 @@
int stateLegacy = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(state));
dynamic_policy_callback cb = NULL;
{
- Mutex::Autolock _l(AudioSystem::gLock);
+ std::lock_guard _l(AudioSystem::gMutex);
cb = gDynPolicyCallback;
}
@@ -2848,7 +2871,7 @@
AudioSource source) {
record_config_callback cb = NULL;
{
- Mutex::Autolock _l(AudioSystem::gLock);
+ std::lock_guard _l(AudioSystem::gMutex);
cb = gRecordConfigCallback;
}
@@ -2881,7 +2904,7 @@
Status AudioSystem::AudioPolicyServiceClient::onRoutingUpdated() {
routing_callback cb = NULL;
{
- Mutex::Autolock _l(AudioSystem::gLock);
+ std::lock_guard _l(AudioSystem::gMutex);
cb = gRoutingCallback;
}
@@ -2894,7 +2917,7 @@
Status AudioSystem::AudioPolicyServiceClient::onVolumeRangeInitRequest() {
vol_range_init_req_callback cb = NULL;
{
- Mutex::Autolock _l(AudioSystem::gLock);
+ std::lock_guard _l(AudioSystem::gMutex);
cb = gVolRangeInitReqCallback;
}
@@ -2906,12 +2929,12 @@
void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused) {
{
- Mutex::Autolock _l(mLock);
- for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) {
- mAudioPortCallbacks[i]->onServiceDied();
+ std::lock_guard _l(mMutex);
+ for (const auto& callback : mAudioPortCallbacks) {
+ callback->onServiceDied();
}
- for (size_t i = 0; i < mAudioVolumeGroupCallback.size(); i++) {
- mAudioVolumeGroupCallback[i]->onServiceDied();
+ for (const auto& callback : mAudioVolumeGroupCallbacks) {
+ callback->onServiceDied();
}
}
AudioSystem::clearAudioPolicyService();
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 565427b..98a1fde 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1699,29 +1699,42 @@
}
status_t AudioTrack::setOutputDevice(audio_port_handle_t deviceId) {
+ status_t result = NO_ERROR;
AutoMutex lock(mLock);
- ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d mRoutedDeviceId %d",
- __func__, mPortId, deviceId, mSelectedDeviceId, mRoutedDeviceId);
+ ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
+ __func__, mPortId, deviceId, mSelectedDeviceId);
if (mSelectedDeviceId != deviceId) {
mSelectedDeviceId = deviceId;
if (mStatus == NO_ERROR) {
- // allow track invalidation when track is not playing to propagate
- // the updated mSelectedDeviceId
- if (isPlaying_l()) {
- if (mSelectedDeviceId != mRoutedDeviceId) {
- android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
- mProxy->interrupt();
+ if (isOffloadedOrDirect_l()) {
+ if (mState == STATE_STOPPED || mState == STATE_FLUSHED) {
+ ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
+ result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
+ } else {
+ ALOGW("%s(%d). Offloaded or Direct track is not STOPPED or FLUSHED. "
+ "State: %s.",
+ __func__, mPortId, stateToString(mState));
+ result = INVALID_OPERATION;
}
} else {
- // if the track is idle, try to restore now and
- // defer to next start if not possible
- if (restoreTrack_l("setOutputDevice") != OK) {
- android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+ // allow track invalidation when track is not playing to propagate
+ // the updated mSelectedDeviceId
+ if (isPlaying_l()) {
+ if (mSelectedDeviceId != mRoutedDeviceId) {
+ android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+ mProxy->interrupt();
+ }
+ } else {
+ // if the track is idle, try to restore now and
+ // defer to next start if not possible
+ if (restoreTrack_l("setOutputDevice") != OK) {
+ android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+ }
}
}
}
}
- return NO_ERROR;
+ return result;
}
audio_port_handle_t AudioTrack::getOutputDevice() {
@@ -2836,7 +2849,7 @@
return 0;
}
-status_t AudioTrack::restoreTrack_l(const char *from)
+status_t AudioTrack::restoreTrack_l(const char *from, bool forceRestore)
{
status_t result = NO_ERROR; // logged: make sure to set this before returning.
const int64_t beginNs = systemTime();
@@ -2857,7 +2870,8 @@
// output parameters and new IAudioFlinger in createTrack_l()
AudioSystem::clearAudioConfigCache();
- if (isOffloadedOrDirect_l() || mDoNotReconnect) {
+ if (!forceRestore &&
+ (isOffloadedOrDirect_l() || mDoNotReconnect)) {
// FIXME re-creation of offloaded and direct tracks is not yet implemented;
// Disabled since (1) timestamp correction is not implemented for non-PCM and
// (2) We pre-empt existing direct tracks on resource constraint, so these tracks
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index 60b08fa..441e329 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -242,6 +242,8 @@
legacy.mCbFlags = VALUE_OR_RETURN(aidl2legacy_AudioMixCallbackFlag_uint32_t_mask(aidl.cbFlags));
legacy.mAllowPrivilegedMediaPlaybackCapture = aidl.allowPrivilegedMediaPlaybackCapture;
legacy.mVoiceCommunicationCaptureAllowed = aidl.voiceCommunicationCaptureAllowed;
+ legacy.mToken = aidl.mToken;
+ legacy.mVirtualDeviceId = aidl.mVirtualDeviceId;
return legacy;
}
@@ -265,6 +267,8 @@
aidl.cbFlags = VALUE_OR_RETURN(legacy2aidl_uint32_t_AudioMixCallbackFlag_mask(legacy.mCbFlags));
aidl.allowPrivilegedMediaPlaybackCapture = legacy.mAllowPrivilegedMediaPlaybackCapture;
aidl.voiceCommunicationCaptureAllowed = legacy.mVoiceCommunicationCaptureAllowed;
+ aidl.mToken = legacy.mToken;
+ aidl.mVirtualDeviceId = legacy.mVirtualDeviceId;
return aidl;
}
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
index 234e858..68dba34 100644
--- a/media/libaudioclient/TEST_MAPPING
+++ b/media/libaudioclient/TEST_MAPPING
@@ -43,10 +43,9 @@
}
],
"postsubmit": [
- // TODO(b/302036943): Enable once we make it pass with AIDL HAL on CF.
- // {
- // "name": "audioeffect_analysis"
- // },
+ {
+ "name": "audioeffect_analysis"
+ },
{
"name": "CtsVirtualDevicesTestCases",
"options" : [
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index 9c4ccb8..e213f08 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -872,6 +872,18 @@
{ .duration = 0 , .waveFreq = { 0 }, 0, 0}},
.repeatCnt = 3,
.repeatSegment = 0 }, // TONE_NZ_CALL_WAITING
+ { .segments = { { .duration = 500, .waveFreq = { 425, 0 }, 0, 0 },
+ { .duration = 250, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_MY_CONGESTION
+ { .segments = { { .duration = 400, .waveFreq = { 425, 0 }, 0, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 400, .waveFreq = { 425, 0 }, 0, 0 },
+ { .duration = 2000, .waveFreq = { 0 }, 0, 0},
+ { .duration = 0, .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 } // TONE_MY_RINGTONE
};
// Used by ToneGenerator::getToneForRegion() to convert user specified supervisory tone type
@@ -976,6 +988,16 @@
TONE_SUP_ERROR, // TONE_SUP_ERROR
TONE_NZ_CALL_WAITING, // TONE_SUP_CALL_WAITING
TONE_GB_RINGTONE // TONE_SUP_RINGTONE
+ },
+ { // MALAYSIA
+ TONE_SUP_DIAL, // TONE_SUP_DIAL
+ TONE_SUP_BUSY, // TONE_SUP_BUSY
+ TONE_MY_CONGESTION, // TONE_SUP_CONGESTION
+ TONE_SUP_RADIO_ACK, // TONE_SUP_RADIO_ACK
+ TONE_SUP_RADIO_NOTAVAIL, // TONE_SUP_RADIO_NOTAVAIL
+ TONE_SUP_ERROR, // TONE_SUP_ERROR
+ TONE_SUP_CALL_WAITING, // TONE_SUP_CALL_WAITING
+ TONE_MY_RINGTONE // TONE_SUP_RINGTONE
}
};
@@ -1055,6 +1077,8 @@
mRegion = TAIWAN;
} else if (strstr(value, "nz") != NULL) {
mRegion = NZ;
+ } else if (strstr(value, "my") != NULL) {
+ mRegion = MY;
} else {
mRegion = CEPT;
}
diff --git a/media/libaudioclient/aidl/android/media/AudioMix.aidl b/media/libaudioclient/aidl/android/media/AudioMix.aidl
index 88b0450..bb8537d 100644
--- a/media/libaudioclient/aidl/android/media/AudioMix.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioMix.aidl
@@ -39,4 +39,8 @@
boolean allowPrivilegedMediaPlaybackCapture;
/** Indicates if the caller can capture voice communication output */
boolean voiceCommunicationCaptureAllowed;
+ /** Identifies the owner of the AudioPolicy that this AudioMix belongs to */
+ IBinder mToken;
+ /** Indicates the Id of the VirtualDevice this AudioMix was registered for */
+ int mVirtualDeviceId;
}
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 52c8da0..633493c 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -263,6 +263,8 @@
void registerPolicyMixes(in AudioMix[] mixes, boolean registration);
+ List<AudioMix> getRegisteredPolicyMixes();
+
void updatePolicyMixes(in AudioMixUpdate[] updates);
void setUidDeviceAffinities(int /* uid_t */ uid, in AudioDevice[] devices);
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 6093933..a28a677 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -1,4 +1,8 @@
/*
+package {
+ default_team: "trendy_team_media_framework_audio",
+}
+
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,6 +23,7 @@
static_libs: [
"android.hardware.audio.common@7.0-enums",
"effect-aidl-cpp",
+ "libaudiomockhal",
"libcgrouprc",
"libcgrouprc_format",
"libfakeservicemanager",
@@ -71,7 +76,7 @@
"libbinder_headers",
"libmedia_headers",
],
- fuzz_config: {
+ fuzz_config: {
cc: [
"android-media-fuzzing-reports@google.com",
],
@@ -90,6 +95,9 @@
srcs: ["audioflinger_aidl_fuzzer.cpp"],
defaults: [
"libaudioclient_aidl_fuzzer_defaults",
- "service_fuzzer_defaults"
+ "latest_android_hardware_audio_core_ndk_shared",
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "latest_android_hardware_audio_effect_ndk_shared",
+ "service_fuzzer_defaults",
],
}
diff --git a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
index f99cc3b..5e4f9a1 100644
--- a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
+++ b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
@@ -17,8 +17,12 @@
#include <AudioFlinger.h>
#include <android-base/logging.h>
#include <android/binder_interface_utils.h>
+#include <android/binder_manager.h>
#include <android/binder_process.h>
#include <android/media/IAudioPolicyService.h>
+#include <core-mock/ConfigMock.h>
+#include <core-mock/ModuleMock.h>
+#include <effect-mock/FactoryMock.h>
#include <fakeservicemanager/FakeServiceManager.h>
#include <fuzzbinder/libbinder_driver.h>
#include <fuzzbinder/random_binder.h>
@@ -53,14 +57,26 @@
});
gFakeServiceManager->clear();
- for (const char* service :
- {"activity", "sensor_privacy", "permission", "scheduling_policy",
- "android.hardware.audio.core.IConfig", "batterystats", "media.metrics"}) {
+ for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
+ "batterystats", "media.metrics"}) {
if (!addService(String16(service), gFakeServiceManager, fdp)) {
return 0;
}
}
+ auto configService = ndk::SharedRefBase::make<ConfigMock>();
+ CHECK_EQ(NO_ERROR, AServiceManager_addService(configService.get()->asBinder().get(),
+ "android.hardware.audio.core.IConfig/default"));
+
+ auto factoryService = ndk::SharedRefBase::make<FactoryMock>();
+ CHECK_EQ(NO_ERROR,
+ AServiceManager_addService(factoryService.get()->asBinder().get(),
+ "android.hardware.audio.effect.IFactory/default"));
+
+ auto moduleService = ndk::SharedRefBase::make<ModuleMock>();
+ CHECK_EQ(NO_ERROR, AServiceManager_addService(moduleService.get()->asBinder().get(),
+ "android.hardware.audio.core.IModule/default"));
+
const auto audioFlinger = sp<AudioFlinger>::make();
const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
@@ -77,12 +93,7 @@
false /* allowIsolated */,
IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
- sp<IBinder> audioFlingerServiceBinder =
- gFakeServiceManager->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
- sp<media::IAudioFlingerService> audioFlingerService =
- interface_cast<media::IAudioFlingerService>(audioFlingerServiceBinder);
-
- fuzzService(media::IAudioFlingerService::asBinder(audioFlingerService), std::move(fdp));
+ fuzzService(media::IAudioFlingerService::asBinder(afAdapter), std::move(fdp));
return 0;
}
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/Android.bp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/Android.bp
new file mode 100644
index 0000000..c4afffb
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/Android.bp
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_library {
+ name: "libaudiomockhal",
+
+ defaults: [
+ "latest_android_hardware_audio_core_ndk_shared",
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "latest_android_hardware_audio_effect_ndk_shared",
+ ],
+ header_libs: [
+ "libbinder_headers",
+ ],
+ static_libs: [
+ "libbinder_random_parcel",
+ ],
+ shared_libs: [
+ "libbinder_ndk",
+ ],
+
+ host_supported: true,
+ srcs: [
+ "FactoryMock.cpp",
+ "ModuleMock.cpp",
+ "StreamInMock.cpp",
+ "StreamOutMock.cpp",
+ ],
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+ export_include_dirs: ["include"],
+}
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/FactoryMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/FactoryMock.cpp
new file mode 100644
index 0000000..ea07afc
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/FactoryMock.cpp
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "effect-mock/FactoryMock.h"
+#include "effect-mock/EffectMock.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+ndk::ScopedAStatus FactoryMock::createEffect(const AudioUuid&,
+ std::shared_ptr<IEffect>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<EffectMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/ModuleMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/ModuleMock.cpp
new file mode 100644
index 0000000..711924f
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/ModuleMock.cpp
@@ -0,0 +1,144 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "core-mock/ModuleMock.h"
+#include "core-mock/BluetoothA2dpMock.h"
+#include "core-mock/BluetoothLeMock.h"
+#include "core-mock/BluetoothMock.h"
+#include "core-mock/StreamInMock.h"
+#include "core-mock/StreamOutMock.h"
+#include "core-mock/TelephonyMock.h"
+#include "sounddose-mock/SoundDoseMock.h"
+
+namespace aidl::android::hardware::audio::core {
+
+ModuleMock::ModuleMock() {
+ // Device ports
+ auto outDevice = createPort(/* PortId */ 0, /* Name */ "Default",
+ /* Flags */ 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE,
+ /* isInput */ false,
+ createDeviceExt(
+ /* DeviceType */ AudioDeviceType::OUT_DEFAULT,
+ /* Flags */ AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE));
+ mPorts.push_back(outDevice);
+ auto inDevice = createPort(/* PortId */ 1, /* Name */ "Default",
+ /* Flags */ 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE,
+ /* isInput */ true,
+ createDeviceExt(
+ /* DeviceType */ AudioDeviceType::IN_DEFAULT,
+ /* Flags */ 0));
+ mPorts.push_back(outDevice);
+}
+
+ndk::ScopedAStatus ModuleMock::getTelephony(std::shared_ptr<ITelephony>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<TelephonyMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getBluetooth(std::shared_ptr<IBluetooth>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<BluetoothMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getBluetoothA2dp(std::shared_ptr<IBluetoothA2dp>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<BluetoothA2dpMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getBluetoothLe(std::shared_ptr<IBluetoothLe>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<BluetoothLeMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::openInputStream(const OpenInputStreamArguments&,
+ OpenInputStreamReturn* _aidl_return) {
+ _aidl_return->stream = ndk::SharedRefBase::make<StreamInMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::openOutputStream(const OpenOutputStreamArguments&,
+ OpenOutputStreamReturn* _aidl_return) {
+ _aidl_return->stream = ndk::SharedRefBase::make<StreamOutMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMasterMute(bool* _aidl_return) {
+ *_aidl_return = mMasterMute;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::setMasterMute(bool masterMute) {
+ mMasterMute = masterMute;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMasterVolume(float* _aidl_return) {
+ *_aidl_return = mMasterVolume;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::setMasterVolume(float masterVolume) {
+ mMasterVolume = masterVolume;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMicMute(bool* _aidl_return) {
+ *_aidl_return = mMicMute;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::setMicMute(bool micMute) {
+ mMicMute = micMute;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getSoundDose(std::shared_ptr<ISoundDose>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<SoundDoseMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMmapPolicyInfos(AudioMMapPolicyType,
+ std::vector<AudioMMapPolicyInfo>* _aidl_return) {
+ AudioMMapPolicyInfo never;
+ never.mmapPolicy = AudioMMapPolicy::NEVER;
+ _aidl_return->push_back(never);
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::supportsVariableLatency(bool* _aidl_return) {
+ *_aidl_return = false;
+ return ndk::ScopedAStatus::ok();
+}
+
+AudioPortExt ModuleMock::createDeviceExt(AudioDeviceType devType, int32_t flags) {
+ AudioPortDeviceExt deviceExt;
+ deviceExt.device.type.type = devType;
+ deviceExt.flags = flags;
+ return AudioPortExt::make<AudioPortExt::Tag::device>(deviceExt);
+}
+
+AudioPort ModuleMock::createPort(int32_t id, const std::string& name, int32_t flags, bool isInput,
+ const AudioPortExt& ext) {
+ AudioPort port;
+ port.id = id;
+ port.name = name;
+ port.flags = isInput ? AudioIoFlags::make<AudioIoFlags::Tag::input>(flags)
+ : AudioIoFlags::make<AudioIoFlags::Tag::output>(flags);
+ port.ext = ext;
+ return port;
+}
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamInMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamInMock.cpp
new file mode 100644
index 0000000..093a979
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamInMock.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "core-mock/StreamInMock.h"
+#include "core-mock/StreamCommonMock.h"
+
+namespace aidl::android::hardware::audio::core {
+
+ndk::ScopedAStatus StreamInMock::getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) {
+ if (!mStreamCommon) {
+ mStreamCommon = ndk::SharedRefBase::make<StreamCommonMock>();
+ }
+ *_aidl_return = mStreamCommon;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::getMicrophoneDirection(
+ IStreamIn::MicrophoneDirection* _aidl_return) {
+ *_aidl_return = mMicrophoneDirection;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::setMicrophoneDirection(
+ IStreamIn::MicrophoneDirection in_direction) {
+ mMicrophoneDirection = in_direction;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::getMicrophoneFieldDimension(float* _aidl_return) {
+ *_aidl_return = mMicrophoneFieldDimension;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::setMicrophoneFieldDimension(float in_zoom) {
+ mMicrophoneFieldDimension = in_zoom;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::getHwGain(std::vector<float>* _aidl_return) {
+ *_aidl_return = mHwGains;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::setHwGain(const std::vector<float>& in_channelGains) {
+ mHwGains = in_channelGains;
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamOutMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamOutMock.cpp
new file mode 100644
index 0000000..a71f954
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamOutMock.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "core-mock/StreamOutMock.h"
+#include "core-mock/StreamCommonMock.h"
+
+namespace aidl::android::hardware::audio::core {
+
+ndk::ScopedAStatus StreamOutMock::getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) {
+ if (!mStreamCommon) {
+ mStreamCommon = ndk::SharedRefBase::make<StreamCommonMock>();
+ }
+ *_aidl_return = mStreamCommon;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamOutMock::getHwVolume(std::vector<float>* _aidl_return) {
+ *_aidl_return = mHwVolume;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setHwVolume(const std::vector<float>& in_channelVolumes) {
+ mHwVolume = in_channelVolumes;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::getAudioDescriptionMixLevel(float* _aidl_return) {
+ *_aidl_return = mAudioDescriptionMixLeveldB;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setAudioDescriptionMixLevel(float in_leveldB) {
+ mAudioDescriptionMixLeveldB = in_leveldB;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::getDualMonoMode(AudioDualMonoMode* _aidl_return) {
+ *_aidl_return = mDualMonoMode;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setDualMonoMode(AudioDualMonoMode in_mode) {
+ mDualMonoMode = in_mode;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::getPlaybackRateParameters(AudioPlaybackRate* _aidl_return) {
+ *_aidl_return = mPlaybackRateParameters;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setPlaybackRateParameters(
+ const AudioPlaybackRate& in_playbackRate) {
+ mPlaybackRateParameters = in_playbackRate;
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothA2dpMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothA2dpMock.h
new file mode 100644
index 0000000..c4dd0d9
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothA2dpMock.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnBluetoothA2dp.h>
+
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class BluetoothA2dpMock : public BnBluetoothA2dp {
+ public:
+ ndk::ScopedAStatus isEnabled(bool* _aidl_return) override {
+ *_aidl_return = mEnabled;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setEnabled(bool enabled) override {
+ mEnabled = enabled;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override {
+ *_aidl_return = kSupportsOffloadReconfiguration;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus reconfigureOffload(const std::vector<VendorParameter>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ private:
+ static constexpr bool kSupportsOffloadReconfiguration = true;
+ bool mEnabled = false;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothLeMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothLeMock.h
new file mode 100644
index 0000000..d58695a
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothLeMock.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnBluetoothLe.h>
+
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class BluetoothLeMock : public BnBluetoothLe {
+ public:
+ ndk::ScopedAStatus isEnabled(bool* _aidl_return) override {
+ *_aidl_return = mEnabled;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setEnabled(bool enabled) override {
+ mEnabled = enabled;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override {
+ *_aidl_return = kSupportsOffloadReconfiguration;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus reconfigureOffload(const std::vector<VendorParameter>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ private:
+ static constexpr bool kSupportsOffloadReconfiguration = true;
+ bool mEnabled = false;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothMock.h
new file mode 100644
index 0000000..e805840
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothMock.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnBluetooth.h>
+
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class BluetoothMock : public BnBluetooth {
+ public:
+ ndk::ScopedAStatus setScoConfig(const IBluetooth::ScoConfig&, IBluetooth::ScoConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setHfpConfig(const IBluetooth::HfpConfig&, IBluetooth::HfpConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ConfigMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ConfigMock.h
new file mode 100644
index 0000000..f4031b5
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ConfigMock.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnConfig.h>
+
+using namespace aidl::android::media::audio::common;
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class ConfigMock : public BnConfig {
+ private:
+ ndk::ScopedAStatus getSurroundSoundConfig(SurroundSoundConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getEngineConfig(AudioHalEngineConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ModuleMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ModuleMock.h
new file mode 100644
index 0000000..d49203d
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ModuleMock.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnModule.h>
+
+using namespace aidl::android::media::audio::common;
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::hardware::audio::core::sounddose;
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::core {
+
+class ModuleMock : public BnModule {
+ public:
+ ModuleMock();
+
+ private:
+ ndk::ScopedAStatus getTelephony(std::shared_ptr<ITelephony>*) override;
+ ndk::ScopedAStatus getBluetooth(std::shared_ptr<IBluetooth>*) override;
+ ndk::ScopedAStatus getBluetoothA2dp(std::shared_ptr<IBluetoothA2dp>*) override;
+ ndk::ScopedAStatus getBluetoothLe(std::shared_ptr<IBluetoothLe>*) override;
+ ndk::ScopedAStatus openInputStream(const OpenInputStreamArguments&,
+ OpenInputStreamReturn*) override;
+ ndk::ScopedAStatus openOutputStream(const OpenOutputStreamArguments&,
+ OpenOutputStreamReturn*) override;
+ ndk::ScopedAStatus getMasterMute(bool*) override;
+ ndk::ScopedAStatus setMasterMute(bool) override;
+ ndk::ScopedAStatus getMasterVolume(float*) override;
+ ndk::ScopedAStatus setMasterVolume(float) override;
+ ndk::ScopedAStatus getMicMute(bool*) override;
+ ndk::ScopedAStatus setMicMute(bool) override;
+ ndk::ScopedAStatus getSoundDose(std::shared_ptr<ISoundDose>*) override;
+ ndk::ScopedAStatus getMmapPolicyInfos(AudioMMapPolicyType,
+ std::vector<AudioMMapPolicyInfo>*) override;
+ ndk::ScopedAStatus supportsVariableLatency(bool*) override;
+
+ ndk::ScopedAStatus setModuleDebug(const ModuleDebug&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus connectExternalDevice(const AudioPort&, AudioPort*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus disconnectExternalDevice(int32_t) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioPatches(std::vector<AudioPatch>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioPort(int32_t, AudioPort*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioPortConfigs(std::vector<AudioPortConfig>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioPorts(std::vector<AudioPort>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioRoutes(std::vector<AudioRoute>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioRoutesForAudioPort(int32_t, std::vector<AudioRoute>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getSupportedPlaybackRateFactors(SupportedPlaybackRateFactors*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setAudioPatch(const AudioPatch&, AudioPatch*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setAudioPortConfig(const AudioPortConfig&, AudioPortConfig*,
+ bool*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus resetAudioPatch(int32_t) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus resetAudioPortConfig(int32_t) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getMicrophones(std::vector<MicrophoneInfo>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateAudioMode(AudioMode) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus updateScreenRotation(ScreenRotation) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateScreenState(bool) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus generateHwAvSyncId(int32_t*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>&,
+ std::vector<VendorParameter>*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>&, bool) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus addDeviceEffect(int32_t, const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus removeDeviceEffect(int32_t, const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAAudioMixerBurstCount(int32_t*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus getAAudioHardwareBurstMinUsec(int32_t*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus prepareToDisconnectExternalDevice(int32_t) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ AudioPortExt createDeviceExt(AudioDeviceType devType, int32_t flags);
+ AudioPort createPort(int32_t id, const std::string& name, int32_t flags, bool isInput,
+ const AudioPortExt& ext);
+
+ bool mMasterMute;
+ float mMasterVolume;
+ bool mMicMute;
+ std::vector<AudioPort> mPorts;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamCommonMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamCommonMock.h
new file mode 100644
index 0000000..25d53f8
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamCommonMock.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnStreamCommon.h>
+
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::core {
+
+class StreamCommonMock : public BnStreamCommon {
+ ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus prepareToClose() override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus updateHwAvSyncId(int32_t) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>&,
+ std::vector<VendorParameter>*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>&, bool) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus addEffect(const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus removeEffect(const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamInMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamInMock.h
new file mode 100644
index 0000000..5deab5b
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamInMock.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnStreamIn.h>
+
+using namespace aidl::android::hardware::audio::common;
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::media::audio::common;
+
+namespace aidl::android::hardware::audio::core {
+
+class StreamInMock : public BnStreamIn {
+ ndk::ScopedAStatus getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) override;
+ ndk::ScopedAStatus getMicrophoneDirection(
+ IStreamIn::MicrophoneDirection* _aidl_return) override;
+ ndk::ScopedAStatus setMicrophoneDirection(IStreamIn::MicrophoneDirection in_direction) override;
+ ndk::ScopedAStatus getMicrophoneFieldDimension(float* _aidl_return) override;
+ ndk::ScopedAStatus setMicrophoneFieldDimension(float in_zoom) override;
+ ndk::ScopedAStatus getHwGain(std::vector<float>* _aidl_return) override;
+ ndk::ScopedAStatus setHwGain(const std::vector<float>& in_channelGains) override;
+
+ ndk::ScopedAStatus getActiveMicrophones(std::vector<MicrophoneDynamicInfo>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateMetadata(const SinkMetadata&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ private:
+ IStreamIn::MicrophoneDirection mMicrophoneDirection;
+ float mMicrophoneFieldDimension;
+ std::vector<float> mHwGains;
+ std::shared_ptr<IStreamCommon> mStreamCommon;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamOutMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamOutMock.h
new file mode 100644
index 0000000..4d12815
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamOutMock.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnStreamOut.h>
+
+using namespace aidl::android::hardware::audio::common;
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::media::audio::common;
+
+namespace aidl::android::hardware::audio::core {
+
+class StreamOutMock : public BnStreamOut {
+ ndk::ScopedAStatus getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) override;
+ ndk::ScopedAStatus getHwVolume(std::vector<float>* _aidl_return) override;
+ ndk::ScopedAStatus setHwVolume(const std::vector<float>& in_channelVolumes) override;
+ ndk::ScopedAStatus getAudioDescriptionMixLevel(float* _aidl_return) override;
+ ndk::ScopedAStatus setAudioDescriptionMixLevel(float in_leveldB) override;
+ ndk::ScopedAStatus getDualMonoMode(AudioDualMonoMode* _aidl_return) override;
+ ndk::ScopedAStatus setDualMonoMode(AudioDualMonoMode in_mode) override;
+ ndk::ScopedAStatus getPlaybackRateParameters(AudioPlaybackRate* _aidl_return) override;
+ ndk::ScopedAStatus setPlaybackRateParameters(const AudioPlaybackRate& in_playbackRate) override;
+
+ ndk::ScopedAStatus updateMetadata(const SourceMetadata&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateOffloadMetadata(const AudioOffloadMetadata&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getRecommendedLatencyModes(std::vector<AudioLatencyMode>*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus setLatencyMode(AudioLatencyMode) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus selectPresentation(int32_t, int32_t) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+
+ private:
+ AudioPlaybackRate mPlaybackRateParameters;
+ AudioDualMonoMode mDualMonoMode;
+ float mAudioDescriptionMixLeveldB;
+ std::vector<float> mHwVolume;
+ std::shared_ptr<IStreamCommon> mStreamCommon;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/TelephonyMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/TelephonyMock.h
new file mode 100644
index 0000000..d56dee6
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/TelephonyMock.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnTelephony.h>
+
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::media::audio::common;
+
+namespace aidl::android::hardware::audio::core {
+
+class TelephonyMock : public BnTelephony {
+ public:
+ ndk::ScopedAStatus getSupportedAudioModes(std::vector<AudioMode>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus switchAudioMode(AudioMode) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus setTelecomConfig(const ITelephony::TelecomConfig&,
+ ITelephony::TelecomConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/EffectMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/EffectMock.h
new file mode 100644
index 0000000..db20cd8
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/EffectMock.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::effect {
+
+class EffectMock : public BnEffect {
+ public:
+ ndk::ScopedAStatus open(const Parameter::Common&, const std::optional<Parameter::Specific>&,
+ IEffect::OpenEffectReturn*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus command(CommandId) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getState(State*) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getDescriptor(Descriptor*) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus reopen(IEffect::OpenEffectReturn*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setParameter(const Parameter&) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getParameter(const Parameter::Id&, Parameter*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/FactoryMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/FactoryMock.h
new file mode 100644
index 0000000..57d58d5
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/FactoryMock.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/effect/BnFactory.h>
+
+using namespace aidl::android::media::audio::common;
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::effect {
+
+class FactoryMock : public BnFactory {
+ ndk::ScopedAStatus queryEffects(const std::optional<AudioUuid>&,
+ const std::optional<AudioUuid>&,
+ const std::optional<AudioUuid>&,
+ std::vector<Descriptor>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus queryProcessing(const std::optional<Processing::Type>&,
+ std::vector<Processing>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus destroyEffect(const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ ndk::ScopedAStatus createEffect(const AudioUuid&, std::shared_ptr<IEffect>*) override;
+};
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/sounddose-mock/SoundDoseMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/sounddose-mock/SoundDoseMock.h
new file mode 100644
index 0000000..5557b10
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/sounddose-mock/SoundDoseMock.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/sounddose/BnSoundDose.h>
+
+using namespace aidl::android::hardware::audio::core::sounddose;
+
+namespace aidl::android::hardware::audio::core::sounddose {
+
+class SoundDoseMock : public BnSoundDose {
+ ndk::ScopedAStatus setOutputRs2UpperBound(float in_rs2ValueDbA) override {
+ mOutputRs2UpperBound = in_rs2ValueDbA;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getOutputRs2UpperBound(float* _aidl_return) override {
+ *_aidl_return = mOutputRs2UpperBound;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus registerSoundDoseCallback(
+ const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ private:
+ float mOutputRs2UpperBound;
+};
+
+} // namespace aidl::android::hardware::audio::core::sounddose
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index fd3b0a8..f2ad91c 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -15,6 +15,7 @@
*/
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index ec35e93..b190fba 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -18,6 +18,7 @@
#ifndef ANDROID_AUDIO_POLICY_H
#define ANDROID_AUDIO_POLICY_H
+#include <binder/IBinder.h>
#include <binder/Parcel.h>
#include <media/AudioDeviceTypeAddr.h>
#include <system/audio.h>
@@ -127,6 +128,8 @@
audio_devices_t mDeviceType;
String8 mDeviceAddress;
uint32_t mCbFlags; // flags indicating which callbacks to use, see kCbFlag*
+ sp<IBinder> mToken;
+ uint32_t mVirtualDeviceId;
/** Ignore the AUDIO_FLAG_NO_MEDIA_PROJECTION */
bool mAllowPrivilegedMediaPlaybackCapture = false;
/** Indicates if the caller can capture voice communication output */
diff --git a/media/libaudioclient/include/media/AudioProductStrategy.h b/media/libaudioclient/include/media/AudioProductStrategy.h
index fcbb019..2505b11 100644
--- a/media/libaudioclient/include/media/AudioProductStrategy.h
+++ b/media/libaudioclient/include/media/AudioProductStrategy.h
@@ -58,11 +58,11 @@
* @return {@code INVALID_SCORE} if not matching, {@code MATCH_ON_DEFAULT_SCORE} if matching
* to default strategy, non zero positive score if matching a strategy.
*/
- static int attributesMatchesScore(const audio_attributes_t refAttributes,
- const audio_attributes_t clientAttritubes);
+ static int attributesMatchesScore(audio_attributes_t refAttributes,
+ audio_attributes_t clientAttritubes);
- static bool attributesMatches(const audio_attributes_t refAttributes,
- const audio_attributes_t clientAttritubes) {
+ static bool attributesMatches(audio_attributes_t refAttributes,
+ audio_attributes_t clientAttritubes) {
return attributesMatchesScore(refAttributes, clientAttritubes) > 0;
}
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index a1f7941..338534d 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -19,6 +19,7 @@
#include <sys/types.h>
+#include <mutex>
#include <set>
#include <vector>
@@ -86,6 +87,7 @@
typedef void (*routing_callback)();
typedef void (*vol_range_init_req_callback)();
+class CaptureStateListenerImpl;
class IAudioFlinger;
class String8;
@@ -95,6 +97,13 @@
class AudioSystem
{
+ friend class AudioFlingerClient;
+ friend class AudioPolicyServiceClient;
+ friend class CaptureStateListenerImpl;
+ template <typename ServiceInterface, typename Client, typename AidlInterface,
+ typename ServiceTraits>
+ friend class ServiceHandler;
+
public:
// FIXME Declare in binder opcode order, similarly to IAudioFlinger.h and IAudioFlinger.cpp
@@ -177,8 +186,8 @@
static status_t setLocalAudioFlinger(const sp<IAudioFlinger>& af);
// helper function to obtain AudioFlinger service handle
- static const sp<IAudioFlinger> get_audio_flinger();
- static const sp<IAudioFlinger> get_audio_flinger_for_fuzzer();
+ static sp<IAudioFlinger> get_audio_flinger();
+ static sp<IAudioFlinger> get_audio_flinger_for_fuzzer();
static float linearToLog(int volume);
static int logToLinear(float volume);
@@ -402,7 +411,12 @@
// and output configuration cache (gOutputs)
static void clearAudioConfigCache();
- static const sp<media::IAudioPolicyService> get_audio_policy_service();
+ // Sets a local AudioPolicyService interface to be used by AudioSystem.
+ // This is used by audioserver main() to allow client object initialization
+ // before exposing any interfaces to ServiceManager.
+ static status_t setLocalAudioPolicyService(const sp<media::IAudioPolicyService>& aps);
+
+ static sp<media::IAudioPolicyService> get_audio_policy_service();
static void clearAudioPolicyService();
// helpers for android.media.AudioManager.getProperty(), see description there for meaning
@@ -462,6 +476,8 @@
static status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
+ static status_t getRegisteredPolicyMixes(std::vector<AudioMix>& mixes);
+
static status_t updatePolicyMixes(
const std::vector<
std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>& mixesWithUpdates);
@@ -774,23 +790,18 @@
static int32_t getAAudioHardwareBurstMinUsec();
-private:
-
class AudioFlingerClient: public IBinder::DeathRecipient, public media::BnAudioFlingerClient
{
public:
- AudioFlingerClient() :
- mInBuffSize(0), mInSamplingRate(0),
- mInFormat(AUDIO_FORMAT_DEFAULT), mInChannelMask(AUDIO_CHANNEL_NONE) {
- }
+ AudioFlingerClient() = default;
- void clearIoCache();
+ void clearIoCache() EXCLUDES(mMutex);
status_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
- audio_channel_mask_t channelMask, size_t* buffSize);
- sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
+ audio_channel_mask_t channelMask, size_t* buffSize) EXCLUDES(mMutex);
+ sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle) EXCLUDES(mMutex);
// DeathRecipient
- virtual void binderDied(const wp<IBinder>& who);
+ void binderDied(const wp<IBinder>& who) final;
// IAudioFlingerClient
@@ -798,61 +809,71 @@
// values for output/input parameters up-to-date in client process
binder::Status ioConfigChanged(
media::AudioIoConfigEvent event,
- const media::AudioIoDescriptor& ioDesc) override;
+ const media::AudioIoDescriptor& ioDesc) final EXCLUDES(mMutex);
binder::Status onSupportedLatencyModesChanged(
int output,
- const std::vector<media::audio::common::AudioLatencyMode>& latencyModes) override;
+ const std::vector<media::audio::common::AudioLatencyMode>& latencyModes)
+ final EXCLUDES(mMutex);
status_t addAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
- audio_io_handle_t audioIo,
- audio_port_handle_t portId);
+ audio_io_handle_t audioIo, audio_port_handle_t portId) EXCLUDES(mMutex);
status_t removeAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
- audio_io_handle_t audioIo,
- audio_port_handle_t portId);
+ audio_io_handle_t audioIo, audio_port_handle_t portId) EXCLUDES(mMutex);
status_t addSupportedLatencyModesCallback(
- const sp<SupportedLatencyModesCallback>& callback);
+ const sp<SupportedLatencyModesCallback>& callback) EXCLUDES(mMutex);
status_t removeSupportedLatencyModesCallback(
- const sp<SupportedLatencyModesCallback>& callback);
+ const sp<SupportedLatencyModesCallback>& callback) EXCLUDES(mMutex);
- audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
+ audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo) EXCLUDES(mMutex);
private:
- Mutex mLock;
- DefaultKeyedVector<audio_io_handle_t, sp<AudioIoDescriptor> > mIoDescriptors;
+ mutable std::mutex mMutex;
+ std::map<audio_io_handle_t, sp<AudioIoDescriptor>> mIoDescriptors GUARDED_BY(mMutex);
std::map<audio_io_handle_t, std::map<audio_port_handle_t, wp<AudioDeviceCallback>>>
- mAudioDeviceCallbacks;
+ mAudioDeviceCallbacks GUARDED_BY(mMutex);
std::vector<wp<SupportedLatencyModesCallback>>
- mSupportedLatencyModesCallbacks GUARDED_BY(mLock);
+ mSupportedLatencyModesCallbacks GUARDED_BY(mMutex);
// cached values for recording getInputBufferSize() queries
- size_t mInBuffSize; // zero indicates cache is invalid
- uint32_t mInSamplingRate;
- audio_format_t mInFormat;
- audio_channel_mask_t mInChannelMask;
- sp<AudioIoDescriptor> getIoDescriptor_l(audio_io_handle_t ioHandle);
+ size_t mInBuffSize GUARDED_BY(mMutex) = 0; // zero indicates cache is invalid
+ uint32_t mInSamplingRate GUARDED_BY(mMutex) = 0;
+ audio_format_t mInFormat GUARDED_BY(mMutex) = AUDIO_FORMAT_DEFAULT;
+ audio_channel_mask_t mInChannelMask GUARDED_BY(mMutex) = AUDIO_CHANNEL_NONE;
+
+ sp<AudioIoDescriptor> getIoDescriptor_l(audio_io_handle_t ioHandle) REQUIRES(mMutex);
};
class AudioPolicyServiceClient: public IBinder::DeathRecipient,
- public media::BnAudioPolicyServiceClient
- {
+ public media::BnAudioPolicyServiceClient {
public:
- AudioPolicyServiceClient() {
+ AudioPolicyServiceClient() = default;
+
+ int addAudioPortCallback(const sp<AudioPortCallback>& callback) EXCLUDES(mMutex);
+
+ int removeAudioPortCallback(const sp<AudioPortCallback>& callback) EXCLUDES(mMutex);
+
+ bool isAudioPortCbEnabled() const EXCLUDES(mMutex) {
+ std::lock_guard _l(mMutex);
+ return !mAudioPortCallbacks.empty();
}
- int addAudioPortCallback(const sp<AudioPortCallback>& callback);
- int removeAudioPortCallback(const sp<AudioPortCallback>& callback);
- bool isAudioPortCbEnabled() const { return (mAudioPortCallbacks.size() != 0); }
+ int addAudioVolumeGroupCallback(
+ const sp<AudioVolumeGroupCallback>& callback) EXCLUDES(mMutex);
- int addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback);
- int removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback);
- bool isAudioVolumeGroupCbEnabled() const { return (mAudioVolumeGroupCallback.size() != 0); }
+ int removeAudioVolumeGroupCallback(
+ const sp<AudioVolumeGroupCallback>& callback) EXCLUDES(mMutex);
+
+ bool isAudioVolumeGroupCbEnabled() const EXCLUDES(mMutex) {
+ std::lock_guard _l(mMutex);
+ return !mAudioVolumeGroupCallbacks.empty();
+ }
// DeathRecipient
- virtual void binderDied(const wp<IBinder>& who);
+ void binderDied(const wp<IBinder>& who) final;
// IAudioPolicyServiceClient
binder::Status onAudioVolumeGroupChanged(int32_t group, int32_t flags) override;
@@ -873,43 +894,36 @@
binder::Status onVolumeRangeInitRequest();
private:
- Mutex mLock;
- Vector <sp <AudioPortCallback> > mAudioPortCallbacks;
- Vector <sp <AudioVolumeGroupCallback> > mAudioVolumeGroupCallback;
+ mutable std::mutex mMutex;
+ std::set<sp<AudioPortCallback>> mAudioPortCallbacks GUARDED_BY(mMutex);
+ std::set<sp<AudioVolumeGroupCallback>> mAudioVolumeGroupCallbacks GUARDED_BY(mMutex);
};
+ private:
+
static audio_io_handle_t getOutput(audio_stream_type_t stream);
- static const sp<AudioFlingerClient> getAudioFlingerClient();
+ static sp<AudioFlingerClient> getAudioFlingerClient();
static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
- static const sp<IAudioFlinger> getAudioFlingerImpl(bool canStartThreadPool);
// Invokes all registered error callbacks with the given error code.
static void reportError(status_t err);
- static sp<AudioFlingerClient> gAudioFlingerClient;
- static sp<AudioPolicyServiceClient> gAudioPolicyServiceClient;
- friend class AudioFlingerClient;
- friend class AudioPolicyServiceClient;
+ [[clang::no_destroy]] static std::mutex gMutex;
+ static dynamic_policy_callback gDynPolicyCallback GUARDED_BY(gMutex);
+ static record_config_callback gRecordConfigCallback GUARDED_BY(gMutex);
+ static routing_callback gRoutingCallback GUARDED_BY(gMutex);
+ static vol_range_init_req_callback gVolRangeInitReqCallback GUARDED_BY(gMutex);
- static Mutex gLock; // protects gAudioFlinger
- static Mutex gLockErrorCallbacks; // protects gAudioErrorCallbacks
- static Mutex gLockAPS; // protects gAudioPolicyService and gAudioPolicyServiceClient
- static sp<IAudioFlinger> gAudioFlinger;
- static std::set<audio_error_callback> gAudioErrorCallbacks;
- static dynamic_policy_callback gDynPolicyCallback;
- static record_config_callback gRecordConfigCallback;
- static routing_callback gRoutingCallback;
- static vol_range_init_req_callback gVolRangeInitReqCallback;
+ [[clang::no_destroy]] static std::mutex gApsCallbackMutex;
+ [[clang::no_destroy]] static std::mutex gErrorCallbacksMutex;
+ [[clang::no_destroy]] static std::set<audio_error_callback> gAudioErrorCallbacks
+ GUARDED_BY(gErrorCallbacksMutex);
- static size_t gInBuffSize;
- // previous parameters for recording buffer size queries
- static uint32_t gPrevInSamplingRate;
- static audio_format_t gPrevInFormat;
- static audio_channel_mask_t gPrevInChannelMask;
-
- static sp<media::IAudioPolicyService> gAudioPolicyService;
+ [[clang::no_destroy]] static std::mutex gSoundTriggerMutex;
+ [[clang::no_destroy]] static sp<CaptureStateListenerImpl> gSoundTriggerCaptureStateListener
+ GUARDED_BY(gSoundTriggerMutex);
};
-}; // namespace android
+} // namespace android
#endif /*ANDROID_AUDIOSYSTEM_H_*/
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 523383f..19780ae 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1220,7 +1220,7 @@
void setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount);
// FIXME enum is faster than strcmp() for parameter 'from'
- status_t restoreTrack_l(const char *from);
+ status_t restoreTrack_l(const char *from, bool forceRestore = false);
uint32_t getUnderrunCount_l() const;
diff --git a/media/libaudioclient/include/media/EffectClientAsyncProxy.h b/media/libaudioclient/include/media/EffectClientAsyncProxy.h
new file mode 100644
index 0000000..e7d6d80
--- /dev/null
+++ b/media/libaudioclient/include/media/EffectClientAsyncProxy.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/media/BnEffectClient.h>
+#include <audio_utils/CommandThread.h>
+
+namespace android::media {
+
+class EffectClientAsyncProxy : public IEffectClient {
+public:
+
+ /**
+ * Call this factory method to interpose a worker thread when a binder
+ * callback interface is invoked in-proc.
+ */
+ static sp<IEffectClient> makeIfNeeded(const sp<IEffectClient>& effectClient) {
+ if (isLocalBinder(effectClient)) {
+ return sp<EffectClientAsyncProxy>::make(effectClient);
+ }
+ return effectClient;
+ }
+
+ explicit EffectClientAsyncProxy(const sp<IEffectClient>& effectClient)
+ : mEffectClient(effectClient) {}
+
+ ::android::IBinder* onAsBinder() override {
+ return nullptr;
+ }
+
+ ::android::binder::Status controlStatusChanged(bool controlGranted) override {
+ getThread().add(__func__, [=, effectClient = mEffectClient]() {
+ effectClient->controlStatusChanged(controlGranted);
+ });
+ return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+ }
+
+ ::android::binder::Status enableStatusChanged(bool enabled) override {
+ getThread().add(__func__, [=, effectClient = mEffectClient]() {
+ effectClient->enableStatusChanged(enabled);
+ });
+ return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+ }
+
+ ::android::binder::Status commandExecuted(
+ int32_t cmdCode, const ::std::vector<uint8_t>& cmdData,
+ const ::std::vector<uint8_t>& replyData) override {
+ getThread().add(__func__, [=, effectClient = mEffectClient]() {
+ effectClient->commandExecuted(cmdCode, cmdData, replyData);
+ });
+ return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+ }
+
+ ::android::binder::Status framesProcessed(int32_t frames) override {
+ getThread().add(__func__, [=, effectClient = mEffectClient]() {
+ effectClient->framesProcessed(frames);
+ });
+ return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+ }
+
+ /**
+ * Returns true if the binder interface is local (in-proc).
+ *
+ * Move to a binder helper class?
+ */
+ static bool isLocalBinder(const sp<IInterface>& interface) {
+ const auto b = IInterface::asBinder(interface);
+ return b && b->localBinder();
+ }
+
+private:
+ const sp<IEffectClient> mEffectClient;
+
+ /**
+ * Returns the per-interface-descriptor CommandThread for in-proc binder transactions.
+ *
+ * Note: Remote RPC transactions to a given binder (kernel) node enter that node's
+ * async_todo list, which serializes all async operations to that binder node.
+ * Each transaction on the async_todo list must complete before the next one
+ * starts, even though there may be available threads in the process threadpool.
+ *
+ * For local transactions, we order all async requests entering
+ * the CommandThread. We do not maintain a threadpool, though a future implementation
+ * could use a shared ThreadPool.
+ *
+ * By using a static here, all in-proc binder interfaces made async with
+ * EffectClientAsyncProxy will get the same CommandThread.
+ *
+ * @return CommandThread to use.
+ */
+ static audio_utils::CommandThread& getThread() {
+ [[clang::no_destroy]] static audio_utils::CommandThread commandThread;
+ return commandThread;
+ }
+}; // class EffectClientAsyncProxy
+
+} // namespace android::media
diff --git a/media/libaudioclient/include/media/ToneGenerator.h b/media/libaudioclient/include/media/ToneGenerator.h
index 46e9501..3e515fc 100644
--- a/media/libaudioclient/include/media/ToneGenerator.h
+++ b/media/libaudioclient/include/media/ToneGenerator.h
@@ -225,11 +225,14 @@
TONE_INDIA_CONGESTION, // Congestion tone: 400 Hz, 250ms ON, 250ms OFF...
TONE_INDIA_CALL_WAITING, // Call waiting tone: 400 Hz, tone repeated in a 0.2s on, 0.1s off, 0.2s on, 7.5s off pattern.
TONE_INDIA_RINGTONE, // Ring tone: 400 Hz tone modulated with 25Hz, 0.4 on 0.2 off 0.4 on 2..0 off
- // TAIWAN supervisory tones
+ // TAIWAN supervisory tones
TONE_TW_RINGTONE, // Ring Tone: 440 Hz + 480 Hz repeated with pattern 1s on, 3s off.
- // NEW ZEALAND supervisory tones
+ // NEW ZEALAND supervisory tones
TONE_NZ_CALL_WAITING, // Call waiting tone: 400 Hz, 0.2s ON, 3s OFF,
// 0.2s ON, 3s OFF, 0.2s ON, 3s OFF, 0.2s ON
+ // MALAYSIA supervisory tones
+ TONE_MY_CONGESTION, // Congestion tone: 425 Hz, 500ms ON, 250ms OFF...
+ TONE_MY_RINGTONE, // Ring tone: 425 Hz, 400ms ON 200ms OFF 400ms ON 2s OFF..
NUM_ALTERNATE_TONES
};
@@ -244,6 +247,7 @@
INDIA,
TAIWAN,
NZ,
+ MY,
CEPT,
NUM_REGIONS
};
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index f72ac89..b667c8d 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -133,6 +134,10 @@
"libaudiopolicy",
],
data: ["bbb*.raw"],
+ srcs: [
+ "audio_test_utils.cpp",
+ "test_execution_tracer.cpp",
+ ],
test_config_template: "audio_test_template.xml",
}
@@ -141,7 +146,6 @@
defaults: ["libaudioclient_gtests_defaults"],
srcs: [
"audiorecord_tests.cpp",
- "audio_test_utils.cpp",
],
}
@@ -150,7 +154,6 @@
defaults: ["libaudioclient_gtests_defaults"],
srcs: [
"audiotrack_tests.cpp",
- "audio_test_utils.cpp",
],
}
@@ -159,7 +162,6 @@
defaults: ["libaudioclient_gtests_defaults"],
srcs: [
"audioeffect_tests.cpp",
- "audio_test_utils.cpp",
],
}
@@ -172,7 +174,6 @@
],
srcs: [
"audioeffect_analyser.cpp",
- "audio_test_utils.cpp",
],
static_libs: [
"libpffft",
@@ -184,7 +185,6 @@
defaults: ["libaudioclient_gtests_defaults"],
srcs: [
"audiorouting_tests.cpp",
- "audio_test_utils.cpp",
],
}
@@ -193,14 +193,15 @@
defaults: ["libaudioclient_gtests_defaults"],
srcs: [
"audioclient_serialization_tests.cpp",
- "audio_test_utils.cpp",
],
}
cc_test {
name: "trackplayerbase_tests",
defaults: ["libaudioclient_gtests_defaults"],
- srcs: ["trackplayerbase_tests.cpp"],
+ srcs: [
+ "trackplayerbase_tests.cpp",
+ ],
}
cc_test {
@@ -208,6 +209,5 @@
defaults: ["libaudioclient_gtests_defaults"],
srcs: [
"audiosystem_tests.cpp",
- "audio_test_utils.cpp",
],
}
diff --git a/media/libaudioclient/tests/audio_test_utils.cpp b/media/libaudioclient/tests/audio_test_utils.cpp
index ee5489b..9a202cc3 100644
--- a/media/libaudioclient/tests/audio_test_utils.cpp
+++ b/media/libaudioclient/tests/audio_test_utils.cpp
@@ -565,12 +565,14 @@
int attempts = 5;
status_t status;
unsigned int generation1, generation;
- unsigned int numPorts = 0;
+ unsigned int numPorts;
do {
if (attempts-- < 0) {
status = TIMED_OUT;
break;
}
+ // query for number of ports.
+ numPorts = 0;
status = AudioSystem::listAudioPorts(AUDIO_PORT_ROLE_NONE, AUDIO_PORT_TYPE_NONE, &numPorts,
nullptr, &generation1);
if (status != NO_ERROR) {
@@ -622,12 +624,14 @@
int attempts = 5;
status_t status;
unsigned int generation1, generation;
- unsigned int numPatches = 0;
+ unsigned int numPatches;
do {
if (attempts-- < 0) {
status = TIMED_OUT;
break;
}
+ // query for number of patches.
+ numPatches = 0;
status = AudioSystem::listAudioPatches(&numPatches, nullptr, &generation1);
if (status != NO_ERROR) {
ALOGE("AudioSystem::listAudioPatches returned error %d", status);
diff --git a/media/libaudioclient/tests/audioclient_serialization_tests.cpp b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
index 707b9b3..5debabc 100644
--- a/media/libaudioclient/tests/audioclient_serialization_tests.cpp
+++ b/media/libaudioclient/tests/audioclient_serialization_tests.cpp
@@ -15,18 +15,23 @@
*/
//#define LOG_NDEBUG 0
-#define LOG_TAG "AudioClientSerializationUnitTests"
+#define LOG_TAG "AudioClientSerializationTests"
#include <cstdint>
#include <cstdlib>
#include <ctime>
-
-#include <gtest/gtest.h>
+#include <vector>
#include <android_audio_policy_configuration_V7_0-enums.h>
+#include <gtest/gtest.h>
+#include <media/AudioPolicy.h>
+#include <media/AudioProductStrategy.h>
+#include <media/AudioVolumeGroup.h>
+#include <media/VolumeGroupAttributes.h>
+#include <system/audio.h>
#include <xsdc/XsdcSupport.h>
-#include "audio_test_utils.h"
+#include "test_execution_tracer.h"
using namespace android;
namespace xsd {
@@ -310,3 +315,9 @@
// audioStream
INSTANTIATE_TEST_SUITE_P(SerializationParameterizedTests, AudioAttributesParameterizedTest,
::testing::Combine(testing::ValuesIn(kStreamtypes)));
+
+int main(int argc, char** argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+ return RUN_ALL_TESTS();
+}
diff --git a/media/libaudioclient/tests/audioeffect_analyser.cpp b/media/libaudioclient/tests/audioeffect_analyser.cpp
index 94accae..f4d37bc 100644
--- a/media/libaudioclient/tests/audioeffect_analyser.cpp
+++ b/media/libaudioclient/tests/audioeffect_analyser.cpp
@@ -14,23 +14,26 @@
* limitations under the License.
*/
-// #define LOG_NDEBUG 0
-#define LOG_TAG "AudioEffectAnalyser"
-
-#include <android-base/file.h>
-#include <android-base/stringprintf.h>
-#include <gtest/gtest.h>
-#include <media/AudioEffect.h>
-#include <system/audio_effects/effect_bassboost.h>
-#include <system/audio_effects/effect_equalizer.h>
#include <fstream>
#include <iostream>
#include <string>
#include <tuple>
#include <vector>
+// #define LOG_NDEBUG 0
+#define LOG_TAG "AudioEffectAnalyser"
+
+#include <android-base/file.h>
+#include <android-base/stringprintf.h>
+#include <binder/ProcessState.h>
+#include <gtest/gtest.h>
+#include <media/AudioEffect.h>
+#include <system/audio_effects/effect_bassboost.h>
+#include <system/audio_effects/effect_equalizer.h>
+
#include "audio_test_utils.h"
#include "pffft.hpp"
+#include "test_execution_tracer.h"
#define CHECK_OK(expr, msg) \
mStatus = (expr); \
@@ -417,3 +420,10 @@
prevGain = diffB;
}
}
+
+int main(int argc, char** argv) {
+ android::ProcessState::self()->startThreadPool();
+ ::testing::InitGoogleTest(&argc, argv);
+ ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+ return RUN_ALL_TESTS();
+}
diff --git a/media/libaudioclient/tests/audioeffect_tests.cpp b/media/libaudioclient/tests/audioeffect_tests.cpp
index e12ae23..59d0c6a 100644
--- a/media/libaudioclient/tests/audioeffect_tests.cpp
+++ b/media/libaudioclient/tests/audioeffect_tests.cpp
@@ -15,8 +15,9 @@
*/
//#define LOG_NDEBUG 0
-#define LOG_TAG "AudioEffectUnitTests"
+#define LOG_TAG "AudioEffectTests"
+#include <binder/ProcessState.h>
#include <gtest/gtest.h>
#include <media/AudioEffect.h>
#include <system/audio_effects/effect_hapticgenerator.h>
@@ -24,6 +25,7 @@
#include <system/audio_effects/effect_visualizer.h>
#include "audio_test_utils.h"
+#include "test_execution_tracer.h"
using namespace android;
@@ -563,3 +565,10 @@
EXPECT_TRUE(cb->receivedFramesProcessed)
<< "AudioEffect frames processed callback not received";
}
+
+int main(int argc, char** argv) {
+ android::ProcessState::self()->startThreadPool();
+ ::testing::InitGoogleTest(&argc, argv);
+ ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+ return RUN_ALL_TESTS();
+}
diff --git a/media/libaudioclient/tests/audiorecord_tests.cpp b/media/libaudioclient/tests/audiorecord_tests.cpp
index 61edd4d..0bf2e82 100644
--- a/media/libaudioclient/tests/audiorecord_tests.cpp
+++ b/media/libaudioclient/tests/audiorecord_tests.cpp
@@ -24,6 +24,7 @@
#include <gtest/gtest.h>
#include "audio_test_utils.h"
+#include "test_execution_tracer.h"
using namespace android;
@@ -261,26 +262,6 @@
AUDIO_SOURCE_UNPROCESSED)),
GetRecordTestName);
-namespace {
-
-class TestExecutionTracer : public ::testing::EmptyTestEventListener {
- public:
- void OnTestStart(const ::testing::TestInfo& test_info) override {
- TraceTestState("Started", test_info);
- }
- void OnTestEnd(const ::testing::TestInfo& test_info) override {
- TraceTestState("Finished", test_info);
- }
- void OnTestPartResult(const ::testing::TestPartResult& result) override { LOG(INFO) << result; }
-
- private:
- static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info) {
- LOG(INFO) << state << " " << test_info.test_suite_name() << "::" << test_info.name();
- }
-};
-
-} // namespace
-
int main(int argc, char** argv) {
::testing::InitGoogleTest(&argc, argv);
::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index c101f00..3b2285e 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -19,11 +19,13 @@
#include <string.h>
+#include <binder/Binder.h>
#include <binder/ProcessState.h>
#include <cutils/properties.h>
#include <gtest/gtest.h>
#include "audio_test_utils.h"
+#include "test_execution_tracer.h"
using namespace android;
@@ -150,6 +152,7 @@
config.sample_rate = 48000;
AudioMix mix(criteria, mixType, config, mixFlag, String8{mAddress.c_str()}, 0);
mix.mDeviceType = deviceType;
+ mix.mToken = sp<BBinder>::make();
mMixes.push(mix);
if (OK == AudioSystem::registerPolicyMixes(mMixes, true)) {
mPolicyMixRegistered = true;
@@ -267,21 +270,6 @@
playback->stop();
}
-class TestExecutionTracer : public ::testing::EmptyTestEventListener {
- public:
- void OnTestStart(const ::testing::TestInfo& test_info) override {
- TraceTestState("Started", test_info);
- }
- void OnTestEnd(const ::testing::TestInfo& test_info) override {
- TraceTestState("Completed", test_info);
- }
-
- private:
- static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info) {
- ALOGI("%s %s::%s", state.c_str(), test_info.test_suite_name(), test_info.name());
- }
-};
-
int main(int argc, char** argv) {
android::ProcessState::self()->startThreadPool();
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index d9789f1..03c15f4 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -14,18 +14,19 @@
* limitations under the License.
*/
-#define LOG_TAG "AudioSystemTest"
-
#include <string.h>
#include <set>
+#define LOG_TAG "AudioSystemTest"
+
#include <gtest/gtest.h>
#include <log/log.h>
#include <media/AidlConversionCppNdk.h>
#include <media/IAudioFlinger.h>
#include "audio_test_utils.h"
+#include "test_execution_tracer.h"
using android::media::audio::common::AudioDeviceAddress;
using android::media::audio::common::AudioDeviceDescription;
@@ -706,21 +707,6 @@
}
}
-class TestExecutionTracer : public ::testing::EmptyTestEventListener {
- public:
- void OnTestStart(const ::testing::TestInfo& test_info) override {
- TraceTestState("Started", test_info);
- }
- void OnTestEnd(const ::testing::TestInfo& test_info) override {
- TraceTestState("Completed", test_info);
- }
-
- private:
- static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info) {
- ALOGI("%s %s::%s", state.c_str(), test_info.test_suite_name(), test_info.name());
- }
-};
-
int main(int argc, char** argv) {
::testing::InitGoogleTest(&argc, argv);
::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
diff --git a/media/libaudioclient/tests/audiotrack_tests.cpp b/media/libaudioclient/tests/audiotrack_tests.cpp
index 2b68225..0282bd7 100644
--- a/media/libaudioclient/tests/audiotrack_tests.cpp
+++ b/media/libaudioclient/tests/audiotrack_tests.cpp
@@ -15,10 +15,13 @@
*/
//#define LOG_NDEBUG 0
+#define LOG_TAG "AudioTrackTests"
+#include <binder/ProcessState.h>
#include <gtest/gtest.h>
#include "audio_test_utils.h"
+#include "test_execution_tracer.h"
using namespace android;
@@ -209,3 +212,10 @@
AUDIO_OUTPUT_FLAG_RAW | AUDIO_OUTPUT_FLAG_FAST,
AUDIO_OUTPUT_FLAG_DEEP_BUFFER),
::testing::Values(AUDIO_SESSION_NONE)));
+
+int main(int argc, char** argv) {
+ android::ProcessState::self()->startThreadPool();
+ ::testing::InitGoogleTest(&argc, argv);
+ ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+ return RUN_ALL_TESTS();
+}
diff --git a/media/libaudioclient/tests/test_execution_tracer.cpp b/media/libaudioclient/tests/test_execution_tracer.cpp
new file mode 100644
index 0000000..797bb4b
--- /dev/null
+++ b/media/libaudioclient/tests/test_execution_tracer.cpp
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TestExecutionTracer"
+
+#include "test_execution_tracer.h"
+
+#include <android-base/logging.h>
+
+void TestExecutionTracer::OnTestStart(const ::testing::TestInfo& test_info) {
+ TraceTestState("Started", test_info);
+}
+
+void TestExecutionTracer::OnTestEnd(const ::testing::TestInfo& test_info) {
+ TraceTestState("Finished", test_info);
+}
+
+void TestExecutionTracer::OnTestPartResult(const ::testing::TestPartResult& result) {
+ if (result.failed()) {
+ LOG(ERROR) << result;
+ } else {
+ LOG(INFO) << result;
+ }
+}
+
+// static
+void TestExecutionTracer::TraceTestState(const std::string& state,
+ const ::testing::TestInfo& test_info) {
+ LOG(INFO) << state << " " << test_info.test_suite_name() << "::" << test_info.name();
+}
diff --git a/media/libaudioclient/tests/test_execution_tracer.h b/media/libaudioclient/tests/test_execution_tracer.h
new file mode 100644
index 0000000..9031aaf
--- /dev/null
+++ b/media/libaudioclient/tests/test_execution_tracer.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gtest/gtest.h>
+
+class TestExecutionTracer : public ::testing::EmptyTestEventListener {
+ public:
+ void OnTestStart(const ::testing::TestInfo& test_info) override;
+ void OnTestEnd(const ::testing::TestInfo& test_info) override;
+ void OnTestPartResult(const ::testing::TestPartResult& result) override;
+
+ private:
+ static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info);
+};
diff --git a/media/libaudioclient/tests/trackplayerbase_tests.cpp b/media/libaudioclient/tests/trackplayerbase_tests.cpp
index c9b704d..7317bf0 100644
--- a/media/libaudioclient/tests/trackplayerbase_tests.cpp
+++ b/media/libaudioclient/tests/trackplayerbase_tests.cpp
@@ -16,10 +16,12 @@
#define LOG_TAG "TrackPlayerBaseTest"
+#include <binder/ProcessState.h>
#include <gtest/gtest.h>
-
#include <media/TrackPlayerBase.h>
+#include "test_execution_tracer.h"
+
using namespace android;
using namespace android::media;
@@ -159,3 +161,10 @@
INSTANTIATE_TEST_SUITE_P(TrackPlayerTest, PauseTestParam,
::testing::Values(std::make_tuple(1.0, 75.0, 2, 24000)));
+
+int main(int argc, char** argv) {
+ android::ProcessState::self()->startThreadPool();
+ ::testing::InitGoogleTest(&argc, argv);
+ ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+ return RUN_ALL_TESTS();
+}
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index ae0457f..6dbf284 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -192,7 +192,8 @@
dst->append(
base::StringPrintf("%*s extra audio descriptor %zu:\n", eadSpaces, "", i));
dst->append(base::StringPrintf(
- "%*s- standard: %u\n", descSpaces, "", mExtraAudioDescriptors[i].standard));
+ "%*s- standard: %u\n", descSpaces, "",
+ static_cast<unsigned>(mExtraAudioDescriptors[i].standard)));
dst->append(base::StringPrintf("%*s- descriptor:", descSpaces, ""));
for (auto v : mExtraAudioDescriptors[i].audioDescriptor) {
dst->append(base::StringPrintf(" %02x", v));
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 3c05b0b..b8d0998 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -23,7 +23,6 @@
],
required: [
- "libaudiohal@4.0",
"libaudiohal@5.0",
"libaudiohal@6.0",
"libaudiohal@7.0",
diff --git a/media/libaudiohal/FactoryHal.cpp b/media/libaudiohal/FactoryHal.cpp
index c414e19..15cb297 100644
--- a/media/libaudiohal/FactoryHal.cpp
+++ b/media/libaudiohal/FactoryHal.cpp
@@ -50,13 +50,12 @@
* This list need to keep sync with AudioHalVersionInfo.VERSIONS in
* media/java/android/media/AudioHalVersionInfo.java.
*/
-static const std::array<AudioHalVersionInfo, 6> sAudioHALVersions = {
+static const std::array<AudioHalVersionInfo, 5> sAudioHALVersions = {
AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, 1, 0),
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1),
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 0),
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 6, 0),
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 5, 0),
- AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 4, 0),
};
static const std::map<AudioHalVersionInfo::Type, InterfaceName> sDevicesHALInterfaces = {
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index fb1cc34..4d81f77 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -41,7 +41,7 @@
],
header_libs: [
"android.hardware.audio.common.util@all-versions",
- ]
+ ],
}
cc_defaults {
@@ -71,7 +71,7 @@
],
header_libs: [
"libaudioclient_headers",
- "libaudiohal_headers"
+ "libaudiohal_headers",
],
defaults: [
"latest_android_media_audio_common_types_cpp_export_shared",
@@ -83,36 +83,10 @@
}
cc_library_shared {
- name: "libaudiohal@4.0",
- defaults: [
- "libaudiohal_default",
- "libaudiohal_hidl_default"
- ],
- srcs: [
- ":audio_core_hal_client_sources",
- ":audio_effect_hidl_hal_client_sources",
- "EffectsFactoryHalEntry.cpp",
- ],
- shared_libs: [
- "android.hardware.audio.common@4.0",
- "android.hardware.audio.common@4.0-util",
- "android.hardware.audio.effect@4.0",
- "android.hardware.audio.effect@4.0-util",
- "android.hardware.audio@4.0",
- "android.hardware.audio@4.0-util",
- ],
- cflags: [
- "-DMAJOR_VERSION=4",
- "-DMINOR_VERSION=0",
- "-include common/all-versions/VersionMacro.h",
- ]
-}
-
-cc_library_shared {
name: "libaudiohal@5.0",
defaults: [
"libaudiohal_default",
- "libaudiohal_hidl_default"
+ "libaudiohal_hidl_default",
],
srcs: [
":audio_core_hal_client_sources",
@@ -131,14 +105,14 @@
"-DMAJOR_VERSION=5",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_library_shared {
name: "libaudiohal@6.0",
defaults: [
"libaudiohal_default",
- "libaudiohal_hidl_default"
+ "libaudiohal_hidl_default",
],
srcs: [
":audio_core_hal_client_sources",
@@ -157,14 +131,14 @@
"-DMAJOR_VERSION=6",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_library_static {
name: "libaudiohal.effect@7.0",
defaults: [
"libaudiohal_default",
- "libaudiohal_hidl_default"
+ "libaudiohal_hidl_default",
],
srcs: [
":audio_effect_hidl_hal_client_sources",
@@ -179,14 +153,14 @@
"-DMAJOR_VERSION=7",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_library_shared {
name: "libaudiohal@7.0",
defaults: [
"libaudiohal_default",
- "libaudiohal_hidl_default"
+ "libaudiohal_hidl_default",
],
srcs: [
":audio_core_hal_client_sources",
@@ -206,7 +180,7 @@
"-DMAJOR_VERSION=7",
"-DMINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_library_shared {
@@ -215,7 +189,7 @@
"latest_android_hardware_audio_core_sounddose_ndk_shared",
"latest_android_hardware_audio_sounddose_ndk_shared",
"libaudiohal_default",
- "libaudiohal_hidl_default"
+ "libaudiohal_hidl_default",
],
srcs: [
":audio_core_hal_client_sources",
@@ -242,7 +216,7 @@
"-DCOMMON_TYPES_MINOR_VERSION=0",
"-DCORE_TYPES_MINOR_VERSION=0",
"-include common/all-versions/VersionMacro.h",
- ]
+ ],
}
cc_defaults {
@@ -257,7 +231,7 @@
shared_libs: [
"android.hardware.common-V2-ndk",
"android.hardware.common.fmq-V1-ndk",
- "av-audio-types-aidl-ndk",
+ "av-audio-types-aidl-V1-ndk",
"libaudio_aidl_conversion_common_cpp",
"libaudio_aidl_conversion_common_ndk",
"libaudio_aidl_conversion_common_ndk_cpp",
@@ -287,9 +261,30 @@
],
srcs: [
"DevicesFactoryHalEntry.cpp",
+ "EffectsFactoryHalEntry.cpp",
+ ":audio_effect_hal_aidl_src_files",
+ ":core_audio_hal_aidl_src_files",
+ ],
+}
+
+filegroup {
+ name: "core_audio_hal_aidl_src_files",
+ srcs: [
+ "ConversionHelperAidl.cpp",
+ "DeviceHalAidl.cpp",
+ "DevicesFactoryHalAidl.cpp",
+ "Hal2AidlMapper.cpp",
+ "StreamHalAidl.cpp",
+ ],
+}
+
+filegroup {
+ name: "audio_effect_hal_aidl_src_files",
+ srcs: [
"EffectConversionHelperAidl.cpp",
"EffectBufferHalAidl.cpp",
"EffectHalAidl.cpp",
+ "EffectsFactoryHalAidl.cpp",
"effectsAidlConversion/AidlConversionAec.cpp",
"effectsAidlConversion/AidlConversionAgc1.cpp",
"effectsAidlConversion/AidlConversionAgc2.cpp",
@@ -306,21 +301,7 @@
"effectsAidlConversion/AidlConversionVendorExtension.cpp",
"effectsAidlConversion/AidlConversionVirtualizer.cpp",
"effectsAidlConversion/AidlConversionVisualizer.cpp",
- "EffectsFactoryHalAidl.cpp",
- "EffectsFactoryHalEntry.cpp",
":audio_effectproxy_src_files",
- ":core_audio_hal_aidl_src_files",
- ],
-}
-
-filegroup {
- name: "core_audio_hal_aidl_src_files",
- srcs: [
- "ConversionHelperAidl.cpp",
- "DeviceHalAidl.cpp",
- "DevicesFactoryHalAidl.cpp",
- "Hal2AidlMapper.cpp",
- "StreamHalAidl.cpp",
],
}
diff --git a/media/libaudiohal/impl/ConversionHelperAidl.cpp b/media/libaudiohal/impl/ConversionHelperAidl.cpp
index 46abfda..7a32811 100644
--- a/media/libaudiohal/impl/ConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/ConversionHelperAidl.cpp
@@ -37,10 +37,6 @@
using ParameterScope = IHalAdapterVendorExtension::ParameterScope;
if (parameterKeys.size() == 0) return OK;
const String8 rawKeys = parameterKeys.keysToString();
- if (vendorExt == nullptr) {
- ALOGW("%s: unknown parameters, ignored: \"%s\"", __func__, rawKeys.c_str());
- return OK;
- }
std::vector<std::string> parameterIds;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(vendorExt->parseVendorParameterIds(
@@ -85,10 +81,6 @@
using ParameterScope = IHalAdapterVendorExtension::ParameterScope;
if (parameters.size() == 0) return OK;
const String8 rawKeysAndValues = parameters.toString();
- if (vendorExt == nullptr) {
- ALOGW("%s: unknown parameters, ignored: \"%s\"", __func__, rawKeysAndValues.c_str());
- return OK;
- }
std::vector<VendorParameter> syncParameters, asyncParameters;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(vendorExt->parseVendorParameters(
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index db9a9b1..f865603 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -274,15 +274,16 @@
return parseAndGetVendorParameters(mVendorExt, mModule, parameterKeys, values);
}
-status_t DeviceHalAidl::getInputBufferSize(const struct audio_config* config, size_t* size) {
+status_t DeviceHalAidl::getInputBufferSize(struct audio_config* config, size_t* size) {
ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
TIME_CHECK();
if (mModule == nullptr) return NO_INIT;
if (config == nullptr || size == nullptr) {
return BAD_VALUE;
}
+ constexpr bool isInput = true;
AudioConfig aidlConfig = VALUE_OR_RETURN_STATUS(
- ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, true /*isInput*/));
+ ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, isInput));
AudioDevice aidlDevice;
aidlDevice.type.type = AudioDeviceType::IN_DEFAULT;
AudioSource aidlSource = AudioSource::DEFAULT;
@@ -296,6 +297,9 @@
0 /*handle*/, aidlDevice, aidlFlags, aidlSource,
&cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
}
+ *config = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::aidl2legacy_AudioConfig_audio_config_t(aidlConfig, isInput));
+ if (mixPortConfig.id == 0) return BAD_VALUE; // HAL suggests a different config.
*size = aidlConfig.frameCount *
getFrameSizeInBytes(aidlConfig.base.format, aidlConfig.base.channelMask);
// Do not disarm cleanups to release temporary port configs.
@@ -401,8 +405,7 @@
*static_cast<StreamCallbackBase*>(this)),
StreamCallbackBaseHelper<StreamOutHalInterfaceLatencyModeCallback>(
*static_cast<StreamCallbackBase*>(this)) {}
- ndk::ScopedAStatus onCodecFormatChanged(const std::vector<uint8_t>& in_audioMetadata) override {
- std::basic_string<uint8_t> halMetadata(in_audioMetadata.begin(), in_audioMetadata.end());
+ ndk::ScopedAStatus onCodecFormatChanged(const std::vector<uint8_t>& halMetadata) override {
return StreamCallbackBaseHelper<StreamOutHalInterfaceEventCallback>::runCb(
[&halMetadata](auto cb) { cb->onCodecFormatChanged(halMetadata); });
}
@@ -586,7 +589,6 @@
// that the HAL module uses `int32_t` for patch IDs. The following assert ensures
// that both the framework and the HAL use the same value for "no ID":
static_assert(AUDIO_PATCH_HANDLE_NONE == 0);
- int32_t aidlPatchId = static_cast<int32_t>(*patch);
// Upon conversion, mix port configs contain audio configuration, while
// device port configs contain device address. This data is used to find
@@ -608,11 +610,27 @@
::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
sinks[i], isInput, 0)));
}
+ int32_t aidlPatchId = static_cast<int32_t>(*patch);
Hal2AidlMapper::Cleanups cleanups(mMapperAccessor);
{
std::lock_guard l(mLock);
- RETURN_STATUS_IF_ERROR(mMapper.createOrUpdatePatch(
- aidlSources, aidlSinks, &aidlPatchId, &cleanups));
+ // Check for patches that only exist for the framework, or have different HAL patch ID.
+ if (int32_t aidlHalPatchId = mMapper.findFwkPatch(aidlPatchId); aidlHalPatchId != 0) {
+ if (aidlHalPatchId == aidlPatchId) {
+ // This patch was previously released by the HAL. Thus we need to pass '0'
+ // to the HAL to obtain a new patch.
+ int32_t newAidlPatchId = 0;
+ RETURN_STATUS_IF_ERROR(mMapper.createOrUpdatePatch(
+ aidlSources, aidlSinks, &newAidlPatchId, &cleanups));
+ mMapper.updateFwkPatch(aidlPatchId, newAidlPatchId);
+ } else {
+ RETURN_STATUS_IF_ERROR(mMapper.createOrUpdatePatch(
+ aidlSources, aidlSinks, &aidlHalPatchId, &cleanups));
+ }
+ } else {
+ RETURN_STATUS_IF_ERROR(mMapper.createOrUpdatePatch(
+ aidlSources, aidlSinks, &aidlPatchId, &cleanups));
+ }
}
*patch = static_cast<audio_patch_handle_t>(aidlPatchId);
cleanups.disarmAll();
@@ -628,7 +646,19 @@
return BAD_VALUE;
}
std::lock_guard l(mLock);
- RETURN_STATUS_IF_ERROR(mMapper.releaseAudioPatch(static_cast<int32_t>(patch)));
+ // Check for patches that only exist for the framework, or have different HAL patch ID.
+ int32_t aidlPatchId = static_cast<int32_t>(patch);
+ if (int32_t aidlHalPatchId = mMapper.findFwkPatch(aidlPatchId); aidlHalPatchId != 0) {
+ if (aidlHalPatchId == aidlPatchId) {
+ // This patch was previously released by the HAL, just need to finish its removal.
+ mMapper.eraseFwkPatch(aidlPatchId);
+ return OK;
+ } else {
+ // This patch has a HAL patch ID which is different
+ aidlPatchId = aidlHalPatchId;
+ }
+ }
+ RETURN_STATUS_IF_ERROR(mMapper.releaseAudioPatch(aidlPatchId));
return OK;
}
@@ -985,7 +1015,7 @@
if (mModule == nullptr) return NO_INIT;
{
std::lock_guard l(mLock);
- mMapper.resetUnusedPatchesPortConfigsAndPorts();
+ mMapper.resetUnusedPatchesAndPortConfigs();
}
ModuleDebug debug{ .simulateDeviceConnections = enabled };
status_t status = statusTFromBinderStatus(mModule->setModuleDebug(debug));
@@ -1052,15 +1082,11 @@
(void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
parameters, String8(AudioParameter::keyReconfigA2dp),
[&](const String8& value) -> status_t {
- if (mVendorExt != nullptr) {
- std::vector<VendorParameter> result;
- RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
- mVendorExt->parseBluetoothA2dpReconfigureOffload(
- std::string(value.c_str()), &result)));
- reconfigureOffload = std::move(result);
- } else {
- reconfigureOffload = std::vector<VendorParameter>();
- }
+ std::vector<VendorParameter> result;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ mVendorExt->parseBluetoothA2dpReconfigureOffload(
+ std::string(value.c_str()), &result)));
+ reconfigureOffload = std::move(result);
return OK;
}));
if (mBluetoothA2dp != nullptr && a2dpEnabled.has_value()) {
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index f705db7..d925b46 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -113,7 +113,7 @@
status_t getParameters(const String8& keys, String8 *values) override;
// Returns audio input buffer size according to parameters passed.
- status_t getInputBufferSize(const struct audio_config* config, size_t* size) override;
+ status_t getInputBufferSize(struct audio_config* config, size_t* size) override;
// Creates and opens the audio hardware output stream. The stream is closed
// by releasing all references to the returned object.
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index e8e1f46..ea4258c 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -236,7 +236,7 @@
}
status_t DeviceHalHidl::getInputBufferSize(
- const struct audio_config *config, size_t *size) {
+ struct audio_config *config, size_t *size) {
TIME_CHECK();
if (mDevice == 0) return NO_INIT;
AudioConfig hidlConfig;
@@ -752,10 +752,14 @@
// the attributes reported by `getParameters` API.
struct audio_port_v7 temp = *devicePort;
AudioProfileAttributesMultimap attrsFromDevice;
- status_t status = getAudioPort(&temp);
- if (status == NO_ERROR) {
- attrsFromDevice = createAudioProfilesAttrMap(temp.audio_profiles, 0 /*first*/,
- temp.num_audio_profiles);
+ bool supportsPatches;
+ if (supportsAudioPatches(&supportsPatches) == OK && supportsPatches) {
+ // The audio patches are supported since HAL 3.0, which is the same HAL version
+ // requirement for 'getAudioPort' API.
+ if (getAudioPort(&temp) == NO_ERROR) {
+ attrsFromDevice = createAudioProfilesAttrMap(temp.audio_profiles, 0 /*first*/,
+ temp.num_audio_profiles);
+ }
}
auto streamIt = mStreams.find(mixPort->ext.mix.handle);
if (streamIt == mStreams.end()) {
@@ -767,7 +771,7 @@
}
String8 formatsStr;
- status = getParametersFromStream(
+ status_t status = getParametersFromStream(
stream, AudioParameter::keyStreamSupportedFormats, nullptr /*extraParameters*/,
&formatsStr);
if (status != NO_ERROR) {
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 7a712df..1362dab 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -67,7 +67,7 @@
status_t getParameters(const String8& keys, String8 *values) override;
// Returns audio input buffer size according to parameters passed.
- status_t getInputBufferSize(const struct audio_config* config, size_t* size) override;
+ status_t getInputBufferSize(struct audio_config* config, size_t* size) override;
// Creates and opens the audio hardware output stream. The stream is closed
// by releasing all references to the returned object.
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
index 3dbc14a..347afa6 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
@@ -17,13 +17,16 @@
#include <algorithm>
#include <map>
#include <memory>
+#include <mutex>
#include <string>
#define LOG_TAG "DevicesFactoryHalAidl"
//#define LOG_NDEBUG 0
#include <aidl/android/hardware/audio/core/IModule.h>
+#include <aidl/android/media/audio/BnHalAdapterVendorExtension.h>
#include <android/binder_manager.h>
+#include <cutils/properties.h>
#include <media/AidlConversionNdkCpp.h>
#include <media/AidlConversionUtil.h>
#include <utils/Log.h>
@@ -35,6 +38,7 @@
using aidl::android::hardware::audio::core::IConfig;
using aidl::android::hardware::audio::core::IModule;
using aidl::android::hardware::audio::core::SurroundSoundConfig;
+using aidl::android::hardware::audio::core::VendorParameter;
using aidl::android::media::audio::common::AudioHalEngineConfig;
using aidl::android::media::audio::IHalAdapterVendorExtension;
using android::detail::AudioHalVersionInfo;
@@ -62,10 +66,84 @@
return cpp;
}
+class HalAdapterVendorExtensionWrapper :
+ public ::aidl::android::media::audio::BnHalAdapterVendorExtension {
+ private:
+ template<typename F>
+ ndk::ScopedAStatus callWithRetryOnCrash(F method) {
+ ndk::ScopedAStatus status = ndk::ScopedAStatus::ok();
+ for (auto service = getService(); service != nullptr; service = getService(true)) {
+ status = method(service);
+ if (status.getStatus() != STATUS_DEAD_OBJECT) break;
+ }
+ return status;
+ }
+
+ ndk::ScopedAStatus parseVendorParameterIds(ParameterScope in_scope,
+ const std::string& in_rawKeys,
+ std::vector<std::string>* _aidl_return) override {
+ return callWithRetryOnCrash([&](auto service) {
+ return service->parseVendorParameterIds(in_scope, in_rawKeys, _aidl_return);
+ });
+ }
+
+ ndk::ScopedAStatus parseVendorParameters(
+ ParameterScope in_scope, const std::string& in_rawKeysAndValues,
+ std::vector<VendorParameter>* out_syncParameters,
+ std::vector<VendorParameter>* out_asyncParameters) override {
+ return callWithRetryOnCrash([&](auto service) {
+ return service->parseVendorParameters(in_scope, in_rawKeysAndValues,
+ out_syncParameters, out_asyncParameters);
+ });
+ }
+
+ ndk::ScopedAStatus parseBluetoothA2dpReconfigureOffload(
+ const std::string& in_rawValue, std::vector<VendorParameter>* _aidl_return) override {
+ return callWithRetryOnCrash([&](auto service) {
+ return service->parseBluetoothA2dpReconfigureOffload(in_rawValue, _aidl_return);
+ });
+ }
+
+ ndk::ScopedAStatus parseBluetoothLeReconfigureOffload(const std::string& in_rawValue,
+ std::vector<VendorParameter>* _aidl_return) override {
+ return callWithRetryOnCrash([&](auto service) {
+ return service->parseBluetoothLeReconfigureOffload(in_rawValue, _aidl_return);
+ });
+ }
+
+ ndk::ScopedAStatus processVendorParameters(ParameterScope in_scope,
+ const std::vector<VendorParameter>& in_parameters,
+ std::string* _aidl_return) override {
+ return callWithRetryOnCrash([&](auto service) {
+ return service->processVendorParameters(in_scope, in_parameters, _aidl_return);
+ });
+ }
+
+ std::shared_ptr<IHalAdapterVendorExtension> getService(bool reset = false) {
+ std::lock_guard l(mLock);
+ if (reset || !mVendorExt.has_value()) {
+ if (property_get_bool("ro.audio.ihaladaptervendorextension_enabled", false)) {
+ auto serviceName = std::string(IHalAdapterVendorExtension::descriptor) + "/default";
+ mVendorExt = std::shared_ptr<IHalAdapterVendorExtension>(
+ IHalAdapterVendorExtension::fromBinder(ndk::SpAIBinder(
+ AServiceManager_waitForService(serviceName.c_str()))));
+ } else {
+ mVendorExt = nullptr;
+ }
+ }
+ return mVendorExt.value();
+ }
+
+ std::mutex mLock;
+ std::optional<std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension>>
+ mVendorExt GUARDED_BY(mLock);
+};
+
} // namespace
DevicesFactoryHalAidl::DevicesFactoryHalAidl(std::shared_ptr<IConfig> config)
- : mConfig(std::move(config)) {
+ : mConfig(std::move(config)),
+ mVendorExt(ndk::SharedRefBase::make<HalAdapterVendorExtensionWrapper>()) {
}
status_t DevicesFactoryHalAidl::getDeviceNames(std::vector<std::string> *names) {
@@ -110,7 +188,7 @@
ALOGE("%s fromBinder %s failed", __func__, serviceName.c_str());
return NO_INIT;
}
- *device = sp<DeviceHalAidl>::make(name, service, getVendorExtension());
+ *device = sp<DeviceHalAidl>::make(name, service, mVendorExt);
return OK;
}
@@ -149,20 +227,6 @@
return OK;
}
-std::shared_ptr<IHalAdapterVendorExtension> DevicesFactoryHalAidl::getVendorExtension() {
- if (!mVendorExt.has_value()) {
- auto serviceName = std::string(IHalAdapterVendorExtension::descriptor) + "/default";
- if (AServiceManager_isDeclared(serviceName.c_str())) {
- mVendorExt = std::shared_ptr<IHalAdapterVendorExtension>(
- IHalAdapterVendorExtension::fromBinder(ndk::SpAIBinder(
- AServiceManager_waitForService(serviceName.c_str()))));
- } else {
- mVendorExt = nullptr;
- }
- }
- return mVendorExt.value();
-}
-
// Main entry-point to the shared library.
extern "C" __attribute__((visibility("default"))) void* createIDevicesFactoryImpl() {
auto serviceName = std::string(IConfig::descriptor) + "/default";
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.h b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
index 17bfe43..2a3a9e7 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
@@ -45,10 +45,7 @@
private:
const std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> mConfig;
- std::optional<std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension>>
- mVendorExt;
-
- std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> getVendorExtension();
+ const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
~DevicesFactoryHalAidl() = default;
};
diff --git a/media/libaudiohal/impl/EffectBufferHalAidl.cpp b/media/libaudiohal/impl/EffectBufferHalAidl.cpp
index a701852..33fe3ed 100644
--- a/media/libaudiohal/impl/EffectBufferHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectBufferHalAidl.cpp
@@ -58,25 +58,14 @@
}
EffectBufferHalAidl::~EffectBufferHalAidl() {
+ if (mAudioBuffer.raw) free(mAudioBuffer.raw);
}
status_t EffectBufferHalAidl::init() {
- int fd = ashmem_create_region("audioEffectAidl", mBufferSize);
- if (fd < 0) {
- ALOGE("%s create ashmem failed %d", __func__, fd);
- return fd;
+ if (0 != posix_memalign(&mAudioBuffer.raw, 32, mBufferSize)) {
+ return NO_MEMORY;
}
- ScopedFileDescriptor tempFd(fd);
- mAudioBuffer.raw = mmap(nullptr /* address */, mBufferSize /* length */, PROT_READ | PROT_WRITE,
- MAP_SHARED, fd, 0 /* offset */);
- if (mAudioBuffer.raw == MAP_FAILED) {
- ALOGE("mmap failed for fd %d", fd);
- mAudioBuffer.raw = nullptr;
- return INVALID_OPERATION;
- }
-
- mMemory = {std::move(tempFd), static_cast<int64_t>(mBufferSize)};
return OK;
}
diff --git a/media/libaudiohal/impl/EffectBufferHalAidl.h b/media/libaudiohal/impl/EffectBufferHalAidl.h
index 035314b..cf6031f 100644
--- a/media/libaudiohal/impl/EffectBufferHalAidl.h
+++ b/media/libaudiohal/impl/EffectBufferHalAidl.h
@@ -50,7 +50,6 @@
const size_t mBufferSize;
bool mFrameCountChanged;
void* mExternalData;
- aidl::android::hardware::common::Ashmem mMemory;
audio_buffer_t mAudioBuffer;
// Can not be constructed directly by clients.
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
index 39999a5..ff6126d 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -181,36 +181,40 @@
State state;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
if (state == State::INIT) {
- ALOGI("%s at state %s, opening effect with input %s output %s", __func__,
+ ALOGD("%s at state %s, opening effect with input %s output %s", __func__,
android::internal::ToString(state).c_str(), common.input.toString().c_str(),
common.output.toString().c_str());
IEffect::OpenEffectReturn openReturn;
RETURN_STATUS_IF_ERROR(
statusTFromBinderStatus(mEffect->open(common, std::nullopt, &openReturn)));
- updateMqs(openReturn);
-
- if (status_t status = updateEventFlags(); status != OK) {
- ALOGV("%s closing at status %d", __func__, status);
- mEffect->close();
- return status;
- }
+ updateMqsAndEventFlags(openReturn);
} else if (mCommon != common) {
- ALOGI("%s at state %s, setParameter", __func__, android::internal::ToString(state).c_str());
+ ALOGV("%s at state %s, setCommonParameter %s", __func__,
+ android::internal::ToString(state).c_str(), common.toString().c_str());
Parameter aidlParam = UNION_MAKE(Parameter, common, common);
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
}
+ mOutputAccessMode = config->outputCfg.accessMode;
mCommon = common;
return *static_cast<int32_t*>(pReplyData) = OK;
}
-void EffectConversionHelperAidl::updateMqs(const IEffect::OpenEffectReturn& ret) {
+void EffectConversionHelperAidl::updateMqsAndEventFlags(const IEffect::OpenEffectReturn& ret) {
if (mIsProxyEffect) {
mStatusQ = std::static_pointer_cast<EffectProxy>(mEffect)->getStatusMQ();
+ } else {
+ mStatusQ = std::make_shared<StatusMQ>(ret.statusMQ);
+ }
+ updateEventFlags();
+ updateDataMqs(ret);
+}
+
+void EffectConversionHelperAidl::updateDataMqs(const IEffect::OpenEffectReturn& ret) {
+ if (mIsProxyEffect) {
mInputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getInputMQ();
mOutputQ = std::static_pointer_cast<EffectProxy>(mEffect)->getOutputMQ();
} else {
- mStatusQ = std::make_shared<StatusMQ>(ret.statusMQ);
mInputQ = std::make_shared<DataMQ>(ret.inputDataMQ);
mOutputQ = std::make_shared<DataMQ>(ret.outputDataMQ);
}
@@ -395,22 +399,20 @@
effect_offload_param_t* offload = (effect_offload_param_t*)pCmdData;
// send to proxy to update active sub-effect
if (mIsProxyEffect) {
- ALOGI("%s offload param offload %s ioHandle %d", __func__,
+ ALOGV("%s offload param offload %s ioHandle %d", __func__,
offload->isOffload ? "true" : "false", offload->ioHandle);
const auto& effectProxy = std::static_pointer_cast<EffectProxy>(mEffect);
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(effectProxy->setOffloadParam(offload)));
if (mCommon.ioHandle != offload->ioHandle) {
- ALOGI("%s ioHandle update [%d to %d]", __func__, mCommon.ioHandle, offload->ioHandle);
+ ALOGV("%s ioHandle update [%d to %d]", __func__, mCommon.ioHandle, offload->ioHandle);
mCommon.ioHandle = offload->ioHandle;
Parameter aidlParam = UNION_MAKE(Parameter, common, mCommon);
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
}
// update FMQs if the effect instance already open
if (State state; effectProxy->getState(&state).isOk() && state != State::INIT) {
- mStatusQ = effectProxy->getStatusMQ();
- mInputQ = effectProxy->getInputMQ();
- mOutputQ = effectProxy->getOutputMQ();
- updateEventFlags();
+ IEffect::OpenEffectReturn openReturn;
+ updateMqsAndEventFlags(openReturn);
}
}
return *static_cast<int32_t*>(pReplyData) = OK;
@@ -512,7 +514,8 @@
IEffect::OpenEffectReturn openReturn;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->reopen(&openReturn)));
- updateMqs(openReturn);
+ // status MQ won't be changed after open
+ updateDataMqs(openReturn);
return OK;
}
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
index 8b9efb3..29c5a83 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.h
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -49,6 +49,8 @@
::aidl::android::hardware::audio::effect::Descriptor getDescriptor() const;
status_t reopen();
+ uint8_t mOutputAccessMode = EFFECT_BUFFER_ACCESS_WRITE;
+
protected:
const int32_t mSessionId;
const int32_t mIoId;
@@ -69,9 +71,6 @@
void* pReplyData);
private:
- const aidl::android::media::audio::common::AudioFormatDescription kDefaultFormatDescription = {
- .type = aidl::android::media::audio::common::AudioFormatType::PCM,
- .pcm = aidl::android::media::audio::common::PcmType::FLOAT_32_BIT};
const bool mIsProxyEffect;
static constexpr int kDefaultframeCount = 0x100;
@@ -81,13 +80,16 @@
return pt ? std::to_string(*pt) : "nullptr";
}
- using AudioChannelLayout = aidl::android::media::audio::common::AudioChannelLayout;
const aidl::android::media::audio::common::AudioConfig kDefaultAudioConfig = {
.base = {.sampleRate = 44100,
- .channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
- AudioChannelLayout::LAYOUT_STEREO),
- .format = kDefaultFormatDescription},
+ .channelMask = aidl::android::media::audio::common::AudioChannelLayout::make<
+ aidl::android::media::audio::common::AudioChannelLayout::layoutMask>(
+ aidl::android::media::audio::common::AudioChannelLayout::
+ LAYOUT_STEREO),
+ .format = {.type = aidl::android::media::audio::common::AudioFormatType::PCM,
+ .pcm = aidl::android::media::audio::common::PcmType::FLOAT_32_BIT}},
.frameCount = kDefaultframeCount};
+
// command handler map
typedef status_t (EffectConversionHelperAidl::*CommandHandler)(uint32_t /* cmdSize */,
const void* /* pCmdData */,
@@ -98,7 +100,6 @@
std::shared_ptr<StatusMQ> mStatusQ = nullptr;
std::shared_ptr<DataMQ> mInputQ = nullptr, mOutputQ = nullptr;
-
struct EventFlagDeleter {
void operator()(::android::hardware::EventFlag* flag) const {
if (flag) {
@@ -108,8 +109,10 @@
};
std::shared_ptr<android::hardware::EventFlag> mEfGroup = nullptr;
status_t updateEventFlags();
-
- void updateMqs(const ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn& ret);
+ void updateDataMqs(
+ const ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn& ret);
+ void updateMqsAndEventFlags(
+ const ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn& ret);
status_t handleInit(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
void* pReplyData);
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index 2836727..87d24a5 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -20,6 +20,7 @@
#include <memory>
+#include <audio_utils/primitives.h>
#include <error/expected_utils.h>
#include <media/AidlConversionCppNdk.h>
#include <media/AidlConversionEffect.h>
@@ -53,24 +54,27 @@
#include "effectsAidlConversion/AidlConversionVisualizer.h"
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::CommandId;
using ::aidl::android::hardware::audio::effect::Descriptor;
using ::aidl::android::hardware::audio::effect::IEffect;
using ::aidl::android::hardware::audio::effect::IFactory;
using ::aidl::android::hardware::audio::effect::kEventFlagDataMqUpdate;
+using ::aidl::android::hardware::audio::effect::kReopenSupportedVersion;
using ::aidl::android::hardware::audio::effect::State;
namespace android {
namespace effect {
EffectHalAidl::EffectHalAidl(const std::shared_ptr<IFactory>& factory,
- const std::shared_ptr<IEffect>& effect,
- int32_t sessionId, int32_t ioId, const Descriptor& desc,
- bool isProxyEffect)
+ const std::shared_ptr<IEffect>& effect, int32_t sessionId,
+ int32_t ioId, const Descriptor& desc, bool isProxyEffect)
: mFactory(factory),
mEffect(effect),
mSessionId(sessionId),
mIoId(ioId),
mIsProxyEffect(isProxyEffect) {
+ assert(mFactory != nullptr);
+ assert(mEffect != nullptr);
createAidlConversion(effect, sessionId, ioId, desc);
}
@@ -184,10 +188,18 @@
return INVALID_OPERATION;
}
- if (uint32_t efState = 0;
- ::android::OK == efGroup->wait(kEventFlagDataMqUpdate, &efState, 1 /* ns */,
- true /* retry */)) {
- ALOGI("%s %s receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str());
+ // use IFactory HAL version because IEffect can be an EffectProxy instance
+ static const int halVersion = [&]() {
+ int version = 0;
+ return mFactory->getInterfaceVersion(&version).isOk() ? version : 0;
+ }();
+
+ if (uint32_t efState = 0; halVersion >= kReopenSupportedVersion &&
+ ::android::OK == efGroup->wait(kEventFlagDataMqUpdate, &efState,
+ 1 /* ns */, true /* retry */) &&
+ efState & kEventFlagDataMqUpdate) {
+ ALOGV("%s %s V%d receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str(),
+ halVersion);
mConversion->reopen();
}
auto statusQ = mConversion->getStatusMQ();
@@ -229,16 +241,23 @@
mOutBuffer->getSize() / sizeof(float), available);
return INVALID_OPERATION;
}
+
+ float *outputRawBuffer = mOutBuffer->audioBuffer()->f32;
+ std::vector<float> tempBuffer;
+ if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+ tempBuffer.resize(floatsToRead);
+ outputRawBuffer = tempBuffer.data();
+ }
// always read floating point data for AIDL
- if (!mOutBuffer->audioBuffer() ||
- !outputQ->read(mOutBuffer->audioBuffer()->f32, floatsToRead)) {
+ if (!outputQ->read(outputRawBuffer, floatsToRead)) {
ALOGE("%s failed to read %zu from outputQ to audioBuffer %p", __func__, floatsToRead,
mOutBuffer->audioBuffer());
return INVALID_OPERATION;
}
+ if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+ accumulate_float(mOutBuffer->audioBuffer()->f32, outputRawBuffer, floatsToRead);
+ }
- ALOGD("%s %s consumed %zu produced %zu", __func__, effectName.c_str(), floatsToWrite,
- floatsToRead);
return OK;
}
@@ -275,6 +294,7 @@
status_t EffectHalAidl::close() {
TIME_CHECK();
+ mEffect->command(CommandId::STOP);
return statusTFromBinderStatus(mEffect->close());
}
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
index d440ef8..af0f8f2 100644
--- a/media/libaudiohal/impl/EffectProxy.cpp
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -147,7 +147,7 @@
// close all opened effects if failure
if (!status.isOk()) {
- ALOGE("%s: closing all sub-effects with error %s", __func__,
+ ALOGW("%s: closing all sub-effects with error %s", __func__,
status.getDescription().c_str());
close();
}
@@ -208,6 +208,10 @@
if (desc.common.flags.volume == Flags::Volume::NONE) {
common.flags.volume = Flags::Volume::NONE;
}
+ // set to AUXILIARY if any sub-effect is of AUXILIARY type
+ if (desc.common.flags.type == Flags::Type::AUXILIARY) {
+ common.flags.type = Flags::Type::AUXILIARY;
+ }
}
// copy type UUID from any of sub-effects, all sub-effects should have same type
@@ -256,7 +260,7 @@
std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func) {
ndk::ScopedAStatus status = runWithActiveSubEffect(func);
if (!status.isOk()) {
- ALOGE("%s active sub-effect return error %s", __func__, status.getDescription().c_str());
+ ALOGW("%s active sub-effect return error %s", __func__, status.getDescription().c_str());
}
// proceed with others
@@ -265,7 +269,7 @@
continue;
}
if (!mSubEffects[i].handle) {
- ALOGE("%s null sub-effect interface for %s", __func__,
+ ALOGW("%s null sub-effect interface for %s", __func__,
mSubEffects[i].descriptor.common.id.uuid.toString().c_str());
continue;
}
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 7d807b2..424fdb7 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -120,8 +120,6 @@
}
*pNumEffects = mEffectCount;
- ALOGD("%s %u non %zu proxyMap %zu proxyDesc %zu", __func__, *pNumEffects,
- mNonProxyDescList.size(), mProxyUuidDescriptorMap.size(), mProxyDescList.size());
return OK;
}
@@ -178,7 +176,7 @@
if (sessionId == AUDIO_SESSION_DEVICE && ioId == AUDIO_IO_HANDLE_NONE) {
return INVALID_OPERATION;
}
- ALOGI("%s session %d ioId %d", __func__, sessionId, ioId);
+ ALOGV("%s session %d ioId %d", __func__, sessionId, ioId);
AudioUuid aidlUuid =
VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
@@ -218,13 +216,11 @@
}
status_t EffectsFactoryHalAidl::allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) {
- ALOGI("%s size %zu buffer %p", __func__, size, buffer);
return EffectBufferHalAidl::allocate(size, buffer);
}
status_t EffectsFactoryHalAidl::mirrorBuffer(void* external, size_t size,
sp<EffectBufferHalInterface>* buffer) {
- ALOGI("%s extern %p size %zu buffer %p", __func__, external, size, buffer);
return EffectBufferHalAidl::mirror(external, size, buffer);
}
@@ -245,7 +241,6 @@
ALOGE("%s UUID not found in HAL and proxy list %s", __func__, toString(uuid).c_str());
return NAME_NOT_FOUND;
}
- ALOGI("%s UUID impl found %s", __func__, toString(uuid).c_str());
*pDescriptor = VALUE_OR_RETURN_STATUS(
::aidl::android::aidl2legacy_Descriptor_effect_descriptor(*matchIt));
@@ -267,7 +262,6 @@
ALOGW("%s UUID type not found in HAL and proxy list %s", __func__, toString(type).c_str());
return BAD_VALUE;
}
- ALOGI("%s UUID type found %zu \n %s", __func__, result.size(), toString(type).c_str());
*descriptors = VALUE_OR_RETURN_STATUS(
aidl::android::convertContainer<std::vector<effect_descriptor_t>>(
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index 2b7f298..acc69ec 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -102,8 +102,8 @@
}
void Hal2AidlMapper::addStream(
- const sp<StreamHalInterface>& stream, int32_t portConfigId, int32_t patchId) {
- mStreams.insert(std::pair(stream, std::pair(portConfigId, patchId)));
+ const sp<StreamHalInterface>& stream, int32_t mixPortConfigId, int32_t patchId) {
+ mStreams.insert(std::pair(stream, std::pair(mixPortConfigId, patchId)));
}
bool Hal2AidlMapper::audioDeviceMatches(const AudioDevice& device, const AudioPort& p) {
@@ -698,20 +698,23 @@
return OK;
}
-bool Hal2AidlMapper::isPortBeingHeld(int32_t portId) {
- // It is assumed that mStreams has already been cleaned up.
- for (const auto& s : mStreams) {
- if (portConfigBelongsToPort(s.second.first, portId)) return true;
- }
- for (const auto& [_, patch] : mPatches) {
+std::set<int32_t> Hal2AidlMapper::getPatchIdsByPortId(int32_t portId) {
+ std::set<int32_t> result;
+ for (const auto& [patchId, patch] : mPatches) {
for (int32_t id : patch.sourcePortConfigIds) {
- if (portConfigBelongsToPort(id, portId)) return true;
+ if (portConfigBelongsToPort(id, portId)) {
+ result.insert(patchId);
+ break;
+ }
}
for (int32_t id : patch.sinkPortConfigIds) {
- if (portConfigBelongsToPort(id, portId)) return true;
+ if (portConfigBelongsToPort(id, portId)) {
+ result.insert(patchId);
+ break;
+ }
}
}
- return false;
+ return result;
}
status_t Hal2AidlMapper::prepareToDisconnectExternalDevice(const AudioPort& devicePort) {
@@ -730,7 +733,7 @@
this, __func__, ioHandle, device.toString().c_str(),
flags.toString().c_str(), toString(source).c_str(),
config->toString().c_str(), mixPortConfig->toString().c_str());
- resetUnusedPatchesPortConfigsAndPorts();
+ resetUnusedPatchesAndPortConfigs();
const AudioConfig initialConfig = *config;
// Find / create AudioPortConfigs for the device port and the mix port,
// then find / create a patch between them, and open a stream on the mix port.
@@ -843,39 +846,52 @@
return releaseAudioPatches({patchId});
}
+// Note: does not reset port configs.
+status_t Hal2AidlMapper::releaseAudioPatch(Patches::iterator it) {
+ const int32_t patchId = it->first;
+ if (ndk::ScopedAStatus status = mModule->resetAudioPatch(patchId); !status.isOk()) {
+ ALOGE("%s: error while resetting patch %d: %s",
+ __func__, patchId, status.getDescription().c_str());
+ return statusTFromBinderStatus(status);
+ }
+ mPatches.erase(it);
+ for (auto it = mFwkPatches.begin(); it != mFwkPatches.end(); ++it) {
+ if (it->second == patchId) {
+ mFwkPatches.erase(it);
+ break;
+ }
+ }
+ return OK;
+}
+
status_t Hal2AidlMapper::releaseAudioPatches(const std::set<int32_t>& patchIds) {
status_t result = OK;
for (const auto patchId : patchIds) {
if (auto it = mPatches.find(patchId); it != mPatches.end()) {
- mPatches.erase(it);
- if (ndk::ScopedAStatus status = mModule->resetAudioPatch(patchId); !status.isOk()) {
- ALOGE("%s: error while resetting patch %d: %s",
- __func__, patchId, status.getDescription().c_str());
- result = statusTFromBinderStatus(status);
- }
+ releaseAudioPatch(it);
} else {
ALOGE("%s: patch id %d not found", __func__, patchId);
result = BAD_VALUE;
}
}
- resetUnusedPortConfigsAndPorts();
+ resetUnusedPortConfigs();
return result;
}
void Hal2AidlMapper::resetPortConfig(int32_t portConfigId) {
if (auto it = mPortConfigs.find(portConfigId); it != mPortConfigs.end()) {
- mPortConfigs.erase(it);
if (ndk::ScopedAStatus status = mModule->resetAudioPortConfig(portConfigId);
!status.isOk()) {
ALOGE("%s: error while resetting port config %d: %s",
__func__, portConfigId, status.getDescription().c_str());
}
+ mPortConfigs.erase(it);
return;
}
ALOGE("%s: port config id %d not found", __func__, portConfigId);
}
-void Hal2AidlMapper::resetUnusedPatchesPortConfigsAndPorts() {
+void Hal2AidlMapper::resetUnusedPatchesAndPortConfigs() {
// Since patches can be created independently of streams via 'createOrUpdatePatch',
// here we only clean up patches for released streams.
std::set<int32_t> patchesToRelease;
@@ -889,52 +905,35 @@
it = mStreams.erase(it);
}
}
- // 'releaseAudioPatches' also resets unused port configs and ports.
+ // 'releaseAudioPatches' also resets unused port configs.
releaseAudioPatches(patchesToRelease);
}
-void Hal2AidlMapper::resetUnusedPortConfigsAndPorts() {
+void Hal2AidlMapper::resetUnusedPortConfigs() {
// The assumption is that port configs are used to create patches
// (or to open streams, but that involves creation of patches, too). Thus,
// orphaned port configs can and should be reset.
- std::map<int32_t, int32_t /*portID*/> portConfigIds;
+ std::set<int32_t> portConfigIdsToReset;
std::transform(mPortConfigs.begin(), mPortConfigs.end(),
- std::inserter(portConfigIds, portConfigIds.end()),
- [](const auto& pcPair) { return std::make_pair(pcPair.first, pcPair.second.portId); });
+ std::inserter(portConfigIdsToReset, portConfigIdsToReset.end()),
+ [](const auto& pcPair) { return pcPair.first; });
for (const auto& p : mPatches) {
- for (int32_t id : p.second.sourcePortConfigIds) portConfigIds.erase(id);
- for (int32_t id : p.second.sinkPortConfigIds) portConfigIds.erase(id);
+ for (int32_t id : p.second.sourcePortConfigIds) portConfigIdsToReset.erase(id);
+ for (int32_t id : p.second.sinkPortConfigIds) portConfigIdsToReset.erase(id);
}
for (int32_t id : mInitialPortConfigIds) {
- portConfigIds.erase(id);
+ portConfigIdsToReset.erase(id);
}
for (const auto& s : mStreams) {
- portConfigIds.erase(s.second.first);
+ portConfigIdsToReset.erase(s.second.first);
}
- std::set<int32_t> retryDeviceDisconnection;
- for (const auto& portConfigAndIdPair : portConfigIds) {
- resetPortConfig(portConfigAndIdPair.first);
- if (const auto it = mConnectedPorts.find(portConfigAndIdPair.second);
- it != mConnectedPorts.end() && it->second) {
- retryDeviceDisconnection.insert(portConfigAndIdPair.second);
- }
- }
- for (int32_t portId : retryDeviceDisconnection) {
- if (!isPortBeingHeld(portId)) {
- if (auto status = mModule->disconnectExternalDevice(portId); status.isOk()) {
- eraseConnectedPort(portId);
- ALOGD("%s: executed postponed external device disconnection for port ID %d",
- __func__, portId);
- }
- }
- }
- if (!retryDeviceDisconnection.empty()) {
- updateRoutes();
+ for (const auto& portConfigId : portConfigIdsToReset) {
+ resetPortConfig(portConfigId);
}
}
status_t Hal2AidlMapper::setDevicePortConnectedState(const AudioPort& devicePort, bool connected) {
- resetUnusedPatchesPortConfigsAndPorts();
+ resetUnusedPatchesAndPortConfigs();
if (connected) {
AudioDevice matchDevice = devicePort.ext.get<AudioPortExt::device>().device;
std::optional<AudioPort> templatePort;
@@ -980,7 +979,7 @@
"%s: module %s, duplicate port ID received from HAL: %s, existing port: %s",
__func__, mInstance.c_str(), connectedPort.toString().c_str(),
it->second.toString().c_str());
- mConnectedPorts[connectedPort.id] = false;
+ mConnectedPorts.insert(connectedPort.id);
if (erasePortAfterConnectionIt != mPorts.end()) {
mPorts.erase(erasePortAfterConnectionIt);
}
@@ -1007,17 +1006,27 @@
port.ext.get<AudioPortExt::Tag::device>().device = matchDevice;
port.profiles = portsIt->second.profiles;
}
- // Streams are closed by AudioFlinger independently from device disconnections.
- // It is possible that the stream has not been closed yet.
- if (!isPortBeingHeld(portId)) {
- RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
- mModule->disconnectExternalDevice(portId)));
- eraseConnectedPort(portId);
- } else {
- ALOGD("%s: since device port ID %d is used by a stream, "
- "external device disconnection postponed", __func__, portId);
- mConnectedPorts[portId] = true;
+
+ // Patches may still exist, the framework may reset or update them later.
+ // For disconnection to succeed, need to release these patches first.
+ if (std::set<int32_t> patchIdsToRelease = getPatchIdsByPortId(portId);
+ !patchIdsToRelease.empty()) {
+ FwkPatches releasedPatches;
+ status_t status = OK;
+ for (int32_t patchId : patchIdsToRelease) {
+ if (auto it = mPatches.find(patchId); it != mPatches.end()) {
+ if (status = releaseAudioPatch(it); status != OK) break;
+ releasedPatches.insert(std::make_pair(patchId, patchId));
+ }
+ }
+ resetUnusedPortConfigs();
+ mFwkPatches.merge(releasedPatches);
+ LOG_ALWAYS_FATAL_IF(!releasedPatches.empty(),
+ "mFwkPatches already contains some of released patches");
+ if (status != OK) return status;
}
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->disconnectExternalDevice(portId)));
+ eraseConnectedPort(portId);
}
return updateRoutes();
}
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
index f937173..f302c23 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.h
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -49,7 +49,7 @@
const std::string& instance,
const std::shared_ptr<::aidl::android::hardware::audio::core::IModule>& module);
- void addStream(const sp<StreamHalInterface>& stream, int32_t portConfigId, int32_t patchId);
+ void addStream(const sp<StreamHalInterface>& stream, int32_t mixPortConfigId, int32_t patchId);
status_t createOrUpdatePatch(
const std::vector<::aidl::android::media::audio::common::AudioPortConfig>& sources,
const std::vector<::aidl::android::media::audio::common::AudioPortConfig>& sinks,
@@ -91,13 +91,32 @@
::aidl::android::media::audio::common::AudioPortConfig* portConfig,
Cleanups* cleanups = nullptr);
status_t releaseAudioPatch(int32_t patchId);
- void resetUnusedPatchesPortConfigsAndPorts();
+ void resetUnusedPatchesAndPortConfigs();
status_t setDevicePortConnectedState(
const ::aidl::android::media::audio::common::AudioPort& devicePort, bool connected);
+ // Methods to work with FwkPatches.
+ void eraseFwkPatch(int32_t fwkPatchId) { mFwkPatches.erase(fwkPatchId); }
+ int32_t findFwkPatch(int32_t fwkPatchId) {
+ const auto it = mFwkPatches.find(fwkPatchId);
+ return it != mFwkPatches.end() ? it->second : 0;
+ }
+ void updateFwkPatch(int32_t fwkPatchId, int32_t halPatchId) {
+ mFwkPatches[fwkPatchId] = halPatchId;
+ }
+
private:
- // IDs of ports for connected external devices, and whether they are held by streams.
- using ConnectedPorts = std::map<int32_t /*port ID*/, bool>;
+ // 'FwkPatches' is used to store patches that diverge from the framework's state.
+ // Uses framework patch ID (aka audio_patch_handle_t) values for indexing.
+ // When the 'key == value', that means Hal2AidlMapper has removed this patch, and it is absent
+ // from 'mPatches', but it still "exists" for the framework. It will either remove it or
+ // re-patch. If the framework re-patches, it will continue to use the same patch handle,
+ // but the HAL will use the new one (since the old patch was reset), thus 'key != value'
+ // for such patches. Since they "exist" both for the framework and the HAL, 'mPatches'
+ // contains their data under HAL patch ID ('value' of 'FwkPatches').
+ // To avoid confusion, all patchIDs used by Hal2AidlMapper are HAL IDs. Mapping between
+ // framework patch IDs and HAL patch IDs is done by DeviceHalAidl.
+ using FwkPatches = std::map<int32_t /*audio_patch_handle_t*/, int32_t /*patch ID*/>;
using Patches = std::map<int32_t /*patch ID*/,
::aidl::android::hardware::audio::core::AudioPatch>;
using PortConfigs = std::map<int32_t /*port config ID*/,
@@ -107,12 +126,12 @@
// Answers the question "whether portID 'first' is reachable from portID 'second'?"
// It's not a map because both portIDs are known. The matrix is symmetric.
using RoutingMatrix = std::set<std::pair<int32_t, int32_t>>;
- // There is always a port config ID set. The patch ID is set after stream
+ // There is always a mix port config ID set. The patch ID is set after stream
// creation, and can be set to '-1' later if the framework happens to create
// a patch between the same endpoints. In that case, the ownership of the patch
// is on the framework.
using Streams = std::map<wp<StreamHalInterface>,
- std::pair<int32_t /*port config ID*/, int32_t /*patch ID*/>>;
+ std::pair<int32_t /*mix port config ID*/, int32_t /*patch ID*/>>;
const std::string mInstance;
const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
@@ -168,7 +187,7 @@
const std::optional<::aidl::android::media::audio::common::AudioConfig>& config,
const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
int32_t ioHandle);
- bool isPortBeingHeld(int32_t portId);
+ std::set<int32_t> getPatchIdsByPortId(int32_t portId);
status_t prepareToOpenStreamHelper(
int32_t ioHandle, int32_t devicePortId, int32_t devicePortConfigId,
const ::aidl::android::media::audio::common::AudioIoFlags& flags,
@@ -181,10 +200,11 @@
auto it = mPortConfigs.find(portConfigId);
return it != mPortConfigs.end() && it->second.portId == portId;
}
+ status_t releaseAudioPatch(Patches::iterator it);
status_t releaseAudioPatches(const std::set<int32_t>& patchIds);
void resetPatch(int32_t patchId) { (void)releaseAudioPatch(patchId); }
void resetPortConfig(int32_t portConfigId);
- void resetUnusedPortConfigsAndPorts();
+ void resetUnusedPortConfigs();
status_t updateAudioPort(
int32_t portId, ::aidl::android::media::audio::common::AudioPort* port);
status_t updateRoutes();
@@ -197,13 +217,14 @@
std::optional<::aidl::android::media::audio::common::AudioPort> mRemoteSubmixOut;
int32_t mDefaultInputPortId = -1;
int32_t mDefaultOutputPortId = -1;
+ FwkPatches mFwkPatches;
PortConfigs mPortConfigs;
std::set<int32_t> mInitialPortConfigIds;
Patches mPatches;
Routes mRoutes;
RoutingMatrix mRoutingMatrix;
Streams mStreams;
- ConnectedPorts mConnectedPorts;
+ std::set<int32_t> mConnectedPorts;
std::pair<int32_t, ::aidl::android::media::audio::common::AudioPort>
mDisconnectedPortReplacement;
std::set<int32_t> mDynamicMixPortIds;
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 5f525d7..2a8ebc6 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -788,7 +788,7 @@
if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
parameters, String8(AudioParameter::keyOffloadCodecAverageBitRate),
[&](int value) {
- return value > 0 ?
+ return value >= 0 ?
mOffloadMetadata.averageBitRatePerSecond = value, OK : BAD_VALUE;
}))) {
updateMetadata = true;
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 72eadc6..77c75db 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -840,7 +840,7 @@
const android::hardware::hidl_vec<uint8_t>& audioMetadata) override {
sp<StreamOutHalHidl> stream = mStream.promote();
if (stream != nullptr) {
- std::basic_string<uint8_t> metadataBs(audioMetadata.begin(), audioMetadata.end());
+ std::vector<uint8_t> metadataBs(audioMetadata.begin(), audioMetadata.end());
stream->onCodecFormatChanged(metadataBs);
}
return Void();
@@ -967,7 +967,7 @@
callback->onError();
}
-void StreamOutHalHidl::onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs) {
+void StreamOutHalHidl::onCodecFormatChanged(const std::vector<uint8_t>& metadataBs) {
sp<StreamOutHalInterfaceEventCallback> callback = mEventCallback.load().promote();
if (callback == nullptr) return;
ALOGV("asyncCodecFormatCallback %s", __func__);
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 5361047..48da633 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -194,7 +194,7 @@
status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) override;
// Methods used by StreamCodecFormatCallback (HIDL).
- void onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs);
+ void onCodecFormatChanged(const std::vector<uint8_t>& metadataBs);
status_t setLatencyMode(audio_latency_mode_t mode) override;
status_t getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) override;
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
index 3cac591..642c370 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
@@ -71,7 +71,6 @@
status_t AidlConversionPresetReverb::getParameter(EffectParamWriter& param) {
uint32_t type = 0;
uint16_t value = 0;
- ALOGE("%s enter %s", __func__, param.toString().c_str());
if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
OK != param.readFromParameter(&type)) {
ALOGE("%s invalid param %s", __func__, param.toString().c_str());
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
index 49e6827..da28204 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
@@ -16,17 +16,17 @@
#include <cstdint>
#include <cstring>
-#include <optional>
#define LOG_TAG "AidlConversionSpatializer"
//#define LOG_NDEBUG 0
#include <aidl/android/hardware/audio/effect/DefaultExtension.h>
#include <aidl/android/hardware/audio/effect/VendorExtension.h>
#include <error/expected_utils.h>
-#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionCppNdk.h>
#include <media/AidlConversionEffect.h>
+#include <media/AidlConversionNdk.h>
+#include <system/audio_effects/aidl_effects_utils.h>
#include <system/audio_effects/effect_spatializer.h>
-
#include <utils/Log.h>
#include "AidlConversionSpatializer.h"
@@ -34,38 +34,312 @@
namespace android {
namespace effect {
-using ::aidl::android::aidl_utils::statusTFromBinderStatus;
-using ::aidl::android::hardware::audio::effect::DefaultExtension;
-using ::aidl::android::hardware::audio::effect::Parameter;
-using ::aidl::android::hardware::audio::effect::VendorExtension;
-using ::android::status_t;
+using aidl::android::getParameterSpecificField;
+using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::hardware::audio::common::SourceMetadata;
+using aidl::android::hardware::audio::effect::DefaultExtension;
+using aidl::android::hardware::audio::effect::Parameter;
+using aidl::android::hardware::audio::effect::Range;
+using aidl::android::hardware::audio::effect::Spatializer;
+using aidl::android::hardware::audio::effect::VendorExtension;
+using aidl::android::media::audio::common::AudioChannelLayout;
+using aidl::android::media::audio::common::HeadTracking;
+using aidl::android::media::audio::common::Spatialization;
+using aidl::android::media::audio::common::toString;
+using android::status_t;
using utils::EffectParamReader;
using utils::EffectParamWriter;
+bool AidlConversionSpatializer::isSpatializerParameterSupported() {
+ return mIsSpatializerAidlParamSupported.value_or(
+ (mIsSpatializerAidlParamSupported =
+ [&]() {
+ ::aidl::android::hardware::audio::effect::Parameter aidlParam;
+ auto id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+ Spatializer::vendor);
+ // No range defined in descriptor capability means no Spatializer AIDL
+ // implementation BAD_VALUE return from getParameter indicates the
+ // parameter is not supported by HAL
+ return mDesc.capability.range.getTag() == Range::spatializer &&
+ mEffect->getParameter(id, &aidlParam).getStatus() !=
+ android::BAD_VALUE;
+ }())
+ .value());
+}
+
status_t AidlConversionSpatializer::setParameter(EffectParamReader& param) {
- Parameter aidlParam = VALUE_OR_RETURN_STATUS(
- ::aidl::android::legacy2aidl_EffectParameterReader_Parameter(param));
+ Parameter aidlParam;
+ if (isSpatializerParameterSupported()) {
+ uint32_t command = 0;
+ if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(int8_t)) ||
+ OK != param.readFromParameter(&command)) {
+ ALOGE("%s %d invalid param %s", __func__, __LINE__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+
+ switch (command) {
+ case SPATIALIZER_PARAM_LEVEL: {
+ Spatialization::Level level = Spatialization::Level::NONE;
+ if (OK != param.readFromValue(&level)) {
+ ALOGE("%s invalid level value %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer,
+ spatializationLevel, level);
+ break;
+ }
+ case SPATIALIZER_PARAM_HEADTRACKING_MODE: {
+ HeadTracking::Mode mode = HeadTracking::Mode::DISABLED;
+ if (OK != param.readFromValue(&mode)) {
+ ALOGE("%s invalid mode value %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer, headTrackingMode,
+ mode);
+ break;
+ }
+ case SPATIALIZER_PARAM_HEAD_TO_STAGE: {
+ const size_t valueSize = param.getValueSize();
+ if (valueSize / sizeof(float) > 6 || valueSize % sizeof(float) != 0) {
+ ALOGE("%s invalid parameter value size %zu", __func__, valueSize);
+ return BAD_VALUE;
+ }
+ std::array<float, 6> headToStage = {};
+ for (size_t i = 0; i < valueSize / sizeof(float); i++) {
+ if (OK != param.readFromValue(&headToStage[i])) {
+ ALOGE("%s failed to read headToStage from %s", __func__,
+ param.toString().c_str());
+ return BAD_VALUE;
+ }
+ }
+ HeadTracking::SensorData sensorData =
+ HeadTracking::SensorData::make<HeadTracking::SensorData::headToStage>(
+ headToStage);
+ aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer,
+ headTrackingSensorData, sensorData);
+ break;
+ }
+ case SPATIALIZER_PARAM_HEADTRACKING_CONNECTION: {
+ int32_t modeInt32 = 0;
+ int32_t sensorId = -1;
+ if (OK != param.readFromValue(&modeInt32) || OK != param.readFromValue(&sensorId)) {
+ ALOGE("%s %d invalid parameter value %s", __func__, __LINE__,
+ param.toString().c_str());
+ return BAD_VALUE;
+ }
+
+ const auto mode = static_cast<HeadTracking::ConnectionMode>(modeInt32);
+ if (mode < *ndk::enum_range<HeadTracking::ConnectionMode>().begin() ||
+ mode > *ndk::enum_range<HeadTracking::ConnectionMode>().end()) {
+ ALOGE("%s %d invalid mode %d", __func__, __LINE__, modeInt32);
+ return BAD_VALUE;
+ }
+ aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer,
+ headTrackingConnectionMode, mode);
+ if (status_t status = statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+ status != OK) {
+ ALOGE("%s failed to set headTrackingConnectionMode %s", __func__,
+ toString(mode).c_str());
+ return status;
+ }
+ aidlParam = MAKE_SPECIFIC_PARAMETER(Spatializer, spatializer, headTrackingSensorId,
+ sensorId);
+ return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+ }
+ default: {
+ ALOGE("%s %d invalid command %u", __func__, __LINE__, command);
+ return BAD_VALUE;
+ }
+ }
+ } else {
+ aidlParam = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::legacy2aidl_EffectParameterReader_Parameter(param));
+ }
+
return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
}
status_t AidlConversionSpatializer::getParameter(EffectParamWriter& param) {
- DefaultExtension defaultExt;
- // read parameters into DefaultExtension vector<uint8_t>
- defaultExt.bytes.resize(param.getParameterSize());
- if (OK != param.readFromParameter(defaultExt.bytes.data(), param.getParameterSize())) {
- ALOGE("%s invalid param %s", __func__, param.toString().c_str());
- param.setStatus(BAD_VALUE);
- return BAD_VALUE;
- }
+ if (isSpatializerParameterSupported()) {
+ uint32_t command = 0;
+ if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(int8_t)) ||
+ OK != param.readFromParameter(&command)) {
+ ALOGE("%s %d invalid param %s", __func__, __LINE__, param.toString().c_str());
+ return BAD_VALUE;
+ }
- VendorExtension idTag;
- idTag.extension.setParcelable(defaultExt);
- Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, idTag);
- Parameter aidlParam;
- RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
- // copy the AIDL extension data back to effect_param_t
- return VALUE_OR_RETURN_STATUS(
- ::aidl::android::aidl2legacy_Parameter_EffectParameterWriter(aidlParam, param));
+ switch (command) {
+ case SPATIALIZER_PARAM_SUPPORTED_LEVELS: {
+ const auto& range = getRange<Range::spatializer, Range::SpatializerRange>(
+ mDesc.capability, Spatializer::spatializationLevel);
+ if (!range) {
+ return BAD_VALUE;
+ }
+ for (const auto level : ::ndk::enum_range<Spatialization::Level>()) {
+ const auto spatializer =
+ Spatializer::make<Spatializer::spatializationLevel>(level);
+ if (spatializer >= range->min && spatializer <= range->max) {
+ if (status_t status = param.writeToValue(&level); status != OK) {
+ ALOGW("%s %d: write level %s to value failed %d", __func__, __LINE__,
+ toString(level).c_str(), status);
+ return status;
+ }
+ }
+ }
+ return OK;
+ }
+ case SPATIALIZER_PARAM_LEVEL: {
+ Parameter aidlParam;
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+ Spatializer::spatializationLevel);
+ RETURN_STATUS_IF_ERROR(
+ statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ const auto level = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+ aidlParam, Spatializer, spatializer, Spatializer::spatializationLevel,
+ Spatialization::Level));
+ return param.writeToValue(&level);
+ }
+ case SPATIALIZER_PARAM_HEADTRACKING_SUPPORTED: {
+ const auto& range = getRange<Range::spatializer, Range::SpatializerRange>(
+ mDesc.capability, Spatializer::spatializationLevel);
+ if (!range) {
+ ALOGE("%s %d: range not defined for spatializationLevel", __func__, __LINE__);
+ return BAD_VALUE;
+ }
+ const auto& nonSupport = Spatializer::make<Spatializer::spatializationLevel>(
+ Spatialization::Level::NONE);
+ const bool support = (range->min > range->max ||
+ (range->min == nonSupport && range->max == nonSupport))
+ ? false
+ : true;
+ return param.writeToValue(&support);
+ }
+ case SPATIALIZER_PARAM_HEADTRACKING_MODE: {
+ Parameter aidlParam;
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+ Spatializer::headTrackingMode);
+ RETURN_STATUS_IF_ERROR(
+ statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ const auto mode = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+ aidlParam, Spatializer, spatializer, Spatializer::headTrackingMode,
+ HeadTracking::Mode));
+ return param.writeToValue(&mode);
+ }
+ case SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS: {
+ Parameter aidlParam;
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+ Spatializer::supportedChannelLayout);
+ RETURN_STATUS_IF_ERROR(
+ statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ const auto& supportedLayouts = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+ aidlParam, Spatializer, spatializer, Spatializer::supportedChannelLayout,
+ std::vector<AudioChannelLayout>));
+ for (const auto& layout : supportedLayouts) {
+ audio_channel_mask_t mask = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+ layout, false /* isInput */));
+ if (status_t status = param.writeToValue(&mask); status != OK) {
+ ALOGW("%s %d: write mask %s to value failed %d", __func__, __LINE__,
+ layout.toString().c_str(), status);
+ return status;
+ }
+ }
+ return OK;
+ }
+ case SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES: {
+ const auto& range = getRange<Range::spatializer, Range::SpatializerRange>(
+ mDesc.capability, Spatializer::spatializationMode);
+ if (!range) {
+ return BAD_VALUE;
+ }
+ for (const auto mode : ::ndk::enum_range<Spatialization::Mode>()) {
+ if (const auto spatializer =
+ Spatializer::make<Spatializer::spatializationMode>(mode);
+ spatializer >= range->min && spatializer <= range->max) {
+ if (status_t status = param.writeToValue(&mode); status != OK) {
+ ALOGW("%s %d: write mode %s to value failed %d", __func__, __LINE__,
+ toString(mode).c_str(), status);
+ return status;
+ }
+ }
+ }
+ return OK;
+ }
+ case SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION: {
+ const auto& range = getRange<Range::spatializer, Range::SpatializerRange>(
+ mDesc.capability, Spatializer::headTrackingConnectionMode);
+ if (!range) {
+ return BAD_VALUE;
+ }
+ for (const auto mode : ::ndk::enum_range<HeadTracking::ConnectionMode>()) {
+ if (const auto spatializer =
+ Spatializer::make<Spatializer::headTrackingConnectionMode>(mode);
+ spatializer < range->min || spatializer > range->max) {
+ continue;
+ }
+ if (status_t status = param.writeToValue(&mode); status != OK) {
+ ALOGW("%s %d: write mode %s to value failed %d", __func__, __LINE__,
+ toString(mode).c_str(), status);
+ return status;
+ }
+ }
+ return OK;
+ }
+ case SPATIALIZER_PARAM_HEADTRACKING_CONNECTION: {
+ status_t status = OK;
+ Parameter aidlParam;
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(
+ Spatializer, spatializerTag, Spatializer::headTrackingConnectionMode);
+ RETURN_STATUS_IF_ERROR(
+ statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ const auto mode = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+ aidlParam, Spatializer, spatializer,
+ Spatializer::headTrackingConnectionMode, HeadTracking::ConnectionMode));
+
+ id = MAKE_SPECIFIC_PARAMETER_ID(Spatializer, spatializerTag,
+ Spatializer::headTrackingSensorId);
+ RETURN_STATUS_IF_ERROR(
+ statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ const auto sensorId = VALUE_OR_RETURN_STATUS(
+ GET_PARAMETER_SPECIFIC_FIELD(aidlParam, Spatializer, spatializer,
+ Spatializer::headTrackingSensorId, int32_t));
+ uint32_t modeInt32 = static_cast<int32_t>(mode);
+ if (status = param.writeToValue(&modeInt32); status != OK) {
+ ALOGW("%s %d: write mode %s to value failed %d", __func__, __LINE__,
+ toString(mode).c_str(), status);
+ return status;
+ }
+ if (status = param.writeToValue(&sensorId); status != OK) {
+ ALOGW("%s %d: write sensorId %d to value failed %d", __func__, __LINE__,
+ sensorId, status);
+ return status;
+ }
+ return OK;
+ }
+ default: {
+ ALOGE("%s %d invalid command %u", __func__, __LINE__, command);
+ return BAD_VALUE;
+ }
+ }
+ } else {
+ Parameter aidlParam;
+ DefaultExtension defaultExt;
+ // read parameters into DefaultExtension vector<uint8_t>
+ defaultExt.bytes.resize(param.getParameterSize());
+ if (OK != param.readFromParameter(defaultExt.bytes.data(), param.getParameterSize())) {
+ ALOGE("%s %d invalid param %s", __func__, __LINE__, param.toString().c_str());
+ param.setStatus(BAD_VALUE);
+ return BAD_VALUE;
+ }
+
+ VendorExtension idTag;
+ idTag.extension.setParcelable(defaultExt);
+ Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, idTag);
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ // copy the AIDL extension data back to effect_param_t
+ return VALUE_OR_RETURN_STATUS(
+ ::aidl::android::aidl2legacy_Parameter_EffectParameterWriter(aidlParam, param));
+ }
}
} // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
index 7c60b14..444e5a7 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.h
@@ -32,6 +32,8 @@
~AidlConversionSpatializer() {}
private:
+ std::optional<bool> mIsSpatializerAidlParamSupported;
+ bool isSpatializerParameterSupported();
status_t setParameter(utils::EffectParamReader& param) override;
status_t getParameter(utils::EffectParamWriter& param) override;
};
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
index cad0068..db5cb9a 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
@@ -133,7 +133,6 @@
const audio_channel_mask_t chMask = ::aidl::android::
aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
angle.channel, false);
- ALOGW("%s aidl %d ch %d", __func__, angle.channel, chMask);
if (OK != param.writeToValue(&chMask) ||
OK != param.writeToValue(&angle.azimuthDegree) ||
OK != param.writeToValue(&angle.elevationDegree)) {
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index bb5f851..7f6c1fb 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -78,8 +78,9 @@
virtual status_t getParameters(const String8& keys, String8 *values) = 0;
// Returns audio input buffer size according to parameters passed.
- virtual status_t getInputBufferSize(const struct audio_config *config,
- size_t *size) = 0;
+ // If there is no possibility for the HAL to open an input with the provided
+ // parameters, the method will return BAD_VALUE and modify the provided `config`.
+ virtual status_t getInputBufferSize(struct audio_config *config, size_t *size) = 0;
// Creates and opens the audio hardware output stream. The stream is closed
// by releasing all references to the returned object.
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index a780a17..37615af 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -116,7 +116,7 @@
class StreamOutHalInterfaceEventCallback : public virtual RefBase {
public:
- virtual void onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs) = 0;
+ virtual void onCodecFormatChanged(const std::vector<uint8_t>& metadataBs) = 0;
protected:
StreamOutHalInterfaceEventCallback() = default;
diff --git a/media/libaudiohal/tests/Android.bp b/media/libaudiohal/tests/Android.bp
index 8f011c8..b9af0bf 100644
--- a/media/libaudiohal/tests/Android.bp
+++ b/media/libaudiohal/tests/Android.bp
@@ -17,6 +17,7 @@
// frameworks/av/include.
package {
+ default_team: "trendy_team_android_media_audio_framework",
default_applicable_licenses: ["frameworks_av_license"],
}
@@ -49,6 +50,7 @@
shared_libs: [
"libvibrator",
],
+ test_config_template: "AudioHalTestTemplate.xml",
}
cc_test {
@@ -63,3 +65,14 @@
],
header_libs: ["libaudiohalimpl_headers"],
}
+
+cc_test {
+ name: "EffectHalVersionCompatibilityTest",
+ srcs: [
+ "EffectHalVersionCompatibility_test.cpp",
+ ":audio_effect_hal_aidl_src_files",
+ ],
+ defaults: ["libaudiohal_aidl_test_default"],
+ header_libs: ["libaudiohalimpl_headers"],
+ static_libs: ["libgmock"],
+}
diff --git a/media/libaudiohal/tests/AudioHalTestTemplate.xml b/media/libaudiohal/tests/AudioHalTestTemplate.xml
new file mode 100644
index 0000000..b1cb2f0
--- /dev/null
+++ b/media/libaudiohal/tests/AudioHalTestTemplate.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Runs {MODULE}.">
+ <target_preparer class="com.android.tradefed.targetprep.RootTargetPreparer" />
+
+ <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+ <option name="cleanup" value="true" />
+ <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+ </target_preparer>
+
+ <test class="com.android.tradefed.testtype.GTest" >
+ <option name="native-test-device-path" value="/data/local/tmp" />
+ <option name="module-name" value="{MODULE}" />
+ <option name="native-test-timeout" value="10m" />
+ </test>
+</configuration>
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 1204a3b..3541078 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -443,21 +443,6 @@
EXPECT_EQ(ScreenRotation::DEG_0, mModule->getScreenRotation());
}
-// Without a vendor extension, any unrecognized parameters must be ignored.
-TEST_F(DeviceHalAidlTest, VendorParameterIgnored) {
- EXPECT_EQ(0UL, mModule->getAsyncParameters().size());
- EXPECT_EQ(0UL, mModule->getSyncParameters().size());
- EXPECT_EQ(OK, mDevice->setParameters(createParameterString("random_name", "random_value")));
- EXPECT_EQ(0UL, mModule->getAsyncParameters().size());
- EXPECT_EQ(0UL, mModule->getSyncParameters().size());
-
- EXPECT_EQ(0UL, mModule->getRetrievedParameterIds().size());
- String8 values;
- EXPECT_EQ(OK, mDevice->getParameters(String8("random_name"), &values));
- EXPECT_EQ(0UL, mModule->getRetrievedParameterIds().size());
- EXPECT_EQ(0UL, values.length());
-}
-
class DeviceHalAidlVendorParametersTest : public testing::Test {
public:
void SetUp() override {
diff --git a/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp b/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
new file mode 100644
index 0000000..e8731ea
--- /dev/null
+++ b/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
@@ -0,0 +1,319 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstddef>
+#include <unordered_map>
+#define LOG_TAG "EffectHalVersionCompatibilityTest"
+
+#include <EffectHalAidl.h>
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include <aidl/android/hardware/audio/effect/IFactory.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <media/audiohal/EffectsFactoryHalInterface.h>
+#include <system/audio_aidl_utils.h>
+#include <system/audio_config.h>
+#include <system/audio_effects/audio_effects_utils.h>
+#include <system/audio_effects/effect_uuid.h>
+#include <utils/Log.h>
+
+using aidl::android::hardware::audio::effect::CommandId;
+using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::hardware::audio::effect::IFactory;
+using aidl::android::hardware::audio::effect::kReopenSupportedVersion;
+using aidl::android::hardware::audio::effect::Parameter;
+using aidl::android::hardware::audio::effect::Processing;
+using aidl::android::hardware::audio::effect::State;
+using aidl::android::media::audio::common::AudioUuid;
+using android::OK;
+using android::sp;
+using android::effect::EffectHalAidl;
+using testing::_;
+using testing::Eq;
+
+namespace {
+
+/**
+ * Maps of parameter and the version it was introduced.
+ */
+// parameters defined directly in the Parameter union, except Parameter::specific (defined in
+// kParamIdEffectVersionMap).
+static const std::unordered_map<Parameter::Tag, int /* version */> kParamTagVersionMap = {
+ {Parameter::common, 1}, {Parameter::deviceDescription, 1},
+ {Parameter::mode, 1}, {Parameter::source, 1},
+ {Parameter::offload, 1}, {Parameter::volumeStereo, 1},
+ {Parameter::sourceMetadata, 2}, {Parameter::sinkMetadata, 2},
+};
+
+// Map of the version a specific effect type introduction
+// Id tags defined Parameter::Id union, except Parameter::Id::commonTag (defined in
+// kParamTagVersionMap).
+static const std::unordered_map<Parameter::Id::Tag, int /* version */> kParamIdEffectVersionMap = {
+ {Parameter::Id::vendorEffectTag, 1},
+ {Parameter::Id::acousticEchoCancelerTag, 1},
+ {Parameter::Id::automaticGainControlV1Tag, 1},
+ {Parameter::Id::automaticGainControlV2Tag, 1},
+ {Parameter::Id::bassBoostTag, 1},
+ {Parameter::Id::downmixTag, 1},
+ {Parameter::Id::dynamicsProcessingTag, 1},
+ {Parameter::Id::environmentalReverbTag, 1},
+ {Parameter::Id::equalizerTag, 1},
+ {Parameter::Id::hapticGeneratorTag, 1},
+ {Parameter::Id::loudnessEnhancerTag, 1},
+ {Parameter::Id::noiseSuppressionTag, 1},
+ {Parameter::Id::presetReverbTag, 1},
+ {Parameter::Id::virtualizerTag, 1},
+ {Parameter::Id::visualizerTag, 1},
+ {Parameter::Id::volumeTag, 1},
+ {Parameter::Id::spatializerTag, 2},
+};
+// Tags defined Parameter::Specific union.
+static const std::unordered_map<Parameter::Specific::Tag, int /* version */>
+ kParamEffectVersionMap = {
+ {Parameter::Specific::vendorEffect, 1},
+ {Parameter::Specific::acousticEchoCanceler, 1},
+ {Parameter::Specific::automaticGainControlV1, 1},
+ {Parameter::Specific::automaticGainControlV2, 1},
+ {Parameter::Specific::bassBoost, 1},
+ {Parameter::Specific::downmix, 1},
+ {Parameter::Specific::dynamicsProcessing, 1},
+ {Parameter::Specific::environmentalReverb, 1},
+ {Parameter::Specific::equalizer, 1},
+ {Parameter::Specific::hapticGenerator, 1},
+ {Parameter::Specific::loudnessEnhancer, 1},
+ {Parameter::Specific::noiseSuppression, 1},
+ {Parameter::Specific::presetReverb, 1},
+ {Parameter::Specific::virtualizer, 1},
+ {Parameter::Specific::visualizer, 1},
+ {Parameter::Specific::volume, 1},
+ {Parameter::Specific::spatializer, 2},
+};
+
+class MockFactory : public IFactory {
+ public:
+ explicit MockFactory(int version) : IFactory(), mVersion(version) {}
+ MOCK_METHOD(ndk::ScopedAStatus, queryEffects,
+ (const std::optional<AudioUuid>& in_type_uuid,
+ const std::optional<AudioUuid>& in_impl_uuid,
+ const std::optional<AudioUuid>& in_proxy_uuid,
+ std::vector<Descriptor>* _aidl_return),
+ (override));
+
+ MOCK_METHOD(ndk::ScopedAStatus, queryProcessing,
+ (const std::optional<Processing::Type>& in_type,
+ std::vector<Processing>* _aidl_return),
+ (override));
+
+ MOCK_METHOD(ndk::ScopedAStatus, createEffect,
+ (const AudioUuid& in_impl_uuid, std::shared_ptr<IEffect>* _aidl_return),
+ (override));
+
+ MOCK_METHOD(ndk::ScopedAStatus, destroyEffect, (const std::shared_ptr<IEffect>& in_handle),
+ (override));
+
+ ndk::ScopedAStatus getInterfaceVersion(int32_t* _aidl_return) {
+ *_aidl_return = mVersion;
+ return ndk::ScopedAStatus::ok();
+ }
+
+ // these must be implemented but won't be used in this testing
+ ::ndk::SpAIBinder asBinder() { return ::ndk::SpAIBinder(); }
+ bool isRemote() { return false; }
+ ::ndk::ScopedAStatus getInterfaceHash(std::string*) { return ndk::ScopedAStatus::ok(); }
+
+ private:
+ const int mVersion;
+};
+
+class MockEffect : public IEffect {
+ public:
+ explicit MockEffect(int version) : IEffect(), mVersion(version) {}
+ MOCK_METHOD(ndk::ScopedAStatus, open,
+ (const Parameter::Common& common,
+ const std::optional<Parameter::Specific>& specific,
+ IEffect::OpenEffectReturn* ret),
+ (override));
+ MOCK_METHOD(ndk::ScopedAStatus, close, (), (override));
+ MOCK_METHOD(binder_status_t, dump, (int fd, const char** args, uint32_t numArgs), (override));
+ MOCK_METHOD(ndk::ScopedAStatus, command, (CommandId id), (override));
+ MOCK_METHOD(ndk::ScopedAStatus, getState, (State * state), (override));
+ MOCK_METHOD(ndk::ScopedAStatus, getDescriptor, (Descriptor * desc), (override));
+ MOCK_METHOD(ndk::ScopedAStatus, destroy, (), ());
+
+ // reopen introduced in version kReopenSupportedVersion
+ ndk::ScopedAStatus reopen(IEffect::OpenEffectReturn*) override {
+ return mVersion < kReopenSupportedVersion
+ ? ndk::ScopedAStatus::fromStatus(STATUS_UNKNOWN_TRANSACTION)
+ : ndk::ScopedAStatus::ok();
+ }
+
+ // for all parameters introduced
+ ndk::ScopedAStatus setParameter(const Parameter& param) override {
+ const auto paramTag = param.getTag();
+ switch (paramTag) {
+ case Parameter::common:
+ case Parameter::deviceDescription:
+ case Parameter::mode:
+ case Parameter::source:
+ case Parameter::offload:
+ case Parameter::volumeStereo:
+ case Parameter::sinkMetadata:
+ FALLTHROUGH_INTENDED;
+ case Parameter::sourceMetadata: {
+ if (kParamTagVersionMap.find(paramTag) != kParamTagVersionMap.end() &&
+ kParamTagVersionMap.at(paramTag) >= mVersion) {
+ return ndk::ScopedAStatus::ok();
+ }
+ break;
+ }
+ case Parameter::specific: {
+ // TODO
+ break;
+ }
+ }
+ return ndk::ScopedAStatus::fromStatus(STATUS_BAD_VALUE);
+ }
+
+ /**
+ * Only care about version compatibility here:
+ * @return BAD_VALUE if a tag is not supported by current AIDL version.
+ * @return OK if a tag is supported by current AIDL version.
+ */
+ ndk::ScopedAStatus getParameter(const Parameter::Id& id, Parameter*) override {
+ const auto idTag = id.getTag();
+ switch (idTag) {
+ case Parameter::Id::commonTag: {
+ const auto paramTag = id.get<Parameter::Id::commonTag>();
+ if (kParamTagVersionMap.find(paramTag) != kParamTagVersionMap.end() &&
+ kParamTagVersionMap.at(paramTag) >= mVersion) {
+ return ndk::ScopedAStatus::ok();
+ }
+ break;
+ }
+ case Parameter::Id::vendorEffectTag:
+ case Parameter::Id::acousticEchoCancelerTag:
+ case Parameter::Id::automaticGainControlV1Tag:
+ case Parameter::Id::automaticGainControlV2Tag:
+ case Parameter::Id::bassBoostTag:
+ case Parameter::Id::downmixTag:
+ case Parameter::Id::dynamicsProcessingTag:
+ case Parameter::Id::environmentalReverbTag:
+ case Parameter::Id::equalizerTag:
+ case Parameter::Id::hapticGeneratorTag:
+ case Parameter::Id::loudnessEnhancerTag:
+ case Parameter::Id::noiseSuppressionTag:
+ case Parameter::Id::presetReverbTag:
+ case Parameter::Id::virtualizerTag:
+ case Parameter::Id::visualizerTag:
+ case Parameter::Id::volumeTag:
+ FALLTHROUGH_INTENDED;
+ case Parameter::Id::spatializerTag: {
+ if (kParamIdEffectVersionMap.find(idTag) != kParamIdEffectVersionMap.end() &&
+ kParamIdEffectVersionMap.at(idTag) >= mVersion) {
+ return ndk::ScopedAStatus::ok();
+ }
+ break;
+ }
+ }
+ return ndk::ScopedAStatus::fromStatus(STATUS_BAD_VALUE);
+ }
+
+ ndk::ScopedAStatus getInterfaceVersion(int32_t* _aidl_return) {
+ *_aidl_return = mVersion;
+ return ndk::ScopedAStatus::ok();
+ }
+
+ // these must be implemented but won't be used in this testing
+ ::ndk::SpAIBinder asBinder() { return ::ndk::SpAIBinder(); }
+ bool isRemote() { return false; }
+ ::ndk::ScopedAStatus getInterfaceHash(std::string*) { return ndk::ScopedAStatus::ok(); }
+
+ private:
+ const int mVersion;
+};
+
+static const std::vector<AudioUuid> kTestParamUUIDs = {
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidAcousticEchoCanceler(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidAutomaticGainControlV1(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidAutomaticGainControlV2(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidBassBoost(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidDownmix(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidDynamicsProcessing(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidEnvReverb(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidEqualizer(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidNoiseSuppression(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidPresetReverb(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidSpatializer(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidVirtualizer(),
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidVisualizer(),
+ ::aidl::android::hardware::audio::effect::getEffectUuidNull(),
+};
+static const std::vector<int> kTestParamVersion = {1, 2}; // Effect AIDL HAL versions to test
+
+enum ParamName { UUID, VERSION };
+using TestParam = std::tuple<AudioUuid, int /* version */>;
+
+class EffectHalVersionCompatibilityTest : public ::testing::TestWithParam<TestParam> {
+ public:
+ void SetUp() override {
+ mMockFactory = ndk::SharedRefBase::make<MockFactory>(mVersion);
+ ASSERT_NE(mMockFactory, nullptr);
+ mMockEffect = ndk::SharedRefBase::make<MockEffect>(mVersion);
+ ASSERT_NE(mMockEffect, nullptr);
+ mEffectHalAidl = sp<EffectHalAidl>::make(mMockFactory, mMockEffect, 0, 0, mDesc, false);
+ ASSERT_NE(mEffectHalAidl, nullptr);
+ }
+
+ void TearDown() override {
+ EXPECT_CALL(*mMockFactory, destroyEffect(_));
+ mEffectHalAidl.clear();
+ mMockEffect.reset();
+ mMockFactory.reset();
+ }
+
+ protected:
+ const int mVersion = std::get<VERSION>(GetParam());
+ const AudioUuid mTypeUuid = std::get<UUID>(GetParam());
+ const Descriptor mDesc = {.common.id.type = mTypeUuid};
+ std::shared_ptr<MockFactory> mMockFactory = nullptr;
+ std::shared_ptr<MockEffect> mMockEffect = nullptr;
+ sp<EffectHalAidl> mEffectHalAidl = nullptr;
+};
+
+TEST_P(EffectHalVersionCompatibilityTest, testEffectAidlHalCreateDestroy) {
+ // do nothing
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ EffectHalVersionCompatibilityTestWithVersion, EffectHalVersionCompatibilityTest,
+ ::testing::Combine(testing::ValuesIn(kTestParamUUIDs),
+ testing::ValuesIn(kTestParamVersion)),
+ [](const testing::TestParamInfo<EffectHalVersionCompatibilityTest::ParamType>& info) {
+ auto version = std::to_string(std::get<VERSION>(info.param));
+ auto uuid = android::audio::utils::toString(std::get<UUID>(info.param));
+ std::string name = "EffectHalVersionCompatibilityTest_V" + version + "_" + uuid;
+ std::replace_if(
+ name.begin(), name.end(), [](const char c) { return !std::isalnum(c); }, '_');
+ return name;
+ });
+
+} // namespace
\ No newline at end of file
diff --git a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
index 0cb654c..d783c64 100644
--- a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
+++ b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
@@ -21,10 +21,13 @@
#include <cstdint>
#include <cstring>
#include <memory>
+#include <string>
#include <utility>
#define LOG_TAG "EffectsFactoryHalInterfaceTest"
#include <aidl/android/media/audio/common/AudioUuid.h>
+#include <android/media/audio/common/HeadTracking.h>
+#include <android/media/audio/common/Spatialization.h>
#include <gtest/gtest.h>
#include <media/AidlConversionCppNdk.h>
#include <media/audiohal/EffectsFactoryHalInterface.h>
@@ -40,15 +43,18 @@
#include <system/audio_effects/effect_hapticgenerator.h>
#include <system/audio_effects/effect_loudnessenhancer.h>
#include <system/audio_effects/effect_ns.h>
+#include <system/audio_effects/effect_spatializer.h>
#include <utils/RefBase.h>
#include <vibrator/ExternalVibrationUtils.h>
namespace android {
-using ::aidl::android::media::audio::common::AudioUuid;
-using ::android::audio::utils::toString;
+using aidl::android::media::audio::common::AudioUuid;
+using android::audio::utils::toString;
using effect::utils::EffectParamReader;
using effect::utils::EffectParamWriter;
+using media::audio::common::HeadTracking;
+using media::audio::common::Spatialization;
// EffectsFactoryHalInterface
TEST(libAudioHalTest, createEffectsFactoryHalInterface) {
@@ -144,34 +150,68 @@
EXPECT_NE(0, version.getMajorVersion());
}
+enum ParamSetGetType { SET_N_GET, SET_ONLY, GET_ONLY };
class EffectParamCombination {
public:
template <typename P, typename V>
- void init(const P& p, const V& v, size_t len) {
- setBuffer.resize(sizeof(effect_param_t) + sizeof(p) + sizeof(v) + 4);
- getBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
- expectBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
- parameterSet =
- std::make_shared<EffectParamReader>(createEffectParam(setBuffer.data(), p, v));
- parameterGet =
- std::make_shared<EffectParamReader>(createEffectParam(getBuffer.data(), p, v));
- parameterExpect =
- std::make_shared<EffectParamReader>(createEffectParam(expectBuffer.data(), p, v));
- valueSize = len;
+ void init(const P& p, const V& v, size_t len, ParamSetGetType type) {
+ if (type != GET_ONLY) {
+ mSetBuffer.resize(sizeof(effect_param_t) + sizeof(p) + sizeof(v) + 4);
+ mParameterSet =
+ std::make_shared<EffectParamReader>(createEffectParam(mSetBuffer.data(), p, v));
+ }
+
+ if (type != SET_ONLY) {
+ mGetBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
+ mExpectBuffer.resize(sizeof(effect_param_t) + sizeof(p) + len + 4);
+ mParameterGet =
+ std::make_shared<EffectParamReader>(createEffectParam(mGetBuffer.data(), p, v));
+ mParameterExpect = std::make_shared<EffectParamReader>(
+ createEffectParam(mExpectBuffer.data(), p, v));
+ mValueSize = len;
+ }
+ mType = type;
}
- std::shared_ptr<EffectParamReader> parameterSet; /* setParameter */
- std::shared_ptr<EffectParamReader> parameterGet; /* getParameter */
- std::shared_ptr<EffectParamReader> parameterExpect; /* expected from getParameter */
- size_t valueSize; /* ValueSize expect to write in reply data buffer */
+ std::shared_ptr<EffectParamReader> mParameterSet; /* setParameter */
+ std::shared_ptr<EffectParamReader> mParameterGet; /* getParameter */
+ std::shared_ptr<EffectParamReader> mParameterExpect; /* expected from getParameter */
+ size_t mValueSize = 0ul; /* ValueSize expect to write in reply data buffer */
+ ParamSetGetType mType = SET_N_GET;
+
+ std::string toString() {
+ uint32_t command = 0;
+ std::string str = "Command: ";
+ if (mType != GET_ONLY) {
+ str += (OK == mParameterSet->readFromParameter(&command) ? std::to_string(command)
+ : mParameterSet->toString());
+ } else {
+ str += (OK == mParameterGet->readFromParameter(&command) ? std::to_string(command)
+ : mParameterSet->toString());
+ }
+ str += "_";
+ str += toString(mType);
+ return str;
+ }
+
+ static std::string toString(ParamSetGetType type) {
+ switch (type) {
+ case SET_N_GET:
+ return "Type:SetAndGet";
+ case SET_ONLY:
+ return "Type:SetOnly";
+ case GET_ONLY:
+ return "Type:GetOnly";
+ }
+ }
private:
- std::vector<uint8_t> setBuffer;
- std::vector<uint8_t> getBuffer;
- std::vector<uint8_t> expectBuffer;
+ std::vector<uint8_t> mSetBuffer;
+ std::vector<uint8_t> mGetBuffer;
+ std::vector<uint8_t> mExpectBuffer;
template <typename P, typename V>
- EffectParamReader createEffectParam(void* buf, const P& p, const V& v) {
+ static EffectParamReader createEffectParam(void* buf, const P& p, const V& v) {
effect_param_t* paramRet = (effect_param_t*)buf;
paramRet->psize = sizeof(P);
paramRet->vsize = sizeof(V);
@@ -184,48 +224,106 @@
};
template <typename P, typename V>
-std::shared_ptr<EffectParamCombination> createEffectParamCombination(const P& p, const V& v,
- size_t len) {
+std::shared_ptr<EffectParamCombination> createEffectParamCombination(
+ const P& p, const V& v, size_t len, ParamSetGetType type = SET_N_GET) {
auto comb = std::make_shared<EffectParamCombination>();
- comb->init(p, v, len);
+ comb->init(p, v, len, type);
return comb;
}
-enum ParamName { TUPLE_UUID, TUPLE_PARAM_COMBINATION };
-using EffectParamTestTuple =
- std::tuple<const effect_uuid_t* /* type UUID */, std::shared_ptr<EffectParamCombination>>;
-
+enum ParamName { TUPLE_UUID, TUPLE_IS_INPUT, TUPLE_PARAM_COMBINATION };
+using EffectParamTestTuple = std::tuple<const effect_uuid_t* /* type UUID */, bool /* isInput */,
+ std::vector<std::shared_ptr<EffectParamCombination>>>;
static const effect_uuid_t EXTEND_EFFECT_TYPE_UUID = {
0xfa81dbde, 0x588b, 0x11ed, 0x9b6a, {0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
constexpr std::array<uint8_t, 10> kVendorExtensionData({0xff, 0x5, 0x50, 0xab, 0xcd, 0x00, 0xbd,
0xdb, 0xee, 0xff});
-std::vector<EffectParamTestTuple> testPairs = {
- std::make_tuple(FX_IID_AEC,
- createEffectParamCombination(AEC_PARAM_ECHO_DELAY, 0xff /* echoDelayMs */,
- sizeof(int32_t) /* returnValueSize */)),
- std::make_tuple(FX_IID_AGC,
- createEffectParamCombination(AGC_PARAM_TARGET_LEVEL, 20 /* targetLevel */,
- sizeof(int16_t) /* returnValueSize */)),
- std::make_tuple(SL_IID_BASSBOOST,
- createEffectParamCombination(BASSBOOST_PARAM_STRENGTH, 20 /* strength */,
- sizeof(int16_t) /* returnValueSize */)),
- std::make_tuple(EFFECT_UIID_DOWNMIX,
- createEffectParamCombination(DOWNMIX_PARAM_TYPE, DOWNMIX_TYPE_FOLD,
- sizeof(int16_t) /* returnValueSize */)),
- std::make_tuple(SL_IID_DYNAMICSPROCESSING,
- createEffectParamCombination(
- std::array<uint32_t, 2>({DP_PARAM_INPUT_GAIN, 0 /* channel */}),
- 30 /* gainDb */, sizeof(int32_t) /* returnValueSize */)),
+static std::vector<EffectParamTestTuple> testPairs = {
std::make_tuple(
- FX_IID_LOUDNESS_ENHANCER,
- createEffectParamCombination(LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB, 5 /* gain */,
- sizeof(int32_t) /* returnValueSize */)),
- std::make_tuple(FX_IID_NS,
- createEffectParamCombination(NS_PARAM_LEVEL, 1 /* level */,
- sizeof(int32_t) /* returnValueSize */)),
- std::make_tuple(&EXTEND_EFFECT_TYPE_UUID,
- createEffectParamCombination(8, kVendorExtensionData,
- sizeof(kVendorExtensionData)))};
+ FX_IID_AEC, true /* isInput */,
+ std::vector<std::shared_ptr<EffectParamCombination>>{
+ createEffectParamCombination(AEC_PARAM_ECHO_DELAY, 0xff /* echoDelayMs */,
+ sizeof(int32_t) /* returnValueSize */)}),
+ std::make_tuple(
+ FX_IID_AGC, false /* isInput */,
+ std::vector<std::shared_ptr<EffectParamCombination>>{
+ createEffectParamCombination(AGC_PARAM_TARGET_LEVEL, 20 /* targetLevel */,
+ sizeof(int16_t) /* returnValueSize */)}),
+ std::make_tuple(
+ SL_IID_BASSBOOST, false /* isInput */,
+ std::vector<std::shared_ptr<EffectParamCombination>>{
+ createEffectParamCombination(BASSBOOST_PARAM_STRENGTH, 20 /* strength */,
+ sizeof(int16_t) /* returnValueSize */)}),
+ std::make_tuple(
+ EFFECT_UIID_DOWNMIX, false /* isInput */,
+ std::vector<std::shared_ptr<EffectParamCombination>>{
+ createEffectParamCombination(DOWNMIX_PARAM_TYPE, DOWNMIX_TYPE_FOLD,
+ sizeof(int16_t) /* returnValueSize */)}),
+ std::make_tuple(
+ SL_IID_DYNAMICSPROCESSING, false /* isInput */,
+ std::vector<std::shared_ptr<EffectParamCombination>>{createEffectParamCombination(
+ std::array<uint32_t, 2>({DP_PARAM_INPUT_GAIN, 0 /* channel */}),
+ 30 /* gainDb */, sizeof(int32_t) /* returnValueSize */)}),
+ std::make_tuple(
+ FX_IID_LOUDNESS_ENHANCER, false /* isInput */,
+ std::vector<std::shared_ptr<EffectParamCombination>>{createEffectParamCombination(
+ LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB, 5 /* gain */,
+ sizeof(int32_t) /* returnValueSize */)}),
+ std::make_tuple(
+ FX_IID_NS, true /* isInput */,
+ std::vector<std::shared_ptr<EffectParamCombination>>{createEffectParamCombination(
+ NS_PARAM_LEVEL, 1 /* level */, sizeof(int32_t) /* returnValueSize */)}),
+ std::make_tuple(
+ FX_IID_SPATIALIZER, false /* isInput */,
+ std::vector<std::shared_ptr<EffectParamCombination>>{
+ createEffectParamCombination(SPATIALIZER_PARAM_LEVEL,
+ SPATIALIZATION_LEVEL_MULTICHANNEL,
+ sizeof(uint8_t), SET_N_GET),
+ createEffectParamCombination(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ HeadTracking::Mode::RELATIVE_WORLD,
+ sizeof(uint8_t), SET_N_GET),
+ createEffectParamCombination(
+ SPATIALIZER_PARAM_HEAD_TO_STAGE,
+ std::array<float, 6>{.55f, 0.2f, 1.f, .999f, .43f, 19.f},
+ sizeof(std::array<float, 6>), SET_ONLY),
+ createEffectParamCombination(
+ SPATIALIZER_PARAM_HEADTRACKING_CONNECTION,
+ std::array<uint32_t, 2>{
+ static_cast<uint32_t>(HeadTracking::ConnectionMode::
+ DIRECT_TO_SENSOR_TUNNEL),
+ 0x5e /* sensorId */},
+ sizeof(std::array<uint32_t, 2>), SET_N_GET),
+ createEffectParamCombination(
+ SPATIALIZER_PARAM_SUPPORTED_LEVELS,
+ std::array<Spatialization::Level, 3>{
+ Spatialization::Level::NONE,
+ Spatialization::Level::MULTICHANNEL,
+ Spatialization::Level::BED_PLUS_OBJECTS},
+ sizeof(std::array<uint8_t, 3>), GET_ONLY),
+ createEffectParamCombination(SPATIALIZER_PARAM_HEADTRACKING_SUPPORTED, true,
+ sizeof(bool), GET_ONLY),
+ createEffectParamCombination(SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS,
+ AUDIO_CHANNEL_OUT_5POINT1, sizeof(uint8_t),
+ GET_ONLY),
+ createEffectParamCombination(
+ SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES,
+ std::array<Spatialization::Mode, 2>{
+ Spatialization::Mode::BINAURAL,
+ Spatialization::Mode::TRANSAURAL},
+ sizeof(std::array<uint8_t, 2>), GET_ONLY),
+ createEffectParamCombination(
+ SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION,
+ std::array<HeadTracking::ConnectionMode, 3>{
+ HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED,
+ HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW,
+ HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL},
+ sizeof(std::array<uint8_t, 3>), GET_ONLY),
+ }),
+ std::make_tuple(
+ &EXTEND_EFFECT_TYPE_UUID, false /* isInput */,
+ std::vector<std::shared_ptr<EffectParamCombination>>{createEffectParamCombination(
+ uint32_t{8}, kVendorExtensionData, sizeof(kVendorExtensionData))}),
+};
class libAudioHalEffectParamTest : public ::testing::TestWithParam<EffectParamTestTuple> {
public:
@@ -233,13 +331,8 @@
: mParamTuple(GetParam()),
mFactory(EffectsFactoryHalInterface::create()),
mTypeUuid(std::get<TUPLE_UUID>(mParamTuple)),
- mCombination(std::get<TUPLE_PARAM_COMBINATION>(mParamTuple)),
- mExpectedValue([&]() {
- std::vector<uint8_t> expectData(mCombination->valueSize);
- mCombination->parameterExpect->readFromValue(expectData.data(),
- mCombination->valueSize);
- return expectData;
- }()),
+ mCombinations(std::get<TUPLE_PARAM_COMBINATION>(mParamTuple)),
+ mIsInput(std::get<TUPLE_IS_INPUT>(mParamTuple)),
mDescs([&]() {
std::vector<effect_descriptor_t> descs;
if (mFactory && mTypeUuid && OK == mFactory->getDescriptors(mTypeUuid, &descs)) {
@@ -263,7 +356,8 @@
uint32_t reply = 0;
uint32_t replySize = sizeof(reply);
ASSERT_EQ(OK, interface->command(EFFECT_CMD_INIT, 0, nullptr, &replySize, &reply));
- ASSERT_EQ(OK, interface->command(EFFECT_CMD_SET_CONFIG, sizeof(mEffectConfig),
+
+ ASSERT_EQ(OK, interface->command(EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
&mEffectConfig, &replySize, &reply));
}
@@ -284,60 +378,85 @@
}
void setAndGetParameter(const sp<EffectHalInterface>& interface) {
- uint32_t replySize = sizeof(uint32_t);
- uint8_t reply[replySize];
- auto parameterSet = mCombination->parameterSet;
- ASSERT_EQ(OK,
- interface->command(EFFECT_CMD_SET_PARAM, (uint32_t)parameterSet->getTotalSize(),
- const_cast<effect_param_t*>(¶meterSet->getEffectParam()),
- &replySize, &reply))
- << parameterSet->toString();
- ASSERT_EQ(replySize, sizeof(uint32_t));
+ for (const auto combination : mCombinations) {
+ uint32_t replySize = kSetParamReplySize;
+ uint8_t reply[replySize];
+ const auto type = combination->mType;
+ if (type != GET_ONLY) {
+ const auto& set = combination->mParameterSet;
+ ASSERT_EQ(OK,
+ interface->command(EFFECT_CMD_SET_PARAM, (uint32_t)set->getTotalSize(),
+ const_cast<effect_param_t*>(&set->getEffectParam()),
+ &replySize, &reply))
+ << set->toString();
+ ASSERT_EQ(replySize, kSetParamReplySize);
+ }
- effect_param_t* getParam =
- const_cast<effect_param_t*>(&mCombination->parameterGet->getEffectParam());
- size_t maxReplySize = mCombination->valueSize + sizeof(effect_param_t) +
- sizeof(parameterSet->getPaddedParameterSize());
- replySize = maxReplySize;
- EXPECT_EQ(OK,
- interface->command(EFFECT_CMD_GET_PARAM, (uint32_t)parameterSet->getTotalSize(),
- const_cast<effect_param_t*>(¶meterSet->getEffectParam()),
- &replySize, getParam));
- EffectParamReader parameterGet(*getParam);
- EXPECT_EQ(replySize, parameterGet.getTotalSize()) << parameterGet.toString();
- if (mCombination->valueSize) {
- std::vector<uint8_t> response(mCombination->valueSize);
- EXPECT_EQ(OK, parameterGet.readFromValue(response.data(), mCombination->valueSize))
- << " try get valueSize " << mCombination->valueSize << " from "
- << parameterGet.toString() << " set " << parameterSet->toString();
- EXPECT_EQ(response, mExpectedValue);
+ if (type != SET_ONLY) {
+ auto get = combination->mParameterGet;
+ auto expect = combination->mParameterExpect;
+ effect_param_t* getParam = const_cast<effect_param_t*>(&get->getEffectParam());
+ size_t maxReplySize = combination->mValueSize + sizeof(effect_param_t) +
+ sizeof(expect->getPaddedParameterSize());
+ replySize = maxReplySize;
+ EXPECT_EQ(OK,
+ interface->command(EFFECT_CMD_GET_PARAM, (uint32_t)expect->getTotalSize(),
+ const_cast<effect_param_t*>(&expect->getEffectParam()),
+ &replySize, getParam));
+
+ EffectParamReader getReader(*getParam);
+ EXPECT_EQ(replySize, getReader.getTotalSize()) << getReader.toString();
+ if (combination->mValueSize) {
+ std::vector<uint8_t> expectedData(combination->mValueSize);
+ EXPECT_EQ(OK, expect->readFromValue(expectedData.data(), expectedData.size()))
+ << combination->toString();
+ std::vector<uint8_t> response(combination->mValueSize);
+ EXPECT_EQ(OK, getReader.readFromValue(response.data(), combination->mValueSize))
+ << " try get valueSize " << combination->mValueSize << " from:\n"
+ << getReader.toString() << "\nexpect:\n"
+ << expect->toString();
+ EXPECT_EQ(expectedData, response) << combination->toString();
+ }
+ }
}
}
+ static constexpr size_t kSetParamReplySize = sizeof(uint32_t);
const EffectParamTestTuple mParamTuple;
const sp<EffectsFactoryHalInterface> mFactory;
const effect_uuid_t* mTypeUuid;
- std::shared_ptr<EffectParamCombination> mCombination;
- const std::vector<uint8_t> mExpectedValue;
+ std::vector<std::shared_ptr<EffectParamCombination>> mCombinations{};
+ const bool mIsInput;
const std::vector<effect_descriptor_t> mDescs;
- std::vector<sp<EffectHalInterface>> mHalInterfaces;
- effect_config_t mEffectConfig = {.inputCfg = {.accessMode = EFFECT_BUFFER_ACCESS_READ,
- .format = AUDIO_FORMAT_PCM_FLOAT,
- .bufferProvider.getBuffer = nullptr,
- .bufferProvider.releaseBuffer = nullptr,
- .bufferProvider.cookie = nullptr,
- .mask = EFFECT_CONFIG_ALL,
- .samplingRate = 48000,
- .channels = AUDIO_CHANNEL_IN_STEREO},
-
- .outputCfg = {.accessMode = EFFECT_BUFFER_ACCESS_WRITE,
- .format = AUDIO_FORMAT_PCM_FLOAT,
- .bufferProvider.getBuffer = nullptr,
- .bufferProvider.releaseBuffer = nullptr,
- .bufferProvider.cookie = nullptr,
- .mask = EFFECT_CONFIG_ALL,
- .samplingRate = 48000,
- .channels = AUDIO_CHANNEL_OUT_STEREO}};
+ std::vector<sp<EffectHalInterface>> mHalInterfaces{};
+ effect_config_t mEffectConfig = {
+ .inputCfg =
+ {
+ .buffer = {.frameCount = 0x100},
+ .samplingRate = 48000,
+ .channels = mIsInput ? AUDIO_CHANNEL_IN_VOICE_CALL_MONO
+ : AUDIO_CHANNEL_IN_STEREO,
+ .bufferProvider = {.getBuffer = nullptr,
+ .releaseBuffer = nullptr,
+ .cookie = nullptr},
+ .format = AUDIO_FORMAT_PCM_FLOAT,
+ .accessMode = EFFECT_BUFFER_ACCESS_READ,
+ .mask = EFFECT_CONFIG_ALL,
+ },
+ .outputCfg =
+ {
+ .buffer = {.frameCount = 0x100},
+ .samplingRate = 48000,
+ .channels = mIsInput ? AUDIO_CHANNEL_IN_VOICE_CALL_MONO
+ : AUDIO_CHANNEL_OUT_STEREO,
+ .bufferProvider = {.getBuffer = nullptr,
+ .releaseBuffer = nullptr,
+ .cookie = nullptr},
+ .format = AUDIO_FORMAT_PCM_FLOAT,
+ .accessMode = EFFECT_BUFFER_ACCESS_WRITE,
+ .mask = EFFECT_CONFIG_ALL,
+ },
+ };
};
TEST_P(libAudioHalEffectParamTest, setAndGetParam) {
@@ -392,7 +511,8 @@
AudioUuid uuid = ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(
*std::get<TUPLE_UUID>(info.param))
.value();
- std::string name = "UUID_" + toString(uuid);
+ std::string name = "UUID_" + toString(uuid) + "_";
+ name += std::get<TUPLE_IS_INPUT>(info.param) ? "_input" : "_output";
std::replace_if(
name.begin(), name.end(), [](const char c) { return !std::isalnum(c); }, '_');
return name;
@@ -404,6 +524,4 @@
return RUN_ALL_TESTS();
}
-// TODO: b/263986405 Add multi-thread testing
-
} // namespace android
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 57b860d..f9ae2d4 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -441,10 +441,10 @@
track->prepareForAdjustChannels(mFrameCount);
}
} break;
- case HAPTIC_INTENSITY: {
- const os::HapticScale hapticIntensity = static_cast<os::HapticScale>(valueInt);
- if (track->mHapticIntensity != hapticIntensity) {
- track->mHapticIntensity = hapticIntensity;
+ case HAPTIC_SCALE: {
+ const os::HapticScale hapticScale = *reinterpret_cast<os::HapticScale*>(value);
+ if (track->mHapticScale != hapticScale) {
+ track->mHapticScale = hapticScale;
}
} break;
case HAPTIC_MAX_AMPLITUDE: {
@@ -585,7 +585,7 @@
t->mPlaybackRate = AUDIO_PLAYBACK_RATE_DEFAULT;
// haptic
t->mHapticPlaybackEnabled = false;
- t->mHapticIntensity = os::HapticScale::NONE;
+ t->mHapticScale = {/*level=*/os::HapticLevel::NONE };
t->mHapticMaxAmplitude = NAN;
t->mMixerHapticChannelMask = AUDIO_CHANNEL_NONE;
t->mMixerHapticChannelCount = 0;
@@ -636,7 +636,7 @@
switch (t->mMixerFormat) {
// Mixer format should be AUDIO_FORMAT_PCM_FLOAT.
case AUDIO_FORMAT_PCM_FLOAT: {
- os::scaleHapticData((float*) buffer, sampleCount, t->mHapticIntensity,
+ os::scaleHapticData((float*) buffer, sampleCount, t->mHapticScale,
t->mHapticMaxAmplitude);
} break;
default:
diff --git a/media/libaudioprocessing/AudioResamplerSinc.cpp b/media/libaudioprocessing/AudioResamplerSinc.cpp
index f2c386d..1a08a03 100644
--- a/media/libaudioprocessing/AudioResamplerSinc.cpp
+++ b/media/libaudioprocessing/AudioResamplerSinc.cpp
@@ -17,7 +17,6 @@
#define LOG_TAG "AudioResamplerSinc"
//#define LOG_NDEBUG 0
-#define __STDC_CONSTANT_MACROS
#include <malloc.h>
#include <pthread.h>
#include <string.h>
diff --git a/media/libaudioprocessing/include/media/AudioMixer.h b/media/libaudioprocessing/include/media/AudioMixer.h
index b39fb92..f558fd5 100644
--- a/media/libaudioprocessing/include/media/AudioMixer.h
+++ b/media/libaudioprocessing/include/media/AudioMixer.h
@@ -49,7 +49,7 @@
DOWNMIX_TYPE = 0x4004,
// for haptic
HAPTIC_ENABLED = 0x4007, // Set haptic data from this track should be played or not.
- HAPTIC_INTENSITY = 0x4008, // Set the intensity to play haptic data.
+ HAPTIC_SCALE = 0x4008, // Set the scale to play haptic data.
HAPTIC_MAX_AMPLITUDE = 0x4009, // Set the max amplitude allowed for haptic data.
// for target TIMESTRETCH
PLAYBACK_RATE = 0x4300, // Configure timestretch on this track name;
@@ -141,7 +141,7 @@
// Haptic
bool mHapticPlaybackEnabled;
- os::HapticScale mHapticIntensity;
+ os::HapticScale mHapticScale;
float mHapticMaxAmplitude;
audio_channel_mask_t mHapticChannelMask;
uint32_t mHapticChannelCount;
diff --git a/media/libaudioprocessing/tests/Android.bp b/media/libaudioprocessing/tests/Android.bp
index ad402db..a33bf55 100644
--- a/media/libaudioprocessing/tests/Android.bp
+++ b/media/libaudioprocessing/tests/Android.bp
@@ -1,6 +1,7 @@
// Build the unit tests for libaudioprocessing
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaudioprocessing/tests/fuzzer/Android.bp b/media/libaudioprocessing/tests/fuzzer/Android.bp
index 8fb6fff..b96ec6b 100644
--- a/media/libaudioprocessing/tests/fuzzer/Android.bp
+++ b/media/libaudioprocessing/tests/fuzzer/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -8,23 +9,23 @@
}
cc_fuzz {
- name: "libaudioprocessing_resampler_fuzzer",
- srcs: [
- "libaudioprocessing_resampler_fuzzer.cpp",
- ],
- defaults: ["libaudioprocessing_test_defaults"],
- static_libs: [
- "libsndfile",
- ],
+ name: "libaudioprocessing_resampler_fuzzer",
+ srcs: [
+ "libaudioprocessing_resampler_fuzzer.cpp",
+ ],
+ defaults: ["libaudioprocessing_test_defaults"],
+ static_libs: [
+ "libsndfile",
+ ],
}
cc_fuzz {
- name: "libaudioprocessing_record_buffer_converter_fuzzer",
- srcs: [
- "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
- ],
- defaults: ["libaudioprocessing_test_defaults"],
- static_libs: [
- "libsndfile",
- ],
+ name: "libaudioprocessing_record_buffer_converter_fuzzer",
+ srcs: [
+ "libaudioprocessing_record_buffer_converter_fuzzer.cpp",
+ ],
+ defaults: ["libaudioprocessing_test_defaults"],
+ static_libs: [
+ "libsndfile",
+ ],
}
diff --git a/media/libeffects/downmix/Android.bp b/media/libeffects/downmix/Android.bp
index b56872c..0b25327 100644
--- a/media/libeffects/downmix/Android.bp
+++ b/media/libeffects/downmix/Android.bp
@@ -1,5 +1,6 @@
// Multichannel downmix effect library
package {
+ default_team: "trendy_team_media_framework_audio",
default_applicable_licenses: [
"frameworks_av_media_libeffects_downmix_license",
],
@@ -60,7 +61,7 @@
],
header_libs: [
"libaudioeffects",
- "libhardware_headers"
+ "libhardware_headers",
],
shared_libs: [
"libaudioutils",
diff --git a/media/libeffects/downmix/aidl/DownmixContext.cpp b/media/libeffects/downmix/aidl/DownmixContext.cpp
index 5fb44b5..593e16f 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.cpp
+++ b/media/libeffects/downmix/aidl/DownmixContext.cpp
@@ -76,18 +76,15 @@
DownmixContext::DownmixContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
- LOG(DEBUG) << __func__;
mState = DOWNMIX_STATE_UNINITIALIZED;
init_params(common);
}
DownmixContext::~DownmixContext() {
- LOG(DEBUG) << __func__;
mState = DOWNMIX_STATE_UNINITIALIZED;
}
RetCode DownmixContext::enable() {
- LOG(DEBUG) << __func__;
if (mState != DOWNMIX_STATE_INITIALIZED) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -96,7 +93,6 @@
}
RetCode DownmixContext::disable() {
- LOG(DEBUG) << __func__;
if (mState != DOWNMIX_STATE_ACTIVE) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -105,7 +101,6 @@
}
void DownmixContext::reset() {
- LOG(DEBUG) << __func__;
disable();
resetBuffer();
}
@@ -127,7 +122,6 @@
return status;
}
- LOG(DEBUG) << __func__ << " start processing";
bool accumulate = false;
int frames = samples * sizeof(float) / getInputFrameSize();
if (mType == Downmix::Type::STRIP) {
@@ -152,9 +146,6 @@
}
}
int producedSamples = (samples / mInputChannelCount) << 1;
- LOG(DEBUG) << __func__ << " done processing " << samples << " samples, generated "
- << producedSamples << " frameSize: " << getInputFrameSize() << " - "
- << getOutputFrameSize();
return {STATUS_OK, samples, producedSamples};
}
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index c82c23b..de60ca4 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -14,10 +14,12 @@
* limitations under the License.
*/
+#define ATRACE_TAG ATRACE_TAG_AUDIO
#define LOG_TAG "AHAL_DownmixImpl"
#include <android-base/logging.h>
#include <system/audio_effects/effect_uuid.h>
+#include <utils/Trace.h>
#include "EffectDownmix.h"
@@ -36,7 +38,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<DownmixImpl>();
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -66,7 +67,6 @@
ndk::ScopedAStatus DownmixImpl::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
@@ -171,6 +171,7 @@
}
void DownmixImpl::process() {
+ ATRACE_NAME("Downmix::process");
/**
* wait for the EventFlag without lock, it's ok because the mEfGroup pointer will not change
* in the life cycle of workerThread (threadLoop).
@@ -195,14 +196,14 @@
const auto availableToWrite = outputMQ->availableToWrite() *
mImplContext->getInputFrameSize() /
mImplContext->getOutputFrameSize();
+ assert(mImplContext->getWorkBufferSize() >=
+ std::max(availableToRead(), availableToWrite));
auto processSamples = std::min(availableToRead, availableToWrite);
if (processSamples) {
inputMQ->read(buffer, processSamples);
IEffect::Status status = effectProcessImpl(buffer, buffer, processSamples);
outputMQ->write(buffer, status.fmqProduced);
statusMQ->writeBlocking(&status, 1);
- LOG(VERBOSE) << getEffectName() << __func__ << ": done processing, effect consumed "
- << status.fmqConsumed << " produced " << status.fmqProduced;
}
}
}
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.h b/media/libeffects/downmix/aidl/EffectDownmix.h
index 54557dc..b7d621a 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.h
+++ b/media/libeffects/downmix/aidl/EffectDownmix.h
@@ -28,11 +28,8 @@
public:
static const std::string kEffectName;
static const Descriptor kDescriptor;
- DownmixImpl() { LOG(DEBUG) << __func__; }
- ~DownmixImpl() {
- cleanUp();
- LOG(DEBUG) << __func__;
- }
+ DownmixImpl() = default;
+ ~DownmixImpl() { cleanUp(); }
ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
diff --git a/media/libeffects/downmix/benchmark/Android.bp b/media/libeffects/downmix/benchmark/Android.bp
index 10f14e2..5b62a0c 100644
--- a/media/libeffects/downmix/benchmark/Android.bp
+++ b/media/libeffects/downmix/benchmark/Android.bp
@@ -1,5 +1,6 @@
// Build testbench for downmix module.
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_media_libeffects_downmix_license"
diff --git a/media/libeffects/downmix/tests/Android.bp b/media/libeffects/downmix/tests/Android.bp
index 392a6fa..77d8f83 100644
--- a/media/libeffects/downmix/tests/Android.bp
+++ b/media/libeffects/downmix/tests/Android.bp
@@ -1,5 +1,6 @@
// Build testbench for downmix module.
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_media_libeffects_downmix_license"
@@ -14,7 +15,7 @@
//
// Use "atest downmix_tests" to run.
cc_test {
- name:"downmix_tests",
+ name: "downmix_tests",
gtest: true,
host_supported: true,
vendor: true,
@@ -45,7 +46,7 @@
// test application and outputs then compares files in a local directory
// on device (/data/local/tmp/downmixtest/).
cc_test {
- name:"downmixtest",
+ name: "downmixtest",
host_supported: false,
proprietary: true,
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index 1fedea4..fdc16e3 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -41,7 +41,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<DynamicsProcessingImpl>();
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -206,7 +205,6 @@
ndk::ScopedAStatus DynamicsProcessingImpl::open(const Parameter::Common& common,
const std::optional<Parameter::Specific>& specific,
OpenEffectReturn* ret) {
- LOG(DEBUG) << __func__;
// effect only support 32bits float
RETURN_IF(common.input.base.format.pcm != common.output.base.format.pcm ||
common.input.base.format.pcm != PcmType::FLOAT_32_BIT,
@@ -215,6 +213,10 @@
RETURN_OK_IF(mState != State::INIT);
mImplContext = createContext(common);
RETURN_IF(!mContext || !mImplContext, EX_NULL_POINTER, "createContextFailed");
+ int version = 0;
+ RETURN_IF(!getInterfaceVersion(&version).isOk(), EX_UNSUPPORTED_OPERATION,
+ "FailedToGetInterfaceVersion");
+ mImplContext->setVersion(version);
mEventFlag = mImplContext->getStatusEventFlag();
if (specific.has_value()) {
@@ -236,7 +238,6 @@
ndk::ScopedAStatus DynamicsProcessingImpl::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
index 4897888..e850ba4 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
@@ -30,11 +30,8 @@
static const Descriptor kDescriptor;
static const Capability kCapability;
- DynamicsProcessingImpl() { LOG(DEBUG) << __func__; }
- ~DynamicsProcessingImpl() {
- cleanUp();
- LOG(DEBUG) << __func__;
- }
+ DynamicsProcessingImpl() = default;
+ ~DynamicsProcessingImpl() { cleanUp(); }
ndk::ScopedAStatus open(const Parameter::Common& common,
const std::optional<Parameter::Specific>& specific,
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index 042b063..9c440df 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -29,16 +29,10 @@
DynamicsProcessingContext::DynamicsProcessingContext(int statusDepth,
const Parameter::Common& common)
: EffectContext(statusDepth, common) {
- LOG(DEBUG) << __func__;
init();
}
-DynamicsProcessingContext::~DynamicsProcessingContext() {
- LOG(DEBUG) << __func__;
-}
-
RetCode DynamicsProcessingContext::enable() {
- std::lock_guard lg(mMutex);
if (mState != DYNAMICS_PROCESSING_STATE_INITIALIZED) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -47,7 +41,6 @@
}
RetCode DynamicsProcessingContext::disable() {
- std::lock_guard lg(mMutex);
if (mState != DYNAMICS_PROCESSING_STATE_ACTIVE) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -56,7 +49,6 @@
}
void DynamicsProcessingContext::reset() {
- std::lock_guard lg(mMutex);
if (mDpFreq != nullptr) {
mDpFreq.reset();
}
@@ -68,12 +60,10 @@
}
mCommon = common;
init();
- LOG(INFO) << __func__ << common.toString();
return RetCode::SUCCESS;
}
RetCode DynamicsProcessingContext::setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) {
- std::lock_guard lg(mMutex);
dp_fx::DPChannel* leftChannel = mDpFreq->getChannel(0);
dp_fx::DPChannel* rightChannel = mDpFreq->getChannel(1);
if (leftChannel != nullptr) {
@@ -99,8 +89,8 @@
int32_t sampleRate = mCommon.input.base.sampleRate;
int32_t minBlockSize = (int32_t)dp_fx::DPFrequency::getMinBockSize();
int32_t block = engine.preferredProcessingDurationMs * sampleRate / 1000.0f;
- LOG(INFO) << __func__ << " sampleRate " << sampleRate << " block length "
- << engine.preferredProcessingDurationMs << " ms (" << block << "samples)";
+ LOG(VERBOSE) << __func__ << " sampleRate " << sampleRate << " block length "
+ << engine.preferredProcessingDurationMs << " ms (" << block << "samples)";
if (block < minBlockSize) {
block = minBlockSize;
} else if (!powerof2(block)) {
@@ -112,7 +102,6 @@
RetCode DynamicsProcessingContext::setEngineArchitecture(
const DynamicsProcessing::EngineArchitecture& engineArchitecture) {
- std::lock_guard lg(mMutex);
if (!mEngineInited || mEngineArchitecture != engineArchitecture) {
if (engineArchitecture.resolutionPreference ==
DynamicsProcessing::ResolutionPreference::FAVOR_FREQUENCY_RESOLUTION) {
@@ -124,34 +113,29 @@
mEngineInited = true;
mEngineArchitecture = engineArchitecture;
}
- LOG(INFO) << __func__ << engineArchitecture.toString();
return RetCode::SUCCESS;
}
RetCode DynamicsProcessingContext::setPreEq(
const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
- std::lock_guard lg(mMutex);
return setDpChannels_l<dp_fx::DPEq>(channels, mEngineArchitecture.preEqStage.inUse,
StageType::PREEQ);
}
RetCode DynamicsProcessingContext::setPostEq(
const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
- std::lock_guard lg(mMutex);
return setDpChannels_l<dp_fx::DPEq>(channels, mEngineArchitecture.postEqStage.inUse,
StageType::POSTEQ);
}
RetCode DynamicsProcessingContext::setMbc(
const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
- std::lock_guard lg(mMutex);
return setDpChannels_l<dp_fx::DPMbc>(channels, mEngineArchitecture.mbcStage.inUse,
StageType::MBC);
}
RetCode DynamicsProcessingContext::setPreEqBand(
const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
- std::lock_guard lg(mMutex);
RETURN_VALUE_IF(!mEngineArchitecture.preEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
"preEqNotInUse");
RETURN_VALUE_IF(
@@ -162,7 +146,6 @@
RetCode DynamicsProcessingContext::setPostEqBand(
const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
- std::lock_guard lg(mMutex);
RETURN_VALUE_IF(!mEngineArchitecture.postEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
"postEqNotInUse");
RETURN_VALUE_IF(
@@ -173,7 +156,6 @@
RetCode DynamicsProcessingContext::setMbcBand(
const std::vector<DynamicsProcessing::MbcBandConfig>& bands) {
- std::lock_guard lg(mMutex);
RETURN_VALUE_IF(!mEngineArchitecture.mbcStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
"mbcNotInUse");
RETURN_VALUE_IF(
@@ -184,7 +166,6 @@
RetCode DynamicsProcessingContext::setLimiter(
const std::vector<DynamicsProcessing::LimiterConfig>& limiters) {
- std::lock_guard lg(mMutex);
RETURN_VALUE_IF(!mEngineArchitecture.limiterInUse, RetCode::ERROR_ILLEGAL_PARAMETER,
"limiterNotInUse");
RETURN_VALUE_IF(!validateLimiterConfig(limiters, mChannelCount),
@@ -194,15 +175,12 @@
RetCode DynamicsProcessingContext::setInputGain(
const std::vector<DynamicsProcessing::InputGain>& inputGains) {
- std::lock_guard lg(mMutex);
RETURN_VALUE_IF(!validateInputGainConfig(inputGains, mChannelCount),
RetCode::ERROR_ILLEGAL_PARAMETER, "inputGainNotValid");
return setBands_l<DynamicsProcessing::InputGain>(inputGains, StageType::INPUTGAIN);
}
DynamicsProcessing::EngineArchitecture DynamicsProcessingContext::getEngineArchitecture() {
- std::lock_guard lg(mMutex);
- LOG(INFO) << __func__ << mEngineArchitecture.toString();
return mEngineArchitecture;
}
@@ -228,8 +206,6 @@
std::vector<DynamicsProcessing::MbcBandConfig> DynamicsProcessingContext::getMbcBand() {
std::vector<DynamicsProcessing::MbcBandConfig> bands;
-
- std::lock_guard lg(mMutex);
auto maxBand = mEngineArchitecture.mbcStage.bandCount;
for (int32_t ch = 0; ch < mChannelCount; ch++) {
auto mbc = getMbc_l(ch);
@@ -261,8 +237,6 @@
std::vector<DynamicsProcessing::LimiterConfig> DynamicsProcessingContext::getLimiter() {
std::vector<DynamicsProcessing::LimiterConfig> ret;
-
- std::lock_guard lg(mMutex);
for (int32_t ch = 0; ch < mChannelCount; ch++) {
auto limiter = getLimiter_l(ch);
if (!limiter) {
@@ -282,8 +256,6 @@
std::vector<DynamicsProcessing::InputGain> DynamicsProcessingContext::getInputGain() {
std::vector<DynamicsProcessing::InputGain> ret;
-
- std::lock_guard lg(mMutex);
for (int32_t ch = 0; ch < mChannelCount; ch++) {
auto channel = getChannel_l(ch);
if (!channel) {
@@ -295,26 +267,20 @@
}
IEffect::Status DynamicsProcessingContext::dpeProcess(float* in, float* out, int samples) {
- LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!in, status, "nullInput");
RETURN_VALUE_IF(!out, status, "nullOutput");
status = {EX_ILLEGAL_STATE, 0, 0};
- LOG(DEBUG) << __func__ << " start processing";
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(mState != DynamicsProcessingState::DYNAMICS_PROCESSING_STATE_ACTIVE, status,
- "notInActiveState");
- RETURN_VALUE_IF(!mDpFreq, status, "engineNotInited");
- mDpFreq->processSamples(in, out, samples);
- }
+ RETURN_VALUE_IF(mState != DynamicsProcessingState::DYNAMICS_PROCESSING_STATE_ACTIVE, status,
+ "notInActiveState");
+ RETURN_VALUE_IF(!mDpFreq, status, "engineNotInited");
+ mDpFreq->processSamples(in, out, samples);
return {STATUS_OK, samples, samples};
}
void DynamicsProcessingContext::init() {
- std::lock_guard lg(mMutex);
if (mState == DYNAMICS_PROCESSING_STATE_UNINITIALIZED) {
mState = DYNAMICS_PROCESSING_STATE_INITIALIZED;
}
@@ -399,7 +365,6 @@
StageType type) {
std::vector<DynamicsProcessing::ChannelConfig> ret;
- std::lock_guard lg(mMutex);
for (int32_t ch = 0; ch < mChannelCount; ch++) {
auto stage = getStageWithType_l(type, ch);
if (!stage) {
@@ -414,7 +379,6 @@
StageType type) {
std::vector<DynamicsProcessing::EqBandConfig> eqBands;
- std::lock_guard lg(mMutex);
auto maxBand = mEngineArchitecture.preEqStage.bandCount;
for (int32_t ch = 0; ch < mChannelCount; ch++) {
auto eq = getEqWithType_l(type, ch);
@@ -485,7 +449,7 @@
if (!stageInUse) {
LOG(WARNING) << __func__ << " not in use " << ::android::internal::ToString(channels);
- return RetCode::SUCCESS;
+ return RetCode::ERROR_ILLEGAL_PARAMETER;
}
RETURN_VALUE_IF(!stageInUse, RetCode::ERROR_ILLEGAL_PARAMETER, "stageNotInUse");
@@ -509,7 +473,6 @@
continue;
}
if (dp->isEnabled() != it.enable) {
- LOG(INFO) << __func__ << it.toString();
dp->setEnabled(it.enable);
}
}
@@ -590,7 +553,6 @@
ret = RetCode::ERROR_ILLEGAL_PARAMETER;
continue;
}
- LOG(INFO) << __func__ << it.toString();
}
return ret;
}
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
index 839c6dd..a059dd0 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
@@ -16,7 +16,6 @@
#pragma once
-#include <android-base/thread_annotations.h>
#include <audio_effects/effect_dynamicsprocessing.h>
#include "effect-impl/EffectContext.h"
@@ -37,8 +36,7 @@
class DynamicsProcessingContext final : public EffectContext {
public:
DynamicsProcessingContext(int statusDepth, const Parameter::Common& common);
- ~DynamicsProcessingContext();
-
+ ~DynamicsProcessingContext() = default;
RetCode enable();
RetCode disable();
void reset();
@@ -73,12 +71,11 @@
private:
static constexpr float kPreferredProcessingDurationMs = 10.0f;
static constexpr int kBandCount = 5;
- std::mutex mMutex;
- int mChannelCount GUARDED_BY(mMutex) = 0;
- DynamicsProcessingState mState GUARDED_BY(mMutex) = DYNAMICS_PROCESSING_STATE_UNINITIALIZED;
- std::unique_ptr<dp_fx::DPFrequency> mDpFreq GUARDED_BY(mMutex) = nullptr;
- bool mEngineInited GUARDED_BY(mMutex) = false;
- DynamicsProcessing::EngineArchitecture mEngineArchitecture GUARDED_BY(mMutex) = {
+ int mChannelCount = 0;
+ DynamicsProcessingState mState = DYNAMICS_PROCESSING_STATE_UNINITIALIZED;
+ std::unique_ptr<dp_fx::DPFrequency> mDpFreq = nullptr;
+ bool mEngineInited = false;
+ DynamicsProcessing::EngineArchitecture mEngineArchitecture = {
.resolutionPreference =
DynamicsProcessing::ResolutionPreference::FAVOR_FREQUENCY_RESOLUTION,
.preferredProcessingDurationMs = kPreferredProcessingDurationMs,
@@ -92,22 +89,21 @@
void init();
- void dpSetFreqDomainVariant_l(const DynamicsProcessing::EngineArchitecture& engine)
- REQUIRES(mMutex);
- dp_fx::DPChannel* getChannel_l(int ch) REQUIRES(mMutex);
- dp_fx::DPEq* getPreEq_l(int ch) REQUIRES(mMutex);
- dp_fx::DPEq* getPostEq_l(int ch) REQUIRES(mMutex);
- dp_fx::DPMbc* getMbc_l(int ch) REQUIRES(mMutex);
- dp_fx::DPLimiter* getLimiter_l(int ch) REQUIRES(mMutex);
- dp_fx::DPBandStage* getStageWithType_l(StageType type, int ch) REQUIRES(mMutex);
- dp_fx::DPEq* getEqWithType_l(StageType type, int ch) REQUIRES(mMutex);
+ void dpSetFreqDomainVariant_l(const DynamicsProcessing::EngineArchitecture& engine);
+ dp_fx::DPChannel* getChannel_l(int ch);
+ dp_fx::DPEq* getPreEq_l(int ch);
+ dp_fx::DPEq* getPostEq_l(int ch);
+ dp_fx::DPMbc* getMbc_l(int ch);
+ dp_fx::DPLimiter* getLimiter_l(int ch);
+ dp_fx::DPBandStage* getStageWithType_l(StageType type, int ch);
+ dp_fx::DPEq* getEqWithType_l(StageType type, int ch);
template <typename D>
RetCode setDpChannels_l(const std::vector<DynamicsProcessing::ChannelConfig>& channels,
- bool stageInUse, StageType type) REQUIRES(mMutex);
+ bool stageInUse, StageType type);
template <typename T /* BandConfig */>
- RetCode setBands_l(const std::vector<T>& bands, StageType type) REQUIRES(mMutex);
+ RetCode setBands_l(const std::vector<T>& bands, StageType type);
RetCode setDpChannelBand_l(const std::any& anyConfig, StageType type,
- std::set<std::pair<int, int>>& chBandSet) REQUIRES(mMutex);
+ std::set<std::pair<int, int>>& chBandSet);
std::vector<DynamicsProcessing::EqBandConfig> getEqBandConfigs(StageType type);
std::vector<DynamicsProcessing::ChannelConfig> getChannelConfig(StageType type);
diff --git a/media/libeffects/factory/Android.bp b/media/libeffects/factory/Android.bp
index d94093e..ad5188f 100644
--- a/media/libeffects/factory/Android.bp
+++ b/media/libeffects/factory/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -20,10 +21,10 @@
name: "libeffects",
vendor: true,
srcs: [
- "EffectsFactory.c",
- "EffectsConfigLoader.c",
- "EffectsFactoryState.c",
- "EffectsXmlConfigLoader.cpp",
+ "EffectsFactory.c",
+ "EffectsConfigLoader.c",
+ "EffectsFactoryState.c",
+ "EffectsXmlConfigLoader.cpp",
],
shared_libs: [
@@ -34,7 +35,7 @@
],
cflags: ["-fvisibility=hidden"],
- local_include_dirs:["include/media"],
+ local_include_dirs: ["include/media"],
header_libs: [
"libaudioeffects",
@@ -61,5 +62,8 @@
"libeffectsconfig",
"libeffects",
],
- local_include_dirs:[".", "include"],
+ local_include_dirs: [
+ ".",
+ "include",
+ ],
}
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
index e89e501..f60d616 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -114,10 +114,11 @@
std::stringstream ss;
ss << "\t\tHaptic setting:\n";
ss << "\t\t- tracks intensity map:\n";
- for (const auto&[id, intensity] : param.id2Intensity) {
- ss << "\t\t\t- id=" << id << ", intensity=" << (int) intensity;
+ for (const auto&[id, hapticScale] : param.id2HapticScale) {
+ ss << "\t\t\t- id=" << id << ", hapticLevel=" << (int) hapticScale.getLevel()
+ << ", adaptiveScaleFactor=" << hapticScale.getAdaptiveScaleFactor();
}
- ss << "\t\t- max intensity: " << (int) param.maxHapticIntensity << '\n';
+ ss << "\t\t- max scale level: " << (int) param.maxHapticScale.getLevel() << '\n';
ss << "\t\t- max haptic amplitude: " << param.maxHapticAmplitude << '\n';
return ss.str();
}
@@ -145,7 +146,7 @@
memset(context->param.hapticChannelSource, 0, sizeof(context->param.hapticChannelSource));
context->param.hapticChannelCount = 0;
context->param.audioChannelCount = 0;
- context->param.maxHapticIntensity = os::HapticScale::MUTE;
+ context->param.maxHapticScale = os::HapticScale::mute();
context->param.resonantFrequency = DEFAULT_RESONANT_FREQUENCY;
context->param.bpfQ = 1.0f;
@@ -312,21 +313,25 @@
void *value) {
switch (param) {
case HG_PARAM_HAPTIC_INTENSITY: {
- if (value == nullptr || size != (uint32_t) (2 * sizeof(int))) {
+ if (value == nullptr || size != (uint32_t) (2 * sizeof(int) + sizeof(float))) {
return -EINVAL;
}
- int id = *(int *) value;
- os::HapticScale hapticIntensity = static_cast<os::HapticScale>(*((int *) value + 1));
- ALOGD("Setting haptic intensity as %d", hapticIntensity);
- if (hapticIntensity == os::HapticScale::MUTE) {
- context->param.id2Intensity.erase(id);
+ const int id = *(int *) value;
+ const os::HapticLevel hapticLevel = static_cast<os::HapticLevel>(*((int *) value + 1));
+ const float adaptiveScaleFactor = (*((float *) value + 2));
+ const os::HapticScale hapticScale = {hapticLevel, adaptiveScaleFactor};
+ ALOGD("Updating haptic scale, hapticLevel=%d, adaptiveScaleFactor=%f",
+ static_cast<int>(hapticLevel), adaptiveScaleFactor);
+ if (hapticScale.isScaleMute()) {
+ context->param.id2HapticScale.erase(id);
} else {
- context->param.id2Intensity.emplace(id, hapticIntensity);
+ context->param.id2HapticScale.emplace(id, hapticScale);
}
- context->param.maxHapticIntensity = hapticIntensity;
- for (const auto&[id, intensity] : context->param.id2Intensity) {
- context->param.maxHapticIntensity = std::max(
- context->param.maxHapticIntensity, intensity);
+ context->param.maxHapticScale = hapticScale;
+ for (const auto&[id, scale] : context->param.id2HapticScale) {
+ if (scale.getLevel() > context->param.maxHapticScale.getLevel()) {
+ context->param.maxHapticScale = scale;
+ }
}
break;
}
@@ -478,7 +483,7 @@
return -ENODATA;
}
- if (context->param.maxHapticIntensity == os::HapticScale::MUTE) {
+ if (context->param.maxHapticScale.isScaleMute()) {
// Haptic channels are muted, not need to generate haptic data.
return 0;
}
@@ -504,8 +509,9 @@
float* hapticOutBuffer = HapticGenerator_runProcessingChain(
context->processingChain, context->inputBuffer.data(),
context->outputBuffer.data(), inBuffer->frameCount);
- os::scaleHapticData(hapticOutBuffer, hapticSampleCount, context->param.maxHapticIntensity,
- context->param.maxHapticAmplitude);
+ os::scaleHapticData(hapticOutBuffer, hapticSampleCount,
+ context->param.maxHapticScale,
+ context->param.maxHapticAmplitude);
// For haptic data, the haptic playback thread will copy the data from effect input buffer,
// which contains haptic data at the end of the buffer, directly to sink buffer.
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
index 85e961f..dbfc5ea 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
@@ -48,9 +48,9 @@
uint32_t audioChannelCount;
uint32_t hapticChannelCount;
- // A map from track id to haptic intensity.
- std::map<int, os::HapticScale> id2Intensity;
- os::HapticScale maxHapticIntensity; // max intensity will be used to scale haptic data.
+ // A map from track id to haptic scale.
+ std::map<int, os::HapticScale> id2HapticScale;
+ os::HapticScale maxHapticScale; // max haptic scale will be used to scale haptic data.
float maxHapticAmplitude; // max amplitude will be used to limit haptic data absolute values.
float resonantFrequency;
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
index 031477f..b803ee4 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
@@ -37,7 +37,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<HapticGeneratorImpl>();
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -67,7 +66,6 @@
ndk::ScopedAStatus HapticGeneratorImpl::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
@@ -185,7 +183,7 @@
IEffect::Status HapticGeneratorImpl::effectProcessImpl(float* in, float* out, int samples) {
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!mContext, status, "nullContext");
- return mContext->lvmProcess(in, out, samples);
+ return mContext->process(in, out, samples);
}
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
index 53dcd49..a775f06 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
@@ -27,11 +27,8 @@
public:
static const std::string kEffectName;
static const Descriptor kDescriptor;
- HapticGeneratorImpl() { LOG(DEBUG) << __func__; }
- ~HapticGeneratorImpl() {
- cleanUp();
- LOG(DEBUG) << __func__;
- }
+ HapticGeneratorImpl() = default;
+ ~HapticGeneratorImpl() { cleanUp(); }
ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index 354ee00..0a04250 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -28,7 +28,6 @@
HapticGeneratorContext::HapticGeneratorContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
- LOG(DEBUG) << __func__;
mState = HAPTIC_GENERATOR_STATE_UNINITIALIZED;
mSampleRate = common.input.base.sampleRate;
mFrameCount = common.input.frameCount;
@@ -36,7 +35,6 @@
}
HapticGeneratorContext::~HapticGeneratorContext() {
- LOG(DEBUG) << __func__;
mState = HAPTIC_GENERATOR_STATE_UNINITIALIZED;
}
@@ -70,7 +68,6 @@
RetCode HapticGeneratorContext::setHgHapticScales(
const std::vector<HapticGenerator::HapticScale>& hapticScales) {
- std::lock_guard lg(mMutex);
for (auto hapticScale : hapticScales) {
mParams.mHapticScales.insert_or_assign(hapticScale.id, hapticScale.scale);
}
@@ -82,13 +79,11 @@
}
HapticGenerator::VibratorInformation HapticGeneratorContext::getHgVibratorInformation() {
- std::lock_guard lg(mMutex);
return mParams.mVibratorInfo;
}
std::vector<HapticGenerator::HapticScale> HapticGeneratorContext::getHgHapticScales() {
std::vector<HapticGenerator::HapticScale> result;
- std::lock_guard lg(mMutex);
for (const auto& [id, vibratorScale] : mParams.mHapticScales) {
result.push_back({id, vibratorScale});
}
@@ -97,30 +92,23 @@
RetCode HapticGeneratorContext::setHgVibratorInformation(
const HapticGenerator::VibratorInformation& vibratorInfo) {
- {
- std::lock_guard lg(mMutex);
- mParams.mVibratorInfo = vibratorInfo;
+ mParams.mVibratorInfo = vibratorInfo;
- if (mProcessorsRecord.bpf != nullptr) {
- mProcessorsRecord.bpf->setCoefficients(
- ::android::audio_effect::haptic_generator::bpfCoefs(
- mParams.mVibratorInfo.resonantFrequencyHz, DEFAULT_BPF_Q, mSampleRate));
- }
- if (mProcessorsRecord.bsf != nullptr) {
- mProcessorsRecord.bsf->setCoefficients(
- ::android::audio_effect::haptic_generator::bsfCoefs(
- mParams.mVibratorInfo.resonantFrequencyHz,
- mParams.mVibratorInfo.qFactor, mParams.mVibratorInfo.qFactor / 2.0f,
- mSampleRate));
- }
+ if (mProcessorsRecord.bpf != nullptr) {
+ mProcessorsRecord.bpf->setCoefficients(::android::audio_effect::haptic_generator::bpfCoefs(
+ mParams.mVibratorInfo.resonantFrequencyHz, DEFAULT_BPF_Q, mSampleRate));
}
+ if (mProcessorsRecord.bsf != nullptr) {
+ mProcessorsRecord.bsf->setCoefficients(::android::audio_effect::haptic_generator::bsfCoefs(
+ mParams.mVibratorInfo.resonantFrequencyHz, mParams.mVibratorInfo.qFactor,
+ mParams.mVibratorInfo.qFactor / 2.0f, mSampleRate));
+ }
+
configure();
return RetCode::SUCCESS;
}
-IEffect::Status HapticGeneratorContext::lvmProcess(float* in, float* out, int samples) {
- LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
-
+IEffect::Status HapticGeneratorContext::process(float* in, float* out, int samples) {
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!in, status, "nullInput");
RETURN_VALUE_IF(!out, status, "nullOutput");
@@ -129,17 +117,11 @@
auto frameSize = getInputFrameSize();
RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
- LOG(DEBUG) << __func__ << " start processing";
// The audio data must not be modified but just written to
// output buffer according the access mode.
- bool accumulate = false;
if (in != out) {
for (int i = 0; i < samples; i++) {
- if (accumulate) {
- out[i] += in[i];
- } else {
- out[i] = in[i];
- }
+ out[i] = in[i];
}
}
@@ -147,7 +129,6 @@
return status;
}
- std::lock_guard lg(mMutex);
if (mParams.mMaxVibratorScale == HapticGenerator::VibratorScale::MUTE) {
// Haptic channels are muted, not need to generate haptic data.
return {STATUS_OK, samples, samples};
@@ -174,7 +155,7 @@
runProcessingChain(mInputBuffer.data(), mOutputBuffer.data(), mFrameCount);
::android::os::scaleHapticData(
hapticOutBuffer, hapticSampleCount,
- static_cast<::android::os::HapticScale>(mParams.mMaxVibratorScale),
+ {/*level=*/static_cast<::android::os::HapticLevel>(mParams.mMaxVibratorScale) },
mParams.mVibratorInfo.qFactor);
// For haptic data, the haptic playback thread will copy the data from effect input
@@ -189,7 +170,6 @@
void HapticGeneratorContext::init_params(media::audio::common::AudioChannelLayout inputChMask,
media::audio::common::AudioChannelLayout outputChMask) {
- std::lock_guard lg(mMutex);
mParams.mMaxVibratorScale = HapticGenerator::VibratorScale::MUTE;
mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
@@ -210,7 +190,6 @@
float HapticGeneratorContext::getDistortionOutputGain() {
float distortionOutputGain = getFloatProperty(
"vendor.audio.hapticgenerator.distortion.output.gain", DEFAULT_DISTORTION_OUTPUT_GAIN);
- LOG(DEBUG) << "Using distortion output gain as " << distortionOutputGain;
return distortionOutputGain;
}
@@ -237,7 +216,6 @@
* Build haptic generator processing chain.
*/
void HapticGeneratorContext::buildProcessingChain() {
- std::lock_guard lg(mMutex);
const size_t channelCount = mParams.mHapticChannelCount;
float highPassCornerFrequency = 50.0f;
auto hpf = ::android::audio_effect::haptic_generator::createHPF2(highPassCornerFrequency,
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
index 26e69e4..3a2ad1c 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
@@ -16,7 +16,6 @@
#pragma once
-#include <android-base/thread_annotations.h>
#include <vibrator/ExternalVibrationUtils.h>
#include <map>
@@ -75,7 +74,7 @@
RetCode setHgVibratorInformation(const HapticGenerator::VibratorInformation& vibratorInfo);
HapticGenerator::VibratorInformation getHgVibratorInformation();
- IEffect::Status lvmProcess(float* in, float* out, int samples);
+ IEffect::Status process(float* in, float* out, int samples);
private:
static constexpr float DEFAULT_RESONANT_FREQUENCY = 150.0f;
@@ -88,9 +87,8 @@
static constexpr float DEFAULT_DISTORTION_INPUT_GAIN = 0.3f;
static constexpr float DEFAULT_DISTORTION_CUBE_THRESHOLD = 0.1f;
- std::mutex mMutex;
HapticGeneratorState mState;
- HapticGeneratorParam mParams GUARDED_BY(mMutex);
+ HapticGeneratorParam mParams;
int mSampleRate;
int64_t mFrameCount = 0;
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
index a7d9282..f89606e 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
@@ -37,7 +37,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<LoudnessEnhancerImpl>();
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -67,7 +66,6 @@
ndk::ScopedAStatus LoudnessEnhancerImpl::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
@@ -178,7 +176,7 @@
IEffect::Status LoudnessEnhancerImpl::effectProcessImpl(float* in, float* out, int samples) {
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!mContext, status, "nullContext");
- return mContext->lvmProcess(in, out, samples);
+ return mContext->process(in, out, samples);
}
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
index e2e716c..98bdc6b 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
@@ -27,11 +27,8 @@
public:
static const std::string kEffectName;
static const Descriptor kDescriptor;
- LoudnessEnhancerImpl() { LOG(DEBUG) << __func__; }
- ~LoudnessEnhancerImpl() {
- cleanUp();
- LOG(DEBUG) << __func__;
- }
+ LoudnessEnhancerImpl() = default;
+ ~LoudnessEnhancerImpl() { cleanUp(); }
ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
index bc3fa45..d8bcfc0 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
@@ -24,16 +24,10 @@
LoudnessEnhancerContext::LoudnessEnhancerContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
- LOG(DEBUG) << __func__;
init_params();
}
-LoudnessEnhancerContext::~LoudnessEnhancerContext() {
- LOG(DEBUG) << __func__;
-}
-
RetCode LoudnessEnhancerContext::enable() {
- std::lock_guard lg(mMutex);
if (mState != LOUDNESS_ENHANCER_STATE_INITIALIZED) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -42,7 +36,6 @@
}
RetCode LoudnessEnhancerContext::disable() {
- std::lock_guard lg(mMutex);
if (mState != LOUDNESS_ENHANCER_STATE_ACTIVE) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -52,7 +45,6 @@
void LoudnessEnhancerContext::reset() {
float targetAmp = pow(10, mGain / 2000.0f); // mB to linear amplification
- std::lock_guard lg(mMutex);
if (mCompressor != nullptr) {
// Get samplingRate from input
mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
@@ -65,9 +57,7 @@
return RetCode::SUCCESS;
}
-IEffect::Status LoudnessEnhancerContext::lvmProcess(float* in, float* out, int samples) {
- LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
-
+IEffect::Status LoudnessEnhancerContext::process(float* in, float* out, int samples) {
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!in, status, "nullInput");
RETURN_VALUE_IF(!out, status, "nullOutput");
@@ -76,11 +66,9 @@
auto frameSize = getInputFrameSize();
RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
- std::lock_guard lg(mMutex);
status = {STATUS_INVALID_OPERATION, 0, 0};
RETURN_VALUE_IF(mState != LOUDNESS_ENHANCER_STATE_ACTIVE, status, "stateNotActive");
- LOG(DEBUG) << __func__ << " start processing";
// PcmType is always expected to be Float 32 bit.
constexpr float scale = 1 << 15; // power of 2 is lossless conversion to int16_t range
constexpr float inverseScale = 1.f / scale;
@@ -124,9 +112,8 @@
mGain = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
float targetAmp = pow(10, mGain / 2000.0f); // mB to linear amplification
- LOG(DEBUG) << __func__ << "Target gain = " << mGain << "mB <=> factor = " << targetAmp;
+ LOG(VERBOSE) << __func__ << "Target gain = " << mGain << "mB <=> factor = " << targetAmp;
- std::lock_guard lg(mMutex);
mCompressor = std::make_unique<le_fx::AdaptiveDynamicRangeCompression>();
mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
mState = LOUDNESS_ENHANCER_STATE_INITIALIZED;
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
index 9a1ec4c..192b212 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
@@ -16,7 +16,6 @@
#pragma once
-#include <android-base/thread_annotations.h>
#include <audio_effects/effect_loudnessenhancer.h>
#include "dsp/core/dynamic_range_compression.h"
@@ -33,7 +32,7 @@
class LoudnessEnhancerContext final : public EffectContext {
public:
LoudnessEnhancerContext(int statusDepth, const Parameter::Common& common);
- ~LoudnessEnhancerContext();
+ ~LoudnessEnhancerContext() = default;
RetCode enable();
RetCode disable();
@@ -42,15 +41,14 @@
RetCode setLeGain(int gainMb);
int getLeGain() const { return mGain; }
- IEffect::Status lvmProcess(float* in, float* out, int samples);
+ IEffect::Status process(float* in, float* out, int samples);
private:
- std::mutex mMutex;
- LoudnessEnhancerState mState GUARDED_BY(mMutex) = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
+ LoudnessEnhancerState mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
int mGain = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
// In this implementation, there is no coupling between the compression on the left and right
// channels
- std::unique_ptr<le_fx::AdaptiveDynamicRangeCompression> mCompressor GUARDED_BY(mMutex);
+ std::unique_ptr<le_fx::AdaptiveDynamicRangeCompression> mCompressor;
void init_params();
};
diff --git a/media/libeffects/lvm/benchmarks/Android.bp b/media/libeffects/lvm/benchmarks/Android.bp
index c21c5f2..8036983 100644
--- a/media/libeffects/lvm/benchmarks/Android.bp
+++ b/media/libeffects/lvm/benchmarks/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index 7998879..c1a77f0 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -1,5 +1,6 @@
// Music bundle
package {
+ default_team: "trendy_team_media_framework_audio",
default_applicable_licenses: [
"frameworks_av_media_libeffects_lvm_lib_license",
],
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 0568fbd..c32e91e 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -1,6 +1,7 @@
// Build the unit tests for effects
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -12,7 +13,7 @@
cc_test {
name: "EffectReverbTest",
defaults: [
- "libeffects-test-defaults",
+ "libeffects-test-defaults",
],
srcs: [
"EffectReverbTest.cpp",
@@ -29,7 +30,7 @@
cc_test {
name: "EffectBundleTest",
defaults: [
- "libeffects-test-defaults",
+ "libeffects-test-defaults",
],
srcs: [
"EffectBundleTest.cpp",
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index bb7e4c6..90406e9 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -36,20 +36,16 @@
BundleContext::BundleContext(int statusDepth, const Parameter::Common& common,
const lvm::BundleEffectType& type)
: EffectContext(statusDepth, common), mType(type) {
- LOG(DEBUG) << __func__ << type;
-
int inputChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
common.input.base.channelMask);
mSamplesPerSecond = common.input.base.sampleRate * inputChannelCount;
}
BundleContext::~BundleContext() {
- LOG(DEBUG) << __func__;
deInit();
}
RetCode BundleContext::init() {
- std::lock_guard lg(mMutex);
// init with pre-defined preset NORMAL
for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
mBandGainmB[i] = lvm::kSoftPresets[0 /* normal */][i] * 100;
@@ -88,7 +84,6 @@
}
void BundleContext::deInit() {
- std::lock_guard lg(mMutex);
if (mInstance) {
LVM_DelInstanceHandle(&mInstance);
mInstance = nullptr;
@@ -102,27 +97,23 @@
bool tempDisabled = false;
switch (mType) {
case lvm::BundleEffectType::EQUALIZER:
- LOG(DEBUG) << __func__ << " enable bundle EQ";
if (mSamplesToExitCountEq <= 0) mNumberEffectsEnabled++;
mSamplesToExitCountEq = (mSamplesPerSecond * 0.1);
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::EQUALIZER));
break;
case lvm::BundleEffectType::BASS_BOOST:
- LOG(DEBUG) << __func__ << " enable bundle BB";
if (mSamplesToExitCountBb <= 0) mNumberEffectsEnabled++;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::BASS_BOOST));
mSamplesToExitCountBb = (mSamplesPerSecond * 0.1);
tempDisabled = mBassTempDisabled;
break;
case lvm::BundleEffectType::VIRTUALIZER:
- LOG(DEBUG) << __func__ << " enable bundle VR";
if (mSamplesToExitCountVirt <= 0) mNumberEffectsEnabled++;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VIRTUALIZER));
mSamplesToExitCountVirt = (mSamplesPerSecond * 0.1);
tempDisabled = mVirtualizerTempDisabled;
break;
case lvm::BundleEffectType::VOLUME:
- LOG(DEBUG) << __func__ << " enable bundle VOL";
if ((mEffectInDrain & (1 << int(lvm::BundleEffectType::VOLUME))) == 0)
mNumberEffectsEnabled++;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VOLUME));
@@ -134,30 +125,24 @@
RetCode BundleContext::enableOperatingMode() {
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
- switch (mType) {
- case lvm::BundleEffectType::EQUALIZER:
- LOG(DEBUG) << __func__ << " enable bundle EQ";
- params.EQNB_OperatingMode = LVM_EQNB_ON;
- break;
- case lvm::BundleEffectType::BASS_BOOST:
- LOG(DEBUG) << __func__ << " enable bundle BB";
- params.BE_OperatingMode = LVM_BE_ON;
- break;
- case lvm::BundleEffectType::VIRTUALIZER:
- LOG(DEBUG) << __func__ << " enable bundle VR";
- params.VirtualizerOperatingMode = LVM_MODE_ON;
- break;
- case lvm::BundleEffectType::VOLUME:
- LOG(DEBUG) << __func__ << " enable bundle VOL";
- break;
- }
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
+ switch (mType) {
+ case lvm::BundleEffectType::EQUALIZER:
+ params.EQNB_OperatingMode = LVM_EQNB_ON;
+ break;
+ case lvm::BundleEffectType::BASS_BOOST:
+ params.BE_OperatingMode = LVM_BE_ON;
+ break;
+ case lvm::BundleEffectType::VIRTUALIZER:
+ params.VirtualizerOperatingMode = LVM_MODE_ON;
+ break;
+ case lvm::BundleEffectType::VOLUME:
+ break;
}
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+
return limitLevel();
}
@@ -165,19 +150,15 @@
if (!mEnabled) return RetCode::ERROR_ILLEGAL_PARAMETER;
switch (mType) {
case lvm::BundleEffectType::EQUALIZER:
- LOG(DEBUG) << __func__ << " disable bundle EQ";
mEffectInDrain |= 1 << int(lvm::BundleEffectType::EQUALIZER);
break;
case lvm::BundleEffectType::BASS_BOOST:
- LOG(DEBUG) << __func__ << " disable bundle BB";
mEffectInDrain |= 1 << int(lvm::BundleEffectType::BASS_BOOST);
break;
case lvm::BundleEffectType::VIRTUALIZER:
- LOG(DEBUG) << __func__ << " disable bundle VR";
mEffectInDrain |= 1 << int(lvm::BundleEffectType::VIRTUALIZER);
break;
case lvm::BundleEffectType::VOLUME:
- LOG(DEBUG) << __func__ << " disable bundle VOL";
mEffectInDrain |= 1 << int(lvm::BundleEffectType::VOLUME);
break;
}
@@ -187,30 +168,23 @@
RetCode BundleContext::disableOperatingMode() {
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
- switch (mType) {
- case lvm::BundleEffectType::EQUALIZER:
- LOG(DEBUG) << __func__ << " disable bundle EQ";
- params.EQNB_OperatingMode = LVM_EQNB_OFF;
- break;
- case lvm::BundleEffectType::BASS_BOOST:
- LOG(DEBUG) << __func__ << " disable bundle BB";
- params.BE_OperatingMode = LVM_BE_OFF;
- break;
- case lvm::BundleEffectType::VIRTUALIZER:
- LOG(DEBUG) << __func__ << " disable bundle VR";
- params.VirtualizerOperatingMode = LVM_MODE_OFF;
- break;
- case lvm::BundleEffectType::VOLUME:
- LOG(DEBUG) << __func__ << " disable bundle VOL";
- break;
- }
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failGetControlParams");
+ switch (mType) {
+ case lvm::BundleEffectType::EQUALIZER:
+ params.EQNB_OperatingMode = LVM_EQNB_OFF;
+ break;
+ case lvm::BundleEffectType::BASS_BOOST:
+ params.BE_OperatingMode = LVM_BE_OFF;
+ break;
+ case lvm::BundleEffectType::VIRTUALIZER:
+ params.VirtualizerOperatingMode = LVM_MODE_OFF;
+ break;
+ case lvm::BundleEffectType::VOLUME:
+ break;
}
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
mEnabled = false;
return limitLevel();
}
@@ -223,89 +197,80 @@
float energyBassBoost = 0;
float crossCorrection = 0;
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- bool eqEnabled = params.EQNB_OperatingMode == LVM_EQNB_ON;
- bool bbEnabled = params.BE_OperatingMode == LVM_BE_ON;
- bool viEnabled = params.VirtualizerOperatingMode == LVM_MODE_ON;
+ bool eqEnabled = params.EQNB_OperatingMode == LVM_EQNB_ON;
+ bool bbEnabled = params.BE_OperatingMode == LVM_BE_ON;
+ bool viEnabled = params.VirtualizerOperatingMode == LVM_MODE_ON;
+
+ if (eqEnabled) {
+ for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+ float bandFactor = mBandGainmB[i] / 1500.0;
+ float bandCoefficient = lvm::kBandEnergyCoefficient[i];
+ float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
+ if (bandEnergy > 0) energyContribution += bandEnergy;
+ }
+
+ // cross EQ coefficients
+ float bandFactorSum = 0;
+ for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
+ float bandFactor1 = mBandGainmB[i] / 1500.0;
+ float bandFactor2 = mBandGainmB[i + 1] / 1500.0;
+
+ if (bandFactor1 > 0 && bandFactor2 > 0) {
+ float crossEnergy =
+ bandFactor1 * bandFactor2 * lvm::kBandEnergyCrossCoefficient[i];
+ bandFactorSum += bandFactor1 * bandFactor2;
+
+ if (crossEnergy > 0) energyCross += crossEnergy;
+ }
+ }
+ bandFactorSum -= 1.0;
+ if (bandFactorSum > 0) crossCorrection = bandFactorSum * 0.7;
+ }
+ // BassBoost contribution
+ if (bbEnabled) {
+ float boostFactor = mBassStrengthSaved / 1000.0;
+ float boostCoefficient = lvm::kBassBoostEnergyCoefficient;
+
+ energyContribution += boostFactor * boostCoefficient * boostCoefficient;
if (eqEnabled) {
for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
float bandFactor = mBandGainmB[i] / 1500.0;
- float bandCoefficient = lvm::kBandEnergyCoefficient[i];
- float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
- if (bandEnergy > 0) energyContribution += bandEnergy;
- }
-
- // cross EQ coefficients
- float bandFactorSum = 0;
- for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
- float bandFactor1 = mBandGainmB[i] / 1500.0;
- float bandFactor2 = mBandGainmB[i + 1] / 1500.0;
-
- if (bandFactor1 > 0 && bandFactor2 > 0) {
- float crossEnergy =
- bandFactor1 * bandFactor2 * lvm::kBandEnergyCrossCoefficient[i];
- bandFactorSum += bandFactor1 * bandFactor2;
-
- if (crossEnergy > 0) energyCross += crossEnergy;
- }
- }
- bandFactorSum -= 1.0;
- if (bandFactorSum > 0) crossCorrection = bandFactorSum * 0.7;
- }
- // BassBoost contribution
- if (bbEnabled) {
- float boostFactor = mBassStrengthSaved / 1000.0;
- float boostCoefficient = lvm::kBassBoostEnergyCoefficient;
-
- energyContribution += boostFactor * boostCoefficient * boostCoefficient;
-
- if (eqEnabled) {
- for (unsigned int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
- float bandFactor = mBandGainmB[i] / 1500.0;
- float bandCrossCoefficient = lvm::kBassBoostEnergyCrossCoefficient[i];
- float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
- if (bandEnergy > 0) energyBassBoost += bandEnergy;
- }
+ float bandCrossCoefficient = lvm::kBassBoostEnergyCrossCoefficient[i];
+ float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
+ if (bandEnergy > 0) energyBassBoost += bandEnergy;
}
}
- // Virtualizer contribution
- if (viEnabled) {
- energyContribution += lvm::kVirtualizerContribution * lvm::kVirtualizerContribution;
- }
+ }
+ // Virtualizer contribution
+ if (viEnabled) {
+ energyContribution += lvm::kVirtualizerContribution * lvm::kVirtualizerContribution;
+ }
- double totalEnergyEstimation =
- sqrt(energyContribution + energyCross + energyBassBoost) - crossCorrection;
- LOG(INFO) << " TOTAL energy estimation: " << totalEnergyEstimation << " dB";
+ double totalEnergyEstimation =
+ sqrt(energyContribution + energyCross + energyBassBoost) - crossCorrection;
- // roundoff
- int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
- if (maxLevelRound + mVolume > 0) {
- gainCorrection = maxLevelRound + mVolume;
- }
+ // roundoff
+ int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
+ if (maxLevelRound + mVolumedB > 0) {
+ gainCorrection = maxLevelRound + mVolumedB;
+ }
- params.VC_EffectLevel = mVolume - gainCorrection;
- if (params.VC_EffectLevel < -96) {
- params.VC_EffectLevel = -96;
- }
- LOG(INFO) << "\tVol: " << mVolume << ", GainCorrection: " << gainCorrection
- << ", Actual vol: " << params.VC_EffectLevel;
+ params.VC_EffectLevel = mVolumedB - gainCorrection;
+ if (params.VC_EffectLevel < -96) {
+ params.VC_EffectLevel = -96;
+ }
+ /* Activate the initial settings */
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- /* Activate the initial settings */
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
-
- if (mFirstVolume) {
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetVolumeNoSmoothing(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setVolumeNoSmoothingFailed");
- LOG(INFO) << "\tLVM_VOLUME: Disabling Smoothing for first volume change to remove "
- "spikes/clicks";
- mFirstVolume = false;
- }
+ if (mFirstVolume) {
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetVolumeNoSmoothing(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setVolumeNoSmoothingFailed");
+ mFirstVolume = false;
}
return RetCode::SUCCESS;
@@ -439,17 +404,13 @@
float maxdB = std::max(leftdB, rightdB);
float pandB = rightdB - leftdB;
setVolumeLevel(maxdB);
- LOG(DEBUG) << __func__ << " pandB: " << pandB << " maxdB " << maxdB;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, "");
- params.VC_Balance = pandB;
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "");
+ params.VC_Balance = pandB;
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, "");
- }
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, "");
mVolumeStereo = volume;
return RetCode::SUCCESS;
}
@@ -469,7 +430,6 @@
RetCode ret = updateControlParameter(bandLevels);
if (RetCode::SUCCESS == ret) {
mCurPresetIdx = presetIdx;
- LOG(INFO) << __func__ << " success with " << presetIdx;
} else {
LOG(ERROR) << __func__ << " failed to setPreset " << presetIdx;
}
@@ -483,7 +443,6 @@
RetCode ret = updateControlParameter(bandLevels);
if (RetCode::SUCCESS == ret) {
mCurPresetIdx = lvm::PRESET_CUSTOM;
- LOG(INFO) << __func__ << " succeed with " << ::android::internal::ToString(bandLevels);
} else {
LOG(ERROR) << __func__ << " failed with " << ::android::internal::ToString(bandLevels);
}
@@ -502,14 +461,11 @@
std::vector<int32_t> BundleContext::getEqualizerCenterFreqs() {
std::vector<int32_t> freqs;
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- /* Get the current settings */
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms), freqs,
- " getControlParamFailed");
- for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
- freqs.push_back((int32_t)params.pEQNB_BandDefinition[i].Frequency * 1000);
- }
+ /* Get the current settings */
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms), freqs,
+ " getControlParamFailed");
+ for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+ freqs.push_back((int32_t)params.pEQNB_BandDefinition[i].Frequency * 1000);
}
return freqs;
@@ -533,68 +489,59 @@
}
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
- params.pEQNB_BandDefinition[i].Frequency = lvm::kPresetsFrequencies[i];
- params.pEQNB_BandDefinition[i].QFactor = lvm::kPresetsQFactors[i];
- params.pEQNB_BandDefinition[i].Gain =
- tempLevel[i] > 0 ? (tempLevel[i] + 50) / 100 : (tempLevel[i] - 50) / 100;
- }
-
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+ for (std::size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+ params.pEQNB_BandDefinition[i].Frequency = lvm::kPresetsFrequencies[i];
+ params.pEQNB_BandDefinition[i].QFactor = lvm::kPresetsQFactors[i];
+ params.pEQNB_BandDefinition[i].Gain =
+ tempLevel[i] > 0 ? (tempLevel[i] + 50) / 100 : (tempLevel[i] - 50) / 100;
}
- mBandGainmB = tempLevel;
- LOG(DEBUG) << __func__ << " update bandGain to " << ::android::internal::ToString(mBandGainmB)
- << "mdB";
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
+ mBandGainmB = tempLevel;
return RetCode::SUCCESS;
}
RetCode BundleContext::setBassBoostStrength(int strength) {
// Update Control Parameter
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.BE_EffectLevel = (LVM_INT16)((15 * strength) / 1000);
- params.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
+ params.BE_EffectLevel = (LVM_INT16)((15 * strength) / 1000);
+ params.BE_CentreFreq = LVM_BE_CENTRE_90Hz;
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
mBassStrengthSaved = strength;
- LOG(INFO) << __func__ << " success with strength " << strength;
return limitLevel();
}
RetCode BundleContext::setVolumeLevel(float level) {
if (mMuteEnabled) {
- mLevelSaved = level;
+ mLevelSaveddB = level;
} else {
- mVolume = level;
+ mVolumedB = level;
}
- LOG(INFO) << __func__ << " success with level " << level;
return limitLevel();
}
float BundleContext::getVolumeLevel() const {
- return (mMuteEnabled ? mLevelSaved : mVolume);
+ return (mMuteEnabled ? mLevelSaveddB : mVolumedB);
}
RetCode BundleContext::setVolumeMute(bool mute) {
mMuteEnabled = mute;
if (mMuteEnabled) {
- mLevelSaved = mVolume;
- mVolume = -96;
+ mLevelSaveddB = mVolumedB;
+ mVolumedB = -96;
} else {
- mVolume = mLevelSaved;
+ mVolumedB = mLevelSaveddB;
}
return limitLevel();
}
@@ -602,19 +549,15 @@
RetCode BundleContext::setVirtualizerStrength(int strength) {
// Update Control Parameter
LVM_ControlParams_t params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.CS_EffectLevel = ((strength * 32767) / 1000);
+ params.CS_EffectLevel = ((strength * 32767) / 1000);
- RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
mVirtStrengthSaved = strength;
- LOG(INFO) << __func__ << " success with strength " << strength;
return limitLevel();
}
@@ -747,7 +690,7 @@
return angles;
}
-IEffect::Status BundleContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status BundleContext::process(float* in, float* out, int samples) {
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!in, status, "nullInput");
RETURN_VALUE_IF(!out, status, "nullOutput");
@@ -760,29 +703,24 @@
auto frameSize = getInputFrameSize();
RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
- LOG(DEBUG) << __func__ << " start processing";
if ((mEffectProcessCalled & 1 << int(mType)) != 0) {
const int undrainedEffects = mEffectInDrain & ~mEffectProcessCalled;
if ((undrainedEffects & 1 << int(lvm::BundleEffectType::EQUALIZER)) != 0) {
- LOG(DEBUG) << "Draining EQUALIZER";
mSamplesToExitCountEq = 0;
--mNumberEffectsEnabled;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::EQUALIZER));
}
if ((undrainedEffects & 1 << int(lvm::BundleEffectType::BASS_BOOST)) != 0) {
- LOG(DEBUG) << "Draining BASS_BOOST";
mSamplesToExitCountBb = 0;
--mNumberEffectsEnabled;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::BASS_BOOST));
}
if ((undrainedEffects & 1 << int(lvm::BundleEffectType::VIRTUALIZER)) != 0) {
- LOG(DEBUG) << "Draining VIRTUALIZER";
mSamplesToExitCountVirt = 0;
--mNumberEffectsEnabled;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VIRTUALIZER));
}
if ((undrainedEffects & 1 << int(lvm::BundleEffectType::VOLUME)) != 0) {
- LOG(DEBUG) << "Draining VOLUME";
--mNumberEffectsEnabled;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VOLUME));
}
@@ -800,7 +738,6 @@
mNumberEffectsEnabled--;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::EQUALIZER));
}
- LOG(DEBUG) << "Effect_process() this is the last frame for EQUALIZER";
}
break;
case lvm::BundleEffectType::BASS_BOOST:
@@ -813,7 +750,6 @@
mNumberEffectsEnabled--;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::BASS_BOOST));
}
- LOG(DEBUG) << "Effect_process() this is the last frame for BASS_BOOST";
}
break;
case lvm::BundleEffectType::VIRTUALIZER:
@@ -826,7 +762,6 @@
mNumberEffectsEnabled--;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VIRTUALIZER));
}
- LOG(DEBUG) << "Effect_process() this is the last frame for VIRTUALIZER";
}
break;
case lvm::BundleEffectType::VOLUME:
@@ -835,14 +770,13 @@
mNumberEffectsEnabled--;
mEffectInDrain &= ~(1 << int(lvm::BundleEffectType::VOLUME));
}
- LOG(DEBUG) << "Effect_process() LVM_VOLUME Effect is not enabled";
break;
}
}
if (isDataAvailable) {
mNumberEffectsCalled++;
}
- bool accumulate = false;
+
if (mNumberEffectsCalled >= mNumberEffectsEnabled) {
// We expect the # effects called to be equal to # effects enabled in sequence (including
// draining effects). Warn if this is not the case due to inconsistent calls.
@@ -850,37 +784,33 @@
"%s Number of effects called %d is greater than number of effects enabled %d",
__func__, mNumberEffectsCalled, mNumberEffectsEnabled);
mEffectProcessCalled = 0; // reset our consistency check.
- if (!isDataAvailable) {
- LOG(DEBUG) << "Effect_process() processing last frame";
- }
mNumberEffectsCalled = 0;
- float* outTmp = (accumulate ? getWorkBuffer() : out);
- /* Process the samples */
- LVM_ReturnStatus_en lvmStatus;
- {
- std::lock_guard lg(mMutex);
-
- lvmStatus = LVM_Process(mInstance, in, outTmp, inputFrameCount, 0);
+ int frames = samples * sizeof(float) / frameSize;
+ int bufferIndex = 0;
+ // LVM library supports max of int16_t frames at a time and should be multiple of
+ // kBlockSizeMultiple.
+ constexpr int kBlockSizeMultiple = 4;
+ constexpr int kMaxBlockFrames =
+ (std::numeric_limits<int16_t>::max() / kBlockSizeMultiple) * kBlockSizeMultiple;
+ while (frames > 0) {
+ /* Process the samples */
+ LVM_ReturnStatus_en lvmStatus;
+ int processFrames = std::min(frames, kMaxBlockFrames);
+ lvmStatus = LVM_Process(mInstance, in + bufferIndex, out + bufferIndex,
+ processFrames, 0);
if (lvmStatus != LVM_SUCCESS) {
- LOG(ERROR) << __func__ << lvmStatus;
+ LOG(ERROR) << "LVM_Process failed with error: " << lvmStatus;
return {EX_UNSUPPORTED_OPERATION, 0, 0};
}
- if (accumulate) {
- for (int i = 0; i < samples; i++) {
- out[i] += outTmp[i];
- }
- }
+ frames -= processFrames;
+ int processedSize = processFrames * frameSize / sizeof(float);
+ bufferIndex += processedSize;
}
} else {
for (int i = 0; i < samples; i++) {
- if (accumulate) {
- out[i] += in[i];
- } else {
- out[i] = in[i];
- }
+ out[i] = in[i];
}
}
- LOG(DEBUG) << __func__ << " done processing";
return {STATUS_OK, samples, samples};
}
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index 809f402..ba3997a 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -17,7 +17,6 @@
#pragma once
#include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
#include <array>
#include <cstddef>
@@ -85,15 +84,14 @@
RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) override;
Parameter::VolumeStereo getVolumeStereo() override { return {1.0f, 1.0f}; }
- IEffect::Status lvmProcess(float* in, float* out, int samples);
+ IEffect::Status process(float* in, float* out, int samples);
IEffect::Status processEffect(float* in, float* out, int sampleToProcess);
private:
- std::mutex mMutex;
const lvm::BundleEffectType mType;
bool mEnabled = false;
- LVM_Handle_t mInstance GUARDED_BY(mMutex);
+ LVM_Handle_t mInstance;
aidl::android::media::audio::common::AudioDeviceDescription mVirtualizerForcedDevice;
@@ -124,8 +122,8 @@
bool mVirtualizerTempDisabled = false;
::aidl::android::media::audio::common::AudioDeviceDescription mForceDevice;
// Volume
- float mLevelSaved = 0; /* for when mute is set, level must be saved */
- float mVolume = 0;
+ float mLevelSaveddB = 0; /* for when mute is set, level must be saved */
+ float mVolumedB = 0;
bool mMuteEnabled = false; /* Must store as mute = -96dB level */
RetCode initControlParameter(LVM_ControlParams_t& params) const;
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
index 143329d..daabdb7 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
@@ -129,8 +129,7 @@
.implementor = "NXP Software Ltd."},
.capability = kVirtualizerCap};
-static const std::vector<Range::VolumeRange> kVolumeRanges = {
- MAKE_RANGE(Volume, levelDb, -9600, 0)};
+static const std::vector<Range::VolumeRange> kVolumeRanges = {MAKE_RANGE(Volume, levelDb, -96, 0)};
static const Capability kVolumeCap = {.range = kVolumeRanges};
static const std::string kVolumeEffectName = "Volume";
static const Descriptor kVolumeDesc = {
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
index 257e972..70c276d 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
@@ -55,7 +55,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<EffectBundleAidl>(*uuid);
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -83,7 +82,6 @@
namespace aidl::android::hardware::audio::effect {
EffectBundleAidl::EffectBundleAidl(const AudioUuid& uuid) {
- LOG(DEBUG) << __func__ << uuid.toString();
if (uuid == getEffectImplUuidEqualizerBundle()) {
mType = lvm::BundleEffectType::EQUALIZER;
mDescriptor = &lvm::kEqualizerDesc;
@@ -107,12 +105,10 @@
EffectBundleAidl::~EffectBundleAidl() {
cleanUp();
- LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus EffectBundleAidl::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << _aidl_return->toString();
*_aidl_return = *mDescriptor;
return ndk::ScopedAStatus::ok();
}
@@ -154,7 +150,6 @@
}
ndk::ScopedAStatus EffectBundleAidl::setParameterSpecific(const Parameter::Specific& specific) {
- LOG(DEBUG) << __func__ << " specific " << specific.toString();
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto tag = specific.getTag();
@@ -458,7 +453,7 @@
IEffect::Status EffectBundleAidl::effectProcessImpl(float* in, float* out, int sampleToProcess) {
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!mContext, status, "nullContext");
- return mContext->lvmProcess(in, out, sampleToProcess);
+ return mContext->process(in, out, sampleToProcess);
}
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h b/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h
index d31763b..ea1a8fe 100644
--- a/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h
+++ b/media/libeffects/lvm/wrapper/Aidl/GlobalSession.h
@@ -21,7 +21,6 @@
#include <unordered_map>
#include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
#include "BundleContext.h"
#include "BundleTypes.h"
@@ -41,11 +40,6 @@
return instance;
}
- bool isSessionIdExist(int sessionId) {
- std::lock_guard lg(mMutex);
- return mSessionMap.count(sessionId);
- }
-
static bool findBundleTypeInList(std::vector<std::shared_ptr<BundleContext>>& list,
const lvm::BundleEffectType& type, bool remove = false) {
auto itor = std::find_if(list.begin(), list.end(),
@@ -69,8 +63,7 @@
std::shared_ptr<BundleContext> createSession(const lvm::BundleEffectType& type, int statusDepth,
const Parameter::Common& common) {
int sessionId = common.session;
- LOG(DEBUG) << __func__ << type << " with sessionId " << sessionId;
- std::lock_guard lg(mMutex);
+ LOG(VERBOSE) << __func__ << type << " with sessionId " << sessionId;
if (mSessionMap.count(sessionId) == 0 && mSessionMap.size() >= MAX_BUNDLE_SESSIONS) {
LOG(ERROR) << __func__ << " exceed max bundle session";
return nullptr;
@@ -97,8 +90,7 @@
}
void releaseSession(const lvm::BundleEffectType& type, int sessionId) {
- LOG(DEBUG) << __func__ << type << " sessionId " << sessionId;
- std::lock_guard lg(mMutex);
+ LOG(VERBOSE) << __func__ << type << " sessionId " << sessionId;
if (mSessionMap.count(sessionId)) {
auto& list = mSessionMap[sessionId];
if (!findBundleTypeInList(list, type, true /* remove */)) {
@@ -112,11 +104,9 @@
}
private:
- // Lock for mSessionMap access.
- std::mutex mMutex;
// Max session number supported.
static constexpr int MAX_BUNDLE_SESSIONS = 32;
std::unordered_map<int /* session ID */, std::vector<std::shared_ptr<BundleContext>>>
- mSessionMap GUARDED_BY(mMutex);
+ mSessionMap;
};
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index 62837b9..781aad6 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -1,5 +1,6 @@
// music bundle wrapper
package {
+ default_team: "trendy_team_media_framework_audio",
default_applicable_licenses: [
"frameworks_av_media_libeffects_lvm_wrapper_license",
],
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
index f9afe69..4d369b1 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
@@ -55,7 +55,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<EffectReverb>(*uuid);
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -82,7 +81,6 @@
namespace aidl::android::hardware::audio::effect {
EffectReverb::EffectReverb(const AudioUuid& uuid) {
- LOG(DEBUG) << __func__ << uuid.toString();
if (uuid == getEffectImplUuidAuxEnvReverb()) {
mType = lvm::ReverbEffectType::AUX_ENV;
mDescriptor = &lvm::kAuxEnvReverbDesc;
@@ -106,18 +104,16 @@
EffectReverb::~EffectReverb() {
cleanUp();
- LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus EffectReverb::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << _aidl_return->toString();
*_aidl_return = *mDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectReverb::setParameterSpecific(const Parameter::Specific& specific) {
- LOG(DEBUG) << __func__ << " specific " << specific.toString();
+ LOG(VERBOSE) << __func__ << " specific " << specific.toString();
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto tag = specific.getTag();
@@ -390,7 +386,7 @@
IEffect::Status EffectReverb::effectProcessImpl(float* in, float* out, int sampleToProcess) {
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!mContext, status, "nullContext");
- return mContext->lvmProcess(in, out, sampleToProcess);
+ return mContext->process(in, out, sampleToProcess);
}
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 468b268..67518af 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -68,24 +68,21 @@
// allocate lvm reverb instance
LVREV_ReturnStatus_en status = LVREV_SUCCESS;
- {
- std::lock_guard lg(mMutex);
- LVREV_InstanceParams_st params = {
- .MaxBlockSize = lvm::kMaxCallSize,
- // Max format, could be mono during process
- .SourceFormat = LVM_STEREO,
- .NumDelays = LVREV_DELAYLINES_4,
- };
- /* Init sets the instance handle */
- status = LVREV_GetInstanceHandle(&mInstance, ¶ms);
- GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_GetInstanceHandleFailed");
+ LVREV_InstanceParams_st params = {
+ .MaxBlockSize = lvm::kMaxCallSize,
+ // Max format, could be mono during process
+ .SourceFormat = LVM_STEREO,
+ .NumDelays = LVREV_DELAYLINES_4,
+ };
+ /* Init sets the instance handle */
+ status = LVREV_GetInstanceHandle(&mInstance, ¶ms);
+ GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_GetInstanceHandleFailed");
- // set control
- LVREV_ControlParams_st controlParams;
- initControlParameter(controlParams);
- status = LVREV_SetControlParameters(mInstance, &controlParams);
- GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_SetControlParametersFailed");
- }
+ // set control
+ LVREV_ControlParams_st controlParams;
+ initControlParameter(controlParams);
+ status = LVREV_SetControlParameters(mInstance, &controlParams);
+ GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_SetControlParametersFailed");
return RetCode::SUCCESS;
@@ -95,7 +92,6 @@
}
void ReverbContext::deInit() {
- std::lock_guard lg(mMutex);
if (mInstance) {
LVREV_FreeInstance(mInstance);
mInstance = nullptr;
@@ -143,19 +139,16 @@
RetCode ReverbContext::setEnvironmentalReverbRoomLevel(int roomLevel) {
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- // Sum of room and reverb level controls
- // needs to subtract max levels for both room level and reverb level
- int combinedLevel = (roomLevel + mLevel) - lvm::kMaxReverbLevel;
- params.Level = convertLevel(combinedLevel);
+ // Sum of room and reverb level controls
+ // needs to subtract max levels for both room level and reverb level
+ int combinedLevel = (roomLevel + mLevel) - lvm::kMaxReverbLevel;
+ params.Level = convertLevel(combinedLevel);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
mRoomLevel = roomLevel;
return RetCode::SUCCESS;
}
@@ -163,16 +156,13 @@
RetCode ReverbContext::setEnvironmentalReverbRoomHfLevel(int roomHfLevel) {
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.LPF = convertHfLevel(roomHfLevel);
+ params.LPF = convertHfLevel(roomHfLevel);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
mRoomHfLevel = roomHfLevel;
return RetCode::SUCCESS;
}
@@ -185,17 +175,15 @@
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.T60 = (LVM_UINT16)time;
- mSamplesToExitCount = (params.T60 * mCommon.input.base.sampleRate) / 1000;
+ params.T60 = (LVM_UINT16)time;
+ mSamplesToExitCount = (params.T60 * mCommon.input.base.sampleRate) / 1000;
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
mDecayTime = time;
return RetCode::SUCCESS;
}
@@ -203,16 +191,13 @@
RetCode ReverbContext::setEnvironmentalReverbDecayHfRatio(int decayHfRatio) {
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.Damping = (LVM_INT16)(decayHfRatio / 20);
+ params.Damping = (LVM_INT16)(decayHfRatio / 20);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
mDecayHfRatio = decayHfRatio;
return RetCode::SUCCESS;
}
@@ -220,19 +205,17 @@
RetCode ReverbContext::setEnvironmentalReverbLevel(int level) {
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- // Sum of room and reverb level controls
- // needs to subtract max levels for both room level and level
- int combinedLevel = (level + mRoomLevel) - lvm::kMaxReverbLevel;
- params.Level = convertLevel(combinedLevel);
+ // Sum of room and reverb level controls
+ // needs to subtract max levels for both room level and level
+ int combinedLevel = (level + mRoomLevel) - lvm::kMaxReverbLevel;
+ params.Level = convertLevel(combinedLevel);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
mLevel = level;
return RetCode::SUCCESS;
}
@@ -245,16 +228,14 @@
RetCode ReverbContext::setEnvironmentalReverbDiffusion(int diffusion) {
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.Density = (LVM_INT16)(diffusion / 10);
+ params.Density = (LVM_INT16)(diffusion / 10);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
mDiffusion = diffusion;
return RetCode::SUCCESS;
}
@@ -262,16 +243,14 @@
RetCode ReverbContext::setEnvironmentalReverbDensity(int density) {
// Update Control Parameter
LVREV_ControlParams_st params;
- {
- std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
- params.RoomSize = (LVM_INT16)(((density * 99) / 1000) + 1);
+ params.RoomSize = (LVM_INT16)(((density * 99) / 1000) + 1);
- RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
- RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
- }
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+
mDensity = density;
return RetCode::SUCCESS;
}
@@ -352,7 +331,7 @@
return kDefaultLPF;
}
-IEffect::Status ReverbContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status ReverbContext::process(float* in, float* out, int samples) {
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!in, status, "nullInput");
RETURN_VALUE_IF(!out, status, "nullOutput");
@@ -362,9 +341,6 @@
RETURN_VALUE_IF(inputFrameCount != outputFrameCount, status, "FrameCountMismatch");
RETURN_VALUE_IF(0 == getInputFrameSize(), status, "zeroFrameSize");
- LOG(DEBUG) << __func__ << " start processing";
- std::lock_guard lg(mMutex);
-
int channels = ::aidl::android::hardware::audio::common::getChannelCount(
mCommon.input.base.channelMask);
int outChannels = ::aidl::android::hardware::audio::common::getChannelCount(
@@ -405,7 +381,6 @@
} else {
if (!mEnabled && mSamplesToExitCount > 0) {
std::fill(outFrames.begin(), outFrames.end(), 0);
- LOG(VERBOSE) << "Zeroing " << channels << " samples per frame at the end of call ";
}
/* Process the samples, producing a stereo output */
@@ -415,7 +390,7 @@
outFrames.data(), /* Output buffer */
frameCount); /* Number of samples to read */
if (lvrevStatus != LVREV_SUCCESS) {
- LOG(ERROR) << __func__ << lvrevStatus;
+ LOG(ERROR) << __func__ << " LVREV_Process error: " << lvrevStatus;
return {EX_UNSUPPORTED_OPERATION, 0, 0};
}
}
@@ -464,19 +439,10 @@
}
}
- bool accumulate = false;
if (outChannels > 2) {
- // Accumulate if required
- if (accumulate) {
- for (int i = 0; i < frameCount; i++) {
- out[outChannels * i] += outFrames[FCC_2 * i];
- out[outChannels * i + 1] += outFrames[FCC_2 * i + 1];
- }
- } else {
- for (int i = 0; i < frameCount; i++) {
- out[outChannels * i] = outFrames[FCC_2 * i];
- out[outChannels * i + 1] = outFrames[FCC_2 * i + 1];
- }
+ for (int i = 0; i < frameCount; i++) {
+ out[outChannels * i] = outFrames[FCC_2 * i];
+ out[outChannels * i + 1] = outFrames[FCC_2 * i + 1];
}
if (!isAuxiliary()) {
for (int i = 0; i < frameCount; i++) {
@@ -487,29 +453,15 @@
}
}
} else {
- if (accumulate) {
- if (outChannels == FCC_1) {
- for (int i = 0; i < frameCount; i++) {
- out[i] += ((outFrames[i * FCC_2] + outFrames[i * FCC_2 + 1]) * 0.5f);
- }
- } else {
- for (int i = 0; i < frameCount * FCC_2; i++) {
- out[i] += outFrames[i];
- }
- }
+ if (outChannels == FCC_1) {
+ From2iToMono_Float(outFrames.data(), out, frameCount);
} else {
- if (outChannels == FCC_1) {
- From2iToMono_Float(outFrames.data(), out, frameCount);
- } else {
- for (int i = 0; i < frameCount * FCC_2; i++) {
- out[i] = outFrames[i];
- }
+ for (int i = 0; i < frameCount * FCC_2; i++) {
+ out[i] = outFrames[i];
}
}
}
- LOG(DEBUG) << __func__ << " done processing";
-
if (!mEnabled && mSamplesToExitCount > 0) {
// signed - unsigned will trigger integer overflow if result becomes negative.
mSamplesToExitCount -= samples;
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
index d11a081..44391f2 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
@@ -17,7 +17,6 @@
#pragma once
#include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
#include <unordered_map>
#include "ReverbTypes.h"
@@ -100,7 +99,7 @@
}
bool getReflectionsLevel() const { return mReflectionsLevelMb; }
- IEffect::Status lvmProcess(float* in, float* out, int samples);
+ IEffect::Status process(float* in, float* out, int samples);
private:
static constexpr inline float kUnitVolume = 1;
@@ -158,10 +157,9 @@
{-400, -600, 1800, 700, -2000, 30, -1400, 60, 1000, 1000}},
{PresetReverb::Presets::PLATE, {-400, -200, 1300, 900, 0, 2, 0, 10, 1000, 750}}};
- std::mutex mMutex;
const lvm::ReverbEffectType mType;
bool mEnabled = false;
- LVREV_Handle_t mInstance GUARDED_BY(mMutex);
+ LVREV_Handle_t mInstance = LVM_NULL;
int mRoomLevel = 0;
int mRoomHfLevel = 0;
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index 994b061..d658536 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -1,5 +1,6 @@
// audio preprocessing wrapper
package {
+ default_team: "trendy_team_media_framework_audio",
default_applicable_licenses: [
"frameworks_av_media_libeffects_preprocessing_license",
],
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
index 7552804..87d267b 100644
--- a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
@@ -50,7 +50,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<EffectPreProcessing>(*uuid);
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -78,7 +77,6 @@
namespace aidl::android::hardware::audio::effect {
EffectPreProcessing::EffectPreProcessing(const AudioUuid& uuid) {
- LOG(DEBUG) << __func__ << uuid.toString();
if (uuid == getEffectImplUuidAcousticEchoCancelerSw()) {
mType = PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION;
mDescriptor = &kAcousticEchoCancelerDesc;
@@ -102,18 +100,16 @@
EffectPreProcessing::~EffectPreProcessing() {
cleanUp();
- LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus EffectPreProcessing::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
- LOG(DEBUG) << _aidl_return->toString();
*_aidl_return = *mDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectPreProcessing::setParameterSpecific(const Parameter::Specific& specific) {
- LOG(DEBUG) << __func__ << " specific " << specific.toString();
+ LOG(VERBOSE) << __func__ << " specific " << specific.toString();
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto tag = specific.getTag();
@@ -446,7 +442,7 @@
IEffect::Status EffectPreProcessing::effectProcessImpl(float* in, float* out, int sampleToProcess) {
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!mContext, status, "nullContext");
- return mContext->lvmProcess(in, out, sampleToProcess);
+ return mContext->process(in, out, sampleToProcess);
}
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
index 2c44e5c..2d549ef 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
@@ -26,7 +26,6 @@
using aidl::android::media::audio::common::AudioDeviceType;
RetCode PreProcessingContext::init(const Parameter::Common& common) {
- std::lock_guard lg(mMutex);
webrtc::AudioProcessingBuilder apBuilder;
mAudioProcessingModule = apBuilder.Create();
if (mAudioProcessingModule == nullptr) {
@@ -64,7 +63,6 @@
}
RetCode PreProcessingContext::deInit() {
- std::lock_guard lg(mMutex);
mAudioProcessingModule = nullptr;
mState = PRE_PROC_STATE_UNINITIALIZED;
return RetCode::SUCCESS;
@@ -75,7 +73,6 @@
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
int typeMsk = (1 << int(mType));
- std::lock_guard lg(mMutex);
// Check if effect is already enabled.
if ((mEnabledMsk & typeMsk) == typeMsk) {
return RetCode::ERROR_ILLEGAL_PARAMETER;
@@ -110,7 +107,6 @@
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
int typeMsk = (1 << int(mType));
- std::lock_guard lg(mMutex);
// Check if effect is already disabled.
if ((mEnabledMsk & typeMsk) != typeMsk) {
return RetCode::ERROR_ILLEGAL_PARAMETER;
@@ -160,7 +156,6 @@
RetCode PreProcessingContext::setAcousticEchoCancelerEchoDelay(int echoDelayUs) {
mEchoDelayUs = echoDelayUs;
- std::lock_guard lg(mMutex);
mAudioProcessingModule->set_stream_delay_ms(mEchoDelayUs / 1000);
return RetCode::SUCCESS;
}
@@ -171,7 +166,6 @@
RetCode PreProcessingContext::setAcousticEchoCancelerMobileMode(bool mobileMode) {
mMobileMode = mobileMode;
- std::lock_guard lg(mMutex);
auto config = mAudioProcessingModule->GetConfig();
config.echo_canceller.mobile_mode = mobileMode;
mAudioProcessingModule->ApplyConfig(config);
@@ -184,7 +178,6 @@
RetCode PreProcessingContext::setAutomaticGainControlV1TargetPeakLevel(int targetPeakLevel) {
mTargetPeakLevel = targetPeakLevel;
- std::lock_guard lg(mMutex);
auto config = mAudioProcessingModule->GetConfig();
config.gain_controller1.target_level_dbfs = -(mTargetPeakLevel / 100);
mAudioProcessingModule->ApplyConfig(config);
@@ -197,7 +190,6 @@
RetCode PreProcessingContext::setAutomaticGainControlV1MaxCompressionGain(int maxCompressionGain) {
mMaxCompressionGain = maxCompressionGain;
- std::lock_guard lg(mMutex);
auto config = mAudioProcessingModule->GetConfig();
config.gain_controller1.compression_gain_db = mMaxCompressionGain / 100;
mAudioProcessingModule->ApplyConfig(config);
@@ -210,7 +202,6 @@
RetCode PreProcessingContext::setAutomaticGainControlV1EnableLimiter(bool enableLimiter) {
mEnableLimiter = enableLimiter;
- std::lock_guard lg(mMutex);
auto config = mAudioProcessingModule->GetConfig();
config.gain_controller1.enable_limiter = mEnableLimiter;
mAudioProcessingModule->ApplyConfig(config);
@@ -223,7 +214,6 @@
RetCode PreProcessingContext::setAutomaticGainControlV2DigitalGain(int gain) {
mDigitalGain = gain;
- std::lock_guard lg(mMutex);
auto config = mAudioProcessingModule->GetConfig();
config.gain_controller2.fixed_digital.gain_db = mDigitalGain;
mAudioProcessingModule->ApplyConfig(config);
@@ -256,7 +246,6 @@
RetCode PreProcessingContext::setNoiseSuppressionLevel(NoiseSuppression::Level level) {
mLevel = level;
- std::lock_guard lg(mMutex);
auto config = mAudioProcessingModule->GetConfig();
config.noise_suppression.level =
(webrtc::AudioProcessing::Config::NoiseSuppression::Level)level;
@@ -268,7 +257,7 @@
return mLevel;
}
-IEffect::Status PreProcessingContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status PreProcessingContext::process(float* in, float* out, int samples) {
IEffect::Status status = {EX_NULL_POINTER, 0, 0};
RETURN_VALUE_IF(!in, status, "nullInput");
RETURN_VALUE_IF(!out, status, "nullOutput");
@@ -278,9 +267,6 @@
RETURN_VALUE_IF(inputFrameCount != outputFrameCount, status, "FrameCountMismatch");
RETURN_VALUE_IF(0 == getInputFrameSize(), status, "zeroFrameSize");
- LOG(DEBUG) << __func__ << " start processing";
- std::lock_guard lg(mMutex);
-
mProcessedMsk |= (1 << int(mType));
// webrtc implementation clear out was_stream_delay_set every time after ProcessStream() call
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.h b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
index 9ba1bbe..11a2bea 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingContext.h
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
@@ -17,7 +17,6 @@
#pragma once
#include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
#include <audio_processing.h>
#include <unordered_map>
@@ -37,10 +36,9 @@
PreProcessingContext(int statusDepth, const Parameter::Common& common,
const PreProcessingEffectType& type)
: EffectContext(statusDepth, common), mType(type) {
- LOG(DEBUG) << __func__ << type;
mState = PRE_PROC_STATE_UNINITIALIZED;
}
- ~PreProcessingContext() override { LOG(DEBUG) << __func__; }
+ ~PreProcessingContext() = default;
RetCode init(const Parameter::Common& common);
RetCode deInit();
@@ -76,7 +74,7 @@
RetCode setNoiseSuppressionLevel(NoiseSuppression::Level level);
NoiseSuppression::Level getNoiseSuppressionLevel() const;
- IEffect::Status lvmProcess(float* in, float* out, int samples);
+ IEffect::Status process(float* in, float* out, int samples);
private:
static constexpr inline int kAgcDefaultTargetLevel = 3;
@@ -85,20 +83,19 @@
static constexpr inline webrtc::AudioProcessing::Config::NoiseSuppression::Level
kNsDefaultLevel = webrtc::AudioProcessing::Config::NoiseSuppression::kModerate;
- std::mutex mMutex;
const PreProcessingEffectType mType;
PreProcEffectState mState; // current state
// handle on webRTC audio processing module (APM)
- rtc::scoped_refptr<webrtc::AudioProcessing> mAudioProcessingModule GUARDED_BY(mMutex);
+ rtc::scoped_refptr<webrtc::AudioProcessing> mAudioProcessingModule;
- int mEnabledMsk GUARDED_BY(mMutex); // bit field containing IDs of enabled pre processors
- int mProcessedMsk GUARDED_BY(mMutex); // bit field containing IDs of pre processors already
+ int mEnabledMsk; // bit field containing IDs of enabled pre processors
+ int mProcessedMsk; // bit field containing IDs of pre processors already
// processed in current round
- int mRevEnabledMsk GUARDED_BY(mMutex); // bit field containing IDs of enabled pre processors
+ int mRevEnabledMsk; // bit field containing IDs of enabled pre processors
// with reverse channel
- int mRevProcessedMsk GUARDED_BY(mMutex); // bit field containing IDs of pre processors with
- // reverse channel already processed in current round
+ int mRevProcessedMsk; // bit field containing IDs of pre processors with
+ // reverse channel already processed in current round
webrtc::StreamConfig mInputConfig; // input stream configuration
webrtc::StreamConfig mOutputConfig; // output stream configuration
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingSession.h b/media/libeffects/preprocessing/aidl/PreProcessingSession.h
index 877292f..4a66e81 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingSession.h
+++ b/media/libeffects/preprocessing/aidl/PreProcessingSession.h
@@ -21,7 +21,6 @@
#include <unordered_map>
#include <android-base/logging.h>
-#include <android-base/thread_annotations.h>
#include "PreProcessingContext.h"
#include "PreProcessingTypes.h"
@@ -67,7 +66,6 @@
const Parameter::Common& common) {
int sessionId = common.session;
LOG(DEBUG) << __func__ << type << " with sessionId " << sessionId;
- std::lock_guard lg(mMutex);
if (mSessionMap.count(sessionId) == 0 && mSessionMap.size() >= MAX_PRE_PROC_SESSIONS) {
LOG(ERROR) << __func__ << " exceed max bundle session";
return nullptr;
@@ -95,7 +93,6 @@
void releaseSession(const PreProcessingEffectType& type, int sessionId) {
LOG(DEBUG) << __func__ << type << " sessionId " << sessionId;
- std::lock_guard lg(mMutex);
if (mSessionMap.count(sessionId)) {
auto& list = mSessionMap[sessionId];
if (!findPreProcessingTypeInList(list, type, true /* remove */)) {
@@ -109,11 +106,9 @@
}
private:
- // Lock for mSessionMap access.
- std::mutex mMutex;
// Max session number supported.
static constexpr int MAX_PRE_PROC_SESSIONS = 8;
std::unordered_map<int /* session ID */, std::vector<std::shared_ptr<PreProcessingContext>>>
- mSessionMap GUARDED_BY(mMutex);
+ mSessionMap;
};
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/preprocessing/benchmarks/Android.bp b/media/libeffects/preprocessing/benchmarks/Android.bp
index fbbcab4..ca99bf8 100644
--- a/media/libeffects/preprocessing/benchmarks/Android.bp
+++ b/media/libeffects/preprocessing/benchmarks/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_media_libeffects_preprocessing_license"
diff --git a/media/libeffects/preprocessing/tests/Android.bp b/media/libeffects/preprocessing/tests/Android.bp
index d80b135..ad8d84d 100644
--- a/media/libeffects/preprocessing/tests/Android.bp
+++ b/media/libeffects/preprocessing/tests/Android.bp
@@ -1,5 +1,6 @@
// audio preprocessing unit test
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_media_libeffects_preprocessing_license"
diff --git a/media/libeffects/spatializer/benchmarks/Android.bp b/media/libeffects/spatializer/benchmarks/Android.bp
index ab7e468..2d07a9b 100644
--- a/media/libeffects/spatializer/benchmarks/Android.bp
+++ b/media/libeffects/spatializer/benchmarks/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libeffects/spatializer/tests/Android.bp b/media/libeffects/spatializer/tests/Android.bp
index 704e873..ddfcff3 100644
--- a/media/libeffects/spatializer/tests/Android.bp
+++ b/media/libeffects/spatializer/tests/Android.bp
@@ -1,6 +1,7 @@
// Build the unit tests for spatializer effect
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -12,7 +13,7 @@
cc_test {
name: "SpatializerTest",
defaults: [
- "libeffects-test-defaults",
+ "libeffects-test-defaults",
],
host_supported: false,
srcs: [
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
index 0303842..9c2b71e 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.cpp
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -37,7 +37,6 @@
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<VisualizerImpl>();
- LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
@@ -59,10 +58,11 @@
const std::string VisualizerImpl::kEffectName = "Visualizer";
const std::vector<Range::VisualizerRange> VisualizerImpl::kRanges = {
MAKE_RANGE(Visualizer, latencyMs, 0, VisualizerContext::kMaxLatencyMs),
- MAKE_RANGE(Visualizer, captureSamples, 0, VisualizerContext::kMaxCaptureBufSize),
+ MAKE_RANGE(Visualizer, captureSamples, VisualizerContext::kMinCaptureBufSize,
+ VisualizerContext::kMaxCaptureBufSize),
/* get only parameters, set invalid range (min > max) to indicate not support set */
- MAKE_RANGE(Visualizer, measurement, Visualizer::Measurement({.peak = 1, .rms = 1}),
- Visualizer::Measurement({.peak = 0, .rms = 0})),
+ MAKE_RANGE(Visualizer, measurement, Visualizer::Measurement({.rms = 1, .peak = 1}),
+ Visualizer::Measurement({.rms = 0, .peak = 0})),
MAKE_RANGE(Visualizer, captureSampleBuffer, std::vector<uint8_t>({1}),
std::vector<uint8_t>({0}))};
const Capability VisualizerImpl::kCapability = {
diff --git a/media/libeffects/visualizer/aidl/Visualizer.h b/media/libeffects/visualizer/aidl/Visualizer.h
index b48c85e..3180972 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.h
+++ b/media/libeffects/visualizer/aidl/Visualizer.h
@@ -29,11 +29,8 @@
static const std::string kEffectName;
static const Capability kCapability;
static const Descriptor kDescriptor;
- VisualizerImpl() { LOG(DEBUG) << __func__; }
- ~VisualizerImpl() {
- cleanUp();
- LOG(DEBUG) << __func__;
- }
+ VisualizerImpl() = default;
+ ~VisualizerImpl() { cleanUp(); }
ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.cpp b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
index 5d2bb3a..1e08674 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.cpp
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
@@ -38,14 +38,10 @@
}
VisualizerContext::~VisualizerContext() {
- std::lock_guard lg(mMutex);
- LOG(DEBUG) << __func__;
mState = State::UNINITIALIZED;
}
RetCode VisualizerContext::initParams(const Parameter::Common& common) {
- std::lock_guard lg(mMutex);
- LOG(DEBUG) << __func__;
if (common.input != common.output) {
LOG(ERROR) << __func__ << " mismatch input: " << common.input.toString()
<< " and output: " << common.output.toString();
@@ -66,7 +62,6 @@
}
RetCode VisualizerContext::enable() {
- std::lock_guard lg(mMutex);
if (mState != State::INITIALIZED) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -75,7 +70,6 @@
}
RetCode VisualizerContext::disable() {
- std::lock_guard lg(mMutex);
if (mState != State::ACTIVE) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -84,48 +78,39 @@
}
void VisualizerContext::reset() {
- std::lock_guard lg(mMutex);
std::fill(mCaptureBuf.begin(), mCaptureBuf.end(), 0x80);
}
RetCode VisualizerContext::setCaptureSamples(int samples) {
- std::lock_guard lg(mMutex);
mCaptureSamples = samples;
return RetCode::SUCCESS;
}
-int VisualizerContext::getCaptureSamples() {
- std::lock_guard lg(mMutex);
+int32_t VisualizerContext::getCaptureSamples() {
return mCaptureSamples;
}
RetCode VisualizerContext::setMeasurementMode(Visualizer::MeasurementMode mode) {
- std::lock_guard lg(mMutex);
mMeasurementMode = mode;
return RetCode::SUCCESS;
}
Visualizer::MeasurementMode VisualizerContext::getMeasurementMode() {
- std::lock_guard lg(mMutex);
return mMeasurementMode;
}
RetCode VisualizerContext::setScalingMode(Visualizer::ScalingMode mode) {
- std::lock_guard lg(mMutex);
mScalingMode = mode;
return RetCode::SUCCESS;
}
Visualizer::ScalingMode VisualizerContext::getScalingMode() {
- std::lock_guard lg(mMutex);
return mScalingMode;
}
RetCode VisualizerContext::setDownstreamLatency(int latency) {
- std::lock_guard lg(mMutex);
mDownstreamLatency = latency;
return RetCode::SUCCESS;
}
int VisualizerContext::getDownstreamLatency() {
- std::lock_guard lg(mMutex);
return mDownstreamLatency;
}
@@ -152,7 +137,6 @@
uint8_t nbValidMeasurements = 0;
{
- std::lock_guard lg(mMutex);
// reset measurements if last measurement was too long ago (which implies stored
// measurements aren't relevant anymore and shouldn't bias the new one)
const uint32_t delayMs = getDeltaTimeMsFromUpdatedTime_l();
@@ -185,13 +169,12 @@
// convert from I16 sample values to mB and write results
measure.rms = (rms < 0.000016f) ? -9600 : (int32_t)(2000 * log10(rms / 32767.0f));
measure.peak = (peakU16 == 0) ? -9600 : (int32_t)(2000 * log10(peakU16 / 32767.0f));
- LOG(INFO) << __func__ << " peak " << peakU16 << " (" << measure.peak << "mB), rms " << rms
- << " (" << measure.rms << "mB)";
+ LOG(VERBOSE) << __func__ << " peak " << peakU16 << " (" << measure.peak << "mB), rms " << rms
+ << " (" << measure.rms << "mB)";
return measure;
}
std::vector<uint8_t> VisualizerContext::capture() {
- std::lock_guard lg(mMutex);
uint32_t captureSamples = mCaptureSamples;
std::vector<uint8_t> result(captureSamples, 0x80);
// cts android.media.audio.cts.VisualizerTest expecting silence data when effect not running
@@ -205,7 +188,6 @@
// clear the capture buffer to return silence
if ((mLastCaptureIdx == mCaptureIdx) && (mBufferUpdateTime.tv_sec != 0) &&
(deltaMs > kMaxStallTimeMs)) {
- LOG(INFO) << __func__ << " capture going to idle";
mBufferUpdateTime.tv_sec = 0;
return result;
}
@@ -247,10 +229,8 @@
IEffect::Status result = {STATUS_NOT_ENOUGH_DATA, 0, 0};
RETURN_VALUE_IF(in == nullptr || out == nullptr || samples == 0, result, "dataBufferError");
- std::lock_guard lg(mMutex);
result.status = STATUS_INVALID_OPERATION;
RETURN_VALUE_IF(mState != State::ACTIVE, result, "stateNotActive");
- LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
// perform measurements if needed
if (mMeasurementMode == Visualizer::MeasurementMode::PEAK_RMS) {
// find the peak and RMS squared for the new buffer
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.h b/media/libeffects/visualizer/aidl/VisualizerContext.h
index 958035f..9715e20 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.h
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.h
@@ -16,8 +16,8 @@
#pragma once
-#include <android-base/thread_annotations.h>
#include <audio_effects/effect_dynamicsprocessing.h>
+#include <system/audio_effects/effect_visualizer.h>
#include "effect-impl/EffectContext.h"
@@ -25,8 +25,11 @@
class VisualizerContext final : public EffectContext {
public:
- static const uint32_t kMaxCaptureBufSize = 65536;
- static const uint32_t kMaxLatencyMs = 3000; // 3 seconds of latency for audio pipeline
+ // need align the min/max capture size to VISUALIZER_CAPTURE_SIZE_MIN and
+ // VISUALIZER_CAPTURE_SIZE_MAX because of limitation in audio_utils fixedfft.
+ static constexpr int32_t kMinCaptureBufSize = VISUALIZER_CAPTURE_SIZE_MIN;
+ static constexpr int32_t kMaxCaptureBufSize = VISUALIZER_CAPTURE_SIZE_MAX;
+ static constexpr uint32_t kMaxLatencyMs = 3000; // 3 seconds of latency for audio pipeline
VisualizerContext(int statusDepth, const Parameter::Common& common);
~VisualizerContext();
@@ -38,8 +41,8 @@
// keep all parameters and reset buffer.
void reset();
- RetCode setCaptureSamples(int captureSize);
- int getCaptureSamples();
+ RetCode setCaptureSamples(int32_t captureSize);
+ int32_t getCaptureSamples();
RetCode setMeasurementMode(Visualizer::MeasurementMode mode);
Visualizer::MeasurementMode getMeasurementMode();
RetCode setScalingMode(Visualizer::ScalingMode mode);
@@ -75,28 +78,26 @@
// note: buffer index is stored in uint8_t
static const uint32_t kMeasurementWindowMaxSizeInBuffers = 25;
- // serialize process() and parameter setting
- std::mutex mMutex;
- Parameter::Common mCommon GUARDED_BY(mMutex);
- State mState GUARDED_BY(mMutex) = State::UNINITIALIZED;
- uint32_t mCaptureIdx GUARDED_BY(mMutex) = 0;
- uint32_t mLastCaptureIdx GUARDED_BY(mMutex) = 0;
- Visualizer::ScalingMode mScalingMode GUARDED_BY(mMutex) = Visualizer::ScalingMode::NORMALIZED;
- struct timespec mBufferUpdateTime GUARDED_BY(mMutex);
+ Parameter::Common mCommon;
+ State mState = State::UNINITIALIZED;
+ uint32_t mCaptureIdx = 0;
+ uint32_t mLastCaptureIdx = 0;
+ Visualizer::ScalingMode mScalingMode = Visualizer::ScalingMode::NORMALIZED;
+ struct timespec mBufferUpdateTime;
// capture buf with 8 bits mono PCM samples
- std::array<uint8_t, kMaxCaptureBufSize> mCaptureBuf GUARDED_BY(mMutex);
- uint32_t mDownstreamLatency GUARDED_BY(mMutex) = 0;
- uint32_t mCaptureSamples GUARDED_BY(mMutex) = kMaxCaptureBufSize;
+ std::array<uint8_t, kMaxCaptureBufSize> mCaptureBuf;
+ uint32_t mDownstreamLatency = 0;
+ int32_t mCaptureSamples = kMaxCaptureBufSize;
// to avoid recomputing it every time a buffer is processed
- uint8_t mChannelCount GUARDED_BY(mMutex) = 0;
- Visualizer::MeasurementMode mMeasurementMode GUARDED_BY(mMutex) =
+ uint8_t mChannelCount = 0;
+ Visualizer::MeasurementMode mMeasurementMode =
Visualizer::MeasurementMode::NONE;
uint8_t mMeasurementWindowSizeInBuffers = kMeasurementWindowMaxSizeInBuffers;
- uint8_t mMeasurementBufferIdx GUARDED_BY(mMutex) = 0;
+ uint8_t mMeasurementBufferIdx = 0;
std::array<BufferStats, kMeasurementWindowMaxSizeInBuffers> mPastMeasurements;
void init_params();
- uint32_t getDeltaTimeMsFromUpdatedTime_l() REQUIRES(mMutex);
+ uint32_t getDeltaTimeMsFromUpdatedTime_l();
};
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libheadtracking/Android.bp b/media/libheadtracking/Android.bp
index 9955862..70a242d 100644
--- a/media/libheadtracking/Android.bp
+++ b/media/libheadtracking/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -11,18 +12,18 @@
name: "libheadtracking",
host_supported: true,
srcs: [
- "HeadTrackingProcessor.cpp",
- "ModeSelector.cpp",
- "Pose.cpp",
- "PoseBias.cpp",
- "PoseDriftCompensator.cpp",
- "PosePredictor.cpp",
- "PoseRateLimiter.cpp",
- "QuaternionUtil.cpp",
- "ScreenHeadFusion.cpp",
- "StillnessDetector.cpp",
- "Twist.cpp",
- "VectorRecorder.cpp",
+ "HeadTrackingProcessor.cpp",
+ "ModeSelector.cpp",
+ "Pose.cpp",
+ "PoseBias.cpp",
+ "PoseDriftCompensator.cpp",
+ "PosePredictor.cpp",
+ "PoseRateLimiter.cpp",
+ "QuaternionUtil.cpp",
+ "ScreenHeadFusion.cpp",
+ "StillnessDetector.cpp",
+ "Twist.cpp",
+ "VectorRecorder.cpp",
],
shared_libs: [
"libaudioutils",
@@ -51,7 +52,7 @@
cc_library {
name: "libheadtracking-binding",
srcs: [
- "SensorPoseProvider.cpp",
+ "SensorPoseProvider.cpp",
],
shared_libs: [
"libbase",
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 085a7e4..ee4075f 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -32,6 +32,7 @@
#include <private/media/VideoFrame.h>
#include <utils/Log.h>
#include <utils/RefBase.h>
+#include <algorithm>
#include <vector>
HeifDecoder* createHeifDecoder() {
@@ -42,7 +43,10 @@
void initFrameInfo(HeifFrameInfo *info, const VideoFrame *videoFrame) {
info->mWidth = videoFrame->mDisplayWidth;
- info->mHeight = videoFrame->mDisplayHeight;
+ // Number of scanlines is mDisplayHeight. Clamp it to mHeight to guard
+ // against malformed streams claiming that mDisplayHeight is greater than
+ // mHeight.
+ info->mHeight = std::min(videoFrame->mDisplayHeight, videoFrame->mHeight);
info->mRotationAngle = videoFrame->mRotationAngle;
info->mBytesPerPixel = videoFrame->mBytesPerPixel;
info->mDurationUs = videoFrame->mDurationUs;
@@ -746,7 +750,9 @@
(videoFrame->mRowBytes * (mCurScanline + videoFrame->mDisplayTop)) +
(videoFrame->mBytesPerPixel * videoFrame->mDisplayLeft);
mCurScanline++;
- memcpy(dst, src, videoFrame->mBytesPerPixel * videoFrame->mDisplayWidth);
+ // Do not try to copy more than |videoFrame->mWidth| pixels.
+ uint32_t width = std::min(videoFrame->mDisplayWidth, videoFrame->mWidth);
+ memcpy(dst, src, videoFrame->mBytesPerPixel * width);
return true;
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 89348a4..3ab32f0 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -111,9 +111,12 @@
// To collect the encoder usage for the battery app
static void addBatteryData(uint32_t params) {
sp<IBinder> binder =
- defaultServiceManager()->getService(String16("media.player"));
+ defaultServiceManager()->waitForService(String16("media.player"));
sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
- CHECK(service.get() != NULL);
+ if (service.get() == nullptr) {
+ ALOGE("%s: Failed to get media.player service", __func__);
+ return;
+ }
service->addBatteryData(params);
}
@@ -1453,29 +1456,44 @@
}
status_t StagefrightRecorder::setupAACRecording() {
- // FIXME:
- // Add support for OUTPUT_FORMAT_AAC_ADIF
- CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_AAC_ADTS);
+ // TODO(b/324512842): Add support for OUTPUT_FORMAT_AAC_ADIF
+ if (mOutputFormat != OUTPUT_FORMAT_AAC_ADTS) {
+ ALOGE("Invalid output format %d used for AAC recording", mOutputFormat);
+ return BAD_VALUE;
+ }
- CHECK(mAudioEncoder == AUDIO_ENCODER_AAC ||
- mAudioEncoder == AUDIO_ENCODER_HE_AAC ||
- mAudioEncoder == AUDIO_ENCODER_AAC_ELD);
- CHECK(mAudioSource != AUDIO_SOURCE_CNT);
+ if (mAudioEncoder != AUDIO_ENCODER_AAC
+ && mAudioEncoder != AUDIO_ENCODER_HE_AAC
+ && mAudioEncoder != AUDIO_ENCODER_AAC_ELD) {
+ ALOGE("Invalid encoder %d used for AAC recording", mAudioEncoder);
+ return BAD_VALUE;
+ }
+
+ if (mAudioSource == AUDIO_SOURCE_CNT) {
+ ALOGE("Audio source hasn't been set correctly");
+ return BAD_VALUE;
+ }
mWriter = new AACWriter(mOutputFd);
return setupRawAudioRecording();
}
status_t StagefrightRecorder::setupOggRecording() {
- CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_OGG);
+ if (mOutputFormat != OUTPUT_FORMAT_OGG) {
+ ALOGE("Invalid output format %d used for OGG recording", mOutputFormat);
+ return BAD_VALUE;
+ }
mWriter = new OggWriter(mOutputFd);
return setupRawAudioRecording();
}
status_t StagefrightRecorder::setupAMRRecording() {
- CHECK(mOutputFormat == OUTPUT_FORMAT_AMR_NB ||
- mOutputFormat == OUTPUT_FORMAT_AMR_WB);
+ if (mOutputFormat != OUTPUT_FORMAT_AMR_NB
+ && mOutputFormat != OUTPUT_FORMAT_AMR_WB) {
+ ALOGE("Invalid output format %d used for AMR recording", mOutputFormat);
+ return BAD_VALUE;
+ }
if (mOutputFormat == OUTPUT_FORMAT_AMR_NB) {
if (mAudioEncoder != AUDIO_ENCODER_DEFAULT &&
@@ -1528,7 +1546,10 @@
}
status_t StagefrightRecorder::setupRTPRecording() {
- CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_RTP_AVP);
+ if (mOutputFormat != OUTPUT_FORMAT_RTP_AVP) {
+ ALOGE("Invalid output format %d used for RTP recording", mOutputFormat);
+ return BAD_VALUE;
+ }
if ((mAudioSource != AUDIO_SOURCE_CNT
&& mVideoSource != VIDEO_SOURCE_LIST_END)
@@ -1571,7 +1592,10 @@
}
status_t StagefrightRecorder::setupMPEG2TSRecording() {
- CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS);
+ if (mOutputFormat != OUTPUT_FORMAT_MPEG2TS) {
+ ALOGE("Invalid output format %d used for MPEG2TS recording", mOutputFormat);
+ return BAD_VALUE;
+ }
sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd);
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index 9332e40..74b0a85 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -135,14 +135,19 @@
],
defaults: [
"libmediaplayerserviceFuzzer_defaults",
+ "libmediaplayerservice_defaults",
],
static_libs: [
"libplayerservice_datasource",
],
shared_libs: [
+ "libmediaplayerservice",
"libdatasource",
"libdrmframework",
+ "libstagefright_httplive",
+ "libmediaextractorservice",
],
+ include_dirs: ["frameworks/av/services/mediaextractor"],
}
cc_fuzz {
diff --git a/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp
index a7cb689..857223d 100644
--- a/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/metadataretriever_fuzzer.cpp
@@ -15,6 +15,8 @@
*
*/
+#include <MediaExtractorService.h>
+#include <MediaPlayerService.h>
#include <StagefrightMetadataRetriever.h>
#include <binder/ProcessState.h>
#include <datasource/FileSource.h>
@@ -54,58 +56,96 @@
MEDIA_MIMETYPE_CONTAINER_MPEG2PS, MEDIA_MIMETYPE_CONTAINER_HEIF,
MEDIA_MIMETYPE_TEXT_3GPP, MEDIA_MIMETYPE_TEXT_SUBRIP,
MEDIA_MIMETYPE_TEXT_VTT, MEDIA_MIMETYPE_TEXT_CEA_608,
- MEDIA_MIMETYPE_TEXT_CEA_708, MEDIA_MIMETYPE_DATA_TIMED_ID3};
+ MEDIA_MIMETYPE_TEXT_CEA_708, MEDIA_MIMETYPE_DATA_TIMED_ID3,
+ MEDIA_MIMETYPE_IMAGE_AVIF, MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1, MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4, MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4, MEDIA_MIMETYPE_AUDIO_DTS,
+ MEDIA_MIMETYPE_AUDIO_DTS_HD, MEDIA_MIMETYPE_AUDIO_DTS_HD_MA,
+ MEDIA_MIMETYPE_AUDIO_DTS_UHD, MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1,
+ MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2, MEDIA_MIMETYPE_AUDIO_EVRC,
+ MEDIA_MIMETYPE_AUDIO_EVRCB, MEDIA_MIMETYPE_AUDIO_EVRCWB,
+ MEDIA_MIMETYPE_AUDIO_EVRCNW, MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS,
+ MEDIA_MIMETYPE_AUDIO_APTX, MEDIA_MIMETYPE_AUDIO_DRA,
+ MEDIA_MIMETYPE_AUDIO_DOLBY_MAT, MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD,
+ MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_1_0,MEDIA_MIMETYPE_AUDIO_AAC_MP4,
+ MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_0,MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_1,
+ MEDIA_MIMETYPE_AUDIO_AAC_MAIN, MEDIA_MIMETYPE_AUDIO_AAC_LC,
+ MEDIA_MIMETYPE_AUDIO_AAC_SSR, MEDIA_MIMETYPE_AUDIO_AAC_LTP,
+ MEDIA_MIMETYPE_AUDIO_AAC_HE_V1, MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE,
+ MEDIA_MIMETYPE_AUDIO_AAC_ERLC, MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN,
+ MEDIA_MIMETYPE_AUDIO_AAC_HE_V2, MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1,
+ MEDIA_MIMETYPE_AUDIO_AAC_XHE, MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2,
+ MEDIA_MIMETYPE_AUDIO_AAC_LD, MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR, MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADIF, MEDIA_MIMETYPE_AUDIO_IEC60958,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC,MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD,
+ MEDIA_MIMETYPE_AUDIO_AAC_ELD, MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE, MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD, MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2,
+ MEDIA_MIMETYPE_AUDIO_IEC61937,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE,};
+
+constexpr size_t kMaxSize = 100;
class MetadataRetrieverFuzzer {
public:
MetadataRetrieverFuzzer(const uint8_t *data, size_t size)
- : mFdp(data, size),
- mMdRetriever(new StagefrightMetadataRetriever()),
- mDataSourceFd(memfd_create("InputFile", MFD_ALLOW_SEALING)) {}
- ~MetadataRetrieverFuzzer() { close(mDataSourceFd); }
+ : mFdp(data, size), mMdRetriever(new StagefrightMetadataRetriever()) {}
bool setDataSource(const uint8_t *data, size_t size);
void getData();
private:
FuzzedDataProvider mFdp;
sp<StagefrightMetadataRetriever> mMdRetriever = nullptr;
- const int32_t mDataSourceFd;
+ int32_t mDataSourceFd;
};
void MetadataRetrieverFuzzer::getData() {
- int64_t timeUs = mFdp.ConsumeIntegral<int64_t>();
- int32_t option = mFdp.ConsumeIntegral<int32_t>();
- int32_t colorFormat = mFdp.ConsumeIntegral<int32_t>();
- bool metaOnly = mFdp.ConsumeBool();
- mMdRetriever->getFrameAtTime(timeUs, option, colorFormat, metaOnly);
-
- int32_t index = mFdp.ConsumeIntegral<int32_t>();
- colorFormat = mFdp.ConsumeIntegral<int32_t>();
- metaOnly = mFdp.ConsumeBool();
- bool thumbnail = mFdp.ConsumeBool();
- mMdRetriever->getImageAtIndex(index, colorFormat, metaOnly, thumbnail);
-
- index = mFdp.ConsumeIntegral<int32_t>();
- colorFormat = mFdp.ConsumeIntegral<int32_t>();
- int32_t left = mFdp.ConsumeIntegral<int32_t>();
- int32_t top = mFdp.ConsumeIntegral<int32_t>();
- int32_t right = mFdp.ConsumeIntegral<int32_t>();
- int32_t bottom = mFdp.ConsumeIntegral<int32_t>();
- mMdRetriever->getImageRectAtIndex(index, colorFormat, left, top, right, bottom);
-
- index = mFdp.ConsumeIntegral<int32_t>();
- colorFormat = mFdp.ConsumeIntegral<int32_t>();
- metaOnly = mFdp.ConsumeBool();
- mMdRetriever->getFrameAtIndex(index, colorFormat, metaOnly);
-
- mMdRetriever->extractAlbumArt();
-
- int32_t keyCode = mFdp.ConsumeIntegral<int32_t>();
- mMdRetriever->extractMetadata(keyCode);
+ while (mFdp.remaining_bytes()) {
+ auto invokeMediaApi = mFdp.PickValueInArray<const std::function<void()>>({
+ [&]() {
+ mMdRetriever->getFrameAtTime(mFdp.ConsumeIntegral<int64_t>() /* timeUs */,
+ mFdp.ConsumeIntegral<int32_t>() /* option */,
+ mFdp.ConsumeIntegral<int32_t>() /* colorFormat */,
+ mFdp.ConsumeBool() /* metaOnly */);
+ },
+ [&]() {
+ mMdRetriever->getImageAtIndex(mFdp.ConsumeIntegral<int32_t>() /* index */,
+ mFdp.ConsumeIntegral<int32_t>() /* colorFormat */,
+ mFdp.ConsumeBool() /* metaOnly */,
+ mFdp.ConsumeBool() /* thumbnail */);
+ },
+ [&]() {
+ mMdRetriever->getImageRectAtIndex(
+ mFdp.ConsumeIntegral<int32_t>() /* index */,
+ mFdp.ConsumeIntegral<int32_t>() /* colorFormat */,
+ mFdp.ConsumeIntegral<int32_t>() /* left */,
+ mFdp.ConsumeIntegral<int32_t>() /* top */,
+ mFdp.ConsumeIntegral<int32_t>() /* right */,
+ mFdp.ConsumeIntegral<int32_t>() /* bottom */);
+ },
+ [&]() {
+ mMdRetriever->getFrameAtIndex(mFdp.ConsumeIntegral<int32_t>() /* index */,
+ mFdp.ConsumeIntegral<int32_t>() /* colorFormat */,
+ mFdp.ConsumeBool() /* metaOnly */);
+ },
+ [&]() { mMdRetriever->extractAlbumArt(); },
+ [&]() {
+ mMdRetriever->extractMetadata(mFdp.ConsumeIntegral<int32_t>() /* keyCode */);
+ },
+ });
+ invokeMediaApi();
+ }
}
bool MetadataRetrieverFuzzer::setDataSource(const uint8_t *data, size_t size) {
status_t status = -1;
+ std::unique_ptr<std::FILE, decltype(&fclose)> fp(tmpfile(), &fclose);
+ mDataSourceFd = fileno(fp.get());
+ if (mDataSourceFd < 0) {
+ return false;
+ }
enum DataSourceChoice {FromHttp, FromFd, FromFileSource, kMaxValue = FromFileSource};
switch (mFdp.ConsumeEnum<DataSourceChoice>()) {
@@ -114,7 +154,7 @@
mHeaders.add(String8(mFdp.ConsumeRandomLengthString().c_str()),
String8(mFdp.ConsumeRandomLengthString().c_str()));
- uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(0, size);
+ uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(0, min(kMaxSize,size));
vector<uint8_t> uriSuffix = mFdp.ConsumeBytes<uint8_t>(dataBlobSize);
string uri("data:");
@@ -146,6 +186,12 @@
return true;
}
+extern "C" int LLVMFuzzerInitialize(int* /* argc */, char*** /* argv */) {
+ MediaPlayerService::instantiate();
+ MediaExtractorService::instantiate();
+ return 0;
+}
+
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
MetadataRetrieverFuzzer mrtFuzzer(data, size);
ProcessState::self()->startThreadPool();
diff --git a/media/libnbaio/SourceAudioBufferProvider.cpp b/media/libnbaio/SourceAudioBufferProvider.cpp
index d58619f..157ebd7 100644
--- a/media/libnbaio/SourceAudioBufferProvider.cpp
+++ b/media/libnbaio/SourceAudioBufferProvider.cpp
@@ -32,7 +32,7 @@
// negotiate with source
NBAIO_Format counterOffers[1];
size_t numCounterOffers = 1;
- ssize_t index = source->negotiate(NULL, 0, counterOffers, numCounterOffers);
+ [[maybe_unused]] ssize_t index = source->negotiate(NULL, 0, counterOffers, numCounterOffers);
ALOG_ASSERT(index == (ssize_t) NEGOTIATE && numCounterOffers > 0);
numCounterOffers = 0;
index = source->negotiate(counterOffers, 1, NULL, numCounterOffers);
diff --git a/media/libshmem/Android.bp b/media/libshmem/Android.bp
index 6e48078..486a34f 100644
--- a/media/libshmem/Android.bp
+++ b/media/libshmem/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 2145dd9..f9ceef2 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -2188,7 +2188,7 @@
int32_t colorFormat = OMX_COLOR_FormatUnused;
OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused;
if (!outputFormat->findInt32("color-format", &colorFormat)) {
- ALOGE("ouptut port did not have a color format (wrong domain?)");
+ ALOGE("output port did not have a color format (wrong domain?)");
return BAD_VALUE;
}
ALOGD("[%s] Requested output format %#x and got %#x.",
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 712b405..896e021 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -315,6 +315,7 @@
"libaudioclient_aidl_conversion",
"packagemanager_aidl-cpp",
"server_configurable_flags",
+ "aconfig_mediacodec_flags_c_lib",
],
static_libs: [
diff --git a/media/libstagefright/CryptoAsync.cpp b/media/libstagefright/CryptoAsync.cpp
index 8b5c8ed..0fc78ec 100644
--- a/media/libstagefright/CryptoAsync.cpp
+++ b/media/libstagefright/CryptoAsync.cpp
@@ -30,6 +30,36 @@
namespace android {
+CryptoAsync::CryptoAsyncInfo::CryptoAsyncInfo(const std::unique_ptr<CodecCryptoInfo> &info) {
+ if (info == nullptr) {
+ return;
+ }
+ size_t key_len = (info->mKey != nullptr)? 16 : 0;
+ size_t iv_len = (info->mIv != nullptr)? 16 : 0;
+ mNumSubSamples = info->mNumSubSamples;
+ mMode = info->mMode;
+ mPattern = info->mPattern;
+ if (key_len > 0) {
+ mKeyBuffer = ABuffer::CreateAsCopy((void*)info->mKey, key_len);
+ mKey = (uint8_t*)(mKeyBuffer.get() != nullptr ? mKeyBuffer.get()->data() : nullptr);
+ }
+ if (iv_len > 0) {
+ mIvBuffer = ABuffer::CreateAsCopy((void*)info->mIv, iv_len);
+ mIv = (uint8_t*)(mIvBuffer.get() != nullptr ? mIvBuffer.get()->data() : nullptr);
+ }
+ mSubSamplesBuffer =
+ new ABuffer(sizeof(CryptoPlugin::SubSample) * mNumSubSamples);
+ if (mSubSamplesBuffer.get()) {
+ CryptoPlugin::SubSample * samples =
+ (CryptoPlugin::SubSample *)(mSubSamplesBuffer.get()->data());
+ for (int s = 0 ; s < mNumSubSamples ; s++) {
+ samples[s].mNumBytesOfClearData = info->mSubSamples[s].mNumBytesOfClearData;
+ samples[s].mNumBytesOfEncryptedData = info->mSubSamples[s].mNumBytesOfEncryptedData;
+ }
+ mSubSamples = (CryptoPlugin::SubSample *)mSubSamplesBuffer.get()->data();
+ }
+}
+
CryptoAsync::~CryptoAsync() {
}
@@ -79,23 +109,27 @@
sp<ABuffer> keyBuffer;
sp<ABuffer> ivBuffer;
sp<ABuffer> subSamplesBuffer;
- msg->findInt32("encryptBlocks", (int32_t*)&pattern.mEncryptBlocks);
- msg->findInt32("skipBlocks", (int32_t*)&pattern.mSkipBlocks);
- msg->findBuffer("key", &keyBuffer);
- msg->findBuffer("iv", &ivBuffer);
- msg->findBuffer("subSamples", &subSamplesBuffer);
- msg->findInt32("secure", &secure);
- msg->findSize("numSubSamples", &numSubSamples);
- msg->findObject("buffer", &obj);
- msg->findInt32("mode", (int32_t*)&mode);
AString errorDetailMsg;
- const uint8_t * key = keyBuffer.get() != nullptr ? keyBuffer.get()->data() : nullptr;
- const uint8_t * iv = ivBuffer.get() != nullptr ? ivBuffer.get()->data() : nullptr;
- const CryptoPlugin::SubSample * subSamples =
- (CryptoPlugin::SubSample *)(subSamplesBuffer.get()->data());
+ msg->findObject("buffer", &obj);
+ msg->findInt32("secure", &secure);
sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
- err = channel->queueSecureInputBuffer(buffer, secure, key, iv, mode,
- pattern, subSamples, numSubSamples, &errorDetailMsg);
+ if (buffer->meta()->findObject("cryptoInfos", &obj)) {
+ err = channel->queueSecureInputBuffers(buffer, secure, &errorDetailMsg);
+ } else {
+ msg->findInt32("encryptBlocks", (int32_t*)&pattern.mEncryptBlocks);
+ msg->findInt32("skipBlocks", (int32_t*)&pattern.mSkipBlocks);
+ msg->findBuffer("key", &keyBuffer);
+ msg->findBuffer("iv", &ivBuffer);
+ msg->findBuffer("subSamples", &subSamplesBuffer);
+ msg->findSize("numSubSamples", &numSubSamples);
+ msg->findInt32("mode", (int32_t*)&mode);
+ const uint8_t * key = keyBuffer.get() != nullptr ? keyBuffer.get()->data() : nullptr;
+ const uint8_t * iv = ivBuffer.get() != nullptr ? ivBuffer.get()->data() : nullptr;
+ const CryptoPlugin::SubSample * subSamples =
+ (CryptoPlugin::SubSample *)(subSamplesBuffer.get()->data());
+ err = channel->queueSecureInputBuffer(buffer, secure, key, iv, mode,
+ pattern, subSamples, numSubSamples, &errorDetailMsg);
+ }
if (err != OK) {
std::list<sp<AMessage>> errorList;
msg->removeEntryByName("buffer");
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 0ab954f..46703bb 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -48,6 +48,9 @@
static const int64_t kBufferTimeOutUs = 10000LL; // 10 msec
static const size_t kRetryCount = 100; // must be >0
static const int64_t kDefaultSampleDurationUs = 33333LL; // 33ms
+// For codec, 0 is the highest importance; higher the number lesser important.
+// To make codec for thumbnail less important, give it a value more than 0.
+static const int kThumbnailImportance = 1;
sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
@@ -102,13 +105,6 @@
displayTop = 0;
}
}
- if (displayWidth > width) {
- displayWidth = width;
- }
- if (displayHeight > height) {
- displayHeight = height;
- }
-
if (allocRotated) {
if (rotationAngle == 90 || rotationAngle == 270) {
@@ -592,6 +588,9 @@
}
}
+ // Set the importance for thumbnail.
+ videoFormat->setInt32(KEY_IMPORTANCE, kThumbnailImportance);
+
int32_t frameRate;
if (trackMeta()->findInt32(kKeyFrameRate, &frameRate) && frameRate > 0) {
mDefaultSampleDurationUs = 1000000LL / frameRate;
@@ -909,6 +908,10 @@
videoFormat->setInt32("android._num-input-buffers", 1);
videoFormat->setInt32("android._num-output-buffers", 1);
}
+
+ /// Set the importance for thumbnail.
+ videoFormat->setInt32(KEY_IMPORTANCE, kThumbnailImportance);
+
return videoFormat;
}
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index a0a2891..a18dbfe 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -172,6 +172,7 @@
const char *getTrackType() const;
void resetInternal();
int64_t trackMetaDataSize();
+ bool isTimestampValid(int64_t timeUs);
private:
// A helper class to handle faster write box with table entries
@@ -1639,6 +1640,11 @@
ALOGV("buffer->range_length:%lld", (long long)buffer->range_length());
if (buffer->meta_data().findInt64(kKeySampleFileOffset, &offset)) {
ALOGV("offset:%lld, old_offset:%lld", (long long)offset, (long long)old_offset);
+ if (mMaxOffsetAppend > offset) {
+ // This has already been appended, skip updating mOffset value.
+ *bytesWritten = buffer->range_length();
+ return offset;
+ }
if (old_offset == offset) {
mOffset += buffer->range_length();
} else {
@@ -2430,6 +2436,47 @@
return OK;
}
+bool MPEG4Writer::isSampleMetadataValid(size_t trackIndex, int64_t timeUs) {
+ // Track Index starts from zero, so it should be at least 1 less than size.
+ if (trackIndex >= mTracks.size()) {
+ ALOGE("Incorrect trackIndex %zu, mTracks->size() %zu", trackIndex, mTracks.size());
+ return false;
+ }
+
+ List<Track *>::iterator it = mTracks.begin();
+
+ // (*it) is already pointing to trackIndex 0.
+ for (int i = 1; i <= trackIndex; i++) {
+ it++;
+ }
+
+ return (*it)->isTimestampValid(timeUs);
+}
+
+bool MPEG4Writer::Track::isTimestampValid(int64_t timeUs) {
+ // No timescale if HEIF
+ if (mIsHeif) {
+ return true;
+ }
+
+ // Make sure abs(timeUs) does not overflow
+ if (timeUs == INT64_MIN) {
+ return false;
+ }
+
+ // Ensure that the timeUs value does not have extremely low or high values
+ // that would cause an underflow or overflow, like in the calculation -
+ // mdhdDuration = (trakDurationUs * mTimeScale + 5E5) / 1E6
+ if (abs(timeUs) >= (INT64_MAX - 5E5) / mTimeScale) {
+ return false;
+ }
+ // Limit check for calculations in ctts box
+ if (abs(timeUs) + kMaxCttsOffsetTimeUs >= INT64_MAX / mTimeScale) {
+ return false;
+ }
+ return true;
+}
+
bool MPEG4Writer::Track::isExifData(
MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const {
if (!mIsHeif) {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 3c80f28..e4f3b83 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -271,6 +271,37 @@
// XXX suppress until we get our representation right
static bool kEmitHistogram = false;
+typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
+
+// Multi access unit helpers
+static status_t generateFlagsFromAccessUnitInfo(
+ sp<AMessage> &msg, const sp<BufferInfosWrapper> &bufferInfos) {
+ msg->setInt64("timeUs", bufferInfos->value[0].mTimestamp);
+ msg->setInt32("flags", bufferInfos->value[0].mFlags);
+ // will prevent any access-unit info copy.
+ if (bufferInfos->value.size() > 1) {
+ uint32_t bufferFlags = 0;
+ uint32_t flagsInAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODEC_CONFIG;
+ uint32_t andFlags = flagsInAllAU;
+ int infoIdx = 0;
+ bool foundEndOfStream = false;
+ for ( ; infoIdx < bufferInfos->value.size() && !foundEndOfStream; ++infoIdx) {
+ bufferFlags |= bufferInfos->value[infoIdx].mFlags;
+ andFlags &= bufferInfos->value[infoIdx].mFlags;
+ if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
+ foundEndOfStream = true;
+ }
+ }
+ bufferFlags = bufferFlags & (andFlags | (~flagsInAllAU));
+ if (infoIdx != bufferInfos->value.size()) {
+ ALOGE("Error: incorrect access-units");
+ return -EINVAL;
+ }
+ msg->setInt32("flags", bufferFlags);
+ }
+ return OK;
+}
+
static int64_t getId(IResourceManagerClient const * client) {
return (int64_t) client;
}
@@ -2222,7 +2253,10 @@
static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
bool reverse);
-mediametrics_handle_t MediaCodec::createMediaMetrics(const sp<AMessage>& format, uint32_t flags) {
+mediametrics_handle_t MediaCodec::createMediaMetrics(const sp<AMessage>& format,
+ uint32_t flags,
+ status_t* err) {
+ *err = OK;
mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
bool isEncoder = (flags & CONFIGURE_FLAG_ENCODE);
@@ -2302,7 +2336,9 @@
mErrorLog.log(LOG_TAG, base::StringPrintf(
"Invalid size(s), width=%d, height=%d", mWidth, mHeight));
mediametrics_delete(nextMetricsHandle);
- return BAD_VALUE;
+ // Set the error code and return null handle.
+ *err = BAD_VALUE;
+ return 0;
}
} else {
@@ -2385,7 +2421,11 @@
updateCodecImportance(format);
// Create and set up metrics for this codec.
- mediametrics_handle_t nextMetricsHandle = createMediaMetrics(format, flags);
+ status_t err = OK;
+ mediametrics_handle_t nextMetricsHandle = createMediaMetrics(format, flags, &err);
+ if (err != OK) {
+ return err;
+ }
sp<AMessage> msg = new AMessage(kWhatConfigure, this);
msg->setMessage("format", format);
@@ -2420,7 +2460,6 @@
sp<AMessage> callback = mCallback;
- status_t err;
std::vector<MediaResourceParcel> resources;
resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
toMediaResourceSubType(mIsHardware, mDomain)));
@@ -3174,7 +3213,49 @@
msg->setInt64("timeUs", presentationTimeUs);
msg->setInt32("flags", flags);
msg->setPointer("errorDetailMsg", errorDetailMsg);
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+status_t MediaCodec::queueInputBuffers(
+ size_t index,
+ size_t offset,
+ size_t size,
+ const sp<BufferInfosWrapper> &infos,
+ AString *errorDetailMsg) {
+ sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
+ uint32_t bufferFlags = 0;
+ uint32_t flagsinAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODECCONFIG;
+ uint32_t andFlags = flagsinAllAU;
+ if (infos == nullptr || infos->value.empty()) {
+ ALOGE("ERROR: Large Audio frame with no BufferInfo");
+ return BAD_VALUE;
+ }
+ int infoIdx = 0;
+ std::vector<AccessUnitInfo> &accessUnitInfo = infos->value;
+ int64_t minTimeUs = accessUnitInfo.front().mTimestamp;
+ bool foundEndOfStream = false;
+ for ( ; infoIdx < accessUnitInfo.size() && !foundEndOfStream; ++infoIdx) {
+ bufferFlags |= accessUnitInfo[infoIdx].mFlags;
+ andFlags &= accessUnitInfo[infoIdx].mFlags;
+ if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
+ foundEndOfStream = true;
+ }
+ }
+ bufferFlags = bufferFlags & (andFlags | (~flagsinAllAU));
+ if (infoIdx != accessUnitInfo.size()) {
+ ALOGE("queueInputBuffers has incorrect access-units");
+ return -EINVAL;
+ }
+ msg->setSize("index", index);
+ msg->setSize("offset", offset);
+ msg->setSize("size", size);
+ msg->setInt64("timeUs", minTimeUs);
+ // Make this represent flags for the entire buffer
+ // decodeOnly Flag is set only when all buffers are decodeOnly
+ msg->setInt32("flags", bufferFlags);
+ msg->setObject("accessUnitInfo", infos);
+ msg->setPointer("errorDetailMsg", errorDetailMsg);
sp<AMessage> response;
return PostAndAwaitResponse(msg, &response);
}
@@ -3215,27 +3296,50 @@
return err;
}
-status_t MediaCodec::queueBuffer(
+status_t MediaCodec::queueSecureInputBuffers(
size_t index,
- const std::shared_ptr<C2Buffer> &buffer,
- int64_t presentationTimeUs,
- uint32_t flags,
- const sp<AMessage> &tunings,
+ size_t offset,
+ size_t size,
+ const sp<BufferInfosWrapper> &auInfo,
+ const sp<CryptoInfosWrapper> &cryptoInfos,
AString *errorDetailMsg) {
if (errorDetailMsg != NULL) {
errorDetailMsg->clear();
}
-
sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
- msg->setSize("index", index);
- sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
- new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
- msg->setObject("c2buffer", obj);
- msg->setInt64("timeUs", presentationTimeUs);
- msg->setInt32("flags", flags);
- if (tunings && tunings->countEntries() > 0) {
- msg->setMessage("tunings", tunings);
+ uint32_t bufferFlags = 0;
+ uint32_t flagsinAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODECCONFIG;
+ uint32_t andFlags = flagsinAllAU;
+ if (auInfo == nullptr
+ || auInfo->value.empty()
+ || cryptoInfos == nullptr
+ || cryptoInfos->value.empty()) {
+ ALOGE("ERROR: Large Audio frame with no BufferInfo/CryptoInfo");
+ return BAD_VALUE;
}
+ int infoIdx = 0;
+ std::vector<AccessUnitInfo> &accessUnitInfo = auInfo->value;
+ int64_t minTimeUs = accessUnitInfo.front().mTimestamp;
+ bool foundEndOfStream = false;
+ for ( ; infoIdx < accessUnitInfo.size() && !foundEndOfStream; ++infoIdx) {
+ bufferFlags |= accessUnitInfo[infoIdx].mFlags;
+ andFlags &= accessUnitInfo[infoIdx].mFlags;
+ if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
+ foundEndOfStream = true;
+ }
+ }
+ bufferFlags = bufferFlags & (andFlags | (~flagsinAllAU));
+ if (infoIdx != accessUnitInfo.size()) {
+ ALOGE("queueInputBuffers has incorrect access-units");
+ return -EINVAL;
+ }
+ msg->setSize("index", index);
+ msg->setSize("offset", offset);
+ msg->setSize("ssize", size);
+ msg->setInt64("timeUs", minTimeUs);
+ msg->setInt32("flags", bufferFlags);
+ msg->setObject("accessUnitInfo", auInfo);
+ msg->setObject("cryptoInfos", cryptoInfos);
msg->setPointer("errorDetailMsg", errorDetailMsg);
sp<AMessage> response;
@@ -3244,46 +3348,77 @@
return err;
}
-status_t MediaCodec::queueEncryptedBuffer(
+status_t MediaCodec::queueBuffer(
size_t index,
- const sp<hardware::HidlMemory> &buffer,
- size_t offset,
- const CryptoPlugin::SubSample *subSamples,
- size_t numSubSamples,
- const uint8_t key[16],
- const uint8_t iv[16],
- CryptoPlugin::Mode mode,
- const CryptoPlugin::Pattern &pattern,
- int64_t presentationTimeUs,
- uint32_t flags,
+ const std::shared_ptr<C2Buffer> &buffer,
+ const sp<BufferInfosWrapper> &bufferInfos,
const sp<AMessage> &tunings,
AString *errorDetailMsg) {
if (errorDetailMsg != NULL) {
errorDetailMsg->clear();
}
+ if (bufferInfos == nullptr || bufferInfos->value.empty()) {
+ return BAD_VALUE;
+ }
+ status_t err = OK;
+ sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
+ msg->setSize("index", index);
+ sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
+ new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
+ msg->setObject("c2buffer", obj);
+ if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
+ return err;
+ }
+ msg->setObject("accessUnitInfo", bufferInfos);
+ if (tunings && tunings->countEntries() > 0) {
+ msg->setMessage("tunings", tunings);
+ }
+ msg->setPointer("errorDetailMsg", errorDetailMsg);
+ sp<AMessage> response;
+ err = PostAndAwaitResponse(msg, &response);
+ return err;
+}
+
+status_t MediaCodec::queueEncryptedBuffer(
+ size_t index,
+ const sp<hardware::HidlMemory> &buffer,
+ size_t offset,
+ size_t size,
+ const sp<BufferInfosWrapper> &bufferInfos,
+ const sp<CryptoInfosWrapper> &cryptoInfos,
+ const sp<AMessage> &tunings,
+ AString *errorDetailMsg) {
+ if (errorDetailMsg != NULL) {
+ errorDetailMsg->clear();
+ }
+ if (bufferInfos == nullptr || bufferInfos->value.empty()) {
+ return BAD_VALUE;
+ }
+ status_t err = OK;
sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
msg->setSize("index", index);
sp<WrapperObject<sp<hardware::HidlMemory>>> memory{
new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
msg->setObject("memory", memory);
msg->setSize("offset", offset);
- msg->setPointer("subSamples", (void *)subSamples);
- msg->setSize("numSubSamples", numSubSamples);
- msg->setPointer("key", (void *)key);
- msg->setPointer("iv", (void *)iv);
- msg->setInt32("mode", mode);
- msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
- msg->setInt32("skipBlocks", pattern.mSkipBlocks);
- msg->setInt64("timeUs", presentationTimeUs);
- msg->setInt32("flags", flags);
+ if (cryptoInfos != nullptr) {
+ msg->setSize("ssize", size);
+ msg->setObject("cryptoInfos", cryptoInfos);
+ } else {
+ msg->setSize("size", size);
+ }
+ msg->setObject("accessUnitInfo", bufferInfos);
+ if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
+ return err;
+ }
if (tunings && tunings->countEntries() > 0) {
msg->setMessage("tunings", tunings);
}
msg->setPointer("errorDetailMsg", errorDetailMsg);
sp<AMessage> response;
- status_t err = PostAndAwaitResponse(msg, &response);
+ err = PostAndAwaitResponse(msg, &response);
return err;
}
@@ -4758,6 +4893,35 @@
}
}
}
+ int32_t largeFrameParamMax = 0, largeFrameParamThreshold = 0;
+ if (format->findInt32(KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, &largeFrameParamMax) ||
+ format->findInt32(KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE,
+ &largeFrameParamThreshold)) {
+ if (largeFrameParamMax > 0 || largeFrameParamThreshold > 0) {
+ if(mComponentName.startsWith("OMX")) {
+ mErrorLog.log(LOG_TAG,
+ "Large Frame params are not supported on OMX codecs."
+ "Currently only supported on C2 audio codec.");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ }
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+ if (!mime.startsWith("audio")) {
+ mErrorLog.log(LOG_TAG,
+ "Large Frame params only works with audio codec");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ }
+ if (!(mFlags & kFlagIsAsync)) {
+ mErrorLog.log(LOG_TAG, "Large Frame audio" \
+ "config works only with async mode");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ }
+ }
+ }
+
mReplyID = replyID;
setState(CONFIGURING);
@@ -5906,10 +6070,10 @@
status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
size_t index;
- size_t offset;
- size_t size;
- int64_t timeUs;
- uint32_t flags;
+ size_t offset = 0;
+ size_t size = 0;
+ int64_t timeUs = 0;
+ uint32_t flags = 0;
CHECK(msg->findSize("index", &index));
CHECK(msg->findInt64("timeUs", &timeUs));
CHECK(msg->findInt32("flags", (int32_t *)&flags));
@@ -5954,22 +6118,26 @@
mErrorLog.log(LOG_TAG, "queuing secure buffer without mCrypto or mDescrambler!");
return -EINVAL;
}
- CHECK(msg->findPointer("subSamples", (void **)&subSamples));
- CHECK(msg->findSize("numSubSamples", &numSubSamples));
- CHECK(msg->findPointer("key", (void **)&key));
- CHECK(msg->findPointer("iv", (void **)&iv));
- CHECK(msg->findInt32("encryptBlocks", (int32_t *)&pattern.mEncryptBlocks));
- CHECK(msg->findInt32("skipBlocks", (int32_t *)&pattern.mSkipBlocks));
+ sp<RefBase> obj;
+ if (msg->findObject("cryptoInfos", &obj)) {
+ CHECK(msg->findSize("ssize", &size));
+ } else {
+ CHECK(msg->findPointer("subSamples", (void **)&subSamples));
+ CHECK(msg->findSize("numSubSamples", &numSubSamples));
+ CHECK(msg->findPointer("key", (void **)&key));
+ CHECK(msg->findPointer("iv", (void **)&iv));
+ CHECK(msg->findInt32("encryptBlocks", (int32_t *)&pattern.mEncryptBlocks));
+ CHECK(msg->findInt32("skipBlocks", (int32_t *)&pattern.mSkipBlocks));
- int32_t tmp;
- CHECK(msg->findInt32("mode", &tmp));
+ int32_t tmp;
+ CHECK(msg->findInt32("mode", &tmp));
- mode = (CryptoPlugin::Mode)tmp;
-
- size = 0;
- for (size_t i = 0; i < numSubSamples; ++i) {
- size += subSamples[i].mNumBytesOfClearData;
- size += subSamples[i].mNumBytesOfEncryptedData;
+ mode = (CryptoPlugin::Mode)tmp;
+ size = 0;
+ for (size_t i = 0; i < numSubSamples; ++i) {
+ size += subSamples[i].mNumBytesOfClearData;
+ size += subSamples[i].mNumBytesOfEncryptedData;
+ }
}
}
@@ -5991,9 +6159,13 @@
"client does not own the buffer #%zu", index));
return -EACCES;
}
- auto setInputBufferParams = [this, &buffer]
+ auto setInputBufferParams = [this, &msg, &buffer]
(int64_t timeUs, uint32_t flags = 0) -> status_t {
status_t err = OK;
+ sp<RefBase> obj;
+ if (msg->findObject("accessUnitInfo", &obj)) {
+ buffer->meta()->setObject("accessUnitInfo", obj);
+ }
buffer->meta()->setInt64("timeUs", timeUs);
if (flags & BUFFER_FLAG_EOS) {
buffer->meta()->setInt32("eos", true);
@@ -6030,35 +6202,48 @@
return err;
};
auto buildCryptoInfoAMessage = [&](const sp<AMessage> & cryptoInfo, int32_t action) {
- size_t key_len = (key != nullptr)? 16 : 0;
- size_t iv_len = (iv != nullptr)? 16 : 0;
- sp<ABuffer> shared_key;
- sp<ABuffer> shared_iv;
- if (key_len > 0) {
- shared_key = ABuffer::CreateAsCopy((void*)key, key_len);
- }
- if (iv_len > 0) {
- shared_iv = ABuffer::CreateAsCopy((void*)iv, iv_len);
- }
- sp<ABuffer> subSamples_buffer =
- new ABuffer(sizeof(CryptoPlugin::SubSample) * numSubSamples);
- CryptoPlugin::SubSample * samples =
- (CryptoPlugin::SubSample *)(subSamples_buffer.get()->data());
- for (int s = 0 ; s < numSubSamples ; s++) {
- samples[s].mNumBytesOfClearData = subSamples[s].mNumBytesOfClearData;
- samples[s].mNumBytesOfEncryptedData = subSamples[s].mNumBytesOfEncryptedData;
- }
// set decrypt Action
cryptoInfo->setInt32("action", action);
cryptoInfo->setObject("buffer", buffer);
cryptoInfo->setInt32("secure", mFlags & kFlagIsSecure);
- cryptoInfo->setBuffer("key", shared_key);
- cryptoInfo->setBuffer("iv", shared_iv);
- cryptoInfo->setInt32("mode", (int)mode);
- cryptoInfo->setInt32("encryptBlocks", pattern.mEncryptBlocks);
- cryptoInfo->setInt32("skipBlocks", pattern.mSkipBlocks);
- cryptoInfo->setBuffer("subSamples", subSamples_buffer);
- cryptoInfo->setSize("numSubSamples", numSubSamples);
+ sp<RefBase> obj;
+ if (msg->findObject("cryptoInfos", &obj)) {
+ sp<CryptoInfosWrapper> infos{(CryptoInfosWrapper*)obj.get()};
+ sp<CryptoInfosWrapper> asyncInfos{
+ new CryptoInfosWrapper(std::vector<std::unique_ptr<CodecCryptoInfo>>())};
+ for (std::unique_ptr<CodecCryptoInfo> &info : infos->value) {
+ if (info) {
+ asyncInfos->value.emplace_back(new CryptoAsync::CryptoAsyncInfo(info));
+ }
+ }
+ buffer->meta()->setObject("cryptoInfos", asyncInfos);
+ } else {
+ size_t key_len = (key != nullptr)? 16 : 0;
+ size_t iv_len = (iv != nullptr)? 16 : 0;
+ sp<ABuffer> shared_key;
+ sp<ABuffer> shared_iv;
+ if (key_len > 0) {
+ shared_key = ABuffer::CreateAsCopy((void*)key, key_len);
+ }
+ if (iv_len > 0) {
+ shared_iv = ABuffer::CreateAsCopy((void*)iv, iv_len);
+ }
+ sp<ABuffer> subSamples_buffer =
+ new ABuffer(sizeof(CryptoPlugin::SubSample) * numSubSamples);
+ CryptoPlugin::SubSample * samples =
+ (CryptoPlugin::SubSample *)(subSamples_buffer.get()->data());
+ for (int s = 0 ; s < numSubSamples ; s++) {
+ samples[s].mNumBytesOfClearData = subSamples[s].mNumBytesOfClearData;
+ samples[s].mNumBytesOfEncryptedData = subSamples[s].mNumBytesOfEncryptedData;
+ }
+ cryptoInfo->setBuffer("key", shared_key);
+ cryptoInfo->setBuffer("iv", shared_iv);
+ cryptoInfo->setInt32("mode", (int)mode);
+ cryptoInfo->setInt32("encryptBlocks", pattern.mEncryptBlocks);
+ cryptoInfo->setInt32("skipBlocks", pattern.mSkipBlocks);
+ cryptoInfo->setBuffer("subSamples", subSamples_buffer);
+ cryptoInfo->setSize("numSubSamples", numSubSamples);
+ }
};
if (c2Buffer || memory) {
sp<AMessage> tunings = NULL;
@@ -6068,15 +6253,37 @@
status_t err = OK;
if (c2Buffer) {
err = mBufferChannel->attachBuffer(c2Buffer, buffer);
+ // to prevent unnecessary copy for single info case.
+ if (msg->findObject("accessUnitInfo", &obj)) {
+ sp<BufferInfosWrapper> infos{(BufferInfosWrapper*)(obj.get())};
+ if (infos->value.size() == 1) {
+ msg->removeEntryByName("accessUnitInfo");
+ }
+ }
} else if (memory) {
AString errorDetailMsg;
- err = mBufferChannel->attachEncryptedBuffer(
- memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
- offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
+ if (msg->findObject("cryptoInfos", &obj)) {
+ buffer->meta()->setSize("ssize", size);
+ buffer->meta()->setObject("cryptoInfos", obj);
+ if (msg->findObject("accessUnitInfo", &obj)) {
+ // the reference will be same here and
+ // setBufferParams
+ buffer->meta()->setObject("accessUnitInfo", obj);
+ }
+ err = mBufferChannel->attachEncryptedBuffers(
+ memory,
+ offset,
+ buffer,
+ (mFlags & kFlagIsSecure),
+ &errorDetailMsg);
+ } else {
+ err = mBufferChannel->attachEncryptedBuffer(
+ memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
+ offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
+ }
if (err != OK && hasCryptoOrDescrambler()
&& (mFlags & kFlagUseCryptoAsync)) {
// create error detail
- AString errorDetailMsg;
sp<AMessage> cryptoErrorInfo = new AMessage();
buildCryptoInfoAMessage(cryptoErrorInfo, CryptoAsync::kActionDecrypt);
cryptoErrorInfo->setInt32("err", err);
@@ -6148,10 +6355,17 @@
}
}
if (mCryptoAsync) {
+ // TODO b/316565675 - enable async path for audio
// prepare a message and enqueue
sp<AMessage> cryptoInfo = new AMessage();
buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
mCryptoAsync->decrypt(cryptoInfo);
+ } else if (msg->findObject("cryptoInfos", &obj)) {
+ buffer->meta()->setObject("cryptoInfos", obj);
+ err = mBufferChannel->queueSecureInputBuffers(
+ buffer,
+ (mFlags & kFlagIsSecure),
+ errorDetailMsg);
} else {
err = mBufferChannel->queueSecureInputBuffer(
buffer,
@@ -6503,10 +6717,11 @@
if (discardDecodeOnlyOutputBuffer(index)) {
continue;
}
+ sp<AMessage> msg = mCallback->dup();
const sp<MediaCodecBuffer> &buffer =
mPortBuffers[kPortIndexOutput][index].mData;
- sp<AMessage> msg = mCallback->dup();
- msg->setInt32("callbackID", CB_OUTPUT_AVAILABLE);
+ int32_t outputCallbackID = CB_OUTPUT_AVAILABLE;
+ sp<RefBase> accessUnitInfoObj;
msg->setInt32("index", index);
msg->setSize("offset", buffer->offset());
msg->setSize("size", buffer->size());
@@ -6520,6 +6735,15 @@
CHECK(buffer->meta()->findInt32("flags", &flags));
msg->setInt32("flags", flags);
+ buffer->meta()->findObject("accessUnitInfo", &accessUnitInfoObj);
+ if (accessUnitInfoObj) {
+ outputCallbackID = CB_LARGE_FRAME_OUTPUT_AVAILABLE;
+ msg->setObject("accessUnitInfo", accessUnitInfoObj);
+ sp<BufferInfosWrapper> auInfo(
+ (decltype(auInfo.get()))accessUnitInfoObj.get());
+ auInfo->value.back().mFlags |= flags & BUFFER_FLAG_END_OF_STREAM;
+ }
+ msg->setInt32("callbackID", outputCallbackID);
statsBufferReceived(timeUs, buffer);
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 22885c9..dd6da15 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -1,16 +1,4 @@
{
- // tests which require dynamic content
- // invoke with: atest -- --enable-module-dynamic-download=true
- // TODO(b/148094059): unit tests not allowed to download content
- "dynamic-presubmit": [
- // writerTest fails about 5 out of 66
- // { "name": "writerTest" },
-
- { "name": "HEVCUtilsUnitTest" },
- { "name": "ExtractorFactoryTest" }
-
- ],
-
"presubmit-large": [
{
"name": "CtsMediaMiscTestCases",
@@ -95,8 +83,16 @@
}
],
"postsubmit": [
+ // writerTest fails about 5 out of 66
+ // { "name": "writerTest" },
{
"name": "BatteryChecker_test"
+ },
+ {
+ "name": "ExtractorFactoryTest"
+ },
+ {
+ "name": "HEVCUtilsUnitTest"
}
]
}
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 24020d1..dc7d787 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -330,6 +330,7 @@
<!-- Video Quality control -->
<!-- supports QP bounding with standard keys -->
<Feature name="qp-bounds" />
+ <Feature name="bitrate-modes" value="VBR,CBR" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.vp8.encoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 0927653..bffb294 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -61,6 +61,36 @@
using hardware::cas::native::V1_0::IDescrambler;
+struct AccessUnitInfo {
+ uint32_t mFlags;
+ uint32_t mSize;
+ int64_t mTimestamp;
+ AccessUnitInfo(uint32_t flags, uint32_t size, int64_t ptsUs)
+ :mFlags(flags), mSize(size), mTimestamp(ptsUs) {
+ }
+ ~AccessUnitInfo() {}
+};
+
+struct CodecCryptoInfo {
+ size_t mNumSubSamples{0};
+ CryptoPlugin::SubSample *mSubSamples{nullptr};
+ uint8_t *mIv{nullptr};
+ uint8_t *mKey{nullptr};
+ enum CryptoPlugin::Mode mMode;
+ CryptoPlugin::Pattern mPattern;
+
+ virtual ~CodecCryptoInfo() {}
+protected:
+ CodecCryptoInfo():
+ mNumSubSamples(0),
+ mSubSamples(nullptr),
+ mIv(nullptr),
+ mKey(nullptr),
+ mMode{CryptoPlugin::kMode_Unencrypted},
+ mPattern{0, 0} {
+ }
+};
+
struct CodecParameterDescriptor {
std::string name;
AMessage::Type type;
@@ -362,6 +392,30 @@
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
AString *errorDetailMsg) = 0;
+
+ /**
+ * Queue a secure input buffer with multiple access units into the buffer channel.
+ *
+ * @param buffer The buffer to queue. The access unit delimiters and crypto
+ * subsample information is included in the buffer metadata.
+ * @param secure Whether the buffer is secure.
+ * @param errorDetailMsg The error message to be set in case of error.
+ * @return OK if successful;
+ * -ENOENT of the buffer is not known
+ * -ENOSYS if mCrypto is not set so that decryption is not
+ * possible;
+ * other errors if decryption failed.
+ */
+ virtual status_t queueSecureInputBuffers(
+ const sp<MediaCodecBuffer> &buffer,
+ bool secure,
+ AString *errorDetailMsg) {
+ (void)buffer;
+ (void)secure;
+ (void)errorDetailMsg;
+ return -ENOSYS;
+ }
+
/**
* Attach a Codec 2.0 buffer to MediaCodecBuffer.
*
@@ -408,6 +462,34 @@
(void)errorDetailMsg;
return -ENOSYS;
}
+
+ /**
+ * Attach an encrypted HidlMemory buffer containing multiple access units to an index
+ *
+ * @param memory The memory to attach.
+ * @param offset index???
+ * @param buffer The MediaCodecBuffer to attach the memory to. The access
+ * unit delimiters and crypto subsample information is included
+ * in the buffer metadata.
+ * @param secure Whether the buffer is secure.
+ * @param errorDetailMsg The error message to be set if an error occurs.
+ * @return OK if successful;
+ * -ENOENT if index is not recognized
+ * -ENOSYS if attaching buffer is not possible or not supported
+ */
+ virtual status_t attachEncryptedBuffers(
+ const sp<hardware::HidlMemory> &memory,
+ size_t offset,
+ const sp<MediaCodecBuffer> &buffer,
+ bool secure,
+ AString* errorDetailMsg) {
+ (void)memory;
+ (void)offset;
+ (void)buffer;
+ (void)secure;
+ (void)errorDetailMsg;
+ return -ENOSYS;
+ }
/**
* Request buffer rendering at specified time.
*
diff --git a/media/libstagefright/include/media/stagefright/CryptoAsync.h b/media/libstagefright/include/media/stagefright/CryptoAsync.h
index b675518..acb3dae 100644
--- a/media/libstagefright/include/media/stagefright/CryptoAsync.h
+++ b/media/libstagefright/include/media/stagefright/CryptoAsync.h
@@ -85,6 +85,18 @@
kActionDecrypt = (1 << 0),
kActionAttachEncryptedBuffer = (1 << 1)
};
+
+ // This struct is meant to copy the mapped contents from the original info.
+ struct CryptoAsyncInfo : public CodecCryptoInfo {
+ public:
+ explicit CryptoAsyncInfo(const std::unique_ptr<CodecCryptoInfo> &info);
+ virtual ~CryptoAsyncInfo() = default;
+ protected:
+ // all backup buffers for the base object.
+ sp<ABuffer> mKeyBuffer;
+ sp<ABuffer> mIvBuffer;
+ sp<ABuffer> mSubSamplesBuffer;
+ };
protected:
// Message types for the looper
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 054a4b8..1ff8acf 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -77,6 +77,9 @@
virtual void setStartTimeOffsetMs(int ms) { mStartTimeOffsetMs = ms; }
virtual int32_t getStartTimeOffsetMs() const { return mStartTimeOffsetMs; }
virtual status_t setNextFd(int fd);
+ // Returns true if the timestamp is valid which is compatible with the Mpeg4.
+ // Note that this overloads that method in the base class.
+ bool isSampleMetadataValid(size_t trackIndex, int64_t timeUs) override;
protected:
virtual ~MPEG4Writer();
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 2f94e5e..9ecb12e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -28,6 +28,7 @@
#include <media/MediaMetrics.h>
#include <media/MediaProfiles.h>
#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/CodecErrorLog.h>
#include <media/stagefright/FrameRenderTracker.h>
#include <media/stagefright/MediaHistogram.h>
@@ -56,7 +57,9 @@
struct AString;
struct BatteryChecker;
class BufferChannelBase;
+struct AccessUnitInfo;
struct CodecBase;
+struct CodecCryptoInfo;
struct CodecParameterDescriptor;
class IBatteryStats;
struct ICrypto;
@@ -78,6 +81,9 @@
using aidl::android::media::MediaResourceParcel;
using aidl::android::media::ClientConfigParcel;
+typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
+typedef WrapperObject<std::vector<std::unique_ptr<CodecCryptoInfo>>> CryptoInfosWrapper;
+
struct MediaCodec : public AHandler {
enum Domain {
DOMAIN_UNKNOWN = 0,
@@ -115,6 +121,7 @@
CB_OUTPUT_FORMAT_CHANGED = 4,
CB_RESOURCE_RECLAIMED = 5,
CB_CRYPTO_ERROR = 6,
+ CB_LARGE_FRAME_OUTPUT_AVAILABLE = 7,
};
static const pid_t kNoPid = -1;
@@ -185,6 +192,13 @@
uint32_t flags,
AString *errorDetailMsg = NULL);
+ status_t queueInputBuffers(
+ size_t index,
+ size_t offset,
+ size_t size,
+ const sp<BufferInfosWrapper> &accessUnitInfo,
+ AString *errorDetailMsg = NULL);
+
status_t queueSecureInputBuffer(
size_t index,
size_t offset,
@@ -198,11 +212,18 @@
uint32_t flags,
AString *errorDetailMsg = NULL);
+ status_t queueSecureInputBuffers(
+ size_t index,
+ size_t offset,
+ size_t size,
+ const sp<BufferInfosWrapper> &accessUnitInfo,
+ const sp<CryptoInfosWrapper> &cryptoInfos,
+ AString *errorDetailMsg = NULL);
+
status_t queueBuffer(
size_t index,
const std::shared_ptr<C2Buffer> &buffer,
- int64_t presentationTimeUs,
- uint32_t flags,
+ const sp<BufferInfosWrapper> &bufferInfos,
const sp<AMessage> &tunings,
AString *errorDetailMsg = NULL);
@@ -210,14 +231,9 @@
size_t index,
const sp<hardware::HidlMemory> &memory,
size_t offset,
- const CryptoPlugin::SubSample *subSamples,
- size_t numSubSamples,
- const uint8_t key[16],
- const uint8_t iv[16],
- CryptoPlugin::Mode mode,
- const CryptoPlugin::Pattern &pattern,
- int64_t presentationTimeUs,
- uint32_t flags,
+ size_t size,
+ const sp<BufferInfosWrapper> &bufferInfos,
+ const sp<CryptoInfosWrapper> &cryptoInfos,
const sp<AMessage> &tunings,
AString *errorDetailMsg = NULL);
@@ -321,7 +337,10 @@
friend struct ResourceManagerClient;
// to create the metrics associated with this codec.
- mediametrics_handle_t createMediaMetrics(const sp<AMessage>& format, uint32_t flags);
+ // Any error in this function will be captured by the output argument err.
+ mediametrics_handle_t createMediaMetrics(const sp<AMessage>& format,
+ uint32_t flags,
+ status_t* err);
private:
enum State {
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index f4c40e1..24ac2e8 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -697,6 +697,7 @@
inline constexpr char FEATURE_AdaptivePlayback[] = "adaptive-playback";
inline constexpr char FEATURE_EncodingStatistics[] = "encoding-statistics";
inline constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
+inline constexpr char FEATURE_MultipleFrames[] = "multiple-frames";
inline constexpr char FEATURE_PartialFrame[] = "partial-frame";
inline constexpr char FEATURE_QpBounds[] = "qp-bounds";
inline constexpr char FEATURE_SecurePlayback[] = "secure-playback";
diff --git a/media/libstagefright/include/media/stagefright/SkipCutBuffer.h b/media/libstagefright/include/media/stagefright/SkipCutBuffer.h
index 0fb5690..66f0bb1 100644
--- a/media/libstagefright/include/media/stagefright/SkipCutBuffer.h
+++ b/media/libstagefright/include/media/stagefright/SkipCutBuffer.h
@@ -59,6 +59,12 @@
int32_t mReadHead;
int32_t mCapacity;
char* mCutBuffer;
+
+ /*
+ * Added to use Access unit skip cut in Codec2 framework
+ */
+ friend class MultiAccessUnitSkipCutBuffer;
+
DISALLOW_EVIL_CONSTRUCTORS(SkipCutBuffer);
};
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 959f43e..458ac9c 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -626,6 +626,10 @@
// ACodec is waiting for all buffers to be returned, do NOT
// submit any more buffers to the codec.
bufferSource->onOmxIdle();
+ } else if (param == OMX_StateExecuting) {
+ // Initiating transition from Idle -> Executing
+ // Start submitting buffers to codec.
+ bufferSource->onOmxExecuting();
} else if (param == OMX_StateLoaded) {
// Initiating transition from Idle/Executing -> Loaded
// Buffers are about to be freed.
@@ -2404,13 +2408,6 @@
asString(event), event, arg1String, arg1, arg2String, arg2);
const sp<IOMXBufferSource> bufferSource(getBufferSource());
- if (bufferSource != NULL
- && event == OMX_EventCmdComplete
- && arg1 == OMX_CommandStateSet
- && arg2 == OMX_StateExecuting) {
- bufferSource->onOmxExecuting();
- }
-
// allow configuration if we return to the loaded state
if (event == OMX_EventCmdComplete
&& arg1 == OMX_CommandStateSet
diff --git a/media/libstagefright/timedtext/TEST_MAPPING b/media/libstagefright/timedtext/TEST_MAPPING
index 35a5b11..f011d04 100644
--- a/media/libstagefright/timedtext/TEST_MAPPING
+++ b/media/libstagefright/timedtext/TEST_MAPPING
@@ -1,9 +1,5 @@
-// mappings for frameworks/av/media/libstagefright/timedtext
{
- // tests which require dynamic content
- // invoke with: atest -- --enable-module-dynamic-download=true
- // TODO(b/148094059): unit tests not allowed to download content
- "dynamic-presubmit": [
+ "postsubmit": [
{ "name": "TimedTextUnitTest" }
]
}
diff --git a/media/libstagefright/webm/Android.bp b/media/libstagefright/webm/Android.bp
index 6ed3e0e..723131d 100644
--- a/media/libstagefright/webm/Android.bp
+++ b/media/libstagefright/webm/Android.bp
@@ -10,8 +10,6 @@
cc_library_static {
name: "libstagefright_webm",
- cppflags: ["-D__STDINT_LIMITS"],
-
cflags: [
"-Werror",
"-Wall",
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 19f9549..2341af1 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -1,4 +1,3 @@
-
package {
default_applicable_licenses: ["frameworks_av_media_mediaserver_license"],
}
@@ -86,7 +85,16 @@
"-Wall",
],
- vintf_fragments: ["manifest_media_c2_software.xml"],
+ // AIDL is only used when release_aidl_use_unfrozen is true
+ // because the swcodec mainline module is a prebuilt from an
+ // Android U branch in that case.
+ // TODO(b/327508501)
+ vintf_fragments: ["manifest_media_c2_software_hidl.xml"],
+ product_variables: {
+ release_aidl_use_unfrozen: {
+ vintf_fragments: ["manifest_media_c2_software_aidl.xml"],
+ },
+ },
soong_config_variables: {
TARGET_DYNAMIC_64_32_MEDIASERVER: {
diff --git a/media/mediaserver/manifest_media_c2_software_aidl.xml b/media/mediaserver/manifest_media_c2_software_aidl.xml
new file mode 100644
index 0000000..e6bcafa
--- /dev/null
+++ b/media/mediaserver/manifest_media_c2_software_aidl.xml
@@ -0,0 +1,7 @@
+<manifest version="1.0" type="framework">
+ <hal format="aidl">
+ <name>android.hardware.media.c2</name>
+ <version>1</version>
+ <fqname>IComponentStore/software</fqname>
+ </hal>
+</manifest>
diff --git a/media/mediaserver/manifest_media_c2_software.xml b/media/mediaserver/manifest_media_c2_software_hidl.xml
similarity index 64%
rename from media/mediaserver/manifest_media_c2_software.xml
rename to media/mediaserver/manifest_media_c2_software_hidl.xml
index d7fb1a0..69a27be 100644
--- a/media/mediaserver/manifest_media_c2_software.xml
+++ b/media/mediaserver/manifest_media_c2_software_hidl.xml
@@ -1,5 +1,5 @@
<manifest version="1.0" type="framework">
- <hal>
+ <hal format="hidl" max-level="8">
<name>android.hardware.media.c2</name>
<transport>hwbinder</transport>
<version>1.2</version>
@@ -8,9 +8,4 @@
<instance>software</instance>
</interface>
</hal>
- <hal format="aidl">
- <name>android.hardware.media.c2</name>
- <version>1</version>
- <fqname>IComponentStore/software</fqname>
- </hal>
</manifest>
diff --git a/media/module/codecs/amrnb/TEST_MAPPING b/media/module/codecs/amrnb/TEST_MAPPING
index 343d08a..306921f 100644
--- a/media/module/codecs/amrnb/TEST_MAPPING
+++ b/media/module/codecs/amrnb/TEST_MAPPING
@@ -1,9 +1,5 @@
-// mappings for frameworks/av/media/libstagefright/codecs/amrnb
{
- // tests which require dynamic content
- // invoke with: atest -- --enable-module-dynamic-download=true
- // TODO(b/148094059): unit tests not allowed to download content
- "dynamic-presubmit": [
+ "postsubmit": [
{ "name": "AmrnbDecoderTest"},
{ "name": "AmrnbEncoderTest"}
]
diff --git a/media/module/codecs/amrwb/TEST_MAPPING b/media/module/codecs/amrwb/TEST_MAPPING
new file mode 100644
index 0000000..3f05c90
--- /dev/null
+++ b/media/module/codecs/amrwb/TEST_MAPPING
@@ -0,0 +1,6 @@
+{
+ "postsubmit": [
+ { "name": "AmrwbDecoderTest"},
+ { "name": "AmrwbEncoderTest"}
+ ]
+}
diff --git a/media/module/codecs/amrwb/dec/TEST_MAPPING b/media/module/codecs/amrwb/dec/TEST_MAPPING
deleted file mode 100644
index 0278d26..0000000
--- a/media/module/codecs/amrwb/dec/TEST_MAPPING
+++ /dev/null
@@ -1,10 +0,0 @@
-// mappings for frameworks/av/media/libstagefright/codecs/amrwb
-{
- // tests which require dynamic content
- // invoke with: atest -- --enable-module-dynamic-download=true
- // TODO(b/148094059): unit tests not allowed to download content
- "dynamic-presubmit": [
- { "name": "AmrwbDecoderTest"}
-
- ]
-}
diff --git a/media/module/codecs/amrwb/enc/TEST_MAPPING b/media/module/codecs/amrwb/enc/TEST_MAPPING
deleted file mode 100644
index 045e8b3..0000000
--- a/media/module/codecs/amrwb/enc/TEST_MAPPING
+++ /dev/null
@@ -1,10 +0,0 @@
-// mappings for frameworks/av/media/libstagefright/codecs/amrwbenc
-{
- // tests which require dynamic content
- // invoke with: atest -- --enable-module-dynamic-download=true
- // TODO(b/148094059): unit tests not allowed to download content
- "dynamic-presubmit": [
- { "name": "AmrwbEncoderTest"}
-
- ]
-}
diff --git a/media/module/codecs/flac/TEST_MAPPING b/media/module/codecs/flac/TEST_MAPPING
new file mode 100644
index 0000000..725ea90
--- /dev/null
+++ b/media/module/codecs/flac/TEST_MAPPING
@@ -0,0 +1,5 @@
+{
+ "postsubmit": [
+ { "name": "FlacDecoderTest"}
+ ]
+}
diff --git a/media/module/codecs/flac/dec/test/Android.bp b/media/module/codecs/flac/dec/test/Android.bp
index a4c2735..8004c4a 100644
--- a/media/module/codecs/flac/dec/test/Android.bp
+++ b/media/module/codecs/flac/dec/test/Android.bp
@@ -28,6 +28,7 @@
cc_test {
name: "FlacDecoderTest",
gtest: true,
+ test_suites: ["device-tests"],
srcs: [
"FlacDecoderTest.cpp",
diff --git a/media/module/codecs/m4v_h263/TEST_MAPPING b/media/module/codecs/m4v_h263/TEST_MAPPING
index ba3ff1c..8599fa5 100644
--- a/media/module/codecs/m4v_h263/TEST_MAPPING
+++ b/media/module/codecs/m4v_h263/TEST_MAPPING
@@ -1,18 +1,6 @@
-// mappings for frameworks/av/media/libstagefright/codecs/m4v_h263
{
- // tests which require dynamic content
- // invoke with: atest -- --enable-module-dynamic-download=true
- // TODO(b/148094059): unit tests not allowed to download content
- "dynamic-presubmit": [
-
- // the decoder reports something bad about an unexpected newline in the *config file
- // and the config file looks like the AndroidTest.xml file that we put in there.
- // I don't get this from the Encoder -- and I don't see any substantive difference
- // between decode and encode AndroidTest.xml files -- except that encode does NOT
- // finish with a newline.
- // strange.
+ "postsubmit": [
{ "name": "Mpeg4H263DecoderTest"},
{ "name": "Mpeg4H263EncoderTest"}
-
]
}
diff --git a/media/module/codecs/mp3dec/TEST_MAPPING b/media/module/codecs/mp3dec/TEST_MAPPING
index 4ef4317..5faece6 100644
--- a/media/module/codecs/mp3dec/TEST_MAPPING
+++ b/media/module/codecs/mp3dec/TEST_MAPPING
@@ -1,9 +1,5 @@
-// mappings for frameworks/av/media/libstagefright/codecs/mp3dec
{
- // tests which require dynamic content
- // invoke with: atest -- --enable-module-dynamic-download=true
- // TODO(b/148094059): unit tests not allowed to download content
- "dynamic-presubmit": [
+ "postsubmit": [
{ "name": "Mp3DecoderTest"}
]
}
diff --git a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
index f95fc4d..4fbfab1 100644
--- a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
+++ b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
@@ -25,6 +25,7 @@
#include <C2Component.h>
#include <C2PlatformSupport.h>
+#include <android/hidl/manager/1.2/IServiceManager.h>
#include <codec2/hidl/1.0/ComponentStore.h>
#include <codec2/hidl/1.1/ComponentStore.h>
#include <codec2/hidl/1.2/ComponentStore.h>
@@ -818,31 +819,44 @@
}
bool registered = false;
- if (platformVersion >= __ANDROID_API_V__) {
- if (!aidlStore) {
- aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(
- std::make_shared<H2C2ComponentStore>(nullptr));
- }
- const std::string serviceName =
- std::string(c2_aidl::IComponentStore::descriptor) + "/software";
- binder_exception_t ex = AServiceManager_addService(
- aidlStore->asBinder().get(), serviceName.c_str());
- if (ex == EX_NONE) {
- registered = true;
- } else {
- LOG(ERROR) << "Cannot register software Codec2 AIDL service.";
+ const std::string aidlServiceName =
+ std::string(c2_aidl::IComponentStore::descriptor) + "/software";
+ if (__builtin_available(android __ANDROID_API_S__, *)) {
+ if (AServiceManager_isDeclared(aidlServiceName.c_str())) {
+ if (!aidlStore) {
+ aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(
+ std::make_shared<H2C2ComponentStore>(nullptr));
+ }
+ binder_exception_t ex = AServiceManager_addService(
+ aidlStore->asBinder().get(), aidlServiceName.c_str());
+ if (ex == EX_NONE) {
+ registered = true;
+ } else {
+ LOG(WARNING) << "Cannot register software Codec2 AIDL service. Exception: " << ex;
+ }
}
}
- if (!hidlStore) {
- hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(
- std::make_shared<H2C2ComponentStore>(nullptr));
- hidlVer = "1.2";
- }
- if (hidlStore->registerAsService("software") == android::OK) {
- registered = true;
+ // If the software component store isn't declared in the manifest, we don't
+ // need to create the service and register it.
+ using ::android::hidl::manager::V1_2::IServiceManager;
+ IServiceManager::Transport transport =
+ android::hardware::defaultServiceManager1_2()->getTransport(
+ V1_2::utils::ComponentStore::descriptor, "software");
+ if (transport == IServiceManager::Transport::HWBINDER) {
+ if (!hidlStore) {
+ hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(
+ std::make_shared<H2C2ComponentStore>(nullptr));
+ hidlVer = "1.2";
+ }
+ if (hidlStore->registerAsService("software") == android::OK) {
+ registered = true;
+ } else {
+ LOG(ERROR) << "Cannot register software Codec2 v" << hidlVer << " service.";
+ }
} else {
- LOG(ERROR) << "Cannot register software Codec2 v" << hidlVer << " service.";
+ LOG(INFO) << "The HIDL software Codec2 service is deprecated"
+ " so it is not being registered with hwservicemanager.";
}
if (registered) {
diff --git a/media/module/esds/TEST_MAPPING b/media/module/esds/TEST_MAPPING
index 9368b6d..0337743 100644
--- a/media/module/esds/TEST_MAPPING
+++ b/media/module/esds/TEST_MAPPING
@@ -1,9 +1,5 @@
-// mappings for frameworks/av/media/module/esds
{
- // tests which require dynamic content
- // invoke with: atest -- --enable-module-dynamic-download=true
- // TODO(b/148094059): unit tests not allowed to download content
- "dynamic-presubmit": [
+ "postsubmit": [
{ "name": "ESDSTest" }
]
}
diff --git a/media/module/esds/tests/Android.bp b/media/module/esds/tests/Android.bp
index aea611e..427b275 100644
--- a/media/module/esds/tests/Android.bp
+++ b/media/module/esds/tests/Android.bp
@@ -25,6 +25,7 @@
cc_test {
name: "ESDSTest",
gtest: true,
+ test_suites: ["device-tests"],
srcs: [
"ESDSTest.cpp",
diff --git a/media/module/foundation/TEST_MAPPING b/media/module/foundation/TEST_MAPPING
index a70c352..ea4e4fd 100644
--- a/media/module/foundation/TEST_MAPPING
+++ b/media/module/foundation/TEST_MAPPING
@@ -1,9 +1,6 @@
-// mappings for frameworks/av/media/libstagefright/foundation
{
- // tests which require dynamic content
- // invoke with: atest -- --enable-module-dynamic-download=true
- // TODO(b/148094059): unit tests not allowed to download content
- "dynamic-presubmit": [
+ "postsubmit": [
+ { "name": "AVCUtilsUnitTest" },
{ "name": "OpusHeaderTest" }
],
diff --git a/media/module/foundation/include/media/stagefright/foundation/AMessage.h b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
index 7594565..b301c53 100644
--- a/media/module/foundation/include/media/stagefright/foundation/AMessage.h
+++ b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
@@ -356,6 +356,16 @@
DISALLOW_EVIL_CONSTRUCTORS(AMessage);
};
+/*
+ * Helper struct for wrapping any object with RefBase.
+ */
+template <typename T>
+struct WrapperObject : public RefBase {
+ WrapperObject(const T& v) : value(v) {}
+ WrapperObject(T&& v) : value(std::move(v)) {}
+ T value;
+};
+
} // namespace android
#endif // A_MESSAGE_H_
diff --git a/media/module/foundation/tests/AVCUtils/Android.bp b/media/module/foundation/tests/AVCUtils/Android.bp
index ee7db21..c306c73 100644
--- a/media/module/foundation/tests/AVCUtils/Android.bp
+++ b/media/module/foundation/tests/AVCUtils/Android.bp
@@ -28,6 +28,7 @@
cc_test {
name: "AVCUtilsUnitTest",
gtest: true,
+ test_suites: ["device-tests"],
srcs: [
"AVCUtilsUnitTest.cpp",
diff --git a/media/module/foundation/tests/AVCUtils/AndroidTest.xml b/media/module/foundation/tests/AVCUtils/AndroidTest.xml
index e30bfbf..315373f 100644
--- a/media/module/foundation/tests/AVCUtils/AndroidTest.xml
+++ b/media/module/foundation/tests/AVCUtils/AndroidTest.xml
@@ -16,7 +16,7 @@
<configuration description="Test module config for AVC Utils unit tests">
<option name="test-suite-tag" value="AVCUtilsUnitTest" />
<target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
- <option name="cleanup" value="false" />
+ <option name="cleanup" value="true" />
<option name="push" value="AVCUtilsUnitTest->/data/local/tmp/AVCUtilsUnitTest" />
</target_preparer>
diff --git a/media/module/id3/TEST_MAPPING b/media/module/id3/TEST_MAPPING
index 6106908..497d984 100644
--- a/media/module/id3/TEST_MAPPING
+++ b/media/module/id3/TEST_MAPPING
@@ -1,9 +1,5 @@
-// frameworks/av/media/libstagefright/id3
{
- // tests which require dynamic content
- // invoke with: atest -- --enable-module-dynamic-download=true
- // TODO(b/148094059): unit tests not allowed to download content
- "dynamic-presubmit": [
+ "postsubmit": [
{ "name": "ID3Test" }
],
diff --git a/media/module/mpeg2ts/ATSParser.cpp b/media/module/mpeg2ts/ATSParser.cpp
index 86187bd..88c3cc2 100644
--- a/media/module/mpeg2ts/ATSParser.cpp
+++ b/media/module/mpeg2ts/ATSParser.cpp
@@ -440,6 +440,10 @@
ATSParser::CADescriptor *caDescriptor) {
bool found = false;
while (infoLength > 2) {
+ if (br->numBitsLeft() < 16) {
+ ALOGE("Not enough data left in bitreader");
+ return false;
+ }
unsigned descriptor_tag = br->getBits(8);
ALOGV(" tag = 0x%02x", descriptor_tag);
@@ -452,6 +456,10 @@
}
if (descriptor_tag == DESCRIPTOR_CA && descriptor_length >= 4) {
found = true;
+ if (br->numBitsLeft() < 32) {
+ ALOGE("Not enough data left in bitreader");
+ return false;
+ }
caDescriptor->mSystemID = br->getBits(16);
caDescriptor->mPID = br->getBits(16) & 0x1fff;
infoLength -= 4;
@@ -460,14 +468,24 @@
break;
} else {
infoLength -= descriptor_length;
- br->skipBits(descriptor_length * 8);
+ if (!br->skipBits(descriptor_length * 8)) {
+ ALOGE("Not enough data left in bitreader");
+ return false;
+ }
}
}
- br->skipBits(infoLength * 8);
+ if (!br->skipBits(infoLength * 8)) {
+ ALOGE("Not enough data left in bitreader");
+ return false;
+ }
return found;
}
status_t ATSParser::Program::parseProgramMap(ABitReader *br) {
+ if (br->numBitsLeft() < 10) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
unsigned table_id = br->getBits(8);
ALOGV(" table_id = %u", table_id);
if (table_id != 0x02u) {
@@ -482,6 +500,10 @@
}
br->skipBits(1); // '0'
+ if (br->numBitsLeft() < 86) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
MY_LOGV(" reserved = %u", br->getBits(2));
unsigned section_length = br->getBits(12);
@@ -526,6 +548,10 @@
while (infoBytesRemaining >= 5) {
StreamInfo info;
+ if (br->numBitsLeft() < 40) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
info.mType = br->getBits(8);
ALOGV(" stream_type = 0x%02x", info.mType);
MY_LOGV(" reserved = %u", br->getBits(3));
@@ -545,6 +571,10 @@
info.mAudioPresentations.clear();
bool hasStreamCA = false;
while (ES_info_length > 2 && infoBytesRemaining >= 0) {
+ if (br->numBitsLeft() < 16) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
unsigned descriptor_tag = br->getBits(8);
ALOGV(" tag = 0x%02x", descriptor_tag);
@@ -562,21 +592,39 @@
if (descriptor_tag == DESCRIPTOR_DTS) {
info.mType = STREAMTYPE_DTS;
ES_info_length -= descriptor_length;
- br->skipBits(descriptor_length * 8);
+ if (!br->skipBits(descriptor_length * 8)) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
} else if (descriptor_tag == DESCRIPTOR_CA && descriptor_length >= 4) {
hasStreamCA = true;
+ if (br->numBitsLeft() < 32) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
streamCA.mSystemID = br->getBits(16);
streamCA.mPID = br->getBits(16) & 0x1fff;
ES_info_length -= descriptor_length;
descriptor_length -= 4;
streamCA.mPrivateData.assign(br->data(), br->data() + descriptor_length);
- br->skipBits(descriptor_length * 8);
+ if (!br->skipBits(descriptor_length * 8)) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
} else if (info.mType == STREAMTYPE_PES_PRIVATE_DATA &&
descriptor_tag == DESCRIPTOR_DVB_EXTENSION && descriptor_length >= 1) {
+ if (br->numBitsLeft() < 8) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
unsigned descTagExt = br->getBits(8);
ALOGV(" tag_ext = 0x%02x", descTagExt);
ES_info_length -= descriptor_length;
descriptor_length--;
+ if (br->numBitsLeft() < (descriptor_length * 8)) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
// The AC4 descriptor is used in the PSI PMT to identify streams which carry AC4
// audio.
if (descTagExt == EXT_DESCRIPTOR_DVB_AC4) {
@@ -594,6 +642,10 @@
br->skipBits(descriptor_length * 8);
} else if (descTagExt == EXT_DESCRIPTOR_DVB_AUDIO_PRESELECTION &&
descriptor_length >= 1) {
+ if (br->numBitsLeft() < 8) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
// DVB BlueBook A038 Table 110
unsigned num_preselections = br->getBits(5);
br->skipBits(3); // reserved
@@ -671,11 +723,17 @@
info.mAudioPresentations.push_back(std::move(ap));
}
} else {
- br->skipBits(descriptor_length * 8);
+ if (!br->skipBits(descriptor_length * 8)) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
}
} else {
ES_info_length -= descriptor_length;
- br->skipBits(descriptor_length * 8);
+ if (!br->skipBits(descriptor_length * 8)) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
}
}
if (hasStreamCA && !mParser->mCasManager->addStream(
@@ -694,6 +752,10 @@
if (infoBytesRemaining != 0) {
ALOGW("Section data remains unconsumed");
}
+ if (br->numBitsLeft() < 32) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
unsigned crc = br->getBits(32);
if (crc != mPMT_CRC) {
audioPresentationsChanged = true;
@@ -1261,6 +1323,10 @@
status_t ATSParser::Stream::parsePES(ABitReader *br, SyncEvent *event) {
const uint8_t *basePtr = br->data();
+ if (br->numBitsLeft() < 48) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
unsigned packet_startcode_prefix = br->getBits(24);
ALOGV("packet_startcode_prefix = 0x%08x", packet_startcode_prefix);
@@ -1286,10 +1352,14 @@
&& stream_id != 0xff // program_stream_directory
&& stream_id != 0xf2 // DSMCC
&& stream_id != 0xf8) { // H.222.1 type E
- if (br->getBits(2) != 2u) {
+ if (br->numBitsLeft() < 2 || br->getBits(2) != 2u) {
return ERROR_MALFORMED;
}
+ if (br->numBitsLeft() < 22) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
unsigned PES_scrambling_control = br->getBits(2);
ALOGV("PES_scrambling_control = %u", PES_scrambling_control);
@@ -1328,19 +1398,19 @@
return ERROR_MALFORMED;
}
- if (br->getBits(4) != PTS_DTS_flags) {
+ if (br->numBitsLeft() < 7 || br->getBits(4) != PTS_DTS_flags) {
return ERROR_MALFORMED;
}
PTS = ((uint64_t)br->getBits(3)) << 30;
- if (br->getBits(1) != 1u) {
+ if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
return ERROR_MALFORMED;
}
PTS |= ((uint64_t)br->getBits(15)) << 15;
- if (br->getBits(1) != 1u) {
+ if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
return ERROR_MALFORMED;
}
PTS |= br->getBits(15);
- if (br->getBits(1) != 1u) {
+ if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
return ERROR_MALFORMED;
}
@@ -1353,20 +1423,20 @@
return ERROR_MALFORMED;
}
- if (br->getBits(4) != 1u) {
+ if (br->numBitsLeft() < 7 || br->getBits(4) != 1u) {
return ERROR_MALFORMED;
}
DTS = ((uint64_t)br->getBits(3)) << 30;
- if (br->getBits(1) != 1u) {
+ if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
return ERROR_MALFORMED;
}
DTS |= ((uint64_t)br->getBits(15)) << 15;
- if (br->getBits(1) != 1u) {
+ if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
return ERROR_MALFORMED;
}
DTS |= br->getBits(15);
- if (br->getBits(1) != 1u) {
+ if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
return ERROR_MALFORMED;
}
@@ -1381,22 +1451,30 @@
return ERROR_MALFORMED;
}
+ if (br->numBitsLeft() < 5) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
br->getBits(2);
uint64_t ESCR = ((uint64_t)br->getBits(3)) << 30;
- if (br->getBits(1) != 1u) {
+ if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
return ERROR_MALFORMED;
}
ESCR |= ((uint64_t)br->getBits(15)) << 15;
- if (br->getBits(1) != 1u) {
+ if (br->numBitsLeft() < 16 || br->getBits(1) != 1u) {
return ERROR_MALFORMED;
}
ESCR |= br->getBits(15);
- if (br->getBits(1) != 1u) {
+ if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
return ERROR_MALFORMED;
}
ALOGV("ESCR = %" PRIu64, ESCR);
+ if (br->numBitsLeft() < 10) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
MY_LOGV("ESCR_extension = %u", br->getBits(9));
if (br->getBits(1) != 1u) {
@@ -1411,18 +1489,25 @@
return ERROR_MALFORMED;
}
- if (br->getBits(1) != 1u) {
+ if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
+ return ERROR_MALFORMED;
+ }
+ if (br->numBitsLeft() < 22) {
+ ALOGE("Not enough data left in bitreader!");
return ERROR_MALFORMED;
}
MY_LOGV("ES_rate = %u", br->getBits(22));
- if (br->getBits(1) != 1u) {
+ if (br->numBitsLeft() < 1 || br->getBits(1) != 1u) {
return ERROR_MALFORMED;
}
optional_bytes_remaining -= 3;
}
- br->skipBits(optional_bytes_remaining * 8);
+ if (!br->skipBits(optional_bytes_remaining * 8)) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
// ES data follows.
int32_t pesOffset = br->data() - basePtr;
@@ -1450,7 +1535,10 @@
PTS_DTS_flags, PTS, DTS, PES_scrambling_control,
br->data(), dataLength, pesOffset, event);
- br->skipBits(dataLength * 8);
+ if (!br->skipBits(dataLength * 8)) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
} else {
onPayloadData(
PTS_DTS_flags, PTS, DTS, PES_scrambling_control,
@@ -1465,15 +1553,13 @@
payloadSizeBits / 8, pesOffset);
}
} else if (stream_id == 0xbe) { // padding_stream
- if (PES_packet_length == 0u) {
+ if (PES_packet_length == 0u || !br->skipBits(PES_packet_length * 8)) {
return ERROR_MALFORMED;
}
- br->skipBits(PES_packet_length * 8);
} else {
- if (PES_packet_length == 0u) {
+ if (PES_packet_length == 0u || !br->skipBits(PES_packet_length * 8)) {
return ERROR_MALFORMED;
}
- br->skipBits(PES_packet_length * 8);
}
return OK;
@@ -1481,6 +1567,10 @@
uint32_t ATSParser::Stream::getPesScramblingControl(
ABitReader *br, int32_t *pesOffset) {
+ if (br->numBitsLeft() < 24) {
+ ALOGE("Not enough data left in bitreader!");
+ return 0;
+ }
unsigned packet_startcode_prefix = br->getBits(24);
ALOGV("packet_startcode_prefix = 0x%08x", packet_startcode_prefix);
@@ -1491,6 +1581,7 @@
}
if (br->numBitsLeft() < 48) {
+ ALOGE("Not enough data left in bitreader!");
return 0;
}
@@ -1987,12 +2078,20 @@
}
void ATSParser::parseProgramAssociationTable(ABitReader *br) {
+ if (br->numBitsLeft() < 8) {
+ ALOGE("Not enough data left in bitreader!");
+ return;
+ }
unsigned table_id = br->getBits(8);
ALOGV(" table_id = %u", table_id);
if (table_id != 0x00u) {
ALOGE("PAT data error!");
return ;
}
+ if (br->numBitsLeft() < 56) {
+ ALOGE("Not enough data left in bitreader!");
+ return;
+ }
unsigned section_syntax_indictor = br->getBits(1);
ALOGV(" section_syntax_indictor = %u", section_syntax_indictor);
@@ -2009,9 +2108,17 @@
MY_LOGV(" section_number = %u", br->getBits(8));
MY_LOGV(" last_section_number = %u", br->getBits(8));
+ // check for unsigned integer overflow before assigning it to numProgramBytes
+ if (section_length < 9) {
+ return;
+ }
size_t numProgramBytes = (section_length - 5 /* header */ - 4 /* crc */);
for (size_t i = 0; i < numProgramBytes / 4; ++i) {
+ if (br->numBitsLeft() < 32) {
+ ALOGE("Not enough data left in bitreader!");
+ return;
+ }
unsigned program_number = br->getBits(16);
ALOGV(" program_number = %u", program_number);
@@ -2049,6 +2156,10 @@
}
}
+ if (br->numBitsLeft() < 32) {
+ ALOGE("Not enough data left in bitreader!");
+ return;
+ }
MY_LOGV(" CRC = 0x%08x", br->getBits(32));
}
@@ -2070,9 +2181,16 @@
section->clear();
}
+ if (br->numBitsLeft() < 8) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
unsigned skip = br->getBits(8);
section->setSkipBytes(skip + 1); // skip filler bytes + pointer field itself
- br->skipBits(skip * 8);
+ if (!br->skipBits(skip * 8)) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
}
if (br->numBitsLeft() % 8 != 0) {
@@ -2157,6 +2275,10 @@
status_t ATSParser::parseAdaptationField(
ABitReader *br, unsigned PID, unsigned *random_access_indicator) {
*random_access_indicator = 0;
+ if (br->numBitsLeft() < 8) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
unsigned adaptation_field_length = br->getBits(8);
if (adaptation_field_length > 0) {
@@ -2227,6 +2349,10 @@
status_t ATSParser::parseTS(ABitReader *br, SyncEvent *event) {
ALOGV("---");
+ if (br->numBitsLeft() < 32) {
+ ALOGE("Not enough data left in bitreader!");
+ return ERROR_MALFORMED;
+ }
unsigned sync_byte = br->getBits(8);
if (sync_byte != 0x47u) {
ALOGE("[error] parseTS: return error as sync_byte=0x%x", sync_byte);
diff --git a/media/module/mpeg2ts/TEST_MAPPING b/media/module/mpeg2ts/TEST_MAPPING
index 9f4bbdf..536450f 100644
--- a/media/module/mpeg2ts/TEST_MAPPING
+++ b/media/module/mpeg2ts/TEST_MAPPING
@@ -1,9 +1,5 @@
-// frameworks/av/media/libstagefright/mpeg2ts
{
- // tests which require dynamic content
- // invoke with: atest -- --enable-module-dynamic-download=true
- // TODO(b/148094059): unit tests not allowed to download content
- "dynamic-presubmit": [
+ "postsubmit": [
{ "name": "Mpeg2tsUnitTest" }
]
}
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 2946398..4926e46 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+//#define LOG_NDEBUG 0
#define LOG_TAG "ServiceUtilities"
#include <audio_utils/clock.h>
@@ -27,7 +28,6 @@
#include <media/AidlConversionUtil.h>
#include <android/content/AttributionSourceState.h>
-#include <com_android_media_audio.h>
#include <iterator>
#include <algorithm>
#include <pwd.h>
@@ -114,8 +114,9 @@
return std::optional<AttributionSourceState>{myAttributionSource};
}
-static bool checkRecordingInternal(const AttributionSourceState& attributionSource,
- const String16& msg, bool start, audio_source_t source) {
+ static bool checkRecordingInternal(const AttributionSourceState &attributionSource,
+ const uint32_t virtualDeviceId,
+ const String16 &msg, bool start, audio_source_t source) {
// Okay to not track in app ops as audio server or media server is us and if
// device is rooted security model is considered compromised.
// system_server loses its RECORD_AUDIO permission when a secondary
@@ -127,7 +128,7 @@
// We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
// may open a record track on behalf of a client. Note that pid may be a tid.
// IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
- const std::optional<AttributionSourceState> resolvedAttributionSource =
+ std::optional<AttributionSourceState> resolvedAttributionSource =
resolveAttributionSource(attributionSource);
if (!resolvedAttributionSource.has_value()) {
return false;
@@ -136,6 +137,7 @@
const int32_t attributedOpCode = getOpForSource(source);
permission::PermissionChecker permissionChecker;
+ resolvedAttributionSource.value().deviceId = virtualDeviceId;
bool permitted = false;
if (start) {
permitted = (permissionChecker.checkPermissionForStartDataDeliveryFromDatasource(
@@ -150,13 +152,24 @@
return permitted;
}
-bool recordingAllowed(const AttributionSourceState& attributionSource, audio_source_t source) {
- return checkRecordingInternal(attributionSource, String16(), /*start*/ false, source);
+static constexpr int DEVICE_ID_DEFAULT = 0;
+
+bool recordingAllowed(const AttributionSourceState &attributionSource, audio_source_t source) {
+ return checkRecordingInternal(attributionSource, DEVICE_ID_DEFAULT, String16(), /*start*/ false,
+ source);
+}
+
+bool recordingAllowed(const AttributionSourceState &attributionSource,
+ const uint32_t virtualDeviceId,
+ audio_source_t source) {
+ return checkRecordingInternal(attributionSource, virtualDeviceId,
+ String16(), /*start*/ false, source);
}
bool startRecording(const AttributionSourceState& attributionSource, const String16& msg,
audio_source_t source) {
- return checkRecordingInternal(attributionSource, msg, /*start*/ true, source);
+ return checkRecordingInternal(attributionSource, DEVICE_ID_DEFAULT, msg, /*start*/ true,
+ source);
}
void finishRecording(const AttributionSourceState& attributionSource, audio_source_t source) {
@@ -388,10 +401,6 @@
*/
bool mustAnonymizeBluetoothAddress(
const AttributionSourceState& attributionSource, const String16& caller) {
- if (!com::android::media::audio::bluetooth_mac_address_anonymization()) {
- return false;
- }
-
uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
if (isAudioServerOrSystemServerUid(uid)) {
return false;
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index bd9a462..0a047c1 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 0b3a3f9..aa9e120 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -87,6 +87,10 @@
bool recordingAllowed(const AttributionSourceState& attributionSource,
audio_source_t source = AUDIO_SOURCE_DEFAULT);
+
+bool recordingAllowed(const AttributionSourceState &attributionSource,
+ uint32_t virtualDeviceId,
+ audio_source_t source);
bool startRecording(const AttributionSourceState& attributionSource,
const String16& msg, audio_source_t source);
void finishRecording(const AttributionSourceState& attributionSource, audio_source_t source);
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 3fdc6eb..a68569a 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/Android.mk b/services/Android.mk
deleted file mode 100644
index c86a226..0000000
--- a/services/Android.mk
+++ /dev/null
@@ -1 +0,0 @@
-$(eval $(call declare-1p-copy-files,frameworks/av/services/audiopolicy,))
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index bdbd4d6..725e5a6 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1957,7 +1957,7 @@
mHardwareStatus = AUDIO_HW_IDLE;
// Change parameters of the configuration each iteration until we find a
- // configuration that the device will support.
+ // configuration that the device will support, or HAL suggests what it supports.
audio_config_t config = AUDIO_CONFIG_INITIALIZER;
for (auto testChannelMask : channelMasks) {
config.channel_mask = testChannelMask;
@@ -1967,11 +1967,16 @@
config.sample_rate = testSampleRate;
size_t bytes = 0;
+ audio_config_t loopConfig = config;
status_t result = dev->getInputBufferSize(&config, &bytes);
+ if (result == BAD_VALUE) {
+ // Retry with the config suggested by the HAL.
+ result = dev->getInputBufferSize(&config, &bytes);
+ }
if (result != OK || bytes == 0) {
+ config = loopConfig;
continue;
}
-
if (config.sample_rate != sampleRate || config.channel_mask != channelMask ||
config.format != format) {
uint32_t dstChannelCount = audio_channel_count_from_in_mask(channelMask);
@@ -2171,30 +2176,24 @@
sp<IAfThreadBase> thread;
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
- if (mPlaybackThreads.valueAt(i)->getEffect(sessionId, effectId) != 0) {
- ALOG_ASSERT(thread == 0);
- thread = mPlaybackThreads.valueAt(i);
+ thread = mPlaybackThreads.valueAt(i);
+ if (thread->getEffect(sessionId, effectId) != 0) {
+ return thread;
}
}
- if (thread != nullptr) {
- return thread;
- }
for (size_t i = 0; i < mRecordThreads.size(); i++) {
- if (mRecordThreads.valueAt(i)->getEffect(sessionId, effectId) != 0) {
- ALOG_ASSERT(thread == 0);
- thread = mRecordThreads.valueAt(i);
+ thread = mRecordThreads.valueAt(i);
+ if (thread->getEffect(sessionId, effectId) != 0) {
+ return thread;
}
}
- if (thread != nullptr) {
- return thread;
- }
for (size_t i = 0; i < mMmapThreads.size(); i++) {
- if (mMmapThreads.valueAt(i)->getEffect(sessionId, effectId) != 0) {
- ALOG_ASSERT(thread == 0);
- thread = mMmapThreads.valueAt(i);
+ thread = mMmapThreads.valueAt(i);
+ if (thread->getEffect(sessionId, effectId) != 0) {
+ return thread;
}
}
- return thread;
+ return nullptr;
}
// ----------------------------------------------------------------------------
@@ -2547,6 +2546,7 @@
bool mm;
if (OK == dev->getMasterMute(&mm)) {
mMasterMute = mm;
+ ALOGI_IF(mMasterMute, "%s: applying mute from HAL %s", __func__, name);
}
}
@@ -4387,11 +4387,12 @@
sp<IAfThreadBase> thread = getEffectThread_l(sessionId, effectId);
if (thread == nullptr) {
- return;
+ return;
}
audio_utils::lock_guard _sl(thread->mutex());
- sp<IAfEffectModule> effect = thread->getEffect_l(sessionId, effectId);
- thread->setEffectSuspended_l(&effect->desc().type, suspended, sessionId);
+ if (const auto& effect = thread->getEffect_l(sessionId, effectId)) {
+ thread->setEffectSuspended_l(&effect->desc().type, suspended, sessionId);
+ }
}
@@ -4784,7 +4785,7 @@
case TransactionCode::GET_AUDIO_POLICY_CONFIG:
case TransactionCode::GET_AUDIO_MIX_PORT:
ALOGW("%s: transaction %d received from PID %d",
- __func__, code, IPCThreadState::self()->getCallingPid());
+ __func__, static_cast<int>(code), IPCThreadState::self()->getCallingPid());
// return status only for non void methods
switch (code) {
case TransactionCode::SET_RECORD_SILENCED:
@@ -4817,7 +4818,8 @@
case TransactionCode::SUPPORTS_BLUETOOTH_VARIABLE_LATENCY: {
if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
- __func__, code, IPCThreadState::self()->getCallingPid(),
+ __func__, static_cast<int>(code),
+ IPCThreadState::self()->getCallingPid(),
IPCThreadState::self()->getCallingUid());
// return status only for non-void methods
switch (code) {
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index a02657e..819f2d6 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -31,6 +31,7 @@
#include <media/AudioContainers.h>
#include <media/AudioDeviceTypeAddr.h>
#include <media/AudioEffect.h>
+#include <media/EffectClientAsyncProxy.h>
#include <media/ShmemCompat.h>
#include <media/TypeConverter.h>
#include <media/audiohal/EffectHalInterface.h>
@@ -1536,7 +1537,7 @@
return IAfEffectModule::isSpatializer(&mDescriptor.type);
}
-status_t EffectModule::setHapticIntensity_l(int id, os::HapticScale intensity) {
+status_t EffectModule::setHapticScale_l(int id, os::HapticScale hapticScale) {
if (mStatus != NO_ERROR) {
return mStatus;
}
@@ -1545,13 +1546,16 @@
return INVALID_OPERATION;
}
- std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t));
+ std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t) + sizeof(float));
effect_param_t *param = (effect_param_t*) request.data();
param->psize = sizeof(int32_t);
- param->vsize = sizeof(int32_t) * 2;
+ param->vsize = sizeof(int32_t) * 2 + sizeof(float);
*(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
- *((int32_t*)param->data + 1) = id;
- *((int32_t*)param->data + 2) = static_cast<int32_t>(intensity);
+ int32_t* hapticScalePtr = reinterpret_cast<int32_t*>(param->data + sizeof(int32_t));
+ hapticScalePtr[0] = id;
+ hapticScalePtr[1] = static_cast<int32_t>(hapticScale.getLevel());
+ float* adaptiveScaleFactorPtr = reinterpret_cast<float*>(param->data + 3 * sizeof(int32_t));
+ *adaptiveScaleFactorPtr = hapticScale.getAdaptiveScaleFactor();
std::vector<uint8_t> response;
status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
if (status == NO_ERROR) {
@@ -1726,7 +1730,8 @@
const sp<media::IEffectClient>& effectClient,
int32_t priority, bool notifyFramesProcessed)
: BnEffect(),
- mEffect(effect), mEffectClient(effectClient), mClient(client), mCblk(NULL),
+ mEffect(effect), mEffectClient(media::EffectClientAsyncProxy::makeIfNeeded(effectClient)),
+ mClient(client), mCblk(nullptr),
mPriority(priority), mHasControl(false), mEnabled(false), mDisconnected(false),
mNotifyFramesProcessed(notifyFramesProcessed)
{
@@ -2674,11 +2679,11 @@
return false;
}
-void EffectChain::setHapticIntensity_l(int id, os::HapticScale intensity)
+void EffectChain::setHapticScale_l(int id, os::HapticScale hapticScale)
{
audio_utils::lock_guard _l(mutex());
for (size_t i = 0; i < mEffects.size(); ++i) {
- mEffects[i]->setHapticIntensity_l(id, intensity);
+ mEffects[i]->setHapticScale_l(id, hapticScale);
}
}
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 6e34e9b..46c44a6 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -233,7 +233,7 @@
bool isHapticGenerator() const final;
bool isSpatializer() const final;
- status_t setHapticIntensity_l(int id, os::HapticScale intensity) final
+ status_t setHapticScale_l(int id, os::HapticScale hapticScale) final
REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectBase_Mutex;
status_t setVibratorInfo_l(const media::AudioVibratorInfo& vibratorInfo) final
REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectBase_Mutex;
@@ -520,7 +520,7 @@
// Requires either IAfThreadBase::mutex() or EffectChain::mutex() held
bool containsHapticGeneratingEffect_l() final;
- void setHapticIntensity_l(int id, os::HapticScale intensity) final
+ void setHapticScale_l(int id, os::HapticScale hapticScale) final
REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
sp<EffectCallbackInterface> effectCallback() const final { return mEffectCallback; }
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index 82436a3..fd4dd62 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -180,7 +180,7 @@
static bool isSpatializer(const effect_uuid_t* type);
virtual bool isSpatializer() const = 0;
- virtual status_t setHapticIntensity_l(int id, os::HapticScale intensity)
+ virtual status_t setHapticScale_l(int id, os::HapticScale hapticScale)
REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectBase_Mutex = 0;
virtual status_t setVibratorInfo_l(const media::AudioVibratorInfo& vibratorInfo)
REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectBase_Mutex = 0;
@@ -319,7 +319,7 @@
virtual bool containsHapticGeneratingEffect_l() = 0;
- virtual void setHapticIntensity_l(int id, os::HapticScale intensity)
+ virtual void setHapticScale_l(int id, os::HapticScale hapticScale)
REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
virtual sp<EffectCallbackInterface> effectCallback() const = 0;
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index 2302e13..8ed44c6 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -18,6 +18,7 @@
#include <android/media/BnAudioRecord.h>
#include <android/media/BnAudioTrack.h>
+#include <audio_utils/mutex.h>
#include <audiomanager/IAudioManager.h>
#include <binder/IMemory.h>
#include <fastpath/FastMixerDumpState.h>
@@ -338,12 +339,12 @@
/** Set haptic playback of the track is enabled or not, should be
* set after query or get callback from vibrator service */
virtual void setHapticPlaybackEnabled(bool hapticPlaybackEnabled) = 0;
- /** Return at what intensity to play haptics, used in mixer. */
- virtual os::HapticScale getHapticIntensity() const = 0;
+ /** Return the haptics scale, used in mixer. */
+ virtual os::HapticScale getHapticScale() const = 0;
/** Return the maximum amplitude allowed for haptics data, used in mixer. */
virtual float getHapticMaxAmplitude() const = 0;
- /** Set intensity of haptic playback, should be set after querying vibrator service. */
- virtual void setHapticIntensity(os::HapticScale hapticIntensity) = 0;
+ /** Set scale for haptic playback, should be set after querying vibrator service. */
+ virtual void setHapticScale(os::HapticScale hapticScale) = 0;
/** Set maximum amplitude allowed for haptic data, should be set after querying
* vibrator service.
*/
@@ -351,7 +352,8 @@
virtual sp<os::ExternalVibration> getExternalVibration() const = 0;
// This function should be called with holding thread lock.
- virtual void updateTeePatches_l() = 0;
+ virtual void updateTeePatches_l() REQUIRES(audio_utils::ThreadBase_Mutex)
+ EXCLUDES_BELOW_ThreadBase_Mutex = 0;
// Argument teePatchesToUpdate is by value, use std::move to optimize.
virtual void setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) = 0;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index b4cb805..6c22e21 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -174,15 +174,15 @@
void setHapticPlaybackEnabled(bool hapticPlaybackEnabled) final {
mHapticPlaybackEnabled = hapticPlaybackEnabled;
}
- /** Return at what intensity to play haptics, used in mixer. */
- os::HapticScale getHapticIntensity() const final { return mHapticIntensity; }
+ /** Return the haptics scale, used in mixer. */
+ os::HapticScale getHapticScale() const final { return mHapticScale; }
/** Return the maximum amplitude allowed for haptics data, used in mixer. */
float getHapticMaxAmplitude() const final { return mHapticMaxAmplitude; }
/** Set intensity of haptic playback, should be set after querying vibrator service. */
- void setHapticIntensity(os::HapticScale hapticIntensity) final {
- if (os::isValidHapticScale(hapticIntensity)) {
- mHapticIntensity = hapticIntensity;
- setHapticPlaybackEnabled(mHapticIntensity != os::HapticScale::MUTE);
+ void setHapticScale(os::HapticScale hapticScale) final {
+ if (os::isValidHapticScale(hapticScale)) {
+ mHapticScale = hapticScale;
+ setHapticPlaybackEnabled(!mHapticScale.isScaleMute());
}
}
/** Set maximum amplitude allowed for haptic data, should be set after querying
@@ -194,7 +194,8 @@
sp<os::ExternalVibration> getExternalVibration() const final { return mExternalVibration; }
// This function should be called with holding thread lock.
- void updateTeePatches_l() final;
+ void updateTeePatches_l() final REQUIRES(audio_utils::ThreadBase_Mutex)
+ EXCLUDES_BELOW_ThreadBase_Mutex;
void setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) final;
void tallyUnderrunFrames(size_t frames) final {
@@ -328,8 +329,8 @@
sp<OpPlayAudioMonitor> mOpPlayAudioMonitor;
bool mHapticPlaybackEnabled = false; // indicates haptic playback enabled or not
- // intensity to play haptic data
- os::HapticScale mHapticIntensity = os::HapticScale::MUTE;
+ // scale to play haptic data
+ os::HapticScale mHapticScale = os::HapticScale::mute();
// max amplitude allowed for haptic data
float mHapticMaxAmplitude = NAN;
class AudioVibrationController : public os::BnExternalVibrationController {
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 671e27f..b519289 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -223,6 +223,8 @@
static const int kPriorityAudioApp = 2;
static const int kPriorityFastMixer = 3;
static const int kPriorityFastCapture = 3;
+// Request real-time priority for PlaybackThread in ARC
+static const int kPriorityPlaybackThreadArc = 1;
// IAudioFlinger::createTrack() has an in/out parameter 'pFrameCount' for the total size of the
// track buffer in shared memory. Zero on input means to use a default value. For fast tracks,
@@ -722,8 +724,9 @@
{
audio_utils::unique_lock _l(event->mutex());
while (event->mWaitStatus) {
- if (event->mCondition.wait_for(_l, std::chrono::nanoseconds(kConfigEventTimeoutNs))
- == std::cv_status::timeout) {
+ if (event->mCondition.wait_for(
+ _l, std::chrono::nanoseconds(kConfigEventTimeoutNs), getTid())
+ == std::cv_status::timeout) {
event->mStatus = TIMED_OUT;
event->mWaitStatus = false;
}
@@ -2898,7 +2901,7 @@
// Unlock due to VibratorService will lock for this call and will
// call Tracks.mute/unmute which also require thread's lock.
mutex().unlock();
- const os::HapticScale intensity = afutils::onExternalVibrationStart(
+ const os::HapticScale hapticScale = afutils::onExternalVibrationStart(
track->getExternalVibration());
std::optional<media::AudioVibratorInfo> vibratorInfo;
{
@@ -2908,7 +2911,7 @@
vibratorInfo = std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
}
mutex().lock();
- track->setHapticIntensity(intensity);
+ track->setHapticScale(hapticScale);
if (vibratorInfo) {
track->setHapticMaxAmplitude(vibratorInfo->maxAmplitude);
}
@@ -2924,7 +2927,7 @@
// Set haptic intensity for effect
if (chain != nullptr) {
- chain->setHapticIntensity_l(track->id(), intensity);
+ chain->setHapticScale_l(track->id(), hapticScale);
}
}
@@ -3054,7 +3057,7 @@
}
void PlaybackThread::onCodecFormatChanged(
- const std::basic_string<uint8_t>& metadataBs)
+ const std::vector<uint8_t>& metadataBs)
{
const auto weakPointerThis = wp<PlaybackThread>::fromExisting(this);
std::thread([this, metadataBs, weakPointerThis]() {
@@ -3317,7 +3320,11 @@
return {}; // nothing to do
}
StreamOutHalInterface::SourceMetadata metadata;
- if (com_android_media_audio_stereo_spatialization()) {
+ static const bool stereo_spatialization_property =
+ property_get_bool("ro.audio.stereo_spatialization_enabled", false);
+ const bool stereo_spatialization_enabled =
+ stereo_spatialization_property && com_android_media_audio_stereo_spatialization();
+ if (stereo_spatialization_enabled) {
std::map<audio_session_t, std::vector<playback_track_metadata_v7_t> >allSessionsMetadata;
for (const sp<IAfTrack>& track : mActiveTracks) {
std::vector<playback_track_metadata_v7_t>& sessionMetadata =
@@ -3501,7 +3508,7 @@
char *endptr;
unsigned long ul = strtoul(value, &endptr, 0);
if (*endptr == '\0' && ul != 0) {
- ALOGD("Silence is golden");
+ ALOGW("%s: mute from ro.audio.silent. Silence is golden", __func__);
// The setprop command will not allow a property to be changed after
// the first time it is set, so we don't have to worry about un-muting.
setMasterMute_l(true);
@@ -3949,6 +3956,27 @@
stream()->setHalThreadPriority(priorityBoost);
}
}
+ } else if (property_get_bool("ro.boot.container", false /* default_value */)) {
+ // In ARC experiments (b/73091832), the latency under using CFS scheduler with any priority
+ // is not enough for PlaybackThread to process audio data in time. We request the lowest
+ // real-time priority, SCHED_FIFO=1, for PlaybackThread in ARC. ro.boot.container is true
+ // only on ARC.
+ const pid_t tid = getTid();
+ if (tid == -1) {
+ ALOGW("%s: Cannot update PlaybackThread priority for ARC, no tid", __func__);
+ } else {
+ const status_t status = requestPriority(getpid(),
+ tid,
+ kPriorityPlaybackThreadArc,
+ false /* isForApp */,
+ true /* asynchronous */);
+ if (status != OK) {
+ ALOGW("%s: Cannot update PlaybackThread priority for ARC, status %d", __func__,
+ status);
+ } else {
+ stream()->setHalThreadPriority(kPriorityPlaybackThreadArc);
+ }
+ }
}
Vector<sp<IAfTrack>> tracksToRemove;
@@ -4160,6 +4188,30 @@
metadataUpdate = updateMetadata_l();
+ // Acquire a local copy of active tracks with lock (release w/o lock).
+ //
+ // Control methods on the track acquire the ThreadBase lock (e.g. start()
+ // stop(), pause(), etc.), but the threadLoop is entitled to call audio
+ // data / buffer methods on tracks from activeTracks without the ThreadBase lock.
+ activeTracks.insert(activeTracks.end(), mActiveTracks.begin(), mActiveTracks.end());
+
+ setHalLatencyMode_l();
+
+ // updateTeePatches_l will acquire the ThreadBase_Mutex of other threads,
+ // so this is done before we lock our effect chains.
+ for (const auto& track : mActiveTracks) {
+ track->updateTeePatches_l();
+ }
+
+ // signal actual start of output stream when the render position reported by
+ // the kernel starts moving.
+ if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
+ && (mKernelPositionOnStandby
+ != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
+ mHalStarted = true;
+ mWaitHalStartCV.notify_all();
+ }
+
// prevent any changes in effect chain list and in each effect chain
// during mixing and effect process as the audio buffers could be deleted
// or modified if an effect is created or deleted
@@ -4187,28 +4239,6 @@
}
}
}
-
- // Acquire a local copy of active tracks with lock (release w/o lock).
- //
- // Control methods on the track acquire the ThreadBase lock (e.g. start()
- // stop(), pause(), etc.), but the threadLoop is entitled to call audio
- // data / buffer methods on tracks from activeTracks without the ThreadBase lock.
- activeTracks.insert(activeTracks.end(), mActiveTracks.begin(), mActiveTracks.end());
-
- setHalLatencyMode_l();
-
- for (const auto &track : mActiveTracks ) {
- track->updateTeePatches_l();
- }
-
- // signal actual start of output stream when the render position reported by the kernel
- // starts moving.
- if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
- && (mKernelPositionOnStandby
- != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
- mHalStarted = true;
- mWaitHalStartCV.notify_all();
- }
} // mutex() scope ends
if (mBytesRemaining == 0) {
@@ -4780,7 +4810,7 @@
// When the track is stop, set the haptic intensity as MUTE
// for the HapticGenerator effect.
if (chain != nullptr) {
- chain->setHapticIntensity_l(track->id(), os::HapticScale::MUTE);
+ chain->setHapticScale_l(track->id(), os::HapticScale::mute());
}
}
@@ -5140,7 +5170,7 @@
// audio to FastMixer
fastTrack->mFormat = mFormat; // mPipeSink format for audio to FastMixer
fastTrack->mHapticPlaybackEnabled = mHapticChannelMask != AUDIO_CHANNEL_NONE;
- fastTrack->mHapticIntensity = os::HapticScale::NONE;
+ fastTrack->mHapticScale = {/*level=*/os::HapticLevel::NONE };
fastTrack->mHapticMaxAmplitude = NAN;
fastTrack->mGeneration++;
state->mFastTracksGen++;
@@ -5698,7 +5728,7 @@
fastTrack->mChannelMask = track->channelMask();
fastTrack->mFormat = track->format();
fastTrack->mHapticPlaybackEnabled = track->getHapticPlaybackEnabled();
- fastTrack->mHapticIntensity = track->getHapticIntensity();
+ fastTrack->mHapticScale = track->getHapticScale();
fastTrack->mHapticMaxAmplitude = track->getHapticMaxAmplitude();
fastTrack->mGeneration++;
state->mTrackMask |= 1 << j;
@@ -6059,10 +6089,11 @@
trackId,
AudioMixer::TRACK,
AudioMixer::HAPTIC_ENABLED, (void *)(uintptr_t)track->getHapticPlaybackEnabled());
+ const os::HapticScale hapticScale = track->getHapticScale();
mAudioMixer->setParameter(
- trackId,
- AudioMixer::TRACK,
- AudioMixer::HAPTIC_INTENSITY, (void *)(uintptr_t)track->getHapticIntensity());
+ trackId,
+ AudioMixer::TRACK,
+ AudioMixer::HAPTIC_SCALE, (void *)&hapticScale);
const float hapticMaxAmplitude = track->getHapticMaxAmplitude();
mAudioMixer->setParameter(
trackId,
@@ -7863,6 +7894,11 @@
if (mSupportedLatencyModes.empty()) {
return;
}
+ // Do not update the HAL latency mode if no track is active
+ if (mActiveTracks.isEmpty()) {
+ return;
+ }
+
audio_latency_mode_t latencyMode = AUDIO_LATENCY_MODE_FREE;
if (mSupportedLatencyModes.size() == 1) {
// If the HAL only support one latency mode currently, confirm the choice
@@ -7873,16 +7909,12 @@
// (mRequestedLatencyMode = AUDIO_LATENCY_MODE_LOW)
// AND
// - At least one active track is spatialized
- bool hasSpatializedActiveTrack = false;
for (const auto& track : mActiveTracks) {
if (track->isSpatialized()) {
- hasSpatializedActiveTrack = true;
+ latencyMode = mRequestedLatencyMode;
break;
}
}
- if (hasSpatializedActiveTrack && mRequestedLatencyMode == AUDIO_LATENCY_MODE_LOW) {
- latencyMode = AUDIO_LATENCY_MODE_LOW;
- }
}
if (latencyMode != mSetLatencyMode) {
@@ -7896,7 +7928,7 @@
}
status_t SpatializerThread::setRequestedLatencyMode(audio_latency_mode_t mode) {
- if (mode != AUDIO_LATENCY_MODE_LOW && mode != AUDIO_LATENCY_MODE_FREE) {
+ if (mode < 0 || mode >= AUDIO_LATENCY_MODE_CNT) {
return BAD_VALUE;
}
audio_utils::lock_guard _l(mutex());
@@ -8185,7 +8217,6 @@
inputStandBy();
reacquire_wakelock:
- sp<IAfRecordTrack> activeTrack;
{
audio_utils::lock_guard _l(mutex());
acquireWakeLock_l();
@@ -8201,6 +8232,8 @@
// loop while there is work to do
for (int64_t loopCount = 0;; ++loopCount) { // loopCount used for statistics tracking
+ // Note: these sp<> are released at the end of the for loop outside of the mutex() lock.
+ sp<IAfRecordTrack> activeTrack;
Vector<sp<IAfEffectChain>> effectChains;
// activeTracks accumulates a copy of a subset of mActiveTracks
@@ -10637,6 +10670,16 @@
}
}
+ // For mmap streams, once the routing has changed, they will be disconnected. It should be
+ // okay to notify the client earlier before the new patch creation.
+ if (mDeviceId != deviceId) {
+ if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
+ // The aaudioservice handle the routing changed event asynchronously. In that case,
+ // it is safe to hold the lock here.
+ callback->onRoutingChanged(deviceId);
+ }
+ }
+
if (mAudioHwDev->supportsAudioPatches()) {
status = mHalDevice->createAudioPatch(patch->num_sources, patch->sources, patch->num_sinks,
patch->sinks, handle);
@@ -10662,12 +10705,6 @@
sendIoConfigEvent_l(AUDIO_INPUT_CONFIG_CHANGED);
mInDeviceTypeAddr = sourceDeviceTypeAddr;
}
- sp<MmapStreamCallback> callback = mCallback.promote();
- if (mDeviceId != deviceId && callback != 0) {
- mutex().unlock();
- callback->onRoutingChanged(deviceId);
- mutex().lock();
- }
mPatch = *patch;
mDeviceId = deviceId;
}
@@ -10819,22 +10856,19 @@
void MmapThread::checkInvalidTracks_l()
{
- sp<MmapStreamCallback> callback;
for (const sp<IAfMmapTrack>& track : mActiveTracks) {
if (track->isInvalid()) {
- callback = mCallback.promote();
- if (callback == nullptr && mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
+ if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
+ // The aaudioservice handle the routing changed event asynchronously. In that case,
+ // it is safe to hold the lock here.
+ callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+ } else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
mNoCallbackWarningCount++;
}
break;
}
}
- if (callback != 0) {
- mutex().unlock();
- callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
- mutex().lock();
- }
}
void MmapThread::dumpInternals_l(int fd, const Vector<String16>& /* args */)
@@ -11093,7 +11127,7 @@
char *endptr;
unsigned long ul = strtoul(value, &endptr, 0);
if (*endptr == '\0' && ul != 0) {
- ALOGD("Silence is golden");
+ ALOGW("%s: mute from ro.audio.silent. Silence is golden", __func__);
// The setprop command will not allow a property to be changed after
// the first time it is set, so we don't have to worry about un-muting.
setMasterMute_l(true);
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index a6888c0..86e1894 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -964,7 +964,7 @@
protected:
// StreamHalInterfaceCodecFormatCallback implementation
void onCodecFormatChanged(
- const std::basic_string<uint8_t>& metadataBs) final;
+ const std::vector<uint8_t>& metadataBs) final;
// ThreadBase virtuals
void preExit() final EXCLUDES_ThreadBase_Mutex;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 4e82173..4d07821 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1233,7 +1233,7 @@
&& (state == IDLE || state == STOPPED || state == FLUSHED)) {
mFrameMap.reset();
- if (!isFastTrack() && (isDirect() || isOffloaded())) {
+ if (!isFastTrack()) {
// Start point of track -> sink frame map. If the HAL returns a
// frame position smaller than the first written frame in
// updateTrackFrameInfo, the timestamp can be interpolated
@@ -1674,7 +1674,7 @@
if (result == OK) {
ALOGI("%s(%d): processed mute state for port ID %d from %d to %d", __func__, id(), mPortId,
- int(muteState), int(mMuteState));
+ static_cast<int>(mMuteState), static_cast<int>(muteState));
mMuteState = muteState;
} else {
ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d", __func__, id(),
@@ -3554,6 +3554,8 @@
}
if (result == OK) {
+ ALOGI("%s(%d): processed mute state for port ID %d from %d to %d", __func__, id(), mPortId,
+ static_cast<int>(mMuteState), static_cast<int>(muteState));
mMuteState = muteState;
} else {
ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d",
diff --git a/services/audioflinger/afutils/NBAIO_Tee.cpp b/services/audioflinger/afutils/NBAIO_Tee.cpp
index 86fb128..cdc8e95 100644
--- a/services/audioflinger/afutils/NBAIO_Tee.cpp
+++ b/services/audioflinger/afutils/NBAIO_Tee.cpp
@@ -514,6 +514,12 @@
return NO_ERROR; // return full path
}
+/* static */
+NBAIO_Tee::RunningTees& NBAIO_Tee::getRunningTees() {
+ [[clang::no_destroy]] static RunningTees runningTees;
+ return runningTees;
+}
+
} // namespace android
#endif // TEE_SINK
diff --git a/services/audioflinger/afutils/NBAIO_Tee.h b/services/audioflinger/afutils/NBAIO_Tee.h
index a5c544e..5ab1949 100644
--- a/services/audioflinger/afutils/NBAIO_Tee.h
+++ b/services/audioflinger/afutils/NBAIO_Tee.h
@@ -310,10 +310,7 @@
};
// singleton
- static RunningTees &getRunningTees() {
- static RunningTees runningTees;
- return runningTees;
- }
+ static RunningTees& getRunningTees();
// The NBAIO TeeImpl may have lifetime longer than NBAIO_Tee if
// RunningTees::dump() is being called simultaneous to ~NBAIO_Tee().
diff --git a/services/audioflinger/afutils/Vibrator.cpp b/services/audioflinger/afutils/Vibrator.cpp
index ab15a09..7c99ca9 100644
--- a/services/audioflinger/afutils/Vibrator.cpp
+++ b/services/audioflinger/afutils/Vibrator.cpp
@@ -20,6 +20,7 @@
#include "Vibrator.h"
+#include <android/os/ExternalVibrationScale.h>
#include <android/os/IExternalVibratorService.h>
#include <binder/IServiceManager.h>
#include <utils/Log.h>
@@ -46,14 +47,15 @@
os::HapticScale onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration) {
if (externalVibration->getAudioAttributes().flags & AUDIO_FLAG_MUTE_HAPTIC) {
ALOGD("%s, mute haptic according to audio attributes flag", __func__);
- return os::HapticScale::MUTE;
+ return os::HapticScale::mute();
}
const sp<os::IExternalVibratorService> evs = getExternalVibratorService();
if (evs != nullptr) {
- int32_t ret;
+
+ os::ExternalVibrationScale ret;
binder::Status status = evs->onExternalVibrationStart(*externalVibration, &ret);
if (status.isOk()) {
- ALOGD("%s, start external vibration with intensity as %d", __func__, ret);
+ ALOGD("%s, start external vibration with intensity as %d", __func__, ret.scaleLevel);
return os::ExternalVibration::externalVibrationScaleToHapticScale(ret);
}
}
@@ -61,7 +63,7 @@
__func__,
evs == nullptr ? "external vibration service not found"
: "error when querying intensity");
- return os::HapticScale::MUTE;
+ return os::HapticScale::mute();
}
void onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration) {
diff --git a/services/audioflinger/datapath/AudioStreamIn.cpp b/services/audioflinger/datapath/AudioStreamIn.cpp
index 24f3bb9..76618f4 100644
--- a/services/audioflinger/datapath/AudioStreamIn.cpp
+++ b/services/audioflinger/datapath/AudioStreamIn.cpp
@@ -56,15 +56,15 @@
return status;
}
- // Adjust for standby using HAL rate frames.
- // Only apply this correction if the HAL is getting PCM frames.
- if (mHalFormatHasProportionalFrames) {
+ if (mHalFormatHasProportionalFrames &&
+ (flags & AUDIO_INPUT_FLAG_DIRECT) == AUDIO_INPUT_FLAG_DIRECT) {
+ // For DirectRecord reset timestamp to 0 on standby.
const uint64_t adjustedPosition = (halPosition <= mFramesReadAtStandby) ?
0 : (halPosition - mFramesReadAtStandby);
// Scale from HAL sample rate to application rate.
*frames = adjustedPosition / mRateMultiplier;
} else {
- // For compressed formats.
+ // For compressed formats and linear PCM.
*frames = halPosition;
}
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index 1830d15..9851f3a 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -99,15 +99,15 @@
return status;
}
- // Adjust for standby using HAL rate frames.
- // Only apply this correction if the HAL is getting PCM frames.
- if (mHalFormatHasProportionalFrames) {
+ if (mHalFormatHasProportionalFrames &&
+ (flags & AUDIO_OUTPUT_FLAG_DIRECT) == AUDIO_OUTPUT_FLAG_DIRECT) {
+ // For DirectTrack reset timestamp to 0 on standby.
const uint64_t adjustedPosition = (halPosition <= mFramesWrittenAtStandby) ?
0 : (halPosition - mFramesWrittenAtStandby);
// Scale from HAL sample rate to application rate.
*frames = adjustedPosition / mRateMultiplier;
} else {
- // For offloaded MP3 and other compressed formats.
+ // For offloaded MP3 and other compressed formats, and linear PCM.
*frames = halPosition;
}
diff --git a/services/audioflinger/fastpath/FastMixer.cpp b/services/audioflinger/fastpath/FastMixer.cpp
index e0a15c1..1d41b3f 100644
--- a/services/audioflinger/fastpath/FastMixer.cpp
+++ b/services/audioflinger/fastpath/FastMixer.cpp
@@ -178,8 +178,8 @@
(void *)(uintptr_t)mSinkChannelMask);
mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_ENABLED,
(void *)(uintptr_t)fastTrack->mHapticPlaybackEnabled);
- mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_INTENSITY,
- (void *)(uintptr_t)fastTrack->mHapticIntensity);
+ mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_SCALE,
+ (void *)(&(fastTrack->mHapticScale)));
mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_MAX_AMPLITUDE,
(void *)(&(fastTrack->mHapticMaxAmplitude)));
diff --git a/services/audioflinger/fastpath/FastMixerState.h b/services/audioflinger/fastpath/FastMixerState.h
index 8ab6d25..0a56f92 100644
--- a/services/audioflinger/fastpath/FastMixerState.h
+++ b/services/audioflinger/fastpath/FastMixerState.h
@@ -54,7 +54,7 @@
audio_format_t mFormat = AUDIO_FORMAT_INVALID; // track format
int mGeneration = 0; // increment when any field is assigned
bool mHapticPlaybackEnabled = false; // haptic playback is enabled or not
- os::HapticScale mHapticIntensity = os::HapticScale::MUTE; // intensity of haptic data
+ os::HapticScale mHapticScale = os::HapticScale::mute(); // scale of haptic data
float mHapticMaxAmplitude = NAN; // max amplitude allowed for haptic data
};
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 6797e3d..3b764d1 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -231,10 +231,12 @@
ALOGI("%s: could not find port id for device %s", __func__, adt.toString().c_str());
return AUDIO_PORT_HANDLE_NONE;
}
- const auto btDeviceIt = mBluetoothDevicesWithCsd.find(std::make_pair(address, type));
- if (btDeviceIt != mBluetoothDevicesWithCsd.end()) {
- if (!btDeviceIt->second) {
- ALOGI("%s: bt device %s does not support sound dose", __func__, adt.toString().c_str());
+
+ if (audio_is_ble_out_device(type) || audio_is_a2dp_device(type)) {
+ const auto btDeviceIt = mBluetoothDevicesWithCsd.find(std::make_pair(address, type));
+ if (btDeviceIt == mBluetoothDevicesWithCsd.end() || !btDeviceIt->second) {
+ ALOGI("%s: bt device %s does not support sound dose", __func__,
+ adt.toString().c_str());
return AUDIO_PORT_HANDLE_NONE;
}
}
@@ -282,7 +284,7 @@
auto id = soundDoseManager->getIdForAudioDevice(in_audioDevice);
if (id == AUDIO_PORT_HANDLE_NONE) {
ALOGI("%s: no mapped id for audio device with type %d and address %s",
- __func__, in_audioDevice.type.type,
+ __func__, static_cast<int>(in_audioDevice.type.type),
in_audioDevice.address.toString().c_str());
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
@@ -315,7 +317,7 @@
auto id = soundDoseManager->getIdForAudioDevice(in_audioDevice);
if (id == AUDIO_PORT_HANDLE_NONE) {
ALOGI("%s: no mapped id for audio device with type %d and address %s",
- __func__, in_audioDevice.type.type,
+ __func__, static_cast<int>(in_audioDevice.type.type),
in_audioDevice.address.toString().c_str());
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
diff --git a/services/audioflinger/sounddose/tests/Android.bp b/services/audioflinger/sounddose/tests/Android.bp
index 2a2addf..60e170d 100644
--- a/services/audioflinger/sounddose/tests/Android.bp
+++ b/services/audioflinger/sounddose/tests/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_base_license"
@@ -11,7 +12,7 @@
name: "sounddosemanager_tests",
srcs: [
- "sounddosemanager_tests.cpp"
+ "sounddosemanager_tests.cpp",
],
defaults: [
diff --git a/services/audioflinger/timing/tests/Android.bp b/services/audioflinger/timing/tests/Android.bp
index d1e5563..040a914 100644
--- a/services/audioflinger/timing/tests/Android.bp
+++ b/services/audioflinger/timing/tests/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_base_license"
@@ -13,7 +14,7 @@
host_supported: true,
srcs: [
- "mediasyncevent_tests.cpp"
+ "mediasyncevent_tests.cpp",
],
header_libs: [
@@ -38,7 +39,7 @@
host_supported: true,
srcs: [
- "monotonicframecounter_tests.cpp"
+ "monotonicframecounter_tests.cpp",
],
static_libs: [
@@ -54,26 +55,26 @@
}
cc_test {
- name: "synchronizedrecordstate_tests",
+ name: "synchronizedrecordstate_tests",
- host_supported: true,
+ host_supported: true,
- srcs: [
- "synchronizedrecordstate_tests.cpp"
- ],
+ srcs: [
+ "synchronizedrecordstate_tests.cpp",
+ ],
- header_libs: [
- "libaudioclient_headers",
- ],
+ header_libs: [
+ "libaudioclient_headers",
+ ],
- static_libs: [
- "liblog",
- "libutils", // RefBase
- ],
+ static_libs: [
+ "liblog",
+ "libutils", // RefBase
+ ],
- cflags: [
- "-Wall",
- "-Werror",
- "-Wextra",
- ],
- }
\ No newline at end of file
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+}
diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp
new file mode 100644
index 0000000..b3da333
--- /dev/null
+++ b/services/audioparameterparser/Android.bp
@@ -0,0 +1,69 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: [
+ "frameworks_av_services_audioparameterparser_license",
+ ],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+ name: "frameworks_av_services_audioparameterparser_license",
+ visibility: [":__subpackages__"],
+ license_kinds: [
+ "SPDX-license-identifier-Apache-2.0",
+ ],
+ license_text: [
+ "NOTICE",
+ ],
+}
+
+cc_defaults {
+ name: "android.hardware.audio.parameter_parser.example_defaults",
+ defaults: [
+ "latest_android_hardware_audio_core_ndk_shared",
+ ],
+
+ shared_libs: [
+ "av-audio-types-aidl-V1-ndk",
+ "libbase",
+ "libbinder_ndk",
+ ],
+
+ cflags: [
+ "-Wall",
+ "-Wextra",
+ "-Werror",
+ "-Wthread-safety",
+ ],
+}
+
+cc_binary {
+ name: "android.hardware.audio.parameter_parser.example_service",
+ system_ext_specific: true,
+ relative_install_path: "hw",
+
+ init_rc: ["android.hardware.audio.parameter_parser.example_service.rc"],
+
+ defaults: [
+ "android.hardware.audio.parameter_parser.example_defaults",
+ ],
+
+ srcs: [
+ "ParameterParser.cpp",
+ "main.cpp",
+ ],
+}
diff --git a/services/audioparameterparser/NOTICE b/services/audioparameterparser/NOTICE
new file mode 100644
index 0000000..44158cb
--- /dev/null
+++ b/services/audioparameterparser/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2005-2024, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/services/audioparameterparser/ParameterParser.cpp b/services/audioparameterparser/ParameterParser.cpp
new file mode 100644
index 0000000..8d6a64f
--- /dev/null
+++ b/services/audioparameterparser/ParameterParser.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ParameterParser.h"
+
+#define LOG_TAG "Audio_ParameterParser"
+#include <android-base/logging.h>
+
+namespace vendor::audio::parserservice {
+
+using ::aidl::android::hardware::audio::core::VendorParameter;
+using ParameterScope = ::aidl::android::media::audio::IHalAdapterVendorExtension::ParameterScope;
+
+::ndk::ScopedAStatus ParameterParser::parseVendorParameterIds(ParameterScope in_scope,
+ const std::string& in_rawKeys,
+ std::vector<std::string>*) {
+ LOG(DEBUG) << __func__ << ": scope: " << toString(in_scope) << ", keys: " << in_rawKeys;
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus ParameterParser::parseVendorParameters(ParameterScope in_scope,
+ const std::string& in_rawKeysAndValues,
+ std::vector<VendorParameter>*,
+ std::vector<VendorParameter>*) {
+ LOG(DEBUG) << __func__ << ": scope: " << toString(in_scope)
+ << ", keys/values: " << in_rawKeysAndValues;
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus ParameterParser::parseBluetoothA2dpReconfigureOffload(
+ const std::string& in_rawValue, std::vector<VendorParameter>*) {
+ LOG(DEBUG) << __func__ << ": value: " << in_rawValue;
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus ParameterParser::parseBluetoothLeReconfigureOffload(
+ const std::string& in_rawValue, std::vector<VendorParameter>*) {
+ LOG(DEBUG) << __func__ << ": value: " << in_rawValue;
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus ParameterParser::processVendorParameters(
+ ParameterScope in_scope, const std::vector<VendorParameter>& in_parameters, std::string*) {
+ LOG(DEBUG) << __func__ << ": scope: " << toString(in_scope)
+ << ", parameters: " << ::android::internal::ToString(in_parameters);
+ return ::ndk::ScopedAStatus::ok();
+}
+
+} // namespace vendor::audio::parserservice
diff --git a/services/audioparameterparser/ParameterParser.h b/services/audioparameterparser/ParameterParser.h
new file mode 100644
index 0000000..1e0e333
--- /dev/null
+++ b/services/audioparameterparser/ParameterParser.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/media/audio/BnHalAdapterVendorExtension.h>
+
+namespace vendor::audio::parserservice {
+
+class ParameterParser : public ::aidl::android::media::audio::BnHalAdapterVendorExtension {
+ public:
+ ParameterParser() = default;
+
+ private:
+ ::ndk::ScopedAStatus parseVendorParameterIds(
+ ::aidl::android::media::audio::IHalAdapterVendorExtension::ParameterScope in_scope,
+ const std::string& in_rawKeys, std::vector<std::string>* _aidl_return) override;
+
+ ::ndk::ScopedAStatus parseVendorParameters(
+ ::aidl::android::media::audio::IHalAdapterVendorExtension::ParameterScope in_scope,
+ const std::string& in_rawKeysAndValues,
+ std::vector<::aidl::android::hardware::audio::core::VendorParameter>*
+ out_syncParameters,
+ std::vector<::aidl::android::hardware::audio::core::VendorParameter>*
+ out_asyncParameters) override;
+
+ ::ndk::ScopedAStatus parseBluetoothA2dpReconfigureOffload(
+ const std::string& in_rawValue,
+ std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return)
+ override;
+
+ ::ndk::ScopedAStatus parseBluetoothLeReconfigureOffload(
+ const std::string& in_rawValue,
+ std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return)
+ override;
+
+ ::ndk::ScopedAStatus processVendorParameters(
+ ::aidl::android::media::audio::IHalAdapterVendorExtension::ParameterScope in_scope,
+ const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
+ in_parameters,
+ std::string* _aidl_return) override;
+};
+
+} // namespace vendor::audio::parserservice
diff --git a/services/audioparameterparser/android.hardware.audio.parameter_parser.example_service.rc b/services/audioparameterparser/android.hardware.audio.parameter_parser.example_service.rc
new file mode 100644
index 0000000..b6aca5c
--- /dev/null
+++ b/services/audioparameterparser/android.hardware.audio.parameter_parser.example_service.rc
@@ -0,0 +1,6 @@
+service audio_parameter_parser_service /system_ext/bin/hw/android.hardware.audio.parameter_parser.example_service
+ class core
+ user audioserver
+ group media
+ ioprio rt 4
+ task_profiles ProcessCapacityHigh HighPerformance
diff --git a/services/audioparameterparser/main.cpp b/services/audioparameterparser/main.cpp
new file mode 100644
index 0000000..d22eb55
--- /dev/null
+++ b/services/audioparameterparser/main.cpp
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Audio_ParameterParser"
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+
+#include "ParameterParser.h"
+
+using vendor::audio::parserservice::ParameterParser;
+
+int main() {
+ // This is a debug implementation, always enable debug logging.
+ android::base::SetMinimumLogSeverity(::android::base::DEBUG);
+
+ auto parser = ndk::SharedRefBase::make<ParameterParser>();
+ const std::string parserFqn =
+ std::string().append(ParameterParser::descriptor).append("/default");
+ binder_status_t status =
+ AServiceManager_addService(parser->asBinder().get(), parserFqn.c_str());
+ if (status != STATUS_OK) {
+ LOG(ERROR) << "failed to register service for \"" << parserFqn << "\"";
+ }
+
+ ABinderProcess_joinThreadPool();
+ return EXIT_FAILURE; // should not reach
+}
diff --git a/services/audiopolicy/Android.bp b/services/audiopolicy/Android.bp
index e018dd3..66ba7e2 100644
--- a/services/audiopolicy/Android.bp
+++ b/services/audiopolicy/Android.bp
@@ -1,10 +1,11 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
- default_applicable_licenses: ["frameworks_av_license"],
+ default_applicable_licenses: ["Android-Apache-2.0"],
}
cc_library_headers {
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 9ececea..8f17ffc 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -166,7 +166,8 @@
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
input_type_t *inputType,
- audio_port_handle_t *portId) = 0;
+ audio_port_handle_t *portId,
+ uint32_t *virtualDeviceId) = 0;
// indicates to the audio policy manager that the input starts being used.
virtual status_t startInput(audio_port_handle_t portId) = 0;
// indicates to the audio policy manager that the input stops being used.
@@ -269,6 +270,7 @@
virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes) = 0;
virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes) = 0;
+ virtual status_t getRegisteredPolicyMixes(std::vector<AudioMix>& mixes) = 0;
virtual status_t updatePolicyMix(
const AudioMix& mix,
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index a2ebb8d..cf1a771 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -46,6 +46,9 @@
"include-filter": "com.google.android.gts.audio.AudioPolicyHostTest"
}
]
+ },
+ {
+ "name": "spatializer_tests"
}
]
}
diff --git a/services/audiopolicy/common/Android.bp b/services/audiopolicy/common/Android.bp
index 91701ad..a699b8b 100644
--- a/services/audiopolicy/common/Android.bp
+++ b/services/audiopolicy/common/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 8b76842..e8b04ce 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -35,6 +36,7 @@
"src/TypeConverter.cpp",
],
shared_libs: [
+ "android.media.audiopolicy-aconfig-cc",
"audioclient-types-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"libaudioclient_aidl_conversion",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 7c70877..00958aa 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -16,7 +16,6 @@
#pragma once
-#define __STDC_LIMIT_MACROS
#include <inttypes.h>
#include <sys/types.h>
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index f3a9518..688772c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -70,10 +70,17 @@
return mMixerBehaviors;
}
+ enum CompatibilityScore{
+ NO_MATCH = 0,
+ PARTIAL_MATCH = 1,
+ EXACT_MATCH = 2
+ };
+
/**
- * @brief isCompatibleProfile: This method is used for input and direct output,
+ * @brief compatibilityScore: This method is used for input and direct output,
* and is not used for other output.
- * Checks if the IO profile is compatible with specified parameters.
+ * Return the compatibility score to measure how much the IO profile is compatible
+ * with specified parameters.
* For input, flags is interpreted as audio_input_flags_t.
* TODO: merge audio_output_flags_t and audio_input_flags_t.
*
@@ -86,18 +93,18 @@
* @param updatedChannelMask if non-NULL, it is assigned the actual channel mask
* @param flags to be checked for compatibility
* @param exactMatchRequiredForInputFlags true if exact match is required on flags
- * @return true if the profile is compatible, false otherwise.
+ * @return how the IO profile is compatible with the given parameters.
*/
- bool isCompatibleProfile(const DeviceVector &devices,
- uint32_t samplingRate,
- uint32_t *updatedSamplingRate,
- audio_format_t format,
- audio_format_t *updatedFormat,
- audio_channel_mask_t channelMask,
- audio_channel_mask_t *updatedChannelMask,
- // FIXME parameter type
- uint32_t flags,
- bool exactMatchRequiredForInputFlags = false) const;
+ CompatibilityScore getCompatibilityScore(const DeviceVector &devices,
+ uint32_t samplingRate,
+ uint32_t *updatedSamplingRate,
+ audio_format_t format,
+ audio_format_t *updatedFormat,
+ audio_channel_mask_t channelMask,
+ audio_channel_mask_t *updatedChannelMask,
+ // FIXME parameter type
+ uint32_t flags,
+ bool exactMatchRequiredForInputFlags = false) const;
/**
* @brief areAllDevicesSupported: Checks if the given devices are supported by the IO profile.
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 6f71ac5..44f84b9 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -235,7 +235,8 @@
&deviceType,
String8(mDevice->address().c_str()),
source,
- flags);
+ static_cast<audio_input_flags_t>(
+ flags & mProfile->getFlags()));
LOG_ALWAYS_FATAL_IF(mDevice->type() != deviceType,
"%s openInput returned device %08x when given device %08x",
__FUNCTION__, mDevice->type(), deviceType);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index dc0f466..3430f4b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -15,7 +15,7 @@
*/
#define LOG_TAG "APM_AudioPolicyMix"
-// #define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
#include <algorithm>
#include <iterator>
@@ -28,6 +28,9 @@
#include "PolicyAudioPort.h"
#include "IOProfile.h"
#include <AudioOutputDescriptor.h>
+#include <android_media_audiopolicy.h>
+
+namespace audiopolicy_flags = android::media::audiopolicy;
namespace android {
namespace {
@@ -190,6 +193,12 @@
mix.mDeviceType, mix.mDeviceAddress.c_str());
return BAD_VALUE;
}
+ if (audiopolicy_flags::audio_mix_ownership()) {
+ if (mix.mToken == registeredMix->mToken) {
+ ALOGE("registerMix(): same mix already registered - skipping");
+ return BAD_VALUE;
+ }
+ }
}
if (!areMixCriteriaConsistent(mix.mCriteria)) {
ALOGE("registerMix(): Mix contains inconsistent criteria "
@@ -212,12 +221,21 @@
{
for (size_t i = 0; i < size(); i++) {
const sp<AudioPolicyMix>& registeredMix = itemAt(i);
- if (mix.mDeviceType == registeredMix->mDeviceType
+ if (audiopolicy_flags::audio_mix_ownership()) {
+ if (mix.mToken == registeredMix->mToken) {
+ ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
+ mix.mDeviceType, mix.mDeviceAddress.c_str());
+ removeAt(i);
+ return NO_ERROR;
+ }
+ } else {
+ if (mix.mDeviceType == registeredMix->mDeviceType
&& mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0) {
- ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
- mix.mDeviceType, mix.mDeviceAddress.c_str());
- removeAt(i);
- return NO_ERROR;
+ ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
+ mix.mDeviceType, mix.mDeviceAddress.c_str());
+ removeAt(i);
+ return NO_ERROR;
+ }
}
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index dd222de..d9fbd89 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -33,17 +33,17 @@
}
}
-bool IOProfile::isCompatibleProfile(const DeviceVector &devices,
- uint32_t samplingRate,
- uint32_t *updatedSamplingRate,
- audio_format_t format,
- audio_format_t *updatedFormat,
- audio_channel_mask_t channelMask,
- audio_channel_mask_t *updatedChannelMask,
- // FIXME type punning here
- uint32_t flags,
- bool exactMatchRequiredForInputFlags) const
-{
+IOProfile::CompatibilityScore IOProfile::getCompatibilityScore(
+ const android::DeviceVector &devices,
+ uint32_t samplingRate,
+ uint32_t *updatedSamplingRate,
+ audio_format_t format,
+ audio_format_t *updatedFormat,
+ audio_channel_mask_t channelMask,
+ audio_channel_mask_t *updatedChannelMask,
+ // FIXME type punning here
+ uint32_t flags,
+ bool exactMatchRequiredForInputFlags) const {
const bool isPlaybackThread =
getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
const bool isRecordThread =
@@ -51,13 +51,13 @@
ALOG_ASSERT(isPlaybackThread != isRecordThread);
if (!areAllDevicesSupported(devices) ||
!isCompatibleProfileForFlags(flags, exactMatchRequiredForInputFlags)) {
- return false;
+ return NO_MATCH;
}
if (!audio_is_valid_format(format) ||
(isPlaybackThread && (samplingRate == 0 || !audio_is_output_channel(channelMask))) ||
(isRecordThread && (!audio_is_input_channel(channelMask)))) {
- return false;
+ return NO_MATCH;
}
audio_format_t myUpdatedFormat = format;
@@ -69,32 +69,40 @@
.channel_mask = channelMask,
.format = format,
};
+ auto result = NO_MATCH;
if (isRecordThread)
{
if ((flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
if (checkExactAudioProfile(&config) != NO_ERROR) {
- return false;
+ return result;
}
- } else if (checkExactAudioProfile(&config) != NO_ERROR && checkCompatibleAudioProfile(
- myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) != NO_ERROR) {
- return false;
+ result = EXACT_MATCH;
+ } else if (checkExactAudioProfile(&config) == NO_ERROR) {
+ result = EXACT_MATCH;
+ } else if (checkCompatibleAudioProfile(
+ myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) == NO_ERROR) {
+ result = PARTIAL_MATCH;
+ } else {
+ return result;
}
} else {
- if (checkExactAudioProfile(&config) != NO_ERROR) {
- return false;
+ if (checkExactAudioProfile(&config) == NO_ERROR) {
+ result = EXACT_MATCH;
+ } else {
+ return result;
}
}
- if (updatedSamplingRate != NULL) {
+ if (updatedSamplingRate != nullptr) {
*updatedSamplingRate = myUpdatedSamplingRate;
}
- if (updatedFormat != NULL) {
+ if (updatedFormat != nullptr) {
*updatedFormat = myUpdatedFormat;
}
- if (updatedChannelMask != NULL) {
+ if (updatedChannelMask != nullptr) {
*updatedChannelMask = myUpdatedChannelMask;
}
- return true;
+ return result;
}
bool IOProfile::areAllDevicesSupported(const DeviceVector &devices) const {
@@ -156,9 +164,9 @@
for (const auto sampleRate : profile->getSampleRates()) {
for (const auto channelMask : profile->getChannels()) {
const audio_config_base_t config = {
- .format = profile->getFormat(),
.sample_rate = sampleRate,
- .channel_mask = channelMask
+ .channel_mask = channelMask,
+ .format = profile->getFormat(),
};
for (const auto mixerBehavior : mMixerBehaviors) {
mixerAttributes->push_back({
diff --git a/services/audiopolicy/config/Android.bp b/services/audiopolicy/config/Android.bp
index 86600f4..7d529df 100644
--- a/services/audiopolicy/config/Android.bp
+++ b/services/audiopolicy/config/Android.bp
@@ -18,6 +18,7 @@
}
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -31,41 +32,49 @@
vendor: true,
src: ":a2dp_in_audio_policy_configuration",
}
+
prebuilt_etc {
name: "a2dp_audio_policy_configuration.xml",
vendor: true,
src: ":a2dp_audio_policy_configuration",
}
+
prebuilt_etc {
name: "audio_policy_configuration.xml",
vendor: true,
src: ":audio_policy_configuration_generic",
}
+
prebuilt_etc {
name: "r_submix_audio_policy_configuration.xml",
vendor: true,
src: ":r_submix_audio_policy_configuration",
}
+
prebuilt_etc {
name: "audio_policy_volumes.xml",
vendor: true,
src: ":audio_policy_volumes",
}
+
prebuilt_etc {
name: "default_volume_tables.xml",
vendor: true,
src: ":default_volume_tables",
}
+
prebuilt_etc {
name: "surround_sound_configuration_5_0.xml",
vendor: true,
src: ":surround_sound_configuration_5_0",
}
+
prebuilt_etc {
name: "usb_audio_policy_configuration.xml",
vendor: true,
src: ":usb_audio_policy_configuration",
}
+
prebuilt_etc {
name: "primary_audio_policy_configuration.xml",
src: ":primary_audio_policy_configuration",
@@ -76,50 +85,62 @@
name: "a2dp_in_audio_policy_configuration",
srcs: ["a2dp_in_audio_policy_configuration.xml"],
}
+
filegroup {
name: "a2dp_audio_policy_configuration",
srcs: ["a2dp_audio_policy_configuration.xml"],
}
+
filegroup {
name: "primary_audio_policy_configuration",
srcs: ["primary_audio_policy_configuration.xml"],
}
+
filegroup {
name: "surround_sound_configuration_5_0",
srcs: ["surround_sound_configuration_5_0.xml"],
}
+
filegroup {
name: "default_volume_tables",
srcs: ["default_volume_tables.xml"],
}
+
filegroup {
name: "audio_policy_volumes",
srcs: ["audio_policy_volumes.xml"],
}
+
filegroup {
name: "audio_policy_configuration_generic",
srcs: ["audio_policy_configuration_generic.xml"],
}
+
filegroup {
name: "audio_policy_configuration_generic_configurable",
srcs: ["audio_policy_configuration_generic_configurable.xml"],
}
+
filegroup {
name: "usb_audio_policy_configuration",
srcs: ["usb_audio_policy_configuration.xml"],
}
+
filegroup {
name: "r_submix_audio_policy_configuration",
srcs: ["r_submix_audio_policy_configuration.xml"],
}
+
filegroup {
name: "bluetooth_audio_policy_configuration_7_0",
srcs: ["bluetooth_audio_policy_configuration_7_0.xml"],
}
+
filegroup {
name: "bluetooth_with_le_audio_policy_configuration_7_0",
srcs: ["bluetooth_with_le_audio_policy_configuration_7_0.xml"],
}
+
filegroup {
name: "hearing_aid_audio_policy_configuration_7_0",
srcs: ["hearing_aid_audio_policy_configuration_7_0.xml"],
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
index 0034a04..a93c816 100644
--- a/services/audiopolicy/engine/common/Android.bp
+++ b/services/audiopolicy/engine/common/Android.bp
@@ -13,6 +13,7 @@
// limitations under the License.
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/common/src/VolumeCurve.cpp b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
index fccbc60..9411155 100644
--- a/services/audiopolicy/engine/common/src/VolumeCurve.cpp
+++ b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
@@ -69,7 +69,7 @@
return mCurvePoints[nbCurvePoints - 1].mAttenuationInMb / 100.0f;
}
if (indexInUiPosition == 0) {
- if (indexInUiPosition != mCurvePoints[0].mIndex) {
+ if ((size_t)volIdx != mCurvePoints[0].mIndex) {
return VOLUME_MIN_DB; // out of bounds
}
return mCurvePoints[0].mAttenuationInMb / 100.0f;
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index 12597de..0864e6a 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/config/tests/Android.bp b/services/audiopolicy/engine/config/tests/Android.bp
index 5d1aa16..8c7b7db 100644
--- a/services/audiopolicy/engine/config/tests/Android.bp
+++ b/services/audiopolicy/engine/config/tests/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/config/tests/resources/Android.bp b/services/audiopolicy/engine/config/tests/resources/Android.bp
index 9cee978..99d62a3 100644
--- a/services/audiopolicy/engine/config/tests/resources/Android.bp
+++ b/services/audiopolicy/engine/config/tests/resources/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/interface/Android.bp b/services/audiopolicy/engine/interface/Android.bp
index 5dd5adb..b1f7666 100644
--- a/services/audiopolicy/engine/interface/Android.bp
+++ b/services/audiopolicy/engine/interface/Android.bp
@@ -13,6 +13,7 @@
// limitations under the License.
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index eb2e2f4..d59ab5a 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -40,7 +41,7 @@
"libaudiopolicyengineconfigurable_pfwwrapper",
],
- shared_libs: [
+ shared_libs: [
"libaudio_aidl_conversion_common_cpp",
"libaudiofoundation",
"libaudiopolicycomponents",
diff --git a/services/audiopolicy/engineconfigurable/config/Android.bp b/services/audiopolicy/engineconfigurable/config/Android.bp
index b3d1f97..8dd13e8 100644
--- a/services/audiopolicy/engineconfigurable/config/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/Android.bp
@@ -17,6 +17,7 @@
// Root soong_namespace for common components
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -30,10 +31,12 @@
vendor: true,
src: ":audio_policy_engine_criteria",
}
+
filegroup {
name: "audio_policy_engine_criterion_types_template",
srcs: ["example/common/audio_policy_engine_criterion_types.xml.in"],
}
+
filegroup {
name: "audio_policy_engine_criteria",
srcs: ["example/common/audio_policy_engine_criteria.xml"],
diff --git a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
index e46b60f..fb1a71c 100644
--- a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
@@ -23,6 +23,7 @@
}
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -42,16 +43,19 @@
":audio_policy_engine_volumes.xml",
],
}
+
prebuilt_etc {
name: "audio_policy_engine_product_strategies.xml",
vendor: true,
src: "audio_policy_engine_product_strategies.xml",
}
+
prebuilt_etc {
name: "audio_policy_engine_volumes.xml",
vendor: true,
src: ":audio_policy_engine_volumes",
}
+
prebuilt_etc {
name: "audio_policy_engine_criterion_types.xml",
vendor: true,
@@ -69,6 +73,7 @@
":audio_policy_configuration_files",
],
}
+
filegroup {
name: "audio_policy_configuration_files",
srcs: [
@@ -79,18 +84,22 @@
":primary_audio_policy_configuration",
],
}
+
filegroup {
- name : "audio_policy_configuration_top_file",
+ name: "audio_policy_configuration_top_file",
srcs: [":audio_policy_configuration_generic"],
}
+
filegroup {
name: "audio_policy_engine_configuration",
srcs: ["audio_policy_engine_configuration.xml"],
}
+
filegroup {
name: "audio_policy_engine_volumes",
srcs: ["audio_policy_engine_volumes.xml"],
}
+
filegroup {
name: "audio_policy_engine_configuration_files",
srcs: [
diff --git a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
index ad6eeb1..b9abb54 100644
--- a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
@@ -24,6 +24,7 @@
}
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -43,11 +44,13 @@
":audio_policy_engine_volumes.xml",
],
}
+
prebuilt_etc {
name: "audio_policy_engine_product_strategies.xml",
vendor: true,
src: "audio_policy_engine_product_strategies.xml",
}
+
prebuilt_etc {
name: "audio_policy_engine_criterion_types.xml",
vendor: true,
@@ -65,6 +68,7 @@
":audio_policy_configuration_files",
],
}
+
filegroup {
name: "audio_policy_configuration_files",
srcs: [
@@ -75,10 +79,12 @@
":primary_audio_policy_configuration",
],
}
+
filegroup {
- name : "audio_policy_configuration_top_file",
+ name: "audio_policy_configuration_top_file",
srcs: [":audio_policy_configuration_generic"],
}
+
filegroup {
name: "audio_policy_engine_configuration_files",
srcs: [
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
index 773a99a..67a6128 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
@@ -23,6 +23,7 @@
}
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -42,21 +43,25 @@
":audio_policy_engine_volumes.xml",
],
}
+
prebuilt_etc {
name: "audio_policy_engine_product_strategies.xml",
vendor: true,
src: "audio_policy_engine_product_strategies.xml",
}
+
prebuilt_etc {
name: "audio_policy_engine_stream_volumes.xml",
vendor: true,
src: ":audio_policy_engine_stream_volumes",
}
+
prebuilt_etc {
name: "audio_policy_engine_default_stream_volumes.xml",
vendor: true,
src: ":audio_policy_engine_default_stream_volumes",
}
+
prebuilt_etc {
name: "audio_policy_engine_criterion_types.xml",
vendor: true,
@@ -74,6 +79,7 @@
":audio_policy_configuration_files",
],
}
+
filegroup {
name: "audio_policy_configuration_files",
srcs: [
@@ -84,22 +90,27 @@
":primary_audio_policy_configuration",
],
}
+
filegroup {
- name : "audio_policy_configuration_top_file",
+ name: "audio_policy_configuration_top_file",
srcs: [":audio_policy_configuration_generic"],
}
+
filegroup {
name: "audio_policy_engine_configuration",
srcs: ["audio_policy_engine_configuration.xml"],
}
+
filegroup {
name: "audio_policy_engine_stream_volumes",
srcs: ["audio_policy_engine_stream_volumes.xml"],
}
+
filegroup {
name: "audio_policy_engine_default_stream_volumes",
srcs: ["audio_policy_engine_default_stream_volumes.xml"],
}
+
filegroup {
name: "audio_policy_engine_configuration_files",
srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
index ee62d5e..7fe111f 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
@@ -17,6 +17,7 @@
// Root soong_namespace for common components
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -31,18 +32,21 @@
src: ":PolicyClass",
sub_dir: "parameter-framework/Structure/Policy",
}
+
prebuilt_etc {
name: "PolicySubsystem.xml",
vendor: true,
src: ":PolicySubsystem",
sub_dir: "parameter-framework/Structure/Policy",
}
+
prebuilt_etc {
name: "PolicySubsystem-CommonTypes.xml",
vendor: true,
src: ":buildcommontypesstructure_gen",
sub_dir: "parameter-framework/Structure/Policy",
}
+
genrule {
name: "buildcommontypesstructure_gen",
defaults: ["buildcommontypesstructurerule"],
@@ -52,34 +56,42 @@
name: "product_strategies_structure_template",
srcs: ["examples/common/Structure/ProductStrategies.xml.in"],
}
+
filegroup {
name: "PolicySubsystem",
srcs: ["examples/common/Structure/PolicySubsystem.xml"],
}
+
filegroup {
name: "PolicySubsystem-no-strategy",
srcs: ["examples/common/Structure/PolicySubsystem-no-strategy.xml"],
}
+
filegroup {
name: "common_types_structure_template",
srcs: ["examples/common/Structure/PolicySubsystem-CommonTypes.xml.in"],
}
+
filegroup {
name: "PolicyClass",
srcs: ["examples/common/Structure/PolicyClass.xml"],
}
+
filegroup {
name: "volumes.pfw",
srcs: ["examples/Settings/volumes.pfw"],
}
+
filegroup {
name: "device_for_input_source.pfw",
srcs: ["examples/Settings/device_for_input_source.pfw"],
}
+
filegroup {
name: "ParameterFrameworkConfigurationPolicy.userdebug.xml",
srcs: ["examples/ParameterFrameworkConfigurationPolicy.userdebug.xml"],
}
+
filegroup {
name: "ParameterFrameworkConfigurationPolicy.user.xml",
srcs: ["examples/ParameterFrameworkConfigurationPolicy.user.xml"],
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
index 7d2d293..38451f2 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
@@ -27,6 +27,7 @@
// Generate Audio Policy Parameter Framework Product Strategies Structure file from template
//
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -42,6 +43,7 @@
sub_dir: "parameter-framework/Structure/Policy",
required: ["libpolicy-subsystem"],
}
+
genrule {
name: "buildstrategiesstructure_gen",
defaults: ["buildstrategiesstructurerule"],
@@ -67,6 +69,7 @@
"PolicySubsystem-CommonTypes.xml",
],
}
+
genrule {
name: "domaingeneratorpolicyrule_gen",
enabled: false, // TODO: This module fails to build
@@ -78,6 +81,7 @@
":edd_files",
],
}
+
filegroup {
name: "edd_files",
srcs: [
@@ -86,11 +90,13 @@
"Settings/device_for_product_strategies.pfw",
],
}
+
// This is for Settings generation, must use socket port, so userdebug version is required
filegroup {
name: "audio_policy_pfw_toplevel",
srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
}
+
filegroup {
name: "audio_policy_pfw_structure_files",
srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
index f825e5f..eae6ae2 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
@@ -28,6 +28,7 @@
// Generate Audio Policy Parameter Framework Product Strategies Structure file from template
//
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -43,6 +44,7 @@
sub_dir: "parameter-framework/Structure/Policy",
required: ["libpolicy-subsystem"],
}
+
genrule {
name: "buildstrategiesstructure_gen",
defaults: ["buildstrategiesstructurerule"],
@@ -68,6 +70,7 @@
"PolicySubsystem-CommonTypes.xml",
],
}
+
genrule {
name: "domaingeneratorpolicyrule_gen",
enabled: false, // TODO: This module fails to build
@@ -79,6 +82,7 @@
":edd_files",
],
}
+
filegroup {
name: "edd_files",
srcs: [
@@ -87,11 +91,13 @@
"Settings/device_for_product_strategies.pfw",
],
}
+
// This is for Settings generation, must use socket port, so userdebug version is required
filegroup {
name: "audio_policy_pfw_toplevel",
srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
}
+
filegroup {
name: "audio_policy_pfw_structure_files",
srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
index 4a83cbc..4e8654b 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
@@ -27,6 +27,7 @@
// Generate Audio Policy Parameter Framework Product Strategies Structure file from template
//
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -42,6 +43,7 @@
sub_dir: "parameter-framework/Structure/Policy",
required: ["libpolicy-subsystem"],
}
+
genrule {
name: "buildstrategiesstructure_gen",
defaults: ["buildstrategiesstructurerule"],
@@ -67,6 +69,7 @@
"PolicySubsystem-CommonTypes.xml",
],
}
+
genrule {
name: "domaingeneratorpolicyrule_gen",
enabled: false, // TODO: This module fails to build
@@ -78,6 +81,7 @@
":edd_files",
],
}
+
filegroup {
name: "edd_files",
srcs: [
@@ -95,11 +99,13 @@
"Settings/device_for_product_strategy_patch.pfw",
],
}
+
// This is for Settings generation, must use socket port, so userdebug version is required
filegroup {
name: "audio_policy_pfw_toplevel",
srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
}
+
filegroup {
name: "audio_policy_pfw_structure_files",
srcs: [
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
index 89ab892..e279a8f 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
@@ -24,6 +24,7 @@
}
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -57,10 +58,12 @@
":edd_files",
],
}
+
filegroup {
name: "audio_policy_pfw_toplevel",
srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
}
+
filegroup {
name: "audio_policy_pfw_structure_files",
srcs: [
@@ -69,6 +72,7 @@
":buildcommontypesstructure_gen",
],
}
+
filegroup {
name: "edd_files",
srcs: [
@@ -76,6 +80,7 @@
":volumes.pfw",
],
}
+
prebuilt_etc {
name: "PolicySubsystem.xml",
vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
index 4880547..47b8b54 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
@@ -24,6 +24,7 @@
}
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -45,6 +46,7 @@
"PolicySubsystem-CommonTypes.xml",
],
}
+
genrule {
name: "domaingeneratorpolicyrule_gen",
enabled: false, // TODO: This module fails to build
@@ -56,10 +58,12 @@
":edd_files",
],
}
+
filegroup {
name: "audio_policy_pfw_toplevel",
srcs: [":ParameterFrameworkConfigurationPolicy.userdebug.xml"],
}
+
filegroup {
name: "audio_policy_pfw_structure_files",
srcs: [
@@ -68,6 +72,7 @@
":buildcommontypesstructure_gen",
],
}
+
filegroup {
name: "edd_files",
srcs: [
@@ -76,6 +81,7 @@
":device_for_input_source.pfw",
],
}
+
prebuilt_etc {
name: "PolicySubsystem.xml",
vendor: true,
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
index f7159c5..aa2163e 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -40,6 +41,6 @@
"liblog",
"libutils",
"libmedia_helper",
- "libparameter"
+ "libparameter",
],
}
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index 3aec064..2f77372 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -13,6 +13,7 @@
// limitations under the License.
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -36,13 +37,13 @@
name: "buildpolicycriteriontypesrule",
tools: ["buildPolicyCriterionTypes"],
cmd: "cp $(locations :audio_policy_configuration_files) $(genDir)/. && " +
- "cp $(location :audio_policy_configuration_top_file) $(genDir)/audio_policy_configuration.xml && " +
- "$(location buildPolicyCriterionTypes) " +
- " --androidaudiobaseheader $(location :libaudio_system_audio_base) " +
- " --androidaudiocommonbaseheader $(location :libaudio_system_audio_common_base) " +
- "--audiopolicyconfigurationfile $(genDir)/audio_policy_configuration.xml " +
- "--criteriontypes $(location :audio_policy_engine_criterion_types_template) " +
- "--outputfile $(out)",
+ "cp $(location :audio_policy_configuration_top_file) $(genDir)/audio_policy_configuration.xml && " +
+ "$(location buildPolicyCriterionTypes) " +
+ " --androidaudiobaseheader $(location :libaudio_system_audio_base) " +
+ " --androidaudiocommonbaseheader $(location :libaudio_system_audio_common_base) " +
+ "--audiopolicyconfigurationfile $(genDir)/audio_policy_configuration.xml " +
+ "--criteriontypes $(location :audio_policy_engine_criterion_types_template) " +
+ "--outputfile $(out)",
srcs: [
// The commented inputs must be provided to use this genrule_defaults
// @todo uncomment if 1428659 is merged":android_audio_base_header_file",
@@ -81,17 +82,17 @@
"domainGeneratorConnector",
],
cmd: "mkdir -p $(genDir)/Structure/Policy && " +
- "cp $(locations :audio_policy_pfw_structure_files) $(genDir)/Structure/Policy && " +
- "cp $(location :audio_policy_pfw_toplevel) $(genDir)/top_level && " +
- "$(location domainGeneratorPolicy) " +
- "--validate " +
- "--domain-generator-tool $(location domainGeneratorConnector) " +
- "--toplevel-config $(genDir)/top_level " +
- "--criteria $(location :audio_policy_engine_criteria) " +
- "--criteriontypes $(location :audio_policy_engine_criterion_types) " +
- "--add-edds $(locations :edd_files) " +
- "--schemas-dir external/parameter-framework/upstream/schemas " +
- " > $(out)",
+ "cp $(locations :audio_policy_pfw_structure_files) $(genDir)/Structure/Policy && " +
+ "cp $(location :audio_policy_pfw_toplevel) $(genDir)/top_level && " +
+ "$(location domainGeneratorPolicy) " +
+ "--validate " +
+ "--domain-generator-tool $(location domainGeneratorConnector) " +
+ "--toplevel-config $(genDir)/top_level " +
+ "--criteria $(location :audio_policy_engine_criteria) " +
+ "--criteriontypes $(location :audio_policy_engine_criterion_types) " +
+ "--add-edds $(locations :edd_files) " +
+ "--schemas-dir external/parameter-framework/upstream/schemas " +
+ " > $(out)",
srcs: [
// The commented inputs must be provided to use this genrule_defaults
// ":audio_policy_pfw_toplevel",
@@ -118,11 +119,11 @@
genrule_defaults {
name: "buildstrategiesstructurerule",
tools: ["buildStrategiesStructureFile"],
- cmd: "cp $(locations :audio_policy_engine_configuration_files) $(genDir) && ls -l $(genDir) &&"+
- "$(location buildStrategiesStructureFile) " +
- "--audiopolicyengineconfigurationfile $(genDir)/audio_policy_engine_configuration.xml "+
- "--productstrategiesstructurefile $(location :product_strategies_structure_template) " +
- "--outputfile $(out)",
+ cmd: "cp $(locations :audio_policy_engine_configuration_files) $(genDir) && ls -l $(genDir) &&" +
+ "$(location buildStrategiesStructureFile) " +
+ "--audiopolicyengineconfigurationfile $(genDir)/audio_policy_engine_configuration.xml " +
+ "--productstrategiesstructurefile $(location :product_strategies_structure_template) " +
+ "--outputfile $(out)",
srcs: [
// The commented inputs must be provided to use this genrule_defaults
// ":audio_policy_engine_configuration_files",
@@ -146,9 +147,9 @@
name: "buildcommontypesstructurerule",
tools: ["buildCommonTypesStructureFile"],
cmd: "$(location buildCommonTypesStructureFile) " +
- "--androidaudiobaseheader $(location :libaudio_system_audio_base) " +
- "--commontypesstructure $(location :common_types_structure_template) " +
- "--outputfile $(out)",
+ "--androidaudiobaseheader $(location :libaudio_system_audio_base) " +
+ "--commontypesstructure $(location :common_types_structure_template) " +
+ "--outputfile $(out)",
srcs: [
":common_types_structure_template",
":libaudio_system_audio_base",
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index 0ef0b82..a897880 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index 7d4ccab..98adff0 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/enginedefault/config/example/Android.bp b/services/audiopolicy/enginedefault/config/example/Android.bp
index 59a704b..f305c39 100644
--- a/services/audiopolicy/enginedefault/config/example/Android.bp
+++ b/services/audiopolicy/enginedefault/config/example/Android.bp
@@ -20,6 +20,7 @@
}
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -38,16 +39,19 @@
":audio_policy_engine_product_strategies.xml",
],
}
+
prebuilt_etc {
name: "audio_policy_engine_product_strategies.xml",
vendor: true,
src: "phone/audio_policy_engine_product_strategies.xml",
}
+
prebuilt_etc {
name: "audio_policy_engine_stream_volumes.xml",
vendor: true,
src: "phone/audio_policy_engine_stream_volumes.xml",
}
+
prebuilt_etc {
name: "audio_policy_engine_default_stream_volumes.xml",
vendor: true,
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index fd240e3..fca02e4 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -17,6 +17,7 @@
******************************************************************************/
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/fuzzer/aidl/Android.bp b/services/audiopolicy/fuzzer/aidl/Android.bp
index 38a2cde..887817d 100644
--- a/services/audiopolicy/fuzzer/aidl/Android.bp
+++ b/services/audiopolicy/fuzzer/aidl/Android.bp
@@ -16,6 +16,10 @@
*
******************************************************************************/
+package {
+ default_team: "trendy_team_android_media_audio_framework",
+}
+
cc_defaults {
name: "audiopolicy_aidl_fuzzer_defaults",
shared_libs: [
@@ -40,6 +44,7 @@
"packagemanager_aidl-cpp",
],
static_libs: [
+ "libaudiomockhal",
"libfakeservicemanager",
"libmediaplayerservice",
],
@@ -69,6 +74,9 @@
srcs: ["audiopolicy_aidl_fuzzer.cpp"],
defaults: [
"audiopolicy_aidl_fuzzer_defaults",
+ "latest_android_hardware_audio_core_ndk_shared",
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "latest_android_hardware_audio_effect_ndk_shared",
"service_fuzzer_defaults",
],
}
diff --git a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
index ca79c49..3d972dc 100644
--- a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
@@ -18,8 +18,12 @@
#include <AudioFlinger.h>
#include <android-base/logging.h>
#include <android/binder_interface_utils.h>
+#include <android/binder_manager.h>
#include <android/binder_process.h>
#include <android/media/IAudioPolicyService.h>
+#include <core-mock/ConfigMock.h>
+#include <core-mock/ModuleMock.h>
+#include <effect-mock/FactoryMock.h>
#include <fakeservicemanager/FakeServiceManager.h>
#include <fuzzbinder/libbinder_driver.h>
#include <fuzzbinder/random_binder.h>
@@ -55,14 +59,26 @@
});
gFakeServiceManager->clear();
- for (const char* service :
- {"activity", "sensor_privacy", "permission", "scheduling_policy",
- "android.hardware.audio.core.IConfig", "batterystats", "media.metrics"}) {
+ for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
+ "batterystats", "media.metrics"}) {
if (!addService(String16(service), gFakeServiceManager, fdp)) {
return 0;
}
}
+ auto configService = ndk::SharedRefBase::make<ConfigMock>();
+ CHECK_EQ(NO_ERROR, AServiceManager_addService(configService.get()->asBinder().get(),
+ "android.hardware.audio.core.IConfig/default"));
+
+ auto factoryService = ndk::SharedRefBase::make<FactoryMock>();
+ CHECK_EQ(NO_ERROR,
+ AServiceManager_addService(factoryService.get()->asBinder().get(),
+ "android.hardware.audio.effect.IFactory/default"));
+
+ auto moduleService = ndk::SharedRefBase::make<ModuleMock>();
+ CHECK_EQ(NO_ERROR, AServiceManager_addService(moduleService.get()->asBinder().get(),
+ "android.hardware.audio.core.IModule/default"));
+
const auto audioFlinger = sp<AudioFlinger>::make();
const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
@@ -79,8 +95,7 @@
false /* allowIsolated */,
IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
- fuzzService(media::IAudioPolicyService::asBinder(audioPolicyService),
- FuzzedDataProvider(data, size));
+ fuzzService(media::IAudioPolicyService::asBinder(audioPolicyService), std::move(fdp));
return 0;
}
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 58fcb5c..6416a47 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -207,7 +207,8 @@
audio_port_handle_t *selectedDeviceId, audio_format_t format,
audio_channel_mask_t channelMask, int sampleRate,
audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
- audio_port_handle_t *portId = nullptr);
+ audio_port_handle_t *portId = nullptr,
+ uint32_t *virtualDeviceId = nullptr);
bool findDevicePort(audio_port_role_t role, audio_devices_t deviceType,
const std::string &address, audio_port_v7 *foundPort);
static audio_port_handle_t getDeviceIdFromPatch(const struct audio_patch *patch);
@@ -283,7 +284,7 @@
bool AudioPolicyManagerFuzzer::getInputForAttr(
const audio_attributes_t &attr, audio_unique_id_t riid, audio_port_handle_t *selectedDeviceId,
audio_format_t format, audio_channel_mask_t channelMask, int sampleRate,
- audio_input_flags_t flags, audio_port_handle_t *portId) {
+ audio_input_flags_t flags, audio_port_handle_t *portId, uint32_t *virtualDeviceId) {
audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
config.sample_rate = sampleRate;
@@ -298,7 +299,7 @@
attributionSource.uid = 0;
attributionSource.token = sp<BBinder>::make();
if (mManager->getInputForAttr(&attr, &input, riid, AUDIO_SESSION_NONE, attributionSource,
- &config, flags, selectedDeviceId, &inputType, portId) != OK) {
+ &config, flags, selectedDeviceId, &inputType, portId, virtualDeviceId) != OK) {
return false;
}
if (*portId == AUDIO_PORT_HANDLE_NONE || input == AUDIO_IO_HANDLE_NONE) {
diff --git a/services/audiopolicy/fuzzer/resources/Android.bp b/services/audiopolicy/fuzzer/resources/Android.bp
index 22ee256..2a2b83b 100644
--- a/services/audiopolicy/fuzzer/resources/Android.bp
+++ b/services/audiopolicy/fuzzer/resources/Android.bp
@@ -17,6 +17,7 @@
******************************************************************************/
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index 6a37b4e..2f46d48 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -44,7 +45,8 @@
"libaudioclient_aidl_conversion",
"audioclient-types-aidl-cpp",
// Flag support
- "com.android.media.audioserver-aconfig-cc"
+ "android.media.audiopolicy-aconfig-cc",
+ "com.android.media.audioserver-aconfig-cc",
],
header_libs: [
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 46e4d60..efcd420 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -42,6 +42,7 @@
#include <Serializer.h>
#include <android/media/audio/common/AudioPort.h>
#include <com_android_media_audio.h>
+#include <android_media_audiopolicy.h>
#include <com_android_media_audioserver.h>
#include <cutils/bitops.h>
#include <cutils/properties.h>
@@ -58,6 +59,9 @@
namespace android {
+
+namespace audio_flags = android::media::audiopolicy;
+
using android::media::audio::common::AudioDevice;
using android::media::audio::common::AudioDeviceAddress;
using android::media::audio::common::AudioPortDeviceExt;
@@ -125,7 +129,8 @@
device->toAudioPort(&devicePort);
if (status_t status = mpClientInterface->setDeviceConnectedState(&devicePort, state);
status != OK) {
- ALOGE("Error %d while setting connected state for device %s", state,
+ ALOGE("Error %d while setting connected state for device %s",
+ static_cast<int>(state),
device->getDeviceTypeAddr().toString(false).c_str());
}
}
@@ -1050,11 +1055,11 @@
sp<IOProfile> profile;
for (const auto& hwModule : hwModules) {
for (const auto& curProfile : hwModule->getOutputProfiles()) {
- if (!curProfile->isCompatibleProfile(devices,
+ if (curProfile->getCompatibilityScore(devices,
samplingRate, NULL /*updatedSamplingRate*/,
format, NULL /*updatedFormat*/,
channelMask, NULL /*updatedChannelMask*/,
- flags)) {
+ flags) == IOProfile::NO_MATCH) {
continue;
}
// reject profiles not corresponding to a device currently available
@@ -1632,10 +1637,13 @@
*flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_ULTRASOUND);
}
+ // Use the spatializer output if the content can be spatialized, no preferred mixer
+ // was specified and offload or direct playback is not explicitly requested.
*isSpatialized = false;
if (mSpatializerOutput != nullptr
&& canBeSpatializedInt(attr, config, devices.toTypeAddrVector())
- && prefMixerConfigInfo == nullptr) {
+ && prefMixerConfigInfo == nullptr
+ && ((*flags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0)) {
*isSpatialized = true;
return mSpatializerOutput->mIoHandle;
}
@@ -2689,7 +2697,8 @@
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
input_type_t *inputType,
- audio_port_handle_t *portId)
+ audio_port_handle_t *portId,
+ uint32_t *virtualDeviceId)
{
ALOGV("%s() source %d, sampling rate %d, format %#x, channel mask %#x, session %d, "
"flags %#x attributes=%s requested device ID %d",
@@ -2791,6 +2800,9 @@
} else {
*inputType = API_INPUT_MIX_EXT_POLICY_REROUTE;
}
+ if (virtualDeviceId) {
+ *virtualDeviceId = policyMix->mVirtualDeviceId;
+ }
} else {
if (explicitRoutingDevice != nullptr) {
device = explicitRoutingDevice;
@@ -2814,6 +2826,10 @@
// meaning it receives audio injected into the framework, so the recorder doesn't
// know about it and is therefore considered "legacy"
*inputType = API_INPUT_LEGACY;
+
+ if (virtualDeviceId) {
+ *virtualDeviceId = policyMix->mVirtualDeviceId;
+ }
} else if (audio_is_remote_submix_device(device->type())) {
*inputType = API_INPUT_MIX_CAPTURE;
} else if (device->type() == AUDIO_DEVICE_IN_TELEPHONY_RX) {
@@ -2845,6 +2861,11 @@
goto error;
}
+
+ if (policyMix != nullptr && virtualDeviceId != nullptr) {
+ *virtualDeviceId = policyMix->mVirtualDeviceId;
+ }
+
exit:
*selectedDeviceId = mAvailableInputDevices.contains(device) ?
@@ -3328,6 +3349,7 @@
// requested device or one of the devices selected by the engine for this stream
// - For default requested device (AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME), apply volume only if
// no specific device volume value exists for currently selected device.
+ // - Only apply the volume if the requested device is the desired device for volume control.
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
DeviceTypeSet curDevices = desc->devices().types();
@@ -3347,7 +3369,8 @@
if (device != AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME) {
curSrcDevices.insert(device);
applyVolume = (curSrcDevices.find(
- Volume::getDeviceForVolume(curDevices)) != curSrcDevices.end());
+ Volume::getDeviceForVolume(curDevices)) != curSrcDevices.end())
+ && Volume::getDeviceForVolume(curSrcDevices) == device;
} else {
applyVolume = !curves.hasVolumeIndexForDevice(curSrcDevice);
}
@@ -3668,6 +3691,7 @@
status_t res = NO_ERROR;
bool checkOutputs = false;
sp<HwModule> rSubmixModule;
+ Vector<AudioMix> registeredMixes;
// examine each mix's route type
for (size_t i = 0; i < mixes.size(); i++) {
AudioMix mix = mixes[i];
@@ -3791,11 +3815,19 @@
break;
} else {
checkOutputs = true;
+ registeredMixes.add(mix);
}
}
}
if (res != NO_ERROR) {
- unregisterPolicyMixes(mixes);
+ if (audio_flags::audio_mix_ownership()) {
+ // Only unregister mixes that were actually registered to not accidentally unregister
+ // mixes that already existed previously.
+ unregisterPolicyMixes(registeredMixes);
+ registeredMixes.clear();
+ } else {
+ unregisterPolicyMixes(mixes);
+ }
} else if (checkOutputs) {
checkForDeviceAndOutputChanges();
updateCallAndOutputRouting();
@@ -3829,15 +3861,22 @@
continue;
}
- for (auto device : {AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX}) {
+ for (auto device: {AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX}) {
if (getDeviceConnectionState(device, address.c_str()) ==
- AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
- res = setDeviceConnectionStateInt(device, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
- address.c_str(), "remote-submix",
- AUDIO_FORMAT_DEFAULT);
- if (res != OK) {
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
+ status_t currentRes =
+ setDeviceConnectionStateInt(device,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ address.c_str(),
+ "remote-submix",
+ AUDIO_FORMAT_DEFAULT);
+ if (!audio_flags::audio_mix_ownership()) {
+ res = currentRes;
+ }
+ if (currentRes != OK) {
ALOGE("Error making RemoteSubmix device unavailable for mix "
"with type %d, address %s", device, address.c_str());
+ res = INVALID_OPERATION;
}
}
}
@@ -3853,6 +3892,7 @@
}
}
}
+
if (res == NO_ERROR && checkOutputs) {
checkForDeviceAndOutputChanges();
updateCallAndOutputRouting();
@@ -3860,6 +3900,26 @@
return res;
}
+status_t AudioPolicyManager::getRegisteredPolicyMixes(std::vector<AudioMix>& _aidl_return) {
+ if (!audio_flags::audio_mix_test_api()) {
+ return INVALID_OPERATION;
+ }
+
+ _aidl_return.clear();
+ _aidl_return.reserve(mPolicyMixes.size());
+ for (const auto &policyMix: mPolicyMixes) {
+ _aidl_return.emplace_back(policyMix->mCriteria, policyMix->mMixType,
+ policyMix->mFormat, policyMix->mRouteFlags, policyMix->mDeviceAddress,
+ policyMix->mCbFlags);
+ _aidl_return.back().mDeviceType = policyMix->mDeviceType;
+ _aidl_return.back().mToken = policyMix->mToken;
+ _aidl_return.back().mVirtualDeviceId = policyMix->mVirtualDeviceId;
+ }
+
+ ALOGVV("%s() returning %zu registered mixes", __func__, _aidl_return.size());
+ return OK;
+}
+
status_t AudioPolicyManager::updatePolicyMix(
const AudioMix& mix,
const std::vector<AudioMixMatchCriterion>& updatedCriteria) {
@@ -4481,11 +4541,11 @@
outputDevices = getMsdAudioOutDevices();
}
for (const auto& curProfile : hwModule->getOutputProfiles()) {
- if (!curProfile->isCompatibleProfile(outputDevices,
+ if (curProfile->getCompatibilityScore(outputDevices,
config->sample_rate, nullptr /*updatedSamplingRate*/,
config->format, nullptr /*updatedFormat*/,
config->channel_mask, nullptr /*updatedChannelMask*/,
- flags)) {
+ flags) == IOProfile::NO_MATCH) {
continue;
}
// reject profiles not corresponding to a device currently available
@@ -4591,15 +4651,17 @@
for (const auto& hwModule : mHwModules) {
for (const auto& curProfile : hwModule->getOutputProfiles()) {
if (curProfile->hasDynamicAudioProfile()
- && curProfile->isCompatibleProfile(devices,
- mixerAttributes->config.sample_rate,
- nullptr /*updatedSamplingRate*/,
- mixerAttributes->config.format,
- nullptr /*updatedFormat*/,
- mixerAttributes->config.channel_mask,
- nullptr /*updatedChannelMask*/,
- flags,
- false /*exactMatchRequiredForInputFlags*/)) {
+ && curProfile->getCompatibilityScore(
+ devices,
+ mixerAttributes->config.sample_rate,
+ nullptr /*updatedSamplingRate*/,
+ mixerAttributes->config.format,
+ nullptr /*updatedFormat*/,
+ mixerAttributes->config.channel_mask,
+ nullptr /*updatedChannelMask*/,
+ flags,
+ false /*exactMatchRequiredForInputFlags*/)
+ != IOProfile::NO_MATCH) {
profile = curProfile;
break;
}
@@ -5004,14 +5066,15 @@
return BAD_VALUE;
}
- if (!outputDesc->mProfile->isCompatibleProfile(DeviceVector(devDesc),
- patch->sources[0].sample_rate,
- NULL, // updatedSamplingRate
- patch->sources[0].format,
- NULL, // updatedFormat
- patch->sources[0].channel_mask,
- NULL, // updatedChannelMask
- AUDIO_OUTPUT_FLAG_NONE /*FIXME*/)) {
+ if (outputDesc->mProfile->getCompatibilityScore(
+ DeviceVector(devDesc),
+ patch->sources[0].sample_rate,
+ nullptr, // updatedSamplingRate
+ patch->sources[0].format,
+ nullptr, // updatedFormat
+ patch->sources[0].channel_mask,
+ nullptr, // updatedChannelMask
+ AUDIO_OUTPUT_FLAG_NONE /*FIXME*/) == IOProfile::NO_MATCH) {
ALOGV("%s profile not supported for device %08x", __func__, devDesc->type());
return INVALID_OPERATION;
}
@@ -5059,17 +5122,18 @@
return BAD_VALUE;
}
- if (!inputDesc->mProfile->isCompatibleProfile(DeviceVector(device),
- patch->sinks[0].sample_rate,
- NULL, /*updatedSampleRate*/
- patch->sinks[0].format,
- NULL, /*updatedFormat*/
- patch->sinks[0].channel_mask,
- NULL, /*updatedChannelMask*/
- // FIXME for the parameter type,
- // and the NONE
- (audio_output_flags_t)
- AUDIO_INPUT_FLAG_NONE)) {
+ if (inputDesc->mProfile->getCompatibilityScore(
+ DeviceVector(device),
+ patch->sinks[0].sample_rate,
+ nullptr, /*updatedSampleRate*/
+ patch->sinks[0].format,
+ nullptr, /*updatedFormat*/
+ patch->sinks[0].channel_mask,
+ nullptr, /*updatedChannelMask*/
+ // FIXME for the parameter type,
+ // and the NONE
+ (audio_output_flags_t)
+ AUDIO_INPUT_FLAG_NONE) == IOProfile::NO_MATCH) {
return INVALID_OPERATION;
}
// TODO: reconfigure output format and channels here
@@ -6001,11 +6065,14 @@
// The caller can have the audio config criteria ignored by either passing a null ptr or
// the AUDIO_CONFIG_INITIALIZER value.
// If an audio config is specified, current policy is to only allow spatialization for
- // some positional channel masks and PCM format
+ // some positional channel masks and PCM format and for stereo if low latency performance
+ // mode is not requested.
if (config != nullptr && *config != AUDIO_CONFIG_INITIALIZER) {
+ static const bool stereo_spatialization_enabled =
+ property_get_bool("ro.audio.stereo_spatialization_enabled", false);
const bool channel_mask_spatialized =
- com_android_media_audio_stereo_spatialization()
+ (stereo_spatialization_enabled && com_android_media_audio_stereo_spatialization())
? audio_channel_mask_contains_stereo(config->channel_mask)
: audio_is_channel_mask_spatialized(config->channel_mask);
if (!channel_mask_spatialized) {
@@ -6014,6 +6081,10 @@
if (!audio_is_linear_pcm(config->format)) {
return false;
}
+ if (config->channel_mask == AUDIO_CHANNEL_OUT_STEREO
+ && ((attr->flags & AUDIO_FLAG_LOW_LATENCY) != 0)) {
+ return false;
+ }
}
sp<IOProfile> profile =
@@ -7218,7 +7289,6 @@
DeviceVector devices;
for (const auto &productStrategy : mEngine->getOrderedProductStrategies()) {
StreamTypeVector streams = mEngine->getStreamTypesForProductStrategy(productStrategy);
- auto attr = mEngine->getAllAttributesForProductStrategy(productStrategy).front();
auto hasStreamActive = [&](auto stream) {
return hasStream(streams, stream) && isStreamActive(stream, 0);
};
@@ -7243,6 +7313,7 @@
mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc))) {
// Retrieval of devices for voice DL is done on primary output profile, cannot
// check the route (would force modifying configuration file for this profile)
+ auto attr = mEngine->getAllAttributesForProductStrategy(productStrategy).front();
devices = mEngine->getOutputDevicesForAttributes(attr, nullptr, fromCache);
break;
}
@@ -7684,9 +7755,6 @@
// Choose an input profile based on the requested capture parameters: select the first available
// profile supporting all requested parameters.
// The flags can be ignored if it doesn't contain a much match flag.
- //
- // TODO: perhaps isCompatibleProfile should return a "matching" score so we can return
- // the best matching profile, not the first one.
using underlying_input_flag_t = std::underlying_type_t<audio_input_flags_t>;
const underlying_input_flag_t mustMatchFlag = AUDIO_INPUT_FLAG_MMAP_NOIRQ |
@@ -7703,27 +7771,35 @@
for (const auto& profile : hwModule->getInputProfiles()) {
// profile->log();
//updatedFormat = format;
- if (profile->isCompatibleProfile(DeviceVector(device), samplingRate,
- &samplingRate /*updatedSamplingRate*/,
- format,
- &format, /*updatedFormat*/
- channelMask,
- &channelMask /*updatedChannelMask*/,
- // FIXME ugly cast
- (audio_output_flags_t) flags,
- true /*exactMatchRequiredForInputFlags*/)) {
+ if (profile->getCompatibilityScore(
+ DeviceVector(device),
+ samplingRate,
+ &updatedSamplingRate,
+ format,
+ &updatedFormat,
+ channelMask,
+ &updatedChannelMask,
+ // FIXME ugly cast
+ (audio_output_flags_t) flags,
+ true /*exactMatchRequiredForInputFlags*/) == IOProfile::EXACT_MATCH) {
+ samplingRate = updatedSamplingRate;
+ format = updatedFormat;
+ channelMask = updatedChannelMask;
return profile;
}
- if (firstInexact == nullptr && profile->isCompatibleProfile(DeviceVector(device),
- samplingRate,
- &updatedSamplingRate,
- format,
- &updatedFormat,
- channelMask,
- &updatedChannelMask,
- // FIXME ugly cast
- (audio_output_flags_t) flags,
- false /*exactMatchRequiredForInputFlags*/)) {
+ if (firstInexact == nullptr
+ && profile->getCompatibilityScore(
+ DeviceVector(device),
+ samplingRate,
+ &updatedSamplingRate,
+ format,
+ &updatedFormat,
+ channelMask,
+ &updatedChannelMask,
+ // FIXME ugly cast
+ (audio_output_flags_t) flags,
+ false /*exactMatchRequiredForInputFlags*/)
+ != IOProfile::NO_MATCH) {
firstInexact = profile;
}
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index ea60c2b..0454e6e 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -141,7 +141,8 @@
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
input_type_t *inputType,
- audio_port_handle_t *portId);
+ audio_port_handle_t *portId,
+ uint32_t *virtualDeviceId);
// indicates to the audio policy manager that the input starts being used.
virtual status_t startInput(audio_port_handle_t portId);
@@ -292,6 +293,7 @@
virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes);
virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes);
+ virtual status_t getRegisteredPolicyMixes(std::vector<AudioMix>& mixes) override;
virtual status_t updatePolicyMix(
const AudioMix& mix,
const std::vector<AudioMixMatchCriterion>& updatedCriteria) override;
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index fb55225..c209b46 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -38,6 +39,7 @@
"libstagefright_foundation",
"libutils",
"libxml2",
+ "android.media.audiopolicy-aconfig-cc",
"audioclient-types-aidl-cpp",
"audioflinger-aidl-cpp",
"audiopolicy-aidl-cpp",
@@ -52,7 +54,7 @@
static_libs: [
"libeffectsconfig",
"libaudiopolicycomponents",
- ]
+ ],
}
cc_library {
@@ -75,10 +77,9 @@
],
include_dirs: [
- "frameworks/av/services/audioflinger"
+ "frameworks/av/services/audioflinger",
],
-
static_libs: [
"framework-permission-aidl-cpp",
],
@@ -107,3 +108,9 @@
"framework-permission-aidl-cpp",
],
}
+
+cc_library_headers {
+ name: "libaudiopolicyservice_headers",
+ host_supported: true,
+ export_include_dirs: ["."],
+}
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 71edd57..d67ddb6 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -61,11 +61,6 @@
}
}
-void AudioPolicyEffects::setDefaultDeviceEffects() {
- mDefaultDeviceEffectFuture = std::async(
- std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
-}
-
status_t AudioPolicyEffects::addInputEffects(audio_io_handle_t input,
audio_source_t inputSource,
audio_session_t audioSession)
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index a9628c2..259b84a 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -116,10 +116,8 @@
// Remove the default stream effect from wherever it's attached.
status_t removeStreamDefaultEffect(audio_unique_id_t id) EXCLUDES_AudioPolicyEffects_Mutex;
- // Called by AudioPolicyService::onFirstRef() to load device effects
- // on a separate worker thread.
- // TODO(b/319515492) move this initialization after AudioPolicyService::onFirstRef().
- void setDefaultDeviceEffects();
+ // Initializes the Effects (AudioSystem must be ready as this creates audio client objects).
+ void initDefaultDeviceEffects() EXCLUDES(mDeviceEffectsMutex) EXCLUDES_EffectHandle_Mutex;
private:
@@ -201,11 +199,6 @@
};
- // Called on an async thread because it creates AudioEffects
- // which register with AudioFlinger and AudioPolicy.
- // We must therefore exclude the EffectHandle_Mutex.
- void initDefaultDeviceEffects() EXCLUDES(mDeviceEffectsMutex) EXCLUDES_EffectHandle_Mutex;
-
status_t loadAudioEffectConfig_ll(const sp<EffectsFactoryHalInterface>& effectsFactoryHal)
REQUIRES(mMutex, mDeviceEffectsMutex);
@@ -281,18 +274,6 @@
std::mutex mDeviceEffectsMutex;
std::map<std::string, std::unique_ptr<DeviceEffects>> mDeviceEffects
GUARDED_BY(mDeviceEffectsMutex);
-
- /**
- * Device Effect initialization must be asynchronous: the audio_policy service parses and init
- * effect on first reference. AudioFlinger will handle effect creation and register these
- * effect on audio_policy service.
- *
- * The future is associated with the std::async launched thread - no need to lock as
- * it is only set once on init. Due to the async nature, it is conceivable that
- * some device effects are not available immediately after AudioPolicyService::onFirstRef()
- * while the effects are being created.
- */
- std::future<void> mDefaultDeviceEffectFuture;
};
} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 6e1ecec..203c15e 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -20,6 +20,7 @@
#include "AudioPolicyService.h"
#include "AudioRecordClient.h"
#include "TypeConverter.h"
+#include <android_media_audiopolicy.h>
#include <media/AidlConversion.h>
#include <media/AudioPolicy.h>
#include <media/AudioValidator.h>
@@ -45,6 +46,7 @@
#define MAX_ITEMS_PER_LIST 1024
namespace android {
+namespace audiopolicy_flags = android::media::audiopolicy;
using binder::Status;
using aidl_utils::binderStatusFromStatusT;
using content::AttributionSourceState;
@@ -62,6 +64,8 @@
using media::audio::common::AudioUuid;
using media::audio::common::Int;
+constexpr int kDefaultVirtualDeviceId = 0;
+
const std::vector<audio_usage_t>& SYSTEM_USAGES = {
AUDIO_USAGE_CALL_ASSISTANT,
AUDIO_USAGE_EMERGENCY,
@@ -627,6 +631,8 @@
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr,
attributionSource)));
+ uint32_t virtualDeviceId = kDefaultVirtualDeviceId;
+
// check calling permissions.
// Capturing from the following sources does not require permission RECORD_AUDIO
// as the captured audio does not come from a microphone:
@@ -698,7 +704,8 @@
status = mAudioPolicyManager->getInputForAttr(&attr, &input, riid, session,
attributionSource, &config,
flags, &selectedDeviceId,
- &inputType, &portId);
+ &inputType, &portId,
+ &virtualDeviceId);
}
audioPolicyEffects = mAudioPolicyEffects;
@@ -737,6 +744,14 @@
LOG_ALWAYS_FATAL("%s encountered an invalid input type %d",
__func__, (int)inputType);
}
+
+ if (audiopolicy_flags::record_audio_device_aware_permission()) {
+ // enforce device-aware RECORD_AUDIO permission
+ if (virtualDeviceId != kDefaultVirtualDeviceId &&
+ !recordingAllowed(attributionSource, virtualDeviceId, inputSource)) {
+ status = PERMISSION_DENIED;
+ }
+ }
}
if (status != NO_ERROR) {
@@ -752,6 +767,7 @@
sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
selectedDeviceId, attributionSource,
+ virtualDeviceId,
canCaptureOutput, canCaptureHotword,
mOutputCommandThread);
mAudioRecordClients.add(portId, client);
@@ -1810,6 +1826,23 @@
}
}
+Status
+AudioPolicyService::getRegisteredPolicyMixes(std::vector<::android::media::AudioMix>* mixesAidl) {
+ if (mAudioPolicyManager == nullptr) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+
+ std::vector<AudioMix> mixes;
+ int status = mAudioPolicyManager->getRegisteredPolicyMixes(mixes);
+
+ for (const auto& mix : mixes) {
+ media::AudioMix aidlMix = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_AudioMix(mix));
+ mixesAidl->push_back(aidlMix);
+ }
+
+ return binderStatusFromStatusT(status);
+}
+
Status AudioPolicyService::updatePolicyMixes(
const ::std::vector<::android::media::AudioMixUpdate>& updates) {
audio_utils::lock_guard _l(mMutex);
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 57e2718..f6492d4 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -18,9 +18,6 @@
//#define LOG_NDEBUG 0
#include "Configuration.h"
-#undef __STRICT_ANSI__
-#define __STDINT_LIMITS
-#define __STDC_LIMIT_MACROS
#include <stdint.h>
#include <sys/time.h>
#include <dlfcn.h>
@@ -166,7 +163,8 @@
BINDER_METHOD_ENTRY(setPreferredMixerAttributes) \
BINDER_METHOD_ENTRY(getPreferredMixerAttributes) \
BINDER_METHOD_ENTRY(clearPreferredMixerAttributes) \
-
+BINDER_METHOD_ENTRY(getRegisteredPolicyMixes) \
+ \
// singleton for Binder Method Statistics for IAudioPolicyService
static auto& getIAudioPolicyServiceStatistics() {
using Code = int;
@@ -312,9 +310,16 @@
}
}
AudioSystem::audioPolicyReady();
- // AudioFlinger will handle effect creation and register these effects on audio_policy
- // service. Hence, audio_policy service must be ready.
- audioPolicyEffects->setDefaultDeviceEffects();
+}
+
+void AudioPolicyService::onAudioSystemReady() {
+ sp<AudioPolicyEffects> audioPolicyEffects;
+ {
+ audio_utils::lock_guard _l(mMutex);
+
+ audioPolicyEffects = mAudioPolicyEffects;
+ }
+ audioPolicyEffects->initDefaultDeviceEffects();
}
void AudioPolicyService::unloadAudioPolicyManager()
@@ -1348,7 +1353,8 @@
case TRANSACTION_getDevicesForRoleAndCapturePreset:
case TRANSACTION_getSpatializer:
case TRANSACTION_setPreferredMixerAttributes:
- case TRANSACTION_clearPreferredMixerAttributes: {
+ case TRANSACTION_clearPreferredMixerAttributes:
+ case TRANSACTION_getRegisteredPolicyMixes: {
if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
__func__, code, IPCThreadState::self()->getCallingPid(),
@@ -2481,7 +2487,7 @@
while (command->mWaitStatus) {
nsecs_t timeOutNs = kAudioCommandTimeoutNs + milliseconds(delayMs);
if (command->mCond.wait_for(
- ul, std::chrono::nanoseconds(timeOutNs)) == std::cv_status::timeout) {
+ ul, std::chrono::nanoseconds(timeOutNs), getTid()) == std::cv_status::timeout) {
command->mStatus = TIMED_OUT;
command->mWaitStatus = false;
}
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 9a8a056..7aa80cf 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -311,6 +311,8 @@
binder::Status clearPreferredMixerAttributes(const media::audio::common::AudioAttributes& attr,
int32_t portId,
int32_t uid) override;
+ binder::Status getRegisteredPolicyMixes(
+ std::vector <::android::media::AudioMix>* mixes) override;
status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
@@ -320,6 +322,9 @@
// RefBase
virtual void onFirstRef();
+ // Commence initialization when AudioSystem is ready.
+ void onAudioSystemReady();
+
//
// Helpers for the struct audio_policy_service_ops implementation.
// This is used by the audio policy manager for certain operations that
@@ -540,6 +545,10 @@
binder::Status onSensorPrivacyChanged(int toggleType, int sensor,
bool enabled);
+ binder::Status onSensorPrivacyStateChanged(int, int, int) {
+ return binder::Status::ok();
+ }
+
private:
wp<AudioPolicyService> mService;
std::atomic_bool mSensorPrivacyEnabled = false;
diff --git a/services/audiopolicy/service/AudioRecordClient.cpp b/services/audiopolicy/service/AudioRecordClient.cpp
index a89a84d..6d8b3cf 100644
--- a/services/audiopolicy/service/AudioRecordClient.cpp
+++ b/services/audiopolicy/service/AudioRecordClient.cpp
@@ -18,9 +18,10 @@
#include "AudioRecordClient.h"
#include "AudioPolicyService.h"
+#include <android_media_audiopolicy.h>
namespace android::media::audiopolicy {
-
+namespace audiopolicy_flags = android::media::audiopolicy;
using android::AudioPolicyService;
namespace {
@@ -59,8 +60,10 @@
// static
sp<OpRecordAudioMonitor>
OpRecordAudioMonitor::createIfNeeded(
- const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
- wp<AudioPolicyService::AudioCommandThread> commandThread)
+ const AttributionSourceState &attributionSource,
+ const uint32_t virtualDeviceId,
+ const audio_attributes_t &attr,
+ wp<AudioPolicyService::AudioCommandThread> commandThread)
{
if (isAudioServerOrRootUid(attributionSource.uid)) {
ALOGV("not silencing record for audio or root source %s",
@@ -78,15 +81,19 @@
|| attributionSource.packageName.value().size() == 0) {
return nullptr;
}
- return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
+
+ return new OpRecordAudioMonitor(attributionSource, virtualDeviceId, attr,
+ getOpForSource(attr.source), commandThread);
}
OpRecordAudioMonitor::OpRecordAudioMonitor(
- const AttributionSourceState& attributionSource, int32_t appOp,
+ const AttributionSourceState &attributionSource,
+ const uint32_t virtualDeviceId, const audio_attributes_t &attr,
+ int32_t appOp,
wp<AudioPolicyService::AudioCommandThread> commandThread) :
- mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
- mCommandThread(commandThread)
-{
+ mHasOp(true), mAttributionSource(attributionSource),
+ mVirtualDeviceId(virtualDeviceId), mAttr(attr), mAppOp(appOp),
+ mCommandThread(commandThread) {
}
OpRecordAudioMonitor::~OpRecordAudioMonitor()
@@ -131,7 +138,12 @@
const int32_t mode = mAppOpsManager.checkOp(mAppOp,
mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
mAttributionSource.packageName.value_or(""))));
- const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+ bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+
+ if (audiopolicy_flags::record_audio_device_aware_permission()) {
+ const bool canRecord = recordingAllowed(mAttributionSource, mVirtualDeviceId, mAttr.source);
+ hasIt = hasIt && canRecord;
+ }
// verbose logging only log when appOp changed
ALOGI_IF(hasIt != mHasOp.load(),
"App op %d missing, %ssilencing record %s",
diff --git a/services/audiopolicy/service/AudioRecordClient.h b/services/audiopolicy/service/AudioRecordClient.h
index d3be316..76aff41 100644
--- a/services/audiopolicy/service/AudioRecordClient.h
+++ b/services/audiopolicy/service/AudioRecordClient.h
@@ -38,12 +38,16 @@
static sp<OpRecordAudioMonitor> createIfNeeded(
const AttributionSourceState& attributionSource,
+ uint32_t virtualDeviceId,
const audio_attributes_t& attr,
wp<AudioPolicyService::AudioCommandThread> commandThread);
private:
- OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
- wp<AudioPolicyService::AudioCommandThread> commandThread);
+ OpRecordAudioMonitor(const AttributionSourceState &attributionSource,
+ uint32_t virtualDeviceId,
+ const audio_attributes_t &attr,
+ int32_t appOp,
+ wp<AudioPolicyService::AudioCommandThread> commandThread);
void onFirstRef() override;
@@ -67,6 +71,8 @@
std::atomic_bool mHasOp;
const AttributionSourceState mAttributionSource;
+ const uint32_t mVirtualDeviceId;
+ const audio_attributes_t mAttr;
const int32_t mAppOp;
wp<AudioPolicyService::AudioCommandThread> mCommandThread;
};
@@ -81,15 +87,20 @@
const audio_session_t session, audio_port_handle_t portId,
const audio_port_handle_t deviceId,
const AttributionSourceState& attributionSource,
+ const uint32_t virtualDeviceId,
bool canCaptureOutput, bool canCaptureHotword,
wp<AudioPolicyService::AudioCommandThread> commandThread) :
AudioClient(attributes, io, attributionSource,
session, portId, deviceId), attributionSource(attributionSource),
+ virtualDeviceId(virtualDeviceId),
startTimeNs(0), canCaptureOutput(canCaptureOutput),
canCaptureHotword(canCaptureHotword), silenced(false),
mOpRecordAudioMonitor(
OpRecordAudioMonitor::createIfNeeded(attributionSource,
- attributes, commandThread)) {}
+ virtualDeviceId,
+ attributes, commandThread)) {
+
+ }
~AudioRecordClient() override = default;
bool hasOp() const {
@@ -97,6 +108,7 @@
}
const AttributionSourceState attributionSource; // attribution source of client
+ const uint32_t virtualDeviceId; // id of the virtual device associated with the audio device
nsecs_t startTimeNs;
const bool canCaptureOutput;
const bool canCaptureHotword;
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index ca3e0e0..233a484 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -31,6 +31,7 @@
#include <audio_utils/fixedfft.h>
#include <com_android_media_audio.h>
#include <cutils/bitops.h>
+#include <cutils/properties.h>
#include <hardware/sensors.h>
#include <media/stagefright/foundation/AHandler.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -49,12 +50,12 @@
using aidl_utils::statusTFromBinderStatus;
using android::content::AttributionSourceState;
using binder::Status;
+using internal::ToString;
using media::HeadTrackingMode;
using media::Pose3f;
using media::SensorPoseProvider;
using media::audio::common::HeadTracking;
using media::audio::common::Spatialization;
-using ::android::internal::ToString;
using namespace std::chrono_literals;
@@ -348,7 +349,8 @@
bool activeLevelFound = false;
for (const auto spatializationLevel : spatializationLevels) {
if (!aidl_utils::isValidEnum(spatializationLevel)) {
- ALOGW("%s: ignoring spatializationLevel:%d", __func__, (int)spatializationLevel);
+ ALOGW("%s: ignoring spatializationLevel:%s", __func__,
+ ToString(spatializationLevel).c_str());
continue;
}
if (spatializationLevel == Spatialization::Level::NONE) {
@@ -375,7 +377,8 @@
for (const auto spatializationMode : spatializationModes) {
if (!aidl_utils::isValidEnum(spatializationMode)) {
- ALOGW("%s: ignoring spatializationMode:%d", __func__, (int)spatializationMode);
+ ALOGW("%s: ignoring spatializationMode:%s", __func__,
+ ToString(spatializationMode).c_str());
continue;
}
// we don't detect duplicates.
@@ -394,8 +397,10 @@
return status;
}
for (const auto channelMask : channelMasks) {
+ static const bool stereo_spatialization_enabled =
+ property_get_bool("ro.audio.stereo_spatialization_enabled", false);
const bool channel_mask_spatialized =
- com_android_media_audio_stereo_spatialization()
+ (stereo_spatialization_enabled && com_android_media_audio_stereo_spatialization())
? audio_channel_mask_contains_stereo(channelMask)
: audio_is_channel_mask_spatialized(channelMask);
if (!channel_mask_spatialized) {
@@ -410,27 +415,26 @@
return BAD_VALUE;
}
- //TODO b/273373363: use AIDL enum when available
if (com::android::media::audio::dsa_over_bt_le_audio()
&& mSupportsHeadTracking) {
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
- std::vector<uint8_t> headtrackingConnectionModes;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
+ std::vector<HeadTracking::ConnectionMode> headtrackingConnectionModes;
status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION,
&headtrackingConnectionModes);
if (status == NO_ERROR) {
for (const auto htConnectionMode : headtrackingConnectionModes) {
- if (htConnectionMode < HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED ||
- htConnectionMode > HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL) {
- ALOGW("%s: ignoring HT connection mode:%d", __func__, (int)htConnectionMode);
+ if (htConnectionMode < HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED ||
+ htConnectionMode > HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL) {
+ ALOGW("%s: ignoring HT connection mode:%s", __func__,
+ ToString(htConnectionMode).c_str());
continue;
}
- mSupportedHeadtrackingConnectionModes.insert(
- static_cast<headtracking_connection_t> (htConnectionMode));
+ mSupportedHeadtrackingConnectionModes.insert(htConnectionMode);
}
ALOGW_IF(mSupportedHeadtrackingConnectionModes.find(
- HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED)
- == mSupportedHeadtrackingConnectionModes.end(),
- "%s: HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED not reported", __func__);
+ HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED) ==
+ mSupportedHeadtrackingConnectionModes.end(),
+ "%s: Headtracking FRAMEWORK_PROCESSED not reported", __func__);
}
}
@@ -557,12 +561,12 @@
}
audio_utils::lock_guard lock(mMutex);
*level = mLevel;
- ALOGV("%s level %d", __func__, (int)*level);
+ ALOGV("%s level %s", __func__, ToString(*level).c_str());
return Status::ok();
}
Status Spatializer::isHeadTrackingSupported(bool *supports) {
- ALOGV("%s mSupportsHeadTracking %d", __func__, mSupportsHeadTracking);
+ ALOGV("%s mSupportsHeadTracking %s", __func__, ToString(mSupportsHeadTracking).c_str());
if (supports == nullptr) {
return binderStatusFromStatusT(BAD_VALUE);
}
@@ -857,7 +861,7 @@
}
void Spatializer::onActualModeChange(HeadTrackingMode mode) {
- std::string modeStr = media::toString(mode);
+ std::string modeStr = ToString(mode);
ALOGV("%s(%s)", __func__, modeStr.c_str());
sp<AMessage> msg = new AMessage(EngineCallbackHandler::kWhatOnActualModeChange, mHandler);
msg->setInt32(EngineCallbackHandler::kModeKey, static_cast<int>(mode));
@@ -865,7 +869,7 @@
}
void Spatializer::onActualModeChangeMsg(HeadTrackingMode mode) {
- ALOGV("%s(%d)", __func__, (int) mode);
+ ALOGV("%s(%s)", __func__, ToString(mode).c_str());
sp<media::ISpatializerHeadTrackingCallback> callback;
HeadTracking::Mode spatializerMode;
{
@@ -884,7 +888,7 @@
spatializerMode = HeadTracking::Mode::RELATIVE_SCREEN;
break;
default:
- LOG_ALWAYS_FATAL("Unknown mode: %d", mode);
+ LOG_ALWAYS_FATAL("Unknown mode: %s", ToString(mode).c_str());
}
}
mActualHeadTrackingMode = spatializerMode;
@@ -898,7 +902,7 @@
}
}
callback = mHeadTrackingCallback;
- mLocalLog.log("%s: updating mode to %s", __func__, media::toString(mode).c_str());
+ mLocalLog.log("%s: updating mode to %s", __func__, ToString(mode).c_str());
}
if (callback != nullptr) {
callback->onHeadTrackingModeChanged(spatializerMode);
@@ -1056,24 +1060,23 @@
}
}
-//TODO b/273373363: use AIDL enum when available
audio_latency_mode_t Spatializer::selectHeadtrackingConnectionMode_l() {
if (!com::android::media::audio::dsa_over_bt_le_audio()) {
return AUDIO_LATENCY_MODE_LOW;
}
// mSupportedLatencyModes is ordered according to system preferences loaded in
// mOrderedLowLatencyModes
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
audio_latency_mode_t requestedLatencyMode = mSupportedLatencyModes[0];
if (requestedLatencyMode == AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE) {
if (mSupportedHeadtrackingConnectionModes.find(
- HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL)
+ HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL)
!= mSupportedHeadtrackingConnectionModes.end()) {
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_TUNNEL;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL;
} else if (mSupportedHeadtrackingConnectionModes.find(
- HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW)
+ HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW)
!= mSupportedHeadtrackingConnectionModes.end()) {
- mHeadtrackingConnectionMode = HEADTRACKING_CONNECTION_DIRECT_TO_SENSOR_SW;
+ mHeadtrackingConnectionMode = HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW;
} else {
// if the engine does not support direct reading of IMU data, do not allow
// DYNAMIC_SPATIAL_AUDIO_HARDWARE mode and fallback to next mode
@@ -1093,7 +1096,7 @@
}
void Spatializer::checkSensorsState_l() {
- audio_latency_mode_t requestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
+ mRequestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
const bool supportsSetLatencyMode = !mSupportedLatencyModes.empty();
bool supportsLowLatencyMode;
if (com::android::media::audio::dsa_over_bt_le_audio()) {
@@ -1114,7 +1117,7 @@
&& mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
&& mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
if (supportsLowLatencyMode) {
- requestedLatencyMode = selectHeadtrackingConnectionMode_l();
+ mRequestedLatencyMode = selectHeadtrackingConnectionMode_l();
}
if (mEngine != nullptr) {
setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
@@ -1136,9 +1139,9 @@
}
if (mOutput != AUDIO_IO_HANDLE_NONE && supportsSetLatencyMode) {
const status_t status =
- AudioSystem::setRequestedLatencyMode(mOutput, requestedLatencyMode);
+ AudioSystem::setRequestedLatencyMode(mOutput, mRequestedLatencyMode);
ALOGD("%s: setRequestedLatencyMode for output thread(%d) to %s returned %d", __func__,
- mOutput, toString(requestedLatencyMode).c_str(), status);
+ mOutput, toString(mRequestedLatencyMode).c_str(), status);
}
}
@@ -1217,7 +1220,7 @@
base::StringAppendF(&ss, " %s", ToString(mode).c_str());
}
base::StringAppendF(&ss, "], Desired: %s, Actual %s\n",
- media::toString(mDesiredHeadTrackingMode).c_str(),
+ ToString(mDesiredHeadTrackingMode).c_str(),
ToString(mActualHeadTrackingMode).c_str());
base::StringAppendF(&ss, "%smSpatializationModes: [", prefixSpace.c_str());
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 24788dc..c5f159c 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -27,6 +27,7 @@
#include <audio_utils/SimpleLog.h>
#include <math.h>
#include <media/AudioEffect.h>
+#include <media/MediaMetricsItem.h>
#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <media/VectorRecorder.h>
#include <media/audiohal/EffectHalInterface.h>
@@ -153,6 +154,34 @@
return mLevel;
}
+ /** For test only */
+ std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
+ getSupportedHeadtrackingConnectionModes() const {
+ return mSupportedHeadtrackingConnectionModes;
+ }
+
+ /** For test only */
+ media::audio::common::HeadTracking::ConnectionMode getHeadtrackingConnectionMode() const {
+ return mHeadtrackingConnectionMode;
+ }
+
+ /** For test only */
+ std::vector<audio_latency_mode_t> getSupportedLatencyModes() const {
+ audio_utils::lock_guard lock(mMutex);
+ return mSupportedLatencyModes;
+ }
+
+ /** For test only */
+ std::vector<audio_latency_mode_t> getOrderedLowLatencyModes() const {
+ return mOrderedLowLatencyModes;
+ }
+
+ /** For test only */
+ audio_latency_mode_t getRequestedLatencyMode() const {
+ audio_utils::lock_guard lock(mMutex);
+ return mRequestedLatencyMode;
+ }
+
/** Called by audio policy service when the special output mixer dedicated to spatialization
* is opened and the spatializer engine must be created.
*/
@@ -164,6 +193,12 @@
/** Returns the output stream the spatializer is attached to. */
audio_io_handle_t getOutput() const { audio_utils::lock_guard lock(mMutex); return mOutput; }
+ /** For test only */
+ void setOutput(audio_io_handle_t output) {
+ audio_utils::lock_guard lock(mMutex);
+ mOutput = output;
+ }
+
void updateActiveTracks(size_t numActiveTracks);
/** Gets the channel mask, sampling rate and format set for the spatializer input. */
@@ -188,6 +223,10 @@
// NO_INIT: Spatializer creation failed.
static void sendEmptyCreateSpatializerMetricWithStatus(status_t status);
+ /** Made public for test only */
+ void onSupportedLatencyModesChangedMsg(
+ audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes);
+
private:
Spatializer(effect_descriptor_t engineDescriptor,
SpatializerPolicyCallback *callback);
@@ -200,8 +239,6 @@
void onHeadToStagePoseMsg(const std::vector<float>& headToStage);
void onActualModeChangeMsg(media::HeadTrackingMode mode);
- void onSupportedLatencyModesChangedMsg(
- audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes);
static constexpr int kMaxEffectParamValues = 10;
/**
@@ -484,13 +521,17 @@
std::vector<media::audio::common::Spatialization::Mode> mSpatializationModes;
std::vector<audio_channel_mask_t> mChannelMasks;
bool mSupportsHeadTracking;
- /** List of supported headtracking connection modes reported by the spatializer.
+
+ /** List of supported head tracking connection modes reported by the spatializer.
* If the list is empty, the spatializer does not support any optional connection
- * mode and mode HEADTRACKING_CONNECTION_FRAMEWORK_PROCESSED is assumed.
+ * mode and mode HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED is assumed.
+ * This is set in the factory constructor and can be accessed without mutex.
*/
- std::unordered_set<headtracking_connection_t> mSupportedHeadtrackingConnectionModes;
+ std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
+ mSupportedHeadtrackingConnectionModes;
/** Selected HT connection mode when several modes are supported by the spatializer */
- headtracking_connection_t mHeadtrackingConnectionMode;
+ media::audio::common::HeadTracking::ConnectionMode mHeadtrackingConnectionMode =
+ media::audio::common::HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED;
// Looper thread for mEngine callbacks
class EngineCallbackHandler;
@@ -502,6 +543,9 @@
std::vector<audio_latency_mode_t> mSupportedLatencyModes GUARDED_BY(mMutex);
/** preference order for low latency modes according to persist.bluetooth.hid.transport */
std::vector<audio_latency_mode_t> mOrderedLowLatencyModes;
+
+ audio_latency_mode_t mRequestedLatencyMode GUARDED_BY(mMutex) = AUDIO_LATENCY_MODE_FREE;
+
/** string to latency mode map used to parse bluetooth.core.le.dsa_transport_preference */
static const std::map<std::string, audio_latency_mode_t> sStringToLatencyModeMap;
static const std::vector<const char*> sHeadPoseKeys;
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index a4a0cd4..21ded60 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -33,11 +34,14 @@
"libutils",
"libcutils",
"libxml2",
+ "server_configurable_flags",
],
static_libs: [
+ "android.media.audiopolicy-aconfig-cc",
"audioclient-types-aidl-cpp",
"libaudiopolicycomponents",
+ "libflagtest",
"libgmock",
],
@@ -49,7 +53,7 @@
srcs: ["audiopolicymanager_tests.cpp"],
- data: [":audiopolicytest_configuration_files",],
+ data: [":audiopolicytest_configuration_files"],
cflags: [
"-Werror",
@@ -63,7 +67,6 @@
}
-
cc_test {
name: "audio_health_tests",
@@ -105,3 +108,40 @@
test_suites: ["device-tests"],
}
+
+cc_test {
+ name: "spatializer_tests",
+
+ defaults: [
+ "latest_android_media_audio_common_types_cpp_shared",
+ "libaudiopolicyservice_dependencies",
+ ],
+
+ require_root: true,
+
+ shared_libs: [
+ "libaudioclient",
+ "libaudiofoundation",
+ "libcutils",
+ "liblog",
+ ],
+
+ static_libs: [
+ "libaudiopolicyservice",
+ ],
+
+ header_libs: [
+ "libaudiohal_headers",
+ "libaudiopolicyservice_headers",
+ "libmediametrics_headers",
+ ],
+
+ srcs: ["spatializer_tests.cpp"],
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
+
+ test_suites: ["device-tests"],
+}
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index 31ee252..aa7c9cd 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -31,6 +31,7 @@
using AudioPolicyManager::getConfig;
using AudioPolicyManager::initialize;
using AudioPolicyManager::getOutputs;
+ using AudioPolicyManager::getInputs;
using AudioPolicyManager::getAvailableOutputDevices;
using AudioPolicyManager::getAvailableInputDevices;
using AudioPolicyManager::setSurroundFormatEnabled;
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 74d3474..f9d5946 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -28,6 +28,8 @@
#include <android-base/file.h>
#include <android-base/properties.h>
#include <android/content/AttributionSourceState.h>
+#include <android_media_audiopolicy.h>
+#include <flag_macros.h>
#include <hardware/audio_effect.h>
#include <media/AudioPolicy.h>
#include <media/PatchBuilder.h>
@@ -43,6 +45,7 @@
using namespace android;
using testing::UnorderedElementsAre;
+using testing::IsEmpty;
using android::content::AttributionSourceState;
namespace {
@@ -92,6 +95,12 @@
return attributionSourceState;
}
+bool equals(const audio_config_base_t& config1, const audio_config_base_t& config2) {
+ return config1.format == config2.format
+ && config1.sample_rate == config2.sample_rate
+ && config1.channel_mask == config2.channel_mask;
+}
+
} // namespace
TEST(AudioPolicyConfigTest, DefaultConfigForTestsIsEmpty) {
@@ -195,7 +204,8 @@
audio_channel_mask_t channelMask,
int sampleRate,
audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
- audio_port_handle_t *portId = nullptr);
+ audio_port_handle_t *portId = nullptr,
+ uint32_t *virtualDeviceId = nullptr);
PatchCountCheck snapshotPatchCount() { return PatchCountCheck(mClient.get()); }
void getAudioPorts(audio_port_type_t type, audio_port_role_t role,
@@ -307,7 +317,8 @@
audio_channel_mask_t channelMask,
int sampleRate,
audio_input_flags_t flags,
- audio_port_handle_t *portId) {
+ audio_port_handle_t *portId,
+ uint32_t *virtualDeviceId) {
audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
config.sample_rate = sampleRate;
config.channel_mask = channelMask;
@@ -315,11 +326,12 @@
audio_port_handle_t localPortId;
if (!portId) portId = &localPortId;
*portId = AUDIO_PORT_HANDLE_NONE;
+ if (!virtualDeviceId) virtualDeviceId = 0;
AudioPolicyInterface::input_type_t inputType;
AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
ASSERT_EQ(OK, mManager->getInputForAttr(
&attr, input, riid, session, attributionSource, &config, flags,
- selectedDeviceId, &inputType, portId));
+ selectedDeviceId, &inputType, portId, virtualDeviceId));
ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
}
@@ -1266,6 +1278,54 @@
"", "", AUDIO_FORMAT_LDAC));
}
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferExactConfigForInput) {
+ const audio_channel_mask_t deviceChannelMask = AUDIO_CHANNEL_IN_3POINT1;
+ mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+ mClient->addSupportedChannelMask(deviceChannelMask);
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+ AUDIO_SOURCE_VOICE_COMMUNICATION,AUDIO_FLAG_NONE, ""};
+ AudioPolicyInterface::input_type_t inputType;
+ audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+ AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
+ audio_config_base_t requestedConfig = {
+ .channel_mask = AUDIO_CHANNEL_IN_STEREO,
+ .format = AUDIO_FORMAT_PCM_16_BIT,
+ .sample_rate = 48000
+ };
+ audio_config_base_t config = requestedConfig;
+ audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+ uint32_t *virtualDeviceId = 0;
+ ASSERT_EQ(OK, mManager->getInputForAttr(
+ &attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
+ AUDIO_INPUT_FLAG_NONE,
+ &selectedDeviceId, &inputType, &portId, virtualDeviceId));
+ ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
+ ASSERT_TRUE(equals(requestedConfig, config));
+
+ attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+ AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE, ""};
+ requestedConfig.channel_mask = deviceChannelMask;
+ config = requestedConfig;
+ selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ input = AUDIO_PORT_HANDLE_NONE;
+ portId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_EQ(OK, mManager->getInputForAttr(
+ &attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
+ AUDIO_INPUT_FLAG_NONE,
+ &selectedDeviceId, &inputType, &portId, virtualDeviceId));
+ ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
+ ASSERT_TRUE(equals(requestedConfig, config));
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+}
+
class AudioPolicyManagerTestDynamicPolicy : public AudioPolicyManagerTestWithConfigurationFile {
protected:
void TearDown() override;
@@ -1273,6 +1333,12 @@
status_t addPolicyMix(int mixType, int mixFlag, audio_devices_t deviceType,
std::string mixAddress, const audio_config_t& audioConfig,
const std::vector<AudioMixMatchCriterion>& matchCriteria);
+
+ status_t addPolicyMix(const AudioMix& mix);
+
+ status_t removePolicyMixes(const Vector<AudioMix>& mixes);
+
+ std::vector<AudioMix> getRegisteredPolicyMixes();
void clearPolicyMix();
void addPolicyMixAndStartInputForLoopback(
int mixType, int mixFlag, audio_devices_t deviceType, std::string mixAddress,
@@ -1307,9 +1373,14 @@
AudioMix myAudioMix(matchCriteria, mixType, audioConfig, mixFlag,
String8(mixAddress.c_str()), 0);
myAudioMix.mDeviceType = deviceType;
+ myAudioMix.mToken = sp<BBinder>::make();
// Clear mAudioMix before add new one to make sure we don't add already exist mixes.
mAudioMixes.clear();
- mAudioMixes.add(myAudioMix);
+ return addPolicyMix(myAudioMix);
+}
+
+status_t AudioPolicyManagerTestDynamicPolicy::addPolicyMix(const AudioMix& mix) {
+ mAudioMixes.add(mix);
// As the policy mixes registration may fail at some case,
// caller need to check the returned status.
@@ -1317,6 +1388,20 @@
return ret;
}
+status_t AudioPolicyManagerTestDynamicPolicy::removePolicyMixes(const Vector<AudioMix>& mixes) {
+ status_t ret = mManager->unregisterPolicyMixes(mixes);
+ return ret;
+}
+
+std::vector<AudioMix> AudioPolicyManagerTestDynamicPolicy::getRegisteredPolicyMixes() {
+ std::vector<AudioMix> audioMixes;
+ if (mManager != nullptr) {
+ status_t ret = mManager->getRegisteredPolicyMixes(audioMixes);
+ EXPECT_EQ(NO_ERROR, ret);
+ }
+ return audioMixes;
+}
+
void AudioPolicyManagerTestDynamicPolicy::clearPolicyMix() {
if (mManager != nullptr) {
mManager->stopInput(mLoopbackInputPortId);
@@ -1470,6 +1555,139 @@
ASSERT_EQ(INVALID_OPERATION, ret);
}
+TEST_F_WITH_FLAGS(
+ AudioPolicyManagerTestDynamicPolicy,
+ RegisterInvalidMixesDoesNotImpactPriorMixes,
+ REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api),
+ ACONFIG_FLAG(android::media::audiopolicy, audio_mix_ownership))
+) {
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+
+ std::vector<AudioMixMatchCriterion> validMixMatchCriteria = {
+ createUidCriterion(/*uid=*/42),
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+ AudioMix validAudioMix(validMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+ MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+ validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+ mAudioMixes.clear();
+ status_t ret = addPolicyMix(validAudioMix);
+
+ ASSERT_EQ(NO_ERROR, ret);
+
+ std::vector<AudioMix> registeredMixes = getRegisteredPolicyMixes();
+ ASSERT_EQ(1, registeredMixes.size());
+
+ std::vector<AudioMixMatchCriterion> invalidMixMatchCriteria = {
+ createUidCriterion(/*uid=*/42),
+ createUidCriterion(/*uid=*/1235, /*exclude=*/true),
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+
+ AudioMix invalidAudioMix(invalidMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+ MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+ validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+ ret = addPolicyMix(invalidAudioMix);
+
+ ASSERT_EQ(INVALID_OPERATION, ret);
+
+ std::vector<AudioMix> remainingMixes = getRegisteredPolicyMixes();
+ ASSERT_EQ(registeredMixes.size(), remainingMixes.size());
+}
+
+TEST_F_WITH_FLAGS(
+ AudioPolicyManagerTestDynamicPolicy,
+ UnregisterInvalidMixesReturnsError,
+ REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api),
+ ACONFIG_FLAG(android::media::audiopolicy, audio_mix_ownership))
+) {
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+
+ std::vector<AudioMixMatchCriterion> validMixMatchCriteria = {
+ createUidCriterion(/*uid=*/42),
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+ AudioMix validAudioMix(validMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+ MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+ validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+ mAudioMixes.clear();
+ status_t ret = addPolicyMix(validAudioMix);
+
+ ASSERT_EQ(NO_ERROR, ret);
+
+ std::vector<AudioMix> registeredMixes = getRegisteredPolicyMixes();
+ ASSERT_EQ(1, registeredMixes.size());
+
+ std::vector<AudioMixMatchCriterion> invalidMixMatchCriteria = {
+ createUidCriterion(/*uid=*/42),
+ createUidCriterion(/*uid=*/1235, /*exclude=*/true),
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+
+ AudioMix invalidAudioMix(invalidMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+ MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+ invalidAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+ Vector<AudioMix> mixes;
+ mixes.add(invalidAudioMix);
+ mixes.add(validAudioMix);
+ ret = removePolicyMixes(mixes);
+
+ ASSERT_EQ(INVALID_OPERATION, ret);
+
+ std::vector<AudioMix> remainingMixes = getRegisteredPolicyMixes();
+ EXPECT_THAT(remainingMixes, IsEmpty());
+}
+
+TEST_F_WITH_FLAGS(
+ AudioPolicyManagerTestDynamicPolicy,
+ GetRegisteredPolicyMixes,
+ REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api))
+) {
+ std::vector<AudioMix> mixes = getRegisteredPolicyMixes();
+ EXPECT_THAT(mixes, IsEmpty());
+}
+
+TEST_F_WITH_FLAGS(AudioPolicyManagerTestDynamicPolicy,
+ AddPolicyMixAndVerifyGetRegisteredPolicyMixes,
+ REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api))
+) {
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+
+ std::vector<AudioMixMatchCriterion> mixMatchCriteria = {
+ createUidCriterion(/*uid=*/42),
+ createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+ status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
+ mixMatchCriteria);
+ ASSERT_EQ(NO_ERROR, ret);
+
+ std::vector<AudioMix> mixes = getRegisteredPolicyMixes();
+ ASSERT_EQ(mixes.size(), 1);
+
+ const AudioMix& mix = mixes[0];
+ ASSERT_EQ(mix.mCriteria.size(), mixMatchCriteria.size());
+ for (uint32_t i = 0; i < mixMatchCriteria.size(); i++) {
+ EXPECT_EQ(mix.mCriteria[i].mRule, mixMatchCriteria[i].mRule);
+ EXPECT_EQ(mix.mCriteria[i].mValue.mUsage, mixMatchCriteria[i].mValue.mUsage);
+ }
+ EXPECT_EQ(mix.mDeviceType, AUDIO_DEVICE_OUT_REMOTE_SUBMIX);
+ EXPECT_EQ(mix.mRouteFlags, MIX_ROUTE_FLAG_LOOP_BACK);
+ EXPECT_EQ(mix.mMixType, MIX_TYPE_PLAYERS);
+ EXPECT_EQ(mix.mFormat.channel_mask, audioConfig.channel_mask);
+ EXPECT_EQ(mix.mFormat.format, audioConfig.format);
+ EXPECT_EQ(mix.mFormat.sample_rate, audioConfig.sample_rate);
+ EXPECT_EQ(mix.mFormat.frame_count, audioConfig.frame_count);
+}
+
class AudioPolicyManagerTestForHdmi
: public AudioPolicyManagerTestWithConfigurationFile,
public testing::WithParamInterface<audio_format_t> {
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index 5e71210..43e2e39 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 4efdf8a..1a299c6 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -65,6 +65,7 @@
samplingRates="48000"
channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
</mixPort>
+ <mixPort name="hifi_input" role="sink" />
</mixPorts>
<devicePorts>
<devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -111,6 +112,8 @@
sources="primary output,hifi_output,mmap_no_irq_out"/>
<route type="mix" sink="mixport_bus_input"
sources="BUS Device In"/>
+ <route type="mix" sink="hifi_input"
+ sources="USB Device In" />
</routes>
</module>
diff --git a/services/audiopolicy/tests/spatializer_tests.cpp b/services/audiopolicy/tests/spatializer_tests.cpp
new file mode 100644
index 0000000..73bef43
--- /dev/null
+++ b/services/audiopolicy/tests/spatializer_tests.cpp
@@ -0,0 +1,231 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Spatializer_Test"
+
+#include "Spatializer.h"
+
+#include <string>
+#include <unordered_set>
+
+#include <gtest/gtest.h>
+
+#include <android/media/audio/common/AudioLatencyMode.h>
+#include <android/media/audio/common/HeadTracking.h>
+#include <android/media/audio/common/Spatialization.h>
+#include <com_android_media_audio.h>
+#include <utils/Log.h>
+
+using namespace android;
+using media::audio::common::HeadTracking;
+using media::audio::common::Spatialization;
+
+class TestSpatializerPolicyCallback :
+ public SpatializerPolicyCallback {
+public:
+ void onCheckSpatializer() override {};
+};
+
+class SpatializerTest : public ::testing::Test {
+protected:
+ void SetUp() override {
+ const sp<EffectsFactoryHalInterface> effectsFactoryHal
+ = EffectsFactoryHalInterface::create();
+ mSpatializer = Spatializer::create(&mTestCallback, effectsFactoryHal);
+ if (mSpatializer == nullptr) {
+ GTEST_SKIP() << "Skipping Spatializer tests: no spatializer";
+ }
+ std::vector<Spatialization::Level> levels;
+ binder::Status status = mSpatializer->getSupportedLevels(&levels);
+ ASSERT_TRUE(status.isOk());
+ for (auto level : levels) {
+ if (level != Spatialization::Level::NONE) {
+ mSpatializer->setLevel(level);
+ break;
+ }
+ }
+ mSpatializer->setOutput(sTestOutput);
+ }
+
+ void TearDown() override {
+ if (mSpatializer == nullptr) {
+ return;
+ }
+ mSpatializer->setLevel(Spatialization::Level::NONE);
+ mSpatializer->setOutput(AUDIO_IO_HANDLE_NONE);
+ mSpatializer->setDesiredHeadTrackingMode(HeadTracking::Mode::DISABLED);
+ mSpatializer->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+ mSpatializer->updateActiveTracks(0);
+ }
+
+ static constexpr audio_io_handle_t sTestOutput= 1977;
+ static constexpr int sTestSensorHandle = 1980;
+
+ const static inline std::vector<audio_latency_mode_t> sA2DPLatencyModes = {
+ AUDIO_LATENCY_MODE_LOW,
+ AUDIO_LATENCY_MODE_FREE
+ };
+ const static inline std::vector<audio_latency_mode_t> sBLELatencyModes = {
+ AUDIO_LATENCY_MODE_LOW,
+ AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+ AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE,
+ AUDIO_LATENCY_MODE_FREE
+ };
+
+ bool setpUpForHeadtracking() {
+ bool htSupported;
+ mSpatializer->isHeadTrackingSupported(&htSupported);
+ if (!htSupported) {
+ return false;
+ }
+
+ std::vector<HeadTracking::Mode> htModes;
+ mSpatializer->getSupportedHeadTrackingModes(&htModes);
+ for (auto htMode : htModes) {
+ if (htMode != HeadTracking::Mode::DISABLED) {
+ mSpatializer->setDesiredHeadTrackingMode(htMode);
+ break;
+ }
+ }
+
+ mSpatializer->setHeadSensor(sTestSensorHandle);
+ return true;
+ }
+
+ TestSpatializerPolicyCallback mTestCallback;
+ sp<Spatializer> mSpatializer;
+};
+
+TEST_F(SpatializerTest, SupportedA2dpLatencyTest) {
+ if (!setpUpForHeadtracking()) {
+ GTEST_SKIP() << "Skipping SupportedA2dpLatencyTest: head tracking not supported";
+ }
+ std::vector<audio_latency_mode_t> latencies = sA2DPLatencyModes;
+ mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+ std::vector<audio_latency_mode_t> supportedLatencies =
+ mSpatializer->getSupportedLatencyModes();
+
+ ASSERT_TRUE(supportedLatencies == sA2DPLatencyModes);
+ // Free mode must always be the last of the ordered list
+ ASSERT_TRUE(supportedLatencies.back() == AUDIO_LATENCY_MODE_FREE);
+}
+
+TEST_F(SpatializerTest, SupportedBleLatencyTest) {
+ if (!setpUpForHeadtracking()) {
+ GTEST_SKIP() << "Skipping SupportedBleLatencyTest: head tracking not supported";
+ }
+ if (!com::android::media::audio::dsa_over_bt_le_audio()) {
+ GTEST_SKIP() << "Skipping SupportedBleLatencyTest: DSA over LE not enabled";
+ }
+ std::vector<audio_latency_mode_t> latencies = sBLELatencyModes;
+ mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+ std::vector<audio_latency_mode_t> supportedLatencies =
+ mSpatializer->getSupportedLatencyModes();
+
+ ASSERT_TRUE(supportedLatencies.back() == AUDIO_LATENCY_MODE_FREE);
+ ASSERT_TRUE(std::find(supportedLatencies.begin(), supportedLatencies.end(),
+ AUDIO_LATENCY_MODE_LOW) != supportedLatencies.end());
+
+ std::vector<audio_latency_mode_t> orderedLowLatencyModes =
+ mSpatializer->getOrderedLowLatencyModes();
+
+ std::vector<audio_latency_mode_t> supportedLowLatencyModes;
+ // remove free mode at the end of the supported list to only retain low latency modes
+ std::copy(supportedLatencies.begin(),
+ supportedLatencies.begin() + supportedLatencies.size() - 1,
+ std::back_inserter(supportedLowLatencyModes));
+
+ // Verify that supported low latency modes are always in ordered latency modes list and
+ // in the same order
+ std::vector<audio_latency_mode_t>::iterator lastIt = orderedLowLatencyModes.begin();
+ for (auto latency : supportedLowLatencyModes) {
+ auto it = std::find(orderedLowLatencyModes.begin(), orderedLowLatencyModes.end(), latency);
+ ASSERT_NE(it, orderedLowLatencyModes.end());
+ ASSERT_LE(lastIt, it);
+ lastIt = it;
+ }
+}
+
+TEST_F(SpatializerTest, RequestedA2dpLatencyTest) {
+ if (!setpUpForHeadtracking()) {
+ GTEST_SKIP() << "Skipping RequestedA2dpLatencyTest: head tracking not supported";
+ }
+
+ std::vector<audio_latency_mode_t> latencies = sA2DPLatencyModes;
+ mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+ // requested latency mode must be free if no spatialized tracks are active
+ audio_latency_mode_t requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+
+ // requested latency mode must be low if at least one spatialized tracks is active
+ mSpatializer->updateActiveTracks(1);
+ requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_LOW);
+
+ // requested latency mode must be free after stopping the last spatialized tracks
+ mSpatializer->updateActiveTracks(0);
+ requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+}
+
+TEST_F(SpatializerTest, RequestedBleLatencyTest) {
+ if (!setpUpForHeadtracking()) {
+ GTEST_SKIP() << "Skipping RequestedBleLatencyTest: head tracking not supported";
+ }
+ if (!com::android::media::audio::dsa_over_bt_le_audio()) {
+ GTEST_SKIP() << "Skipping RequestedBleLatencyTest: DSA over LE not enabled";
+ }
+
+ mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput,
+ { AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+ AUDIO_LATENCY_MODE_FREE });
+
+ // requested latency mode must be free if no spatialized tracks are active
+ audio_latency_mode_t requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+
+ // requested latency mode must be low software if at least one spatialized tracks is active
+ // and the only supported low latency mode is low software
+ mSpatializer->updateActiveTracks(1);
+ requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE);
+
+ mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput,
+ { AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+ AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE,
+ AUDIO_LATENCY_MODE_FREE });
+
+ requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ HeadTracking::ConnectionMode connectionMode = mSpatializer->getHeadtrackingConnectionMode();
+
+ // If low hardware mode is used, the spatializer must use either use one of the sensor
+ // connection tunneled modes.
+ // Otherwise, low software mode must be used
+ if (requestedLatencyMode == AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE) {
+ ASSERT_TRUE(connectionMode == HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL
+ || connectionMode == HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW);
+ } else {
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE);
+ }
+
+ // requested latency mode must be free after stopping the last spatialized tracks
+ mSpatializer->updateActiveTracks(0);
+ requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+}
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 575e0fb..c9e9090 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -112,6 +112,7 @@
"libcameraservice_device_independent",
"libdynamic_depth",
"libprocessinfoservice_aidl",
+ "libvirtualdevicebuildflags",
"media_permission-aidl-cpp",
],
}
@@ -185,6 +186,7 @@
"aidl/AidlCameraServiceListener.cpp",
"aidl/AidlUtils.cpp",
"aidl/DeathPipe.cpp",
+ "utils/AttributionAndPermissionUtils.cpp",
"utils/CameraServiceProxyWrapper.cpp",
"utils/CameraThreadState.cpp",
"utils/CameraTraces.cpp",
@@ -261,7 +263,7 @@
"liblog",
"libutils",
"libxml2",
- "camera_platform_flags_c_lib"
+ "camera_platform_flags_c_lib",
],
include_dirs: [
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 3f43af5..78c14b2 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -38,7 +38,6 @@
#include <aidl/AidlCameraService.h>
#include <android-base/macros.h>
#include <android-base/parseint.h>
-#include <android/permission/PermissionChecker.h>
#include <binder/ActivityManager.h>
#include <binder/AppOpsManager.h>
#include <binder/IPCThreadState.h>
@@ -129,18 +128,16 @@
// ----------------------------------------------------------------------------
-static const std::string sDumpPermission("android.permission.DUMP");
-static const std::string sManageCameraPermission("android.permission.MANAGE_CAMERA");
-static const std::string sCameraPermission("android.permission.CAMERA");
-static const std::string sSystemCameraPermission("android.permission.SYSTEM_CAMERA");
-static const std::string sCameraHeadlessSystemUserPermission(
- "android.permission.CAMERA_HEADLESS_SYSTEM_USER");
-static const std::string
- sCameraSendSystemEventsPermission("android.permission.CAMERA_SEND_SYSTEM_EVENTS");
-static const std::string sCameraOpenCloseListenerPermission(
- "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
-static const std::string
- sCameraInjectExternalCameraPermission("android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+// Permission strings (references to AttributionAndPermissionUtils for brevity)
+static const std::string &sDumpPermission =
+ AttributionAndPermissionUtils::sDumpPermission;
+static const std::string &sManageCameraPermission =
+ AttributionAndPermissionUtils::sManageCameraPermission;
+static const std::string &sCameraSendSystemEventsPermission =
+ AttributionAndPermissionUtils::sCameraSendSystemEventsPermission;
+static const std::string &sCameraInjectExternalCameraPermission =
+ AttributionAndPermissionUtils::sCameraInjectExternalCameraPermission;
+
// Constant integer for FGS Logging, used to denote the API type for logger
static const int LOG_FGS_CAMERA_API = 1;
const char *sFileName = "lastOpenSessionDumpFile";
@@ -156,7 +153,11 @@
static std::set<std::string> sServiceErrorEventSet;
CameraService::CameraService(
- std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils) :
+ AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils == nullptr ?
+ std::make_shared<AttributionAndPermissionUtils>()\
+ : attributionAndPermissionUtils),
mCameraServiceProxyWrapper(cameraServiceProxyWrapper == nullptr ?
std::make_shared<CameraServiceProxyWrapper>() : cameraServiceProxyWrapper),
mEventLog(DEFAULT_EVENT_LOG_LENGTH),
@@ -165,6 +166,7 @@
mSoundRef(0), mInitialized(false),
mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE) {
ALOGI("CameraService started (pid=%d)", getpid());
+ mAttributionAndPermissionUtils->setCameraService(this);
mServiceLockWrapper = std::make_shared<WaitableMutexWrapper>(&mServiceLock);
mMemFd = memfd_create(sFileName, MFD_ALLOW_SEALING);
if (mMemFd == -1) {
@@ -213,7 +215,7 @@
mUidPolicy = new UidPolicy(this);
mUidPolicy->registerSelf();
- mSensorPrivacyPolicy = new SensorPrivacyPolicy(this);
+ mSensorPrivacyPolicy = new SensorPrivacyPolicy(this, mAttributionAndPermissionUtils);
mSensorPrivacyPolicy->registerSelf();
mInjectionStatusListener = new InjectionStatusListener(this);
@@ -706,34 +708,7 @@
broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
}
-static bool isAutomotiveDevice() {
- // Checks the property ro.hardware.type and returns true if it is
- // automotive.
- char value[PROPERTY_VALUE_MAX] = {0};
- property_get("ro.hardware.type", value, "");
- return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
-}
-
-static bool isHeadlessSystemUserMode() {
- // Checks if the device is running in headless system user mode
- // by checking the property ro.fw.mu.headless_system_user.
- char value[PROPERTY_VALUE_MAX] = {0};
- property_get("ro.fw.mu.headless_system_user", value, "");
- return strncmp(value, "true", PROPERTY_VALUE_MAX) == 0;
-}
-
-static bool isAutomotivePrivilegedClient(int32_t uid) {
- // Returns false if this is not an automotive device type.
- if (!isAutomotiveDevice())
- return false;
-
- // Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
- // privileged client uid used for safety critical use cases such as
- // rear view and surround view.
- return uid == AID_AUTOMOTIVE_EVS;
-}
-
-bool CameraService::isAutomotiveExteriorSystemCamera(const std::string& cam_id) const{
+bool CameraService::isAutomotiveExteriorSystemCamera(const std::string& cam_id) const {
// Returns false if this is not an automotive device type.
if (!isAutomotiveDevice())
return false;
@@ -778,48 +753,6 @@
return true;
}
-bool CameraService::checkPermission(const std::string& cameraId, const std::string& permission,
- const AttributionSourceState& attributionSource, const std::string& message,
- int32_t attributedOpCode) const{
- if (isAutomotivePrivilegedClient(attributionSource.uid)) {
- // If cameraId is empty, then it means that this check is not used for the
- // purpose of accessing a specific camera, hence grant permission just
- // based on uid to the automotive privileged client.
- if (cameraId.empty())
- return true;
- // If this call is used for accessing a specific camera then cam_id must be provided.
- // In that case, only pre-grants the permission for accessing the exterior system only
- // camera.
- return isAutomotiveExteriorSystemCamera(cameraId);
- }
-
- permission::PermissionChecker permissionChecker;
- return permissionChecker.checkPermissionForPreflight(toString16(permission), attributionSource,
- toString16(message), attributedOpCode)
- != permission::PermissionChecker::PERMISSION_HARD_DENIED;
-}
-
-bool CameraService::hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid,
- int callingUid) const{
- AttributionSourceState attributionSource{};
- attributionSource.pid = callingPid;
- attributionSource.uid = callingUid;
- bool checkPermissionForSystemCamera = checkPermission(cameraId,
- sSystemCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
- bool checkPermissionForCamera = checkPermission(cameraId,
- sCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
- return checkPermissionForSystemCamera && checkPermissionForCamera;
-}
-
-bool CameraService::hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId,
- int callingPid, int callingUid) const{
- AttributionSourceState attributionSource{};
- attributionSource.pid = callingPid;
- attributionSource.uid = callingUid;
- return checkPermission(cameraId, sCameraHeadlessSystemUserPermission, attributionSource,
- std::string(), AppOpsManager::OP_NONE);
-}
-
Status CameraService::getNumberOfCameras(int32_t type, int32_t* numCameras) {
ATRACE_CALL();
Mutex::Autolock l(mServiceLock);
@@ -906,13 +839,6 @@
"request for system only device %s: ", cameraId.c_str());
}
- // Check for camera permissions
- if (!hasCameraPermissions()) {
- return STATUS_ERROR(ERROR_PERMISSION_DENIED,
- "android.permission.CAMERA needed to call"
- "createDefaultRequest");
- }
-
CameraMetadata metadata;
status_t err = mCameraProviderManager->createDefaultRequest(cameraId, tempId, &metadata);
if (err == OK) {
@@ -961,13 +887,6 @@
cameraId.c_str());
}
- // Check for camera permissions
- if (!hasCameraPermissions()) {
- return STATUS_ERROR(ERROR_PERMISSION_DENIED,
- "android.permission.CAMERA needed to call"
- "isSessionConfigurationWithParametersSupported");
- }
-
*supported = false;
status_t ret = mCameraProviderManager->isSessionConfigurationSupported(cameraId.c_str(),
sessionConfiguration, /*mOverrideForPerfClass*/false, /*checkSessionParams*/true,
@@ -998,6 +917,61 @@
return res;
}
+Status CameraService::getSessionCharacteristics(const std::string& unresolvedCameraId,
+ int targetSdkVersion, bool overrideToPortrait,
+ const SessionConfiguration& sessionConfiguration,
+ /*out*/ CameraMetadata* outMetadata) {
+ ATRACE_CALL();
+
+ if (!mInitialized) {
+ ALOGE("%s: Camera HAL couldn't be initialized", __FUNCTION__);
+ logServiceError("Camera subsystem is not available", ERROR_DISCONNECTED);
+ return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
+ }
+
+ const std::string cameraId =
+ resolveCameraId(unresolvedCameraId, CameraThreadState::getCallingUid());
+
+ if (outMetadata == nullptr) {
+ std::string msg =
+ fmt::sprintf("Camera %s: Invalid 'outMetadata' input!", unresolvedCameraId.c_str());
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+
+ bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+ mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
+
+ status_t ret = mCameraProviderManager->getSessionCharacteristics(
+ cameraId, sessionConfiguration, overrideForPerfClass, overrideToPortrait, outMetadata);
+
+ // TODO(b/303645857): Remove fingerprintable metadata if the caller process does not have
+ // camera access permission.
+
+ Status res = Status::ok();
+ switch (ret) {
+ case OK:
+ // Expected, no handling needed.
+ break;
+ case INVALID_OPERATION: {
+ std::string msg = fmt::sprintf(
+ "Camera %s: Session characteristics query not supported!",
+ cameraId.c_str());
+ ALOGD("%s: %s", __FUNCTION__, msg.c_str());
+ res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
+ }
+ break;
+ default: {
+ std::string msg = fmt::sprintf("Camera %s: Error: %s (%d)", cameraId.c_str(),
+ strerror(-ret), ret);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ }
+
+ return res;
+}
+
Status CameraService::parseCameraIdRemapping(
const hardware::CameraIdRemapping& cameraIdRemapping,
/* out */ TCameraIdRemapping* cameraIdRemappingMap) {
@@ -1221,13 +1195,9 @@
const std::vector<std::string> *deviceIds = &mNormalDeviceIdsWithoutSystemCamera;
auto callingPid = CameraThreadState::getCallingPid();
auto callingUid = CameraThreadState::getCallingUid();
- AttributionSourceState attributionSource{};
- attributionSource.pid = callingPid;
- attributionSource.uid = callingUid;
- bool checkPermissionForSystemCamera = checkPermission(std::to_string(cameraIdInt),
- sSystemCameraPermission, attributionSource, std::string(),
- AppOpsManager::OP_NONE);
- if (checkPermissionForSystemCamera || getpid() == callingPid) {
+ bool systemCameraPermissions = hasPermissionsForSystemCamera(std::to_string(cameraIdInt),
+ callingPid, callingUid, /* checkCameraPermissions= */ false);
+ if (systemCameraPermissions || getpid() == callingPid) {
deviceIds = &mNormalDeviceIds;
}
if (cameraIdInt < 0 || cameraIdInt >= static_cast<int>(deviceIds->size())) {
@@ -1300,11 +1270,7 @@
// If it's not calling from cameraserver, check the permission only if
// android.permission.CAMERA is required. If android.permission.SYSTEM_CAMERA was needed,
// it would've already been checked in shouldRejectSystemCameraConnection.
- AttributionSourceState attributionSource{};
- attributionSource.pid = callingPid;
- attributionSource.uid = callingUid;
- bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission,
- attributionSource, std::string(), AppOpsManager::OP_NONE);
+ bool checkPermissionForCamera = hasPermissionsForCamera(cameraId, callingPid, callingUid);
if ((callingPid != getpid()) &&
(deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
!checkPermissionForCamera) {
@@ -1337,6 +1303,7 @@
int32_t* torchStrength) {
ATRACE_CALL();
Mutex::Autolock l(mServiceLock);
+
const std::string cameraId = resolveCameraId(
unresolvedCameraId, CameraThreadState::getCallingUid());
if (!mInitialized) {
@@ -1490,7 +1457,7 @@
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
- packageName, featureId, cameraId,
+ cameraService->mAttributionAndPermissionUtils, packageName, featureId, cameraId,
api1CameraId, facing, sensorOrientation,
clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
forceSlowJpegMode);
@@ -1500,7 +1467,8 @@
sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
*client = new CameraDeviceClient(cameraService, tmp,
- cameraService->mCameraServiceProxyWrapper, packageName, systemNativeClient,
+ cameraService->mCameraServiceProxyWrapper,
+ cameraService->mAttributionAndPermissionUtils, packageName, systemNativeClient,
featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
overrideForPerfClass, overrideToPortrait, originalCameraId);
ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
@@ -1664,38 +1632,6 @@
return STATUS_ERROR(ERROR_INVALID_OPERATION, "Unable to initialize legacy parameters");
}
-// Can camera service trust the caller based on the calling UID?
-static bool isTrustedCallingUid(uid_t uid) {
- switch (uid) {
- case AID_MEDIA: // mediaserver
- case AID_CAMERASERVER: // cameraserver
- case AID_RADIO: // telephony
- return true;
- default:
- return false;
- }
-}
-
-static status_t getUidForPackage(const std::string &packageName, int userId, /*inout*/uid_t& uid,
- int err) {
- PermissionController pc;
- uid = pc.getPackageUid(toString16(packageName), 0);
- if (uid <= 0) {
- ALOGE("Unknown package: '%s'", packageName.c_str());
- dprintf(err, "Unknown package: '%s'\n", packageName.c_str());
- return BAD_VALUE;
- }
-
- if (userId < 0) {
- ALOGE("Invalid user: %d", userId);
- dprintf(err, "Invalid user: %d\n", userId);
- return BAD_VALUE;
- }
-
- uid = multiuser_get_uid(userId, uid);
- return NO_ERROR;
-}
-
Status CameraService::validateConnectLocked(const std::string& cameraId,
const std::string& clientName8, /*inout*/int& clientUid, /*inout*/int& clientPid,
/*out*/int& originalClientPid) const {
@@ -1747,8 +1683,6 @@
Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
const std::string& clientName, int& clientUid, int& clientPid,
/*out*/int& originalClientPid) const {
- AttributionSourceState attributionSource{};
-
int callingPid = CameraThreadState::getCallingPid();
int callingUid = CameraThreadState::getCallingUid();
@@ -1762,7 +1696,7 @@
"Untrusted caller (calling PID %d, UID %d) trying to "
"forward camera access to camera %s for client %s (PID %d, UID %d)",
callingPid, callingUid, cameraId.c_str(),
- clientName.c_str(), clientUid, clientPid);
+ clientName.c_str(), clientPid, clientUid);
}
// Check if we can trust clientPid
@@ -1775,7 +1709,7 @@
"Untrusted caller (calling PID %d, UID %d) trying to "
"forward camera access to camera %s for client %s (PID %d, UID %d)",
callingPid, callingUid, cameraId.c_str(),
- clientName.c_str(), clientUid, clientPid);
+ clientName.c_str(), clientPid, clientUid);
}
if (shouldRejectSystemCameraConnection(cameraId)) {
@@ -1795,17 +1729,14 @@
// If it's not calling from cameraserver, check the permission if the
// device isn't a system only camera (shouldRejectSystemCameraConnection already checks for
// android.permission.SYSTEM_CAMERA for system only camera devices).
- attributionSource.pid = clientPid;
- attributionSource.uid = clientUid;
- attributionSource.packageName = clientName;
- bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission, attributionSource,
- std::string(), AppOpsManager::OP_NONE);
+ bool checkPermissionForCamera =
+ hasPermissionsForCamera(cameraId, clientPid, clientUid, clientName);
if (callingPid != getpid() &&
(deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
"Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" without camera permission",
- clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+ clientName.c_str(), clientPid, clientUid, cameraId.c_str());
}
// Make sure the UID is in an active state to use the camera
@@ -1816,7 +1747,7 @@
return STATUS_ERROR_FMT(ERROR_DISABLED,
"Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" from background ("
"calling UID %d proc state %" PRId32 ")",
- clientName.c_str(), clientUid, clientPid, cameraId.c_str(),
+ clientName.c_str(), clientPid, clientUid, cameraId.c_str(),
callingUid, procState);
}
@@ -1829,7 +1760,7 @@
ALOGE("Access Denial: cannot use the camera when sensor privacy is enabled");
return STATUS_ERROR_FMT(ERROR_DISABLED,
"Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" when sensor privacy "
- "is enabled", clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+ "is enabled", clientName.c_str(), clientPid, clientUid, cameraId.c_str());
}
// Only use passed in clientPid to check permission. Use calling PID as the client PID that's
@@ -1855,13 +1786,14 @@
// If the System User tries to access the camera when the device is running in
// headless system user mode, ensure that client has the required permission
// CAMERA_HEADLESS_SYSTEM_USER.
- if (isHeadlessSystemUserMode() && (clientUserId == USER_SYSTEM) &&
- !hasPermissionsForCameraHeadlessSystemUser(cameraId, callingPid, callingUid)) {
+ if (isHeadlessSystemUserMode()
+ && (clientUserId == USER_SYSTEM)
+ && !hasPermissionsForCameraHeadlessSystemUser(cameraId, callingPid, callingUid)) {
ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
"Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" as Headless System \
User without camera headless system user permission",
- clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+ clientName.c_str(), clientPid, clientUid, cameraId.c_str());
}
}
@@ -2222,7 +2154,7 @@
}
// (1) Cameraserver trying to connect, accept.
- if (CameraThreadState::getCallingPid() == getpid()) {
+ if (isCallerCameraServerNotDelegating()) {
return false;
}
// (2)
@@ -2259,16 +2191,17 @@
sp<CameraDeviceClient> client = nullptr;
std::string clientPackageNameAdj = clientPackageName;
int callingPid = CameraThreadState::getCallingPid();
+ int callingUid = CameraThreadState::getCallingUid();
bool systemNativeClient = false;
if (doesClientHaveSystemUid() && (clientPackageNameAdj.size() == 0)) {
- std::string systemClient =
- fmt::sprintf("client.pid<%d>", CameraThreadState::getCallingPid());
+ std::string systemClient = fmt::sprintf("client.pid<%d>", callingPid);
clientPackageNameAdj = systemClient;
systemNativeClient = true;
}
+
const std::string cameraId = resolveCameraId(
unresolvedCameraId,
- CameraThreadState::getCallingUid(),
+ callingUid,
clientPackageNameAdj);
if (oomScoreOffset < 0) {
@@ -2281,7 +2214,6 @@
}
userid_t clientUserId = multiuser_get_user_id(clientUid);
- int callingUid = CameraThreadState::getCallingUid();
if (clientUid == USE_CALLING_UID) {
clientUserId = multiuser_get_user_id(callingUid);
}
@@ -2298,8 +2230,8 @@
// enforce system camera permissions
if (oomScoreOffset > 0
&& !hasPermissionsForSystemCamera(cameraId, callingPid,
- CameraThreadState::getCallingUid())
- && !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
+ callingUid)
+ && !isTrustedCallingUid(callingUid)) {
std::string msg = fmt::sprintf("Cannot change the priority of a client %s pid %d for "
"camera id %s without SYSTEM_CAMERA permissions",
clientPackageNameAdj.c_str(), callingPid, cameraId.c_str());
@@ -2339,12 +2271,41 @@
const auto& mActivityManager = getActivityManager();
if (mActivityManager) {
mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
- CameraThreadState::getCallingUid(),
- CameraThreadState::getCallingPid());
+ callingUid,
+ callingPid);
}
return ret;
}
+bool CameraService::isCameraPrivacyEnabled(const String16& packageName, const std::string& cam_id,
+ int callingPid, int callingUid) {
+ if (!isAutomotiveDevice()) {
+ return mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+ }
+
+ // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for
+ // safety-critical use cases cannot be disabled and are exempt from camera privacy policy.
+ if ((isAutomotivePrivilegedClient(callingUid) && isAutomotiveExteriorSystemCamera(cam_id))) {
+ ALOGI("Camera privacy cannot be enabled for automotive privileged client %d "
+ "using camera %s", callingUid, cam_id.c_str());
+ return false;
+ }
+
+ if (mSensorPrivacyPolicy->isCameraPrivacyEnabled(packageName)) {
+ return true;
+ } else if (mSensorPrivacyPolicy->getCameraPrivacyState() == SensorPrivacyManager::DISABLED) {
+ return false;
+ } else if (mSensorPrivacyPolicy->getCameraPrivacyState()
+ == SensorPrivacyManager::ENABLED_EXCEPT_ALLOWLISTED_APPS) {
+ if (hasPermissionsForCameraPrivacyAllowlist(callingPid, callingUid)) {
+ return false;
+ } else {
+ return true;
+ }
+ }
+ return false;
+}
+
std::string CameraService::getPackageNameFromUid(int clientUid) {
std::string packageName("");
@@ -2556,73 +2517,100 @@
// Enable/disable camera service watchdog
client->setCameraServiceWatchdog(mCameraServiceWatchdogEnabled);
- // Set rotate-and-crop override behavior
- if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
- client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
- } else if (overrideToPortrait && portraitRotation != 0) {
- uint8_t rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
- switch (portraitRotation) {
- case 90:
- rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
- break;
- case 180:
- rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_180;
- break;
- case 270:
- rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_270;
- break;
- default:
- ALOGE("Unexpected portrait rotation: %d", portraitRotation);
- break;
+ CameraMetadata chars;
+ bool rotateAndCropSupported = true;
+ err = mCameraProviderManager->getCameraCharacteristics(cameraId, overrideForPerfClass,
+ &chars, overrideToPortrait);
+ if (err == OK) {
+ auto availableRotateCropEntry = chars.find(
+ ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES);
+ if (availableRotateCropEntry.count <= 1) {
+ rotateAndCropSupported = false;
}
- client->setRotateAndCropOverride(rotateAndCropMode);
} else {
- client->setRotateAndCropOverride(
- mCameraServiceProxyWrapper->getRotateAndCropOverride(
- clientPackageName, facing, multiuser_get_user_id(clientUid)));
+ ALOGE("%s: Unable to query static metadata for camera %s: %s (%d)", __FUNCTION__,
+ cameraId.c_str(), strerror(-err), err);
}
- // Set autoframing override behaviour
- if (mOverrideAutoframingMode != ANDROID_CONTROL_AUTOFRAMING_AUTO) {
- client->setAutoframingOverride(mOverrideAutoframingMode);
- } else {
- client->setAutoframingOverride(
- mCameraServiceProxyWrapper->getAutoframingOverride(
- clientPackageName));
+ if (rotateAndCropSupported) {
+ // Set rotate-and-crop override behavior
+ if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
+ client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
+ } else if (overrideToPortrait && portraitRotation != 0) {
+ uint8_t rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
+ switch (portraitRotation) {
+ case 90:
+ rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
+ break;
+ case 180:
+ rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_180;
+ break;
+ case 270:
+ rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_270;
+ break;
+ default:
+ ALOGE("Unexpected portrait rotation: %d", portraitRotation);
+ break;
+ }
+ client->setRotateAndCropOverride(rotateAndCropMode);
+ } else {
+ client->setRotateAndCropOverride(
+ mCameraServiceProxyWrapper->getRotateAndCropOverride(
+ clientPackageName, facing, multiuser_get_user_id(clientUid)));
+ }
}
- // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use
- // cases such as rear view and surround view cannot be disabled and are exempt from camera
- // privacy policy.
- if ((!isAutomotivePrivilegedClient(packageUid) ||
- !isAutomotiveExteriorSystemCamera(cameraId))) {
+ bool autoframingSupported = true;
+ auto availableAutoframingEntry = chars.find(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE);
+ if ((availableAutoframingEntry.count == 1) && (availableAutoframingEntry.data.u8[0] ==
+ ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_FALSE)) {
+ autoframingSupported = false;
+ }
+
+ if (autoframingSupported) {
+ // Set autoframing override behaviour
+ if (mOverrideAutoframingMode != ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+ client->setAutoframingOverride(mOverrideAutoframingMode);
+ } else {
+ client->setAutoframingOverride(
+ mCameraServiceProxyWrapper->getAutoframingOverride(
+ clientPackageName));
+ }
+ }
+
+ bool isCameraPrivacyEnabled;
+ if (flags::camera_privacy_allowlist()) {
// Set camera muting behavior.
- bool isCameraPrivacyEnabled =
+ isCameraPrivacyEnabled = this->isCameraPrivacyEnabled(
+ toString16(client->getPackageName()), cameraId, packagePid, packageUid);
+ } else {
+ isCameraPrivacyEnabled =
mSensorPrivacyPolicy->isCameraPrivacyEnabled();
- if (client->supportsCameraMute()) {
- client->setCameraMute(
- mOverrideCameraMuteMode || isCameraPrivacyEnabled);
- } else if (isCameraPrivacyEnabled) {
- // no camera mute supported, but privacy is on! => disconnect
- ALOGI("Camera mute not supported for package: %s, camera id: %s",
- client->getPackageName().c_str(), cameraId.c_str());
- // Do not hold mServiceLock while disconnecting clients, but
- // retain the condition blocking other clients from connecting
- // in mServiceLockWrapper if held.
- mServiceLock.unlock();
- // Clear caller identity temporarily so client disconnect PID
- // checks work correctly
- int64_t token = CameraThreadState::clearCallingIdentity();
- // Note AppOp to trigger the "Unblock" dialog
- client->noteAppOp();
- client->disconnect();
- CameraThreadState::restoreCallingIdentity(token);
- // Reacquire mServiceLock
- mServiceLock.lock();
+ }
- return STATUS_ERROR_FMT(ERROR_DISABLED,
- "Camera \"%s\" disabled due to camera mute", cameraId.c_str());
- }
+ if (client->supportsCameraMute()) {
+ client->setCameraMute(
+ mOverrideCameraMuteMode || isCameraPrivacyEnabled);
+ } else if (isCameraPrivacyEnabled) {
+ // no camera mute supported, but privacy is on! => disconnect
+ ALOGI("Camera mute not supported for package: %s, camera id: %s",
+ client->getPackageName().c_str(), cameraId.c_str());
+ // Do not hold mServiceLock while disconnecting clients, but
+ // retain the condition blocking other clients from connecting
+ // in mServiceLockWrapper if held.
+ mServiceLock.unlock();
+ // Clear caller identity temporarily so client disconnect PID
+ // checks work correctly
+ int64_t token = CameraThreadState::clearCallingIdentity();
+ // Note AppOp to trigger the "Unblock" dialog
+ client->noteAppOp();
+ client->disconnect();
+ CameraThreadState::restoreCallingIdentity(token);
+ // Reacquire mServiceLock
+ mServiceLock.lock();
+
+ return STATUS_ERROR_FMT(ERROR_DISABLED,
+ "Camera \"%s\" disabled due to camera mute", cameraId.c_str());
}
if (shimUpdateOnly) {
@@ -3212,22 +3200,6 @@
return Status::ok();
}
-bool CameraService::hasCameraPermissions() const {
- int callingPid = CameraThreadState::getCallingPid();
- int callingUid = CameraThreadState::getCallingUid();
- AttributionSourceState attributionSource{};
- attributionSource.pid = callingPid;
- attributionSource.uid = callingUid;
- bool res = checkPermission(std::string(), sCameraPermission,
- attributionSource, std::string(), AppOpsManager::OP_NONE);
-
- bool hasPermission = ((callingPid == getpid()) || res);
- if (!hasPermission) {
- ALOGE("%s: pid %d doesn't have camera permissions", __FUNCTION__, callingPid);
- }
- return hasPermission;
-}
-
Status CameraService::isConcurrentSessionConfigurationSupported(
const std::vector<CameraIdAndSessionConfiguration>& cameraIdsAndSessionConfigurations,
int targetSdkVersion, /*out*/bool* isSupported) {
@@ -3243,7 +3215,11 @@
}
// Check for camera permissions
- if (!hasCameraPermissions()) {
+ int callingPid = CameraThreadState::getCallingPid();
+ int callingUid = CameraThreadState::getCallingUid();
+ bool hasCameraPermission = ((callingPid == getpid()) ||
+ hasPermissionsForCamera(callingPid, callingUid));
+ if (!hasCameraPermission) {
return STATUS_ERROR(ERROR_PERMISSION_DENIED,
"android.permission.CAMERA needed to call"
"isConcurrentSessionConfigurationSupported");
@@ -3287,15 +3263,9 @@
return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Null listener given to addListener");
}
- auto clientUid = CameraThreadState::getCallingUid();
auto clientPid = CameraThreadState::getCallingPid();
- AttributionSourceState attributionSource{};
- attributionSource.uid = clientUid;
- attributionSource.pid = clientPid;
-
- bool openCloseCallbackAllowed = checkPermission(std::string(),
- sCameraOpenCloseListenerPermission, attributionSource, std::string(),
- AppOpsManager::OP_NONE);
+ auto clientUid = CameraThreadState::getCallingUid();
+ bool openCloseCallbackAllowed = hasPermissionsForOpenCloseListener(clientPid, clientUid);
Mutex::Autolock lock(mServiceLock);
@@ -3938,6 +3908,7 @@
CameraService::Client::Client(const sp<CameraService>& cameraService,
const sp<ICameraClient>& cameraClient,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName, bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
const std::string& cameraIdStr,
@@ -3946,6 +3917,7 @@
int servicePid, bool overrideToPortrait) :
CameraService::BasicClient(cameraService,
IInterface::asBinder(cameraClient),
+ attributionAndPermissionUtils,
clientPackageName, systemNativeClient, clientFeatureId,
cameraIdStr, cameraFacing, sensorOrientation,
clientPid, clientUid,
@@ -3976,10 +3948,12 @@
CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
const sp<IBinder>& remoteCallback,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName, bool nativeClient,
const std::optional<std::string>& clientFeatureId, const std::string& cameraIdStr,
int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
int servicePid, bool overrideToPortrait):
+ AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
mDestructionStarted(false),
mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
mClientPackageName(clientPackageName), mSystemNativeClient(nativeClient),
@@ -4143,8 +4117,15 @@
// return MODE_IGNORED. Do not treat such case as error.
bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid,
mClientPackageName);
- bool isCameraPrivacyEnabled =
+
+ bool isCameraPrivacyEnabled;
+ if (flags::camera_privacy_allowlist()) {
+ isCameraPrivacyEnabled = sCameraService->isCameraPrivacyEnabled(
+ toString16(mClientPackageName), std::string(), mClientPid, mClientUid);
+ } else {
+ isCameraPrivacyEnabled =
sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+ }
// We don't want to return EACCESS if the CameraPrivacy is enabled.
// We prefer to successfully open the camera and perform camera muting
// or blocking in connectHelper as handleAppOpMode can be called before the
@@ -4170,8 +4151,15 @@
if (mAppOpsManager != nullptr) {
// Notify app ops that the camera is not available
mOpsCallback = new OpsCallback(this);
- mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
+
+ if (flags::watch_foreground_changes()) {
+ mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
+ toString16(mClientPackageName),
+ AppOpsManager::WATCH_FOREGROUND_CHANGES, mOpsCallback);
+ } else {
+ mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
toString16(mClientPackageName), mOpsCallback);
+ }
// Just check for camera acccess here on open - delay startOp until
// camera frames start streaming in startCameraStreamingOps
@@ -4331,20 +4319,42 @@
block();
} else if (res == AppOpsManager::MODE_IGNORED) {
bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid, mClientPackageName);
- bool isCameraPrivacyEnabled =
+
+ // Uid may be active, but not visible to the user (e.g. PROCESS_STATE_FOREGROUND_SERVICE).
+ // If not visible, but still active, then we want to block instead of muting the camera.
+ int32_t procState = sCameraService->mUidPolicy->getProcState(mClientUid);
+ bool isUidVisible = (procState <= ActivityManager::PROCESS_STATE_BOUND_TOP);
+
+ bool isCameraPrivacyEnabled;
+ if (flags::camera_privacy_allowlist()) {
+ isCameraPrivacyEnabled = sCameraService->isCameraPrivacyEnabled(
+ toString16(mClientPackageName),std::string(),mClientPid,mClientUid);
+ } else {
+ isCameraPrivacyEnabled =
sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
- ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d",
- mCameraIdStr.c_str(), mClientPackageName.c_str(),
- mUidIsTrusted, isUidActive);
- // If the calling Uid is trusted (a native service), or the client Uid is active (WAR for
- // b/175320666), the AppOpsManager could return MODE_IGNORED. Do not treat such cases as
- // error.
+ }
+
+ ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d"
+ " isUidVisible %d, isCameraPrivacyEnabled %d", mCameraIdStr.c_str(),
+ mClientPackageName.c_str(), mUidIsTrusted, isUidActive, isUidVisible,
+ isCameraPrivacyEnabled);
+ // If the calling Uid is trusted (a native service), or the client Uid is active / visible
+ // (WAR for b/175320666)the AppOpsManager could return MODE_IGNORED. Do not treat such
+ // cases as error.
if (!mUidIsTrusted) {
- if (isUidActive && isCameraPrivacyEnabled && supportsCameraMute()) {
- setCameraMute(true);
- } else if (!isUidActive
- || (isCameraPrivacyEnabled && !supportsCameraMute())) {
- block();
+ if (flags::watch_foreground_changes()) {
+ if (isUidVisible && isCameraPrivacyEnabled && supportsCameraMute()) {
+ setCameraMute(true);
+ } else {
+ block();
+ }
+ } else {
+ if (isUidActive && isCameraPrivacyEnabled && supportsCameraMute()) {
+ setCameraMute(true);
+ } else if (!isUidActive
+ || (isCameraPrivacyEnabled && !supportsCameraMute())) {
+ block();
+ }
}
}
} else if (res == AppOpsManager::MODE_ALLOWED) {
@@ -4715,7 +4725,15 @@
}
hasCameraPrivacyFeature(); // Called so the result is cached
mSpm.addSensorPrivacyListener(this);
+ if (isAutomotiveDevice()) {
+ mSpm.addToggleSensorPrivacyListener(this);
+ }
mSensorPrivacyEnabled = mSpm.isSensorPrivacyEnabled();
+ if (flags::camera_privacy_allowlist()) {
+ mCameraPrivacyState = mSpm.getToggleSensorPrivacyState(
+ SensorPrivacyManager::TOGGLE_TYPE_SOFTWARE,
+ SensorPrivacyManager::TOGGLE_SENSOR_CAMERA);
+ }
status_t res = mSpm.linkToDeath(this);
if (res == OK) {
mRegistered = true;
@@ -4747,6 +4765,9 @@
void CameraService::SensorPrivacyPolicy::unregisterSelf() {
Mutex::Autolock _l(mSensorPrivacyLock);
mSpm.removeSensorPrivacyListener(this);
+ if (isAutomotiveDevice()) {
+ mSpm.removeToggleSensorPrivacyListener(this);
+ }
mSpm.unlinkToDeath(this);
mRegistered = false;
ALOGV("SensorPrivacyPolicy: Unregistered with SensorPrivacyManager");
@@ -4761,6 +4782,15 @@
return mSensorPrivacyEnabled;
}
+int CameraService::SensorPrivacyPolicy::getCameraPrivacyState() {
+ if (!mRegistered) {
+ registerWithSensorPrivacyManager();
+ }
+
+ Mutex::Autolock _l(mSensorPrivacyLock);
+ return mCameraPrivacyState;
+}
+
bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled() {
if (!hasCameraPrivacyFeature()) {
return false;
@@ -4768,18 +4798,51 @@
return mSpm.isToggleSensorPrivacyEnabled(SensorPrivacyManager::TOGGLE_SENSOR_CAMERA);
}
+bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled(const String16& packageName) {
+ if (!hasCameraPrivacyFeature()) {
+ return SensorPrivacyManager::DISABLED;
+ }
+ return mSpm.isCameraPrivacyEnabled(packageName);
+}
+
binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyChanged(
- int toggleType __unused, int sensor __unused, bool enabled) {
+ int toggleType, int sensor, bool enabled) {
+ if ((toggleType == SensorPrivacyManager::TOGGLE_TYPE_UNKNOWN)
+ && (sensor == SensorPrivacyManager::TOGGLE_SENSOR_UNKNOWN)) {
+ {
+ Mutex::Autolock _l(mSensorPrivacyLock);
+ mSensorPrivacyEnabled = enabled;
+ }
+ // if sensor privacy is enabled then block all clients from accessing the camera
+ if (enabled) {
+ sp<CameraService> service = mService.promote();
+ if (service != nullptr) {
+ service->blockAllClients();
+ }
+ }
+ }
+ return binder::Status::ok();
+}
+
+binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyStateChanged(
+ int, int sensor, int state) {
+ if (!flags::camera_privacy_allowlist()
+ || (sensor != SensorPrivacyManager::TOGGLE_SENSOR_CAMERA)) {
+ return binder::Status::ok();
+ }
{
Mutex::Autolock _l(mSensorPrivacyLock);
- mSensorPrivacyEnabled = enabled;
+ mCameraPrivacyState = state;
+ }
+ sp<CameraService> service = mService.promote();
+ if (!service) {
+ return binder::Status::ok();
}
// if sensor privacy is enabled then block all clients from accessing the camera
- if (enabled) {
- sp<CameraService> service = mService.promote();
- if (service != nullptr) {
- service->blockAllClients();
- }
+ if (state == SensorPrivacyManager::ENABLED) {
+ service->blockAllClients();
+ } else if (state == SensorPrivacyManager::ENABLED_EXCEPT_ALLOWLISTED_APPS) {
+ service->blockPrivacyEnabledClients();
}
return binder::Status::ok();
}
@@ -5457,8 +5520,7 @@
for (auto& listener : mListenerList) {
bool isVendorListener = listener->isVendorListener();
if (shouldSkipStatusUpdates(deviceKind, isVendorListener,
- listener->getListenerPid(), listener->getListenerUid()) ||
- isVendorListener) {
+ listener->getListenerPid(), listener->getListenerUid())) {
ALOGV("Skipping discovery callback for system-only camera device %s",
cameraId.c_str());
continue;
@@ -5651,6 +5713,23 @@
}
}
+void CameraService::blockPrivacyEnabledClients() {
+ const auto clients = mActiveClientManager.getAll();
+ for (auto& current : clients) {
+ if (current != nullptr) {
+ const auto basicClient = current->getValue();
+ if (basicClient.get() != nullptr) {
+ std::string pkgName = basicClient->getPackageName();
+ bool cameraPrivacyEnabled =
+ mSensorPrivacyPolicy->isCameraPrivacyEnabled(toString16(pkgName));
+ if (cameraPrivacyEnabled) {
+ basicClient->block();
+ }
+ }
+ }
+ }
+}
+
// NOTE: This is a remote API - make sure all args are validated
status_t CameraService::shellCommand(int in, int out, int err, const Vector<String16>& args) {
if (!checkCallingPermission(toString16(sManageCameraPermission), nullptr, nullptr)) {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 1487013..1a887a1 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -53,6 +53,7 @@
#include "utils/ClientManager.h"
#include "utils/IPCTransport.h"
#include "utils/CameraServiceProxyWrapper.h"
+#include "utils/AttributionAndPermissionUtils.h"
#include <set>
#include <string>
@@ -77,7 +78,8 @@
public virtual ::android::hardware::BnCameraService,
public virtual IBinder::DeathRecipient,
public virtual CameraProviderManager::StatusListener,
- public virtual IServiceManager::LocalRegistrationCallback
+ public virtual IServiceManager::LocalRegistrationCallback,
+ public AttributionAndPermissionUtilsEncapsulator
{
friend class BinderService<CameraService>;
friend class CameraOfflineSessionClient;
@@ -119,7 +121,9 @@
// Non-null arguments for cameraServiceProxyWrapper should be provided for
// testing purposes only.
CameraService(std::shared_ptr<CameraServiceProxyWrapper>
- cameraServiceProxyWrapper = nullptr);
+ cameraServiceProxyWrapper = nullptr,
+ std::shared_ptr<AttributionAndPermissionUtils>
+ attributionAndPermissionUtils = nullptr);
virtual ~CameraService();
/////////////////////////////////////////////////////////////////////
@@ -248,6 +252,11 @@
/*out*/
bool* supported);
+ virtual binder::Status getSessionCharacteristics(
+ const std::string& cameraId, int targetSdkVersion, bool overrideToPortrait,
+ const SessionConfiguration& sessionConfiguration,
+ /*out*/ CameraMetadata* outMetadata);
+
// Extra permissions checks
virtual status_t onTransact(uint32_t code, const Parcel& data,
Parcel* reply, uint32_t flags);
@@ -309,10 +318,20 @@
// Shared utilities
static binder::Status filterGetInfoErrorCode(status_t err);
+ /**
+ * Returns true if the device is an automotive device and cameraId is system
+ * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
+ * AUTOMOTIVE_LOCATION_EXTERIOR_LEFT,AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT,
+ * AUTOMOTIVE_LOCATION_EXTERIOR_FRONT or AUTOMOTIVE_LOCATION_EXTERIOR_REAR.
+ */
+ bool isAutomotiveExteriorSystemCamera(const std::string& cameraId) const;
+
/////////////////////////////////////////////////////////////////////
// CameraClient functionality
- class BasicClient : public virtual RefBase {
+ class BasicClient :
+ public virtual RefBase,
+ public AttributionAndPermissionUtilsEncapsulator {
friend class CameraService;
public:
virtual status_t initialize(sp<CameraProviderManager> manager,
@@ -423,6 +442,7 @@
protected:
BasicClient(const sp<CameraService>& cameraService,
const sp<IBinder>& remoteCallback,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool nativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -536,6 +556,7 @@
// Interface used by CameraService
Client(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -639,13 +660,6 @@
int32_t updateAudioRestrictionLocked();
private:
- /**
- * Returns true if the device is an automotive device and cameraId is system
- * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
- * AUTOMOTIVE_LOCATION_EXTERIOR_LEFT,AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT,
- * AUTOMOTIVE_LOCATION_EXTERIOR_FRONT or AUTOMOTIVE_LOCATION_EXTERIOR_REAR.
- */
- bool isAutomotiveExteriorSystemCamera(const std::string& cameraId) const;
// TODO: b/263304156 update this to make use of a death callback for more
// robust/fault tolerant logging
@@ -661,26 +675,6 @@
return activityManager;
}
- /**
- * Pre-grants the permission if the attribution source uid is for an automotive
- * privileged client. Otherwise uses system service permission checker to check
- * for the appropriate permission. If this function is called for accessing a specific
- * camera,then the cameraID must not be empty. CameraId is used only in case of automotive
- * privileged client so that permission is pre-granted only to access system camera device
- * which is located outside of the vehicle body frame because camera located inside the vehicle
- * cabin would need user permission.
- */
- bool checkPermission(const std::string& cameraId, const std::string& permission,
- const content::AttributionSourceState& attributionSource, const std::string& message,
- int32_t attributedOpCode) const;
-
- bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid)
- const;
-
- bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId, int callingPid,
- int callingUid) const;
-
- bool hasCameraPermissions() const;
/**
* Typesafe version of device status, containing both the HAL-layer and the service interface-
* layer values.
@@ -865,19 +859,27 @@
// prevented from accessing the camera.
class SensorPrivacyPolicy : public hardware::BnSensorPrivacyListener,
public virtual IBinder::DeathRecipient,
- public virtual IServiceManager::LocalRegistrationCallback {
+ public virtual IServiceManager::LocalRegistrationCallback,
+ public AttributionAndPermissionUtilsEncapsulator {
public:
- explicit SensorPrivacyPolicy(wp<CameraService> service)
- : mService(service), mSensorPrivacyEnabled(false), mRegistered(false) {}
+ explicit SensorPrivacyPolicy(wp<CameraService> service,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils)
+ : AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
+ mService(service),
+ mSensorPrivacyEnabled(false),
+ mCameraPrivacyState(SensorPrivacyManager::DISABLED), mRegistered(false) {}
void registerSelf();
void unregisterSelf();
bool isSensorPrivacyEnabled();
bool isCameraPrivacyEnabled();
+ int getCameraPrivacyState();
+ bool isCameraPrivacyEnabled(const String16& packageName);
binder::Status onSensorPrivacyChanged(int toggleType, int sensor,
bool enabled);
+ binder::Status onSensorPrivacyStateChanged(int toggleType, int sensor, int state);
// Implementation of IServiceManager::LocalRegistrationCallback
virtual void onServiceRegistration(const String16& name,
@@ -890,6 +892,7 @@
wp<CameraService> mService;
Mutex mSensorPrivacyLock;
bool mSensorPrivacyEnabled;
+ int mCameraPrivacyState;
bool mRegistered;
bool hasCameraPrivacyFeature();
@@ -926,6 +929,9 @@
const std::string& clientName, /*inout*/int& clientUid, /*inout*/int& clientPid,
/*out*/int& originalClientPid) const;
+ bool isCameraPrivacyEnabled(const String16& packageName,const std::string& cameraId,
+ int clientPid, int ClientUid);
+
// Handle active client evictions, and update service state.
// Only call with with mServiceLock held.
status_t handleEvictionsLocked(const std::string& cameraId, int clientPid,
@@ -1385,6 +1391,9 @@
// Blocks all active clients.
void blockAllClients();
+ // Blocks clients whose privacy is enabled.
+ void blockPrivacyEnabledClients();
+
// Overrides the UID state as if it is idle
status_t handleSetUidState(const Vector<String16>& args, int err);
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
index f2d1414..14e5fad 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.cpp
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -17,12 +17,13 @@
#define LOG_TAG "AidlUtils"
#include <aidl/AidlUtils.h>
+#include <aidl/ExtensionMetadataTags.h>
#include <aidl/VndkVersionMetadataTags.h>
#include <aidlcommonsupport/NativeHandle.h>
+#include <camera/StringUtils.h>
#include <device3/Camera3StreamInterface.h>
#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
#include <mediautils/AImageReaderUtils.h>
-#include <camera/StringUtils.h>
namespace android::hardware::cameraservice::utils::conversion::aidl {
@@ -333,4 +334,47 @@
return OK;
}
+bool areExtensionKeysSupported(const CameraMetadata& metadata) {
+ auto requestKeys = metadata.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ if (requestKeys.count == 0) {
+ ALOGE("%s: No ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS entries!", __FUNCTION__);
+ return false;
+ }
+
+ auto resultKeys = metadata.find(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
+ if (resultKeys.count == 0) {
+ ALOGE("%s: No ANDROID_REQUEST_AVAILABLE_RESULT_KEYS entries!", __FUNCTION__);
+ return false;
+ }
+
+ for (const auto& extensionKey : extension_metadata_keys) {
+ if (std::find(requestKeys.data.i32, requestKeys.data.i32 + requestKeys.count, extensionKey)
+ != requestKeys.data.i32 + requestKeys.count) {
+ return true;
+ }
+
+ if (std::find(resultKeys.data.i32, resultKeys.data.i32 + resultKeys.count, extensionKey)
+ != resultKeys.data.i32 + resultKeys.count) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+status_t filterExtensionKeys(CameraMetadata* metadata /*out*/) {
+ if (metadata == nullptr) {
+ return BAD_VALUE;
+ }
+
+ for (const auto& key : extension_metadata_keys) {
+ status_t res = metadata->erase(key);
+ if (res != OK) {
+ ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
+ return res;
+ }
+ }
+ return OK;
+}
+
} // namespace android::hardware::cameraservice::utils::conversion::aidl
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.h b/services/camera/libcameraservice/aidl/AidlUtils.h
index c89d7ff..562aa70 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.h
+++ b/services/camera/libcameraservice/aidl/AidlUtils.h
@@ -122,6 +122,9 @@
status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic = true);
+bool areExtensionKeysSupported(const CameraMetadata& metadata);
+
+status_t filterExtensionKeys(CameraMetadata* metadata /*out*/);
} // namespace android::hardware::cameraservice::utils::conversion::aidl
#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
diff --git a/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
new file mode 100644
index 0000000..86af36c
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <vector>
+#pragma once
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from extensions_camera_metadata_tags.mako. To be included in libcameraservice
+ * only by aidl/AidlUtils.cpp.
+ */
+
+/**
+ * API level to dynamic keys mapping. To be used for filtering out keys depending on vndk version
+ * used by vendor clients.
+ */
+std::vector<camera_metadata_tag> extension_metadata_keys{
+ ANDROID_EXTENSION_STRENGTH,
+ ANDROID_EXTENSION_CURRENT_TYPE,
+ ANDROID_EFV_PADDING_ZOOM_FACTOR,
+ ANDROID_EFV_AUTO_ZOOM,
+ ANDROID_EFV_MAX_PADDING_ZOOM_FACTOR,
+ ANDROID_EFV_STABILIZATION_MODE,
+ ANDROID_EFV_TRANSLATE_VIEWPORT,
+ ANDROID_EFV_ROTATE_VIEWPORT,
+ ANDROID_EFV_PADDING_REGION,
+ ANDROID_EFV_AUTO_ZOOM_PADDING_REGION,
+ ANDROID_EFV_TARGET_COORDINATES,
+};
diff --git a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index e403b97..7965474 100644
--- a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -78,6 +78,7 @@
ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
ANDROID_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE,
+ ANDROID_EFV_PADDING_ZOOM_FACTOR_RANGE,
ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL,
ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL,
ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL,
@@ -112,6 +113,15 @@
ANDROID_CONTROL_LOW_LIGHT_BOOST_STATE,
ANDROID_CONTROL_SETTINGS_OVERRIDE,
ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
+ ANDROID_EFV_AUTO_ZOOM,
+ ANDROID_EFV_AUTO_ZOOM_PADDING_REGION,
+ ANDROID_EFV_MAX_PADDING_ZOOM_FACTOR,
+ ANDROID_EFV_PADDING_REGION,
+ ANDROID_EFV_PADDING_ZOOM_FACTOR,
+ ANDROID_EFV_ROTATE_VIEWPORT,
+ ANDROID_EFV_STABILIZATION_MODE,
+ ANDROID_EFV_TARGET_COORDINATES,
+ ANDROID_EFV_TRANSLATE_VIEWPORT,
ANDROID_EXTENSION_CURRENT_TYPE,
ANDROID_EXTENSION_STRENGTH,
ANDROID_FLASH_STRENGTH_LEVEL,
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index caa6424..19e2999 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -56,6 +56,7 @@
Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
const std::optional<std::string>& clientFeatureId,
const std::string& cameraDeviceId,
@@ -68,7 +69,8 @@
bool overrideForPerfClass,
bool overrideToPortrait,
bool forceSlowJpegMode):
- Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper, clientPackageName,
+ Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper,
+ attributionAndPermissionUtils, clientPackageName,
false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 2cb7af0..2654a25 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -103,6 +103,7 @@
Camera2Client(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
const std::optional<std::string>& clientFeatureId,
const std::string& cameraDeviceId,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 3488629..59828fb 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -61,6 +61,7 @@
CameraDeviceClientBase::CameraDeviceClientBase(
const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -74,6 +75,7 @@
bool overrideToPortrait) :
BasicClient(cameraService,
IInterface::asBinder(remoteCallback),
+ attributionAndPermissionUtils,
clientPackageName,
systemNativeClient,
clientFeatureId,
@@ -92,6 +94,7 @@
CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -104,7 +107,8 @@
bool overrideForPerfClass,
bool overrideToPortrait,
const std::string& originalCameraId) :
- Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
+ Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper,
+ attributionAndPermissionUtils, clientPackageName,
systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
overrideToPortrait),
@@ -777,60 +781,6 @@
return res;
}
-binder::Status CameraDeviceClient::getSessionCharacteristics(
- const SessionConfiguration& sessionConfiguration,
- /*out*/
- hardware::camera2::impl::CameraMetadataNative* sessionCharacteristics) {
- ATRACE_CALL();
- binder::Status res;
- status_t ret = OK;
- if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
-
- Mutex::Autolock icl(mBinderSerializationLock);
-
- if (!mDevice.get()) {
- return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
- }
-
- auto operatingMode = sessionConfiguration.getOperatingMode();
- res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
- mCameraIdStr);
- if (!res.isOk()) {
- return res;
- }
-
- camera3::metadataGetter getMetadata = [this](const std::string &id,
- bool /*overrideForPerfClass*/) {
- return mDevice->infoPhysical(id);};
- ret = mProviderManager->getSessionCharacteristics(mCameraIdStr.c_str(),
- sessionConfiguration, mOverrideForPerfClass, getMetadata,
- sessionCharacteristics);
-
- switch (ret) {
- case OK:
- // Expected, do nothing.
- break;
- case INVALID_OPERATION: {
- std::string msg = fmt::sprintf(
- "Camera %s: Session characteristics query not supported!",
- mCameraIdStr.c_str());
- ALOGD("%s: %s", __FUNCTION__, msg.c_str());
- res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
- }
-
- break;
- default: {
- std::string msg = fmt::sprintf( "Camera %s: Error: %s (%d)", mCameraIdStr.c_str(),
- strerror(-ret), ret);
- ALOGE("%s: %s", __FUNCTION__, msg.c_str());
- res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
- msg.c_str());
- }
- }
-
- return res;
-}
-
binder::Status CameraDeviceClient::deleteStream(int streamId) {
ATRACE_CALL();
ALOGV("%s (streamId = 0x%x)", __FUNCTION__, streamId);
@@ -942,6 +892,11 @@
Mutex::Autolock icl(mBinderSerializationLock);
+ if (!outputConfiguration.isComplete()) {
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "OutputConfiguration isn't valid!");
+ }
+
const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
outputConfiguration.getGraphicBufferProducers();
size_t numBufferProducers = bufferProducers.size();
@@ -958,7 +913,7 @@
bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();
res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
- outputConfiguration.getSurfaceType());
+ outputConfiguration.getSurfaceType(), /*isConfigurationComplete*/true);
if (!res.isOk()) {
return res;
}
@@ -1001,7 +956,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace);
+ streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
if (!res.isOk())
return res;
@@ -1114,6 +1069,10 @@
if (!mDevice.get()) {
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
+ if (!outputConfiguration.isComplete()) {
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "OutputConfiguration isn't valid!");
+ }
// Infer the surface info for deferred surface stream creation.
width = outputConfiguration.getWidth();
@@ -1306,6 +1265,10 @@
if (!mDevice.get()) {
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
+ if (!outputConfiguration.isComplete()) {
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "OutputConfiguration isn't valid!");
+ }
const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
outputConfiguration.getGraphicBufferProducers();
@@ -1373,7 +1336,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
/*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace);
+ streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
if (!res.isOk())
return res;
@@ -1686,6 +1649,11 @@
Mutex::Autolock icl(mBinderSerializationLock);
+ if (!outputConfiguration.isComplete()) {
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "OutputConfiguration isn't valid!");
+ }
+
const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
outputConfiguration.getGraphicBufferProducers();
const std::string &physicalId = outputConfiguration.getPhysicalCameraId();
@@ -1751,7 +1719,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace);
+ streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
if (!res.isOk())
return res;
@@ -1954,9 +1922,9 @@
sp<CameraOfflineSessionClient> offlineClient;
if (offlineSession.get() != nullptr) {
offlineClient = new CameraOfflineSessionClient(sCameraService,
- offlineSession, offlineCompositeStreamMap, cameraCb, mClientPackageName,
- mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation, mClientPid, mClientUid,
- mServicePid);
+ offlineSession, offlineCompositeStreamMap, cameraCb, mAttributionAndPermissionUtils,
+ mClientPackageName, mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation,
+ mClientPid, mClientUid, mServicePid);
ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
}
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index c2f7f56..d93eaff 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -50,6 +50,7 @@
protected:
CameraDeviceClientBase(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -109,11 +110,6 @@
/*out*/
bool* streamStatus) override;
- virtual binder::Status getSessionCharacteristics(
- const SessionConfiguration& sessionConfiguration,
- /*out*/
- hardware::camera2::impl::CameraMetadataNative* sessionCharacteristics) override;
-
// Returns -EBUSY if device is not idle or in error state
virtual binder::Status deleteStream(int streamId) override;
@@ -186,6 +182,7 @@
CameraDeviceClient(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool clientPackageOverride,
const std::optional<std::string>& clientFeatureId,
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 804498f..c6f3e06 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -47,6 +47,7 @@
sp<CameraOfflineSessionBase> session,
const KeyedVector<sp<IBinder>, sp<CompositeStream>>& offlineCompositeStreamMap,
const sp<ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
const std::optional<std::string>& clientFeatureId,
const std::string& cameraIdStr, int cameraFacing, int sensorOrientation,
@@ -54,6 +55,7 @@
CameraService::BasicClient(
cameraService,
IInterface::asBinder(remoteCallback),
+ attributionAndPermissionUtils,
// (v)ndk doesn't have offline session support
clientPackageName, /*overridePackageName*/false, clientFeatureId,
cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid,
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index a126f61..3a78937 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -50,6 +50,7 @@
const sp<CameraService>& cameraService,
const sp<TCamCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -63,9 +64,9 @@
bool overrideForPerfClass,
bool overrideToPortrait,
bool legacyClient):
- TClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
- clientFeatureId, cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
- clientUid, servicePid, overrideToPortrait),
+ TClientBase(cameraService, remoteCallback, attributionAndPermissionUtils, clientPackageName,
+ systemNativeClient, clientFeatureId, cameraId, api1CameraId, cameraFacing,
+ sensorOrientation, clientPid, clientUid, servicePid, overrideToPortrait),
mSharedCameraCallbacks(remoteCallback),
mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
mDeviceActive(false), mApi1CameraId(api1CameraId)
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 2bb90d9..b8a6d8b 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -21,6 +21,7 @@
#include "camera/CameraMetadata.h"
#include "camera/CaptureResult.h"
#include "utils/CameraServiceProxyWrapper.h"
+#include "utils/AttributionAndPermissionUtils.h"
#include "CameraServiceWatchdog.h"
namespace android {
@@ -51,6 +52,7 @@
Camera2ClientBase(const sp<CameraService>& cameraService,
const sp<TCamCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 98c1a79..15e2755 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -33,6 +33,7 @@
#include <future>
#include <inttypes.h>
#include <android_companion_virtualdevice_flags.h>
+#include <android_companion_virtualdevice_build_flags.h>
#include <android/binder_manager.h>
#include <android/hidl/manager/1.2/IServiceManager.h>
#include <hidl/ServiceManagement.h>
@@ -139,7 +140,7 @@
}
std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
-CameraProviderManager::AidlServiceInteractionProxyImpl::getAidlService(
+CameraProviderManager::AidlServiceInteractionProxyImpl::getService(
const std::string& serviceName) {
using aidl::android::hardware::camera::provider::ICameraProvider;
@@ -151,15 +152,31 @@
}
if (binder == nullptr) {
- ALOGD("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
- serviceName.c_str());
+ ALOGE("%s: AIDL Camera provider HAL '%s' is not actually available, despite waiting "
+ "indefinitely?", __FUNCTION__, serviceName.c_str());
return nullptr;
}
std::shared_ptr<ICameraProvider> interface =
ICameraProvider::fromBinder(ndk::SpAIBinder(binder));
return interface;
-};
+}
+
+std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+CameraProviderManager::AidlServiceInteractionProxyImpl::tryGetService(
+ const std::string& serviceName) {
+ using aidl::android::hardware::camera::provider::ICameraProvider;
+
+ std::shared_ptr<ICameraProvider> interface = ICameraProvider::fromBinder(
+ ndk::SpAIBinder(AServiceManager_checkService(serviceName.c_str())));
+ if (interface == nullptr) {
+ ALOGD("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
+ serviceName.c_str());
+ return nullptr;
+ }
+
+ return interface;
+}
static std::string getFullAidlProviderName(const std::string instance) {
std::string aidlHalServiceDescriptor =
@@ -442,19 +459,31 @@
return OK;
}
-status_t CameraProviderManager::getSessionCharacteristics(const std::string& id,
- const SessionConfiguration &configuration, bool overrideForPerfClass,
- metadataGetter getMetadata,
- CameraMetadata* sessionCharacteristics /*out*/) const {
+status_t CameraProviderManager::getSessionCharacteristics(
+ const std::string& id, const SessionConfiguration& configuration, bool overrideForPerfClass,
+ bool overrideToPortrait, CameraMetadata* sessionCharacteristics /*out*/) const {
if (!flags::feature_combination_query()) {
return INVALID_OPERATION;
}
+
std::lock_guard<std::mutex> lock(mInterfaceMutex);
auto deviceInfo = findDeviceInfoLocked(id);
if (deviceInfo == nullptr) {
return NAME_NOT_FOUND;
}
+ metadataGetter getMetadata = [this, overrideToPortrait](const std::string& id,
+ bool overrideForPerfClass) {
+ CameraMetadata metadata;
+ status_t ret = this->getCameraCharacteristicsLocked(id, overrideForPerfClass, &metadata,
+ overrideToPortrait);
+ if (ret != OK) {
+ ALOGE("%s: Could not get CameraCharacteristics for device %s", __FUNCTION__,
+ id.c_str());
+ }
+ return metadata;
+ };
+
return deviceInfo->getSessionCharacteristics(configuration,
overrideForPerfClass, getMetadata, sessionCharacteristics);
}
@@ -2096,8 +2125,14 @@
const std::string& providerName, const sp<ProviderInfo>& providerInfo) {
using aidl::android::hardware::camera::provider::ICameraProvider;
- std::shared_ptr<ICameraProvider> interface =
- mAidlServiceProxy->getAidlService(providerName.c_str());
+ std::shared_ptr<ICameraProvider> interface;
+ if (flags::delay_lazy_hal_instantiation()) {
+ // Only get remote instance if already running. Lazy Providers will be
+ // woken up later.
+ interface = mAidlServiceProxy->tryGetService(providerName);
+ } else {
+ interface = mAidlServiceProxy->getService(providerName);
+ }
if (interface == nullptr) {
ALOGW("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
@@ -3244,7 +3279,8 @@
}
bool CameraProviderManager::isVirtualCameraHalEnabled() {
- return vd_flags::virtual_camera_service_discovery();
+ return vd_flags::virtual_camera_service_discovery() &&
+ vd_flags::virtual_camera_service_build_flag();
}
} // namespace android
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 53a2102..5ff3fcd 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -178,9 +178,15 @@
// Proxy to inject fake services in test.
class AidlServiceInteractionProxy {
public:
- // Returns the Aidl service with the given serviceName
+ // Returns the Aidl service with the given serviceName. Will wait indefinitely
+ // for the service to come up if not running.
virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
- getAidlService(const std::string& serviceName) = 0;
+ getService(const std::string& serviceName) = 0;
+
+ // Attempts to get an already running AIDL service of the given serviceName.
+ // Returns nullptr immediately if service is not running.
+ virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+ tryGetService(const std::string& serviceName) = 0;
virtual ~AidlServiceInteractionProxy() = default;
};
@@ -190,7 +196,10 @@
class AidlServiceInteractionProxyImpl : public AidlServiceInteractionProxy {
public:
virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
- getAidlService(const std::string& serviceName) override;
+ getService(const std::string& serviceName) override;
+
+ virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+ tryGetService(const std::string& serviceName) override;
};
/**
@@ -321,7 +330,8 @@
*/
status_t getSessionCharacteristics(const std::string& id,
const SessionConfiguration &configuration,
- bool overrideForPerfClass, camera3::metadataGetter getMetadata,
+ bool overrideForPerfClass,
+ bool overrideToPortrait,
CameraMetadata* sessionCharacteristics /*out*/) const;
/**
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 921ee43..a721d28 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -297,9 +297,10 @@
}
ALOGV("Camera provider actually needs restart, calling getService(%s)", mProviderName.c_str());
- interface = mManager->mAidlServiceProxy->getAidlService(mProviderName.c_str());
+ interface = mManager->mAidlServiceProxy->getService(mProviderName);
+
if (interface == nullptr) {
- ALOGD("%s: %s service not started", __FUNCTION__, mProviderName.c_str());
+ ALOGE("%s: %s service not started", __FUNCTION__, mProviderName.c_str());
return nullptr;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index c0a0544..9792089 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -78,6 +78,7 @@
using namespace android::camera3;
using namespace android::camera3::SessionConfigurationUtils;
using namespace android::hardware::camera;
+using namespace android::hardware::cameraservice::utils::conversion::aidl;
namespace flags = com::android::internal::camera::flags;
namespace android {
@@ -254,6 +255,8 @@
return res;
}
+ mSupportsExtensionKeys = areExtensionKeysSupported(mDeviceInfo);
+
return OK;
}
@@ -2531,10 +2534,13 @@
config.streams = streams.editArray();
config.hal_buffer_managed_streams = mHalBufManagedStreamIds;
+ config.use_hal_buf_manager = mUseHalBufManager;
// Do the HAL configuration; will potentially touch stream
- // max_buffers, usage, and priv fields, as well as data_space and format
- // fields for IMPLEMENTATION_DEFINED formats.
+ // max_buffers, usage, priv fields, data_space and format
+ // fields for IMPLEMENTATION_DEFINED formats as well as hal buffer managed
+ // streams and use_hal_buf_manager (in case aconfig flag session_hal_buf_manager
+ // is not enabled but the HAL supports session specific hal buffer manager).
int64_t logId = mCameraServiceProxyWrapper->getCurrentLogIdForCamera(mId);
const camera_metadata_t *sessionBuffer = sessionParams.getAndLock();
@@ -2554,6 +2560,8 @@
strerror(-res), res);
return res;
}
+ // It is possible that use hal buffer manager behavior was changed by the
+ // configureStreams call.
mUseHalBufManager = config.use_hal_buf_manager;
if (flags::session_hal_buf_manager()) {
bool prevSessionHalBufManager = (mHalBufManagedStreamIds.size() != 0);
@@ -3887,13 +3895,22 @@
for (it = captureRequest->mSettingsList.begin();
it != captureRequest->mSettingsList.end(); it++) {
- res = hardware::cameraservice::utils::conversion::aidl::filterVndkKeys(
- mVndkVersion, it->metadata, false /*isStatic*/);
+ res = filterVndkKeys(mVndkVersion, it->metadata, false /*isStatic*/);
if (res != OK) {
SET_ERR("RequestThread: Failed during VNDK filter of capture requests "
"%d: %s (%d)", halRequest->frame_number, strerror(-res), res);
return INVALID_OPERATION;
}
+
+ if (!parent->mSupportsExtensionKeys) {
+ res = filterExtensionKeys(&it->metadata);
+ if (res != OK) {
+ SET_ERR("RequestThread: Failed during extension filter of capture "
+ "requests %d: %s (%d)", halRequest->frame_number,
+ strerror(-res), res);
+ return INVALID_OPERATION;
+ }
+ }
}
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 498ef55..fa063b8 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -1060,7 +1060,7 @@
static const nsecs_t kRequestTimeout = 50e6; // 50 ms
// TODO: does this need to be adjusted for long exposure requests?
- static const nsecs_t kRequestSubmitTimeout = 200e6; // 200 ms
+ static const nsecs_t kRequestSubmitTimeout = 500e6; // 500 ms
// Used to prepare a batch of requests.
struct NextRequest {
@@ -1523,6 +1523,9 @@
// AE_TARGET_FPS_RANGE
bool mIsFixedFps = false;
+ // Flag to indicate that we shouldn't forward extension related metadata
+ bool mSupportsExtensionKeys = false;
+
// Injection camera related methods.
class Camera3DeviceInjectionMethods : public virtual RefBase {
public:
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index f98636b..2ce04e8 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -235,65 +235,6 @@
return OK;
}
-status_t Camera3OutputStream::getBuffersLocked(std::vector<OutstandingBuffer>* outBuffers) {
- status_t res;
-
- if ((res = getBufferPreconditionCheckLocked()) != OK) {
- return res;
- }
-
- if (mUseBufferManager) {
- ALOGE("%s: stream %d is managed by buffer manager and does not support batch operation",
- __FUNCTION__, mId);
- return INVALID_OPERATION;
- }
-
- sp<Surface> consumer = mConsumer;
- /**
- * Release the lock briefly to avoid deadlock for below scenario:
- * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
- * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
- * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
- * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
- * StreamingProcessor lock.
- * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
- * and try to lock bufferQueue lock.
- * Then there is circular locking dependency.
- */
- mLock.unlock();
-
- size_t numBuffersRequested = outBuffers->size();
- std::vector<Surface::BatchBuffer> buffers(numBuffersRequested);
-
- nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
- res = consumer->dequeueBuffers(&buffers);
- nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
- mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
-
- mLock.lock();
-
- if (res != OK) {
- if (shouldLogError(res, mState)) {
- ALOGE("%s: Stream %d: Can't dequeue %zu output buffers: %s (%d)",
- __FUNCTION__, mId, numBuffersRequested, strerror(-res), res);
- }
- checkRetAndSetAbandonedLocked(res);
- return res;
- }
- checkRemovedBuffersLocked();
-
- /**
- * FenceFD now owned by HAL except in case of error,
- * in which case we reassign it to acquire_fence
- */
- for (size_t i = 0; i < numBuffersRequested; i++) {
- handoutBufferLocked(*(outBuffers->at(i).outBuffer),
- &(buffers[i].buffer->handle), /*acquireFence*/buffers[i].fenceFd,
- /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
- }
- return OK;
-}
-
status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
ANativeWindowBuffer* buffer, int anwReleaseFence,
const std::vector<size_t>&) {
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 65791a9..da0ed87 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -388,8 +388,6 @@
virtual status_t getBufferLocked(camera_stream_buffer *buffer,
const std::vector<size_t>& surface_ids);
- virtual status_t getBuffersLocked(/*out*/std::vector<OutstandingBuffer>* buffers) override;
-
virtual status_t returnBufferLocked(
const camera_stream_buffer &buffer,
nsecs_t timestamp, nsecs_t readoutTimestamp,
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 5d5c54c..89e08a1 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -499,7 +499,8 @@
states.inflightIntf.onInflightEntryRemovedLocked(duration);
}
-void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx) {
+void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx,
+ std::vector<BufferToReturn> *returnableBuffers) {
InFlightRequestMap& inflightMap = states.inflightMap;
const InFlightRequest &request = inflightMap.valueAt(idx);
const uint32_t frameNumber = inflightMap.keyAt(idx);
@@ -537,12 +538,13 @@
assert(request.requestStatus != OK ||
request.pendingOutputBuffers.size() == 0);
- returnOutputBuffers(
+ collectReturnableOutputBuffers(
states.useHalBufManager, states.halBufManagedStreamIds,
states.listener,
request.pendingOutputBuffers.array(),
request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
/*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
+ /*out*/ returnableBuffers,
/*timestampIncreasing*/true,
request.outputSurfaces, request.resultExtras,
request.errorBufStrategy, request.transform);
@@ -637,6 +639,7 @@
// in-flight request and they will be returned when the shutter timestamp
// arrives. Update the in-flight status and remove the in-flight entry if
// all result data and shutter timestamp have been received.
+ std::vector<BufferToReturn> returnableBuffers{};
nsecs_t shutterTimestamp = 0;
{
std::lock_guard<std::mutex> l(states.inflightLock);
@@ -798,10 +801,11 @@
request.pendingOutputBuffers.appendArray(result->output_buffers,
result->num_output_buffers);
if (shutterTimestamp != 0) {
- returnAndRemovePendingOutputBuffers(
+ collectAndRemovePendingOutputBuffers(
states.useHalBufManager, states.halBufManagedStreamIds,
states.listener,
- request, states.sessionStatsBuilder);
+ request, states.sessionStatsBuilder,
+ /*out*/ &returnableBuffers);
}
if (result->result != NULL && !isPartialResult) {
@@ -826,9 +830,18 @@
request.physicalMetadatas);
}
}
- removeInFlightRequestIfReadyLocked(states, idx);
+ removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
+ if (!flags::return_buffers_outside_locks()) {
+ finishReturningOutputBuffers(returnableBuffers,
+ states.listener, states.sessionStatsBuilder);
+ }
} // scope for states.inFlightLock
+ if (flags::return_buffers_outside_locks()) {
+ finishReturningOutputBuffers(returnableBuffers,
+ states.listener, states.sessionStatsBuilder);
+ }
+
if (result->input_buffer != NULL) {
if (hasInputBufferInRequest) {
Camera3Stream *stream =
@@ -849,17 +862,17 @@
}
}
-void returnOutputBuffers(
+void collectReturnableOutputBuffers(
bool useHalBufManager,
const std::set<int32_t> &halBufferManagedStreams,
sp<NotificationListener> listener,
const camera_stream_buffer_t *outputBuffers, size_t numBuffers,
nsecs_t timestamp, nsecs_t readoutTimestamp, bool requested,
nsecs_t requestTimeNs, SessionStatsBuilder& sessionStatsBuilder,
+ /*out*/ std::vector<BufferToReturn> *returnableBuffers,
bool timestampIncreasing, const SurfaceMap& outputSurfaces,
- const CaptureResultExtras &inResultExtras,
+ const CaptureResultExtras &resultExtras,
ERROR_BUF_STRATEGY errorBufStrategy, int32_t transform) {
-
for (size_t i = 0; i < numBuffers; i++)
{
Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
@@ -869,7 +882,7 @@
if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR &&
errorBufStrategy == ERROR_BUF_RETURN_NOTIFY) {
if (listener != nullptr) {
- CaptureResultExtras extras = inResultExtras;
+ CaptureResultExtras extras = resultExtras;
extras.errorStreamId = streamId;
listener->notifyError(
hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
@@ -894,22 +907,35 @@
}
const auto& it = outputSurfaces.find(streamId);
- status_t res = OK;
// Do not return the buffer if the buffer status is error, and the error
// buffer strategy is CACHE.
if (outputBuffers[i].status != CAMERA_BUFFER_STATUS_ERROR ||
errorBufStrategy != ERROR_BUF_CACHE) {
if (it != outputSurfaces.end()) {
- res = stream->returnBuffer(
+ returnableBuffers->emplace_back(stream,
outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
- it->second, inResultExtras.frameNumber, transform);
+ it->second, resultExtras,
+ transform, requested ? requestTimeNs : 0);
} else {
- res = stream->returnBuffer(
+ returnableBuffers->emplace_back(stream,
outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
- std::vector<size_t> (), inResultExtras.frameNumber, transform);
+ std::vector<size_t> (), resultExtras,
+ transform, requested ? requestTimeNs : 0 );
}
}
+ }
+}
+
+void finishReturningOutputBuffers(const std::vector<BufferToReturn> &returnableBuffers,
+ sp<NotificationListener> listener, SessionStatsBuilder& sessionStatsBuilder) {
+ for (auto& b : returnableBuffers) {
+ const int streamId = b.stream->getId();
+
+ status_t res = b.stream->returnBuffer(b.buffer, b.timestamp,
+ b.readoutTimestamp, b.timestampIncreasing,
+ b.surfaceIds, b.resultExtras.frameNumber, b.transform);
+
// Note: stream may be deallocated at this point, if this buffer was
// the last reference to it.
bool dropped = false;
@@ -920,51 +946,55 @@
ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
dropped = true;
} else {
- if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR || timestamp == 0) {
+ if (b.buffer.status == CAMERA_BUFFER_STATUS_ERROR || b.timestamp == 0) {
dropped = true;
}
}
- if (requested) {
+ if (b.requestTimeNs > 0) {
nsecs_t bufferTimeNs = systemTime();
- int32_t captureLatencyMs = ns2ms(bufferTimeNs - requestTimeNs);
+ int32_t captureLatencyMs = ns2ms(bufferTimeNs - b.requestTimeNs);
sessionStatsBuilder.incCounter(streamId, dropped, captureLatencyMs);
}
// Long processing consumers can cause returnBuffer timeout for shared stream
// If that happens, cancel the buffer and send a buffer error to client
- if (it != outputSurfaces.end() && res == TIMED_OUT &&
- outputBuffers[i].status == CAMERA_BUFFER_STATUS_OK) {
+ if (b.surfaceIds.size() > 0 && res == TIMED_OUT &&
+ b.buffer.status == CAMERA_BUFFER_STATUS_OK) {
// cancel the buffer
- camera_stream_buffer_t sb = outputBuffers[i];
+ camera_stream_buffer_t sb = b.buffer;
sb.status = CAMERA_BUFFER_STATUS_ERROR;
- stream->returnBuffer(sb, /*timestamp*/0, /*readoutTimestamp*/0,
- timestampIncreasing, std::vector<size_t> (),
- inResultExtras.frameNumber, transform);
+ b.stream->returnBuffer(sb, /*timestamp*/0, /*readoutTimestamp*/0,
+ b.timestampIncreasing, std::vector<size_t> (),
+ b.resultExtras.frameNumber, b.transform);
if (listener != nullptr) {
- CaptureResultExtras extras = inResultExtras;
+ CaptureResultExtras extras = b.resultExtras;
extras.errorStreamId = streamId;
listener->notifyError(
hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
extras);
}
}
+
}
}
-void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
+void collectAndRemovePendingOutputBuffers(bool useHalBufManager,
const std::set<int32_t> &halBufferManagedStreams,
sp<NotificationListener> listener, InFlightRequest& request,
- SessionStatsBuilder& sessionStatsBuilder) {
+ SessionStatsBuilder& sessionStatsBuilder,
+ std::vector<BufferToReturn> *returnableBuffers) {
bool timestampIncreasing =
!((request.zslCapture && request.stillCapture) || request.hasInputBuffer);
nsecs_t readoutTimestamp = request.resultExtras.hasReadoutTimestamp ?
request.resultExtras.readoutTimestamp : 0;
- returnOutputBuffers(useHalBufManager, halBufferManagedStreams, listener,
+ collectReturnableOutputBuffers(useHalBufManager, halBufferManagedStreams, listener,
request.pendingOutputBuffers.array(),
request.pendingOutputBuffers.size(),
request.shutterTimestamp, readoutTimestamp,
- /*requested*/true, request.requestTimeNs, sessionStatsBuilder, timestampIncreasing,
+ /*requested*/true, request.requestTimeNs, sessionStatsBuilder,
+ /*out*/ returnableBuffers,
+ timestampIncreasing,
request.outputSurfaces, request.resultExtras,
request.errorBufStrategy, request.transform);
@@ -984,6 +1014,9 @@
ATRACE_CALL();
ssize_t idx;
+ std::vector<BufferToReturn> returnableBuffers{};
+ CaptureResultExtras pendingNotificationResultExtras{};
+
// Set timestamp for the request in the in-flight tracking
// and get the request ID to send upstream
{
@@ -1050,9 +1083,13 @@
states.lastCompletedReprocessFrameNumber;
r.resultExtras.lastCompletedZslFrameNumber =
states.lastCompletedZslFrameNumber;
- states.listener->notifyShutter(r.resultExtras, msg.timestamp);
+ if (flags::return_buffers_outside_locks()) {
+ pendingNotificationResultExtras = r.resultExtras;
+ } else {
+ states.listener->notifyShutter(r.resultExtras, msg.timestamp);
+ }
}
- // send pending result and buffers
+ // send pending result and buffers; this queues them up for delivery later
const auto& cameraIdsWithZoom = getCameraIdsWithZoomLocked(
inflightMap, r.pendingMetadata, r.cameraIdsWithZoom);
sendCaptureResult(states,
@@ -1061,17 +1098,35 @@
r.hasInputBuffer, r.zslCapture && r.stillCapture,
r.rotateAndCropAuto, cameraIdsWithZoom, r.physicalMetadatas);
}
- returnAndRemovePendingOutputBuffers(
+ collectAndRemovePendingOutputBuffers(
states.useHalBufManager, states.halBufManagedStreamIds,
- states.listener, r, states.sessionStatsBuilder);
+ states.listener, r, states.sessionStatsBuilder, &returnableBuffers);
- removeInFlightRequestIfReadyLocked(states, idx);
+ if (!flags::return_buffers_outside_locks()) {
+ finishReturningOutputBuffers(returnableBuffers,
+ states.listener, states.sessionStatsBuilder);
+ }
+
+ removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
+
}
}
if (idx < 0) {
SET_ERR("Shutter notification for non-existent frame number %d",
msg.frame_number);
}
+ // Call notifyShutter outside of in-flight mutex
+ if (flags::return_buffers_outside_locks() && pendingNotificationResultExtras.isValid()) {
+ states.listener->notifyShutter(pendingNotificationResultExtras, msg.timestamp);
+ }
+
+ // With no locks held, finish returning buffers to streams, which may take a while since
+ // binder calls are involved
+ if (flags::return_buffers_outside_locks()) {
+ finishReturningOutputBuffers(returnableBuffers,
+ states.listener, states.sessionStatsBuilder);
+ }
+
}
void notifyError(CaptureOutputStates& states, const camera_error_msg_t &msg) {
@@ -1117,6 +1172,8 @@
break;
case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
+ {
+ std::vector<BufferToReturn> returnableBuffers{};
{
std::lock_guard<std::mutex> l(states.inflightLock);
ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
@@ -1153,7 +1210,12 @@
// Check whether the buffers returned. If they returned,
// remove inflight request.
- removeInFlightRequestIfReadyLocked(states, idx);
+ removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
+ if (!flags::return_buffers_outside_locks()) {
+ finishReturningOutputBuffers(returnableBuffers,
+ states.listener, states.sessionStatsBuilder);
+ }
+
}
} else {
resultExtras.frameNumber = msg.frame_number;
@@ -1162,6 +1224,12 @@
resultExtras.frameNumber);
}
}
+
+ if (flags::return_buffers_outside_locks()) {
+ finishReturningOutputBuffers(returnableBuffers,
+ states.listener, states.sessionStatsBuilder);
+ }
+
resultExtras.errorStreamId = streamId;
if (states.listener != nullptr) {
states.listener->notifyError(errorCode, resultExtras);
@@ -1170,6 +1238,7 @@
states.cameraId.c_str(), __FUNCTION__);
}
break;
+ }
case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
// Do not depend on HAL ERROR_CAMERA_BUFFER to send buffer error
// callback to the app. Rather, use STATUS_ERROR of image buffers.
@@ -1199,18 +1268,24 @@
void flushInflightRequests(FlushInflightReqStates& states) {
ATRACE_CALL();
+ std::vector<BufferToReturn> returnableBuffers{};
{ // First return buffers cached in inFlightMap
std::lock_guard<std::mutex> l(states.inflightLock);
for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
const InFlightRequest &request = states.inflightMap.valueAt(idx);
- returnOutputBuffers(
+ collectReturnableOutputBuffers(
states.useHalBufManager, states.halBufManagedStreamIds,
states.listener,
request.pendingOutputBuffers.array(),
request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
/*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
+ /*out*/ &returnableBuffers,
/*timestampIncreasing*/true, request.outputSurfaces, request.resultExtras,
request.errorBufStrategy);
+ if (!flags::return_buffers_outside_locks()) {
+ finishReturningOutputBuffers(returnableBuffers,
+ states.listener, states.sessionStatsBuilder);
+ }
ALOGW("%s: Frame %d | Timestamp: %" PRId64 ", metadata"
" arrived: %s, buffers left: %d.\n", __FUNCTION__,
states.inflightMap.keyAt(idx), request.shutterTimestamp,
@@ -1221,6 +1296,10 @@
states.inflightMap.clear();
states.inflightIntf.onInflightMapFlushedLocked();
}
+ if (flags::return_buffers_outside_locks()) {
+ finishReturningOutputBuffers(returnableBuffers,
+ states.listener, states.sessionStatsBuilder);
+ }
// Then return all inflight buffers not returned by HAL
std::vector<std::pair<int32_t, int32_t>> inflightKeys;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index d155fa2..75864d7 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -44,16 +44,50 @@
* Helper methods shared between Camera3Device/Camera3OfflineSession for HAL callbacks
*/
- // helper function to return the output buffers to output streams. The
- // function also optionally calls notify(ERROR_BUFFER).
- void returnOutputBuffers(
+ struct BufferToReturn {
+ Camera3StreamInterface *stream;
+ camera_stream_buffer_t buffer;
+ nsecs_t timestamp;
+ nsecs_t readoutTimestamp;
+ bool timestampIncreasing;
+ std::vector<size_t> surfaceIds;
+ const CaptureResultExtras resultExtras;
+ int32_t transform;
+ nsecs_t requestTimeNs;
+
+ BufferToReturn(Camera3StreamInterface *stream,
+ camera_stream_buffer_t buffer,
+ nsecs_t timestamp, nsecs_t readoutTimestamp,
+ bool timestampIncreasing, std::vector<size_t> surfaceIds,
+ const CaptureResultExtras &resultExtras,
+ int32_t transform, nsecs_t requestTimeNs):
+ stream(stream),
+ buffer(buffer),
+ timestamp(timestamp),
+ readoutTimestamp(readoutTimestamp),
+ timestampIncreasing(timestampIncreasing),
+ surfaceIds(surfaceIds),
+ resultExtras(resultExtras),
+ transform(transform),
+ requestTimeNs(requestTimeNs) {}
+ };
+
+ // helper function to return the output buffers to output
+ // streams. The function also optionally calls
+ // notify(ERROR_BUFFER). Returns the list of buffers to hand back
+ // to streams in returnableBuffers. Does not make any two-way
+ // binder calls, so suitable for use when critical locks are being
+ // held
+ void collectReturnableOutputBuffers(
bool useHalBufManager,
const std::set<int32_t> &halBufferManagedStreams,
sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
const camera_stream_buffer_t *outputBuffers,
size_t numBuffers, nsecs_t timestamp,
nsecs_t readoutTimestamp, bool requested, nsecs_t requestTimeNs,
- SessionStatsBuilder& sessionStatsBuilder, bool timestampIncreasing = true,
+ SessionStatsBuilder& sessionStatsBuilder,
+ /*out*/ std::vector<BufferToReturn> *returnableBuffers,
+ bool timestampIncreasing = true,
// The following arguments are only meant for surface sharing use case
const SurfaceMap& outputSurfaces = SurfaceMap{},
// Used to send buffer error callback when failing to return buffer
@@ -61,14 +95,24 @@
ERROR_BUF_STRATEGY errorBufStrategy = ERROR_BUF_RETURN,
int32_t transform = -1);
- // helper function to return the output buffers to output streams, and
- // remove the returned buffers from the inflight request's pending buffers
- // vector.
- void returnAndRemovePendingOutputBuffers(
+ // helper function to collect the output buffers ready to be
+ // returned to output streams, and to remove these buffers from
+ // the inflight request's pending buffers vector. Does not make
+ // any two-way binder calls, so suitable for use when critical
+ // locks are being held
+ void collectAndRemovePendingOutputBuffers(
bool useHalBufManager,
const std::set<int32_t> &halBufferManagedStreams,
sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
- InFlightRequest& request, SessionStatsBuilder& sessionStatsBuilder);
+ InFlightRequest& request, SessionStatsBuilder& sessionStatsBuilder,
+ /*out*/ std::vector<BufferToReturn> *returnableBuffers);
+
+ // Actually return filled output buffers to the consumer to use, using the list
+ // provided by collectReturnableOutputBuffers / collectAndRemovePendingOutputBuffers
+ // Makes two-way binder calls to applications, so do not hold any critical locks when
+ // calling.
+ void finishReturningOutputBuffers(const std::vector<BufferToReturn> &returnableBuffers,
+ sp<NotificationListener> listener, SessionStatsBuilder& sessionStatsBuilder);
// Camera3Device/Camera3OfflineSession internal states used in notify/processCaptureResult
// callbacks
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
index 2c30b15..aca7a67 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
@@ -345,10 +345,15 @@
continue;
}
streamBuffer.stream = stream->asHalStream();
- returnOutputBuffers(states.useHalBufManager, states.halBufManagedStreamIds,
+ std::vector<BufferToReturn> returnableBuffers{};
+ collectReturnableOutputBuffers(states.useHalBufManager, states.halBufManagedStreamIds,
/*listener*/nullptr, &streamBuffer, /*size*/1, /*timestamp*/ 0,
/*readoutTimestamp*/0, /*requested*/false, /*requestTimeNs*/0,
+ states.sessionStatsBuilder,
+ /*out*/&returnableBuffers);
+ finishReturningOutputBuffers(returnableBuffers, /*listener*/ nullptr,
states.sessionStatsBuilder);
+
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 701c472..79a767a 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -969,11 +969,6 @@
return INVALID_OPERATION;
}
-status_t Camera3Stream::getBuffersLocked(std::vector<OutstandingBuffer>*) {
- ALOGE("%s: This type of stream does not support output", __FUNCTION__);
- return INVALID_OPERATION;
-}
-
status_t Camera3Stream::returnBufferLocked(const camera_stream_buffer &,
nsecs_t, nsecs_t, int32_t, const std::vector<size_t>&) {
ALOGE("%s: This type of stream does not support output", __FUNCTION__);
@@ -1047,92 +1042,6 @@
mBufferFreedListener = listener;
}
-status_t Camera3Stream::getBuffers(std::vector<OutstandingBuffer>* buffers,
- nsecs_t waitBufferTimeout) {
- ATRACE_CALL();
- Mutex::Autolock l(mLock);
- status_t res = OK;
-
- if (buffers == nullptr) {
- ALOGI("%s: buffers must not be null!", __FUNCTION__);
- return BAD_VALUE;
- }
-
- size_t numBuffersRequested = buffers->size();
- if (numBuffersRequested == 0) {
- ALOGE("%s: 0 buffers are requested!", __FUNCTION__);
- return BAD_VALUE;
- }
-
- // This function should be only called when the stream is configured already.
- if (mState != STATE_CONFIGURED) {
- ALOGE("%s: Stream %d: Can't get buffers if stream is not in CONFIGURED state %d",
- __FUNCTION__, mId, mState);
- if (mState == STATE_ABANDONED) {
- return DEAD_OBJECT;
- } else {
- return INVALID_OPERATION;
- }
- }
-
- size_t numOutstandingBuffers = getHandoutOutputBufferCountLocked();
- size_t numCachedBuffers = getCachedOutputBufferCountLocked();
- size_t maxNumCachedBuffers = getMaxCachedOutputBuffersLocked();
- // Wait for new buffer returned back if we are running into the limit. There
- // are 2 limits:
- // 1. The number of HAL buffers is greater than max_buffers
- // 2. The number of HAL buffers + cached buffers is greater than max_buffers
- // + maxCachedBuffers
- while (numOutstandingBuffers + numBuffersRequested > camera_stream::max_buffers ||
- numOutstandingBuffers + numCachedBuffers + numBuffersRequested >
- camera_stream::max_buffers + maxNumCachedBuffers) {
- ALOGV("%s: Already dequeued %zu(+%zu) output buffers and requesting %zu "
- "(max is %d(+%zu)), waiting.", __FUNCTION__, numOutstandingBuffers,
- numCachedBuffers, numBuffersRequested, camera_stream::max_buffers,
- maxNumCachedBuffers);
- nsecs_t waitStart = systemTime(SYSTEM_TIME_MONOTONIC);
- if (waitBufferTimeout < kWaitForBufferDuration) {
- waitBufferTimeout = kWaitForBufferDuration;
- }
- res = mOutputBufferReturnedSignal.waitRelative(mLock, waitBufferTimeout);
- nsecs_t waitEnd = systemTime(SYSTEM_TIME_MONOTONIC);
- mBufferLimitLatency.add(waitStart, waitEnd);
- if (res != OK) {
- if (res == TIMED_OUT) {
- ALOGE("%s: wait for output buffer return timed out after %lldms (max_buffers %d)",
- __FUNCTION__, waitBufferTimeout / 1000000LL,
- camera_stream::max_buffers);
- }
- return res;
- }
- size_t updatedNumOutstandingBuffers = getHandoutOutputBufferCountLocked();
- size_t updatedNumCachedBuffers = getCachedOutputBufferCountLocked();
- if (updatedNumOutstandingBuffers >= numOutstandingBuffers &&
- updatedNumCachedBuffers == numCachedBuffers) {
- ALOGE("%s: outstanding buffer count goes from %zu to %zu, "
- "getBuffer(s) call must not run in parallel!", __FUNCTION__,
- numOutstandingBuffers, updatedNumOutstandingBuffers);
- return INVALID_OPERATION;
- }
- numOutstandingBuffers = updatedNumOutstandingBuffers;
- numCachedBuffers = updatedNumCachedBuffers;
- }
-
- res = getBuffersLocked(buffers);
- if (res == OK) {
- for (auto& outstandingBuffer : *buffers) {
- camera_stream_buffer* buffer = outstandingBuffer.outBuffer;
- fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/true);
- if (buffer->buffer) {
- Mutex::Autolock l(mOutstandingBuffersLock);
- mOutstandingBuffers.push_back(*buffer->buffer);
- }
- }
- }
-
- return res;
-}
-
void Camera3Stream::queueHDRMetadata(buffer_handle_t buffer, sp<ANativeWindow>& anw,
int64_t dynamicRangeProfile) {
auto& mapper = GraphicBufferMapper::get();
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index f06ccf3..0df09cd 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -343,12 +343,6 @@
const std::vector<size_t>& surface_ids = std::vector<size_t>());
/**
- * Similar to getBuffer() except this method fills multiple buffers.
- */
- status_t getBuffers(std::vector<OutstandingBuffer>* buffers,
- nsecs_t waitBufferTimeout);
-
- /**
* Return a buffer to the stream after use by the HAL.
*
* Multiple surfaces could share the same HAL stream, but a request may
@@ -535,8 +529,6 @@
nsecs_t timestamp, nsecs_t readoutTimestamp, int32_t transform,
const std::vector<size_t>& surface_ids = std::vector<size_t>());
- virtual status_t getBuffersLocked(std::vector<OutstandingBuffer>*);
-
virtual status_t getInputBufferLocked(camera_stream_buffer *buffer, Size* size);
virtual status_t returnInputBufferLocked(
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 7fa6273..26fa04f 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -395,11 +395,6 @@
*/
std::vector<size_t> surface_ids;
};
- /**
- * Similar to getBuffer() except this method fills multiple buffers.
- */
- virtual status_t getBuffers(std::vector<OutstandingBuffer>* buffers,
- nsecs_t waitBufferTimeout) = 0;
/**
* Return a buffer to the stream after use by the HAL.
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 13c500f..e8ef692 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -913,7 +913,6 @@
std::set<int> activeStreams;
camera::device::StreamConfiguration requestedConfiguration;
requestedConfiguration.streams.resize(config->num_streams);
- config->use_hal_buf_manager = mUseHalBufManager;
for (size_t i = 0; i < config->num_streams; i++) {
camera::device::Stream &dst = requestedConfiguration.streams[i];
camera3::camera_stream_t *src = config->streams[i];
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
index 3fc070b..d9c8e57 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
@@ -319,10 +319,15 @@
sb.acquire_fence = -1;
sb.status = CAMERA_BUFFER_STATUS_ERROR;
}
- returnOutputBuffers(states.useHalBufManager,states.halBufManagedStreamIds, nullptr,
- streamBuffers.data(), numAllocatedBuffers, 0,
- 0, false,
- 0, states.sessionStatsBuilder);
+ std::vector<BufferToReturn> returnableBuffers{};
+ collectReturnableOutputBuffers(states.useHalBufManager, states.halBufManagedStreamIds,
+ /*listener*/ nullptr,
+ streamBuffers.data(), numAllocatedBuffers, /*timestamp*/ 0,
+ /*readoutTimestamp*/ 0, /*requested*/ false,
+ /*requestTimeNs*/ 0, states.sessionStatsBuilder,
+ /*out*/ &returnableBuffers);
+ finishReturningOutputBuffers(returnableBuffers, /*listener*/ nullptr,
+ states.sessionStatsBuilder);
for (auto buf : newBuffers) {
states.bufferRecordsIntf.removeOneBufferCache(streamId, buf);
}
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index a53d26d..939126c 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -372,28 +372,22 @@
};
/**
- * Simple test version of the interaction proxy, to use to inject onRegistered calls to the
+ * Simple test version of HidlServiceInteractionProxy, to use to inject onRegistered calls to the
* CameraProviderManager
*/
-struct TestInteractionProxy : public CameraProviderManager::HidlServiceInteractionProxy,
- public CameraProviderManager::AidlServiceInteractionProxy {
+struct TestHidlInteractionProxy : public CameraProviderManager::HidlServiceInteractionProxy {
sp<hidl::manager::V1_0::IServiceNotification> mManagerNotificationInterface;
sp<TestICameraProvider> mTestCameraProvider;
- std::shared_ptr<TestAidlICameraProvider> mTestAidlCameraProvider;
- TestInteractionProxy() {}
+ TestHidlInteractionProxy() {}
void setProvider(sp<TestICameraProvider> provider) {
mTestCameraProvider = provider;
}
- void setAidlProvider(std::shared_ptr<TestAidlICameraProvider> provider) {
- mTestAidlCameraProvider = provider;
- }
-
std::vector<std::string> mLastRequestedServiceNames;
- virtual ~TestInteractionProxy() {}
+ virtual ~TestHidlInteractionProxy() {}
virtual bool registerForNotifications(
[[maybe_unused]] const std::string &serviceName,
@@ -430,9 +424,47 @@
hardware::hidl_vec<hardware::hidl_string> ret = {"test/0"};
return ret;
}
+};
+
+/**
+ * Simple test version of AidlServiceInteractionProxy, to use to inject onRegistered calls to the
+ * CameraProviderManager
+ */
+struct TestAidlInteractionProxy : public CameraProviderManager::AidlServiceInteractionProxy {
+ std::shared_ptr<TestAidlICameraProvider> mTestAidlCameraProvider;
+
+ TestAidlInteractionProxy() {}
+
+ void setProvider(std::shared_ptr<TestAidlICameraProvider> provider) {
+ mTestAidlCameraProvider = provider;
+ }
+
+ std::vector<std::string> mLastRequestedServiceNames;
+
+ virtual ~TestAidlInteractionProxy() {}
virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
- getAidlService(const std::string&) {
+ getService(const std::string& serviceName) override {
+ if (!flags::delay_lazy_hal_instantiation()) {
+ return mTestAidlCameraProvider;
+ }
+
+ // If no provider has been given, fail; in reality, getService would
+ // block for HALs that don't start correctly, so we should never use
+ // getService when we don't have a valid HAL running
+ if (mTestAidlCameraProvider == nullptr) {
+ ADD_FAILURE() << __FUNCTION__ << "called with no valid provider;"
+ << " would block indefinitely";
+ // Real getService would block, but that's bad in unit tests. So
+ // just record an error and return nullptr
+ return nullptr;
+ }
+ mLastRequestedServiceNames.push_back(serviceName);
+ return mTestAidlCameraProvider;
+ }
+
+ virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+ tryGetService(const std::string&) override {
return mTestAidlCameraProvider;
}
};
@@ -462,7 +494,7 @@
status_t res;
sp<CameraProviderManager> providerManager = new CameraProviderManager();
sp<TestStatusListener> statusListener = new TestStatusListener();
- TestInteractionProxy serviceProxy;
+ TestHidlInteractionProxy serviceProxy;
android::hardware::hidl_vec<uint8_t> chars;
CameraMetadata meta;
@@ -510,7 +542,7 @@
status_t res;
sp<CameraProviderManager> providerManager = new CameraProviderManager();
sp<TestStatusListener> statusListener = new TestStatusListener();
- TestInteractionProxy serviceProxy;
+ TestHidlInteractionProxy serviceProxy;
sp<TestICameraProvider> provider = new TestICameraProvider(deviceNames,
vendorSection);
serviceProxy.setProvider(provider);
@@ -560,7 +592,7 @@
sp<CameraProviderManager> providerManager = new CameraProviderManager();
sp<TestStatusListener> statusListener = new TestStatusListener();
- TestInteractionProxy serviceProxy;
+ TestHidlInteractionProxy serviceProxy;
sp<TestICameraProvider> provider = new TestICameraProvider(deviceNames,
vendorSection);
@@ -696,7 +728,7 @@
status_t res;
sp<CameraProviderManager> providerManager = new CameraProviderManager();
sp<TestStatusListener> statusListener = new TestStatusListener();
- TestInteractionProxy serviceProxy;
+ TestHidlInteractionProxy serviceProxy;
sp<TestICameraProvider> provider = new TestICameraProvider(deviceNames,
vendorSection);
serviceProxy.setProvider(provider);
@@ -730,7 +762,7 @@
sp<CameraProviderManager> providerManager = new CameraProviderManager();
sp<TestStatusListener> statusListener = new TestStatusListener();
- TestInteractionProxy serviceProxy;
+ TestHidlInteractionProxy serviceProxy;
sp<TestICameraProvider> provider = new TestICameraProvider(deviceNames,
vendorSection);
@@ -779,7 +811,7 @@
sp<CameraProviderManager> providerManager = new CameraProviderManager();
sp<TestStatusListener> statusListener = new TestStatusListener();
- TestInteractionProxy serviceProxy;
+ TestHidlInteractionProxy serviceProxy;
sp<TestICameraProvider> provider = new TestICameraProvider(deviceNames,
vendorSection);
@@ -821,7 +853,7 @@
sp<CameraProviderManager> providerManager = new CameraProviderManager();
sp<TestStatusListener> statusListener = new TestStatusListener();
- TestInteractionProxy serviceProxy;
+ TestHidlInteractionProxy serviceProxy;
android::hardware::hidl_vec<uint8_t> chars;
CameraMetadata meta;
@@ -857,9 +889,11 @@
REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(vd_flags, virtual_camera_service_discovery))) {
sp<CameraProviderManager> providerManager = new CameraProviderManager();
sp<TestStatusListener> statusListener = new TestStatusListener();
- TestInteractionProxy serviceProxy;
+ TestAidlInteractionProxy aidlServiceProxy;
+ TestHidlInteractionProxy hidlServiceProxy;
- status_t res = providerManager->initialize(statusListener, &serviceProxy, &serviceProxy);
+ status_t res = providerManager->initialize(statusListener,
+ &hidlServiceProxy, &aidlServiceProxy);
ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
std::vector<std::string> cameraList = {"device@1.1/virtual/123"};
@@ -868,7 +902,7 @@
ndk::SharedRefBase::make<TestAidlICameraProvider>(cameraList);
ndk::SpAIBinder spBinder = aidlProvider->asBinder();
AIBinder* aiBinder = spBinder.get();
- serviceProxy.setAidlProvider(aidlProvider);
+ aidlServiceProxy.setProvider(aidlProvider);
providerManager->onServiceRegistration(
String16("android.hardware.camera.provider.ICameraProvider/virtual/0"),
AIBinder_toPlatformBinder(aiBinder));
@@ -883,15 +917,17 @@
REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(vd_flags, virtual_camera_service_discovery))) {
sp<CameraProviderManager> providerManager = new CameraProviderManager();
sp<TestStatusListener> statusListener = new TestStatusListener();
- TestInteractionProxy serviceProxy;
+ TestAidlInteractionProxy aidlServiceProxy;
+ TestHidlInteractionProxy hidlServiceProxy;
std::vector<std::string> cameraList = {"device@1.1/virtual/123"};
std::shared_ptr<TestAidlICameraProvider> aidlProvider =
ndk::SharedRefBase::make<TestAidlICameraProvider>(cameraList);
- serviceProxy.setAidlProvider(aidlProvider);
+ aidlServiceProxy.setProvider(aidlProvider);
- status_t res = providerManager->initialize(statusListener, &serviceProxy, &serviceProxy);
+ status_t res = providerManager->initialize(statusListener,
+ &hidlServiceProxy, &aidlServiceProxy);
ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
std::unordered_map<std::string, std::set<std::string>> unavailableDeviceIds;
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
new file mode 100644
index 0000000..e63b30b
--- /dev/null
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "AttributionAndPermissionUtils.h"
+
+#include <binder/AppOpsManager.h>
+#include <binder/PermissionController.h>
+#include <cutils/properties.h>
+#include <private/android_filesystem_config.h>
+
+#include "CameraService.h"
+#include "CameraThreadState.h"
+
+namespace android {
+
+const std::string AttributionAndPermissionUtils::sDumpPermission("android.permission.DUMP");
+const std::string AttributionAndPermissionUtils::sManageCameraPermission(
+ "android.permission.MANAGE_CAMERA");
+const std::string AttributionAndPermissionUtils::sCameraPermission(
+ "android.permission.CAMERA");
+const std::string AttributionAndPermissionUtils::sSystemCameraPermission(
+ "android.permission.SYSTEM_CAMERA");
+const std::string AttributionAndPermissionUtils::sCameraHeadlessSystemUserPermission(
+ "android.permission.CAMERA_HEADLESS_SYSTEM_USER");
+const std::string AttributionAndPermissionUtils::sCameraPrivacyAllowlistPermission(
+ "android.permission.CAMERA_PRIVACY_ALLOWLIST");
+const std::string AttributionAndPermissionUtils::sCameraSendSystemEventsPermission(
+ "android.permission.CAMERA_SEND_SYSTEM_EVENTS");
+const std::string AttributionAndPermissionUtils::sCameraOpenCloseListenerPermission(
+ "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
+const std::string AttributionAndPermissionUtils::sCameraInjectExternalCameraPermission(
+ "android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+
+bool AttributionAndPermissionUtils::checkAutomotivePrivilegedClient(const std::string &cameraId,
+ const AttributionSourceState &attributionSource) {
+ if (isAutomotivePrivilegedClient(attributionSource.uid)) {
+ // If cameraId is empty, then it means that this check is not used for the
+ // purpose of accessing a specific camera, hence grant permission just
+ // based on uid to the automotive privileged client.
+ if (cameraId.empty())
+ return true;
+
+ auto cameraService = mCameraService.promote();
+ if (cameraService == nullptr) {
+ ALOGE("%s: CameraService unavailable.", __FUNCTION__);
+ return false;
+ }
+
+ // If this call is used for accessing a specific camera then cam_id must be provided.
+ // In that case, only pre-grants the permission for accessing the exterior system only
+ // camera.
+ return cameraService->isAutomotiveExteriorSystemCamera(cameraId);
+ }
+
+ return false;
+}
+
+bool AttributionAndPermissionUtils::checkPermissionForPreflight(const std::string &cameraId,
+ const std::string &permission, const AttributionSourceState &attributionSource,
+ const std::string& message, int32_t attributedOpCode) {
+ if (checkAutomotivePrivilegedClient(cameraId, attributionSource)) {
+ return true;
+ }
+
+ PermissionChecker permissionChecker;
+ return permissionChecker.checkPermissionForPreflight(toString16(permission), attributionSource,
+ toString16(message), attributedOpCode) != PermissionChecker::PERMISSION_HARD_DENIED;
+}
+
+// Can camera service trust the caller based on the calling UID?
+bool AttributionAndPermissionUtils::isTrustedCallingUid(uid_t uid) {
+ switch (uid) {
+ case AID_MEDIA: // mediaserver
+ case AID_CAMERASERVER: // cameraserver
+ case AID_RADIO: // telephony
+ return true;
+ default:
+ return false;
+ }
+}
+
+bool AttributionAndPermissionUtils::isAutomotiveDevice() {
+ // Checks the property ro.hardware.type and returns true if it is
+ // automotive.
+ char value[PROPERTY_VALUE_MAX] = {0};
+ property_get("ro.hardware.type", value, "");
+ return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
+}
+
+bool AttributionAndPermissionUtils::isHeadlessSystemUserMode() {
+ // Checks if the device is running in headless system user mode
+ // by checking the property ro.fw.mu.headless_system_user.
+ char value[PROPERTY_VALUE_MAX] = {0};
+ property_get("ro.fw.mu.headless_system_user", value, "");
+ return strncmp(value, "true", PROPERTY_VALUE_MAX) == 0;
+}
+
+bool AttributionAndPermissionUtils::isAutomotivePrivilegedClient(int32_t uid) {
+ // Returns false if this is not an automotive device type.
+ if (!isAutomotiveDevice())
+ return false;
+
+ // Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
+ // privileged client uid used for safety critical use cases such as
+ // rear view and surround view.
+ return uid == AID_AUTOMOTIVE_EVS;
+}
+
+status_t AttributionAndPermissionUtils::getUidForPackage(const std::string &packageName,
+ int userId, /*inout*/uid_t& uid, int err) {
+ PermissionController pc;
+ uid = pc.getPackageUid(toString16(packageName), 0);
+ if (uid <= 0) {
+ ALOGE("Unknown package: '%s'", packageName.c_str());
+ dprintf(err, "Unknown package: '%s'\n", packageName.c_str());
+ return BAD_VALUE;
+ }
+
+ if (userId < 0) {
+ ALOGE("Invalid user: %d", userId);
+ dprintf(err, "Invalid user: %d\n", userId);
+ return BAD_VALUE;
+ }
+
+ uid = multiuser_get_uid(userId, uid);
+ return NO_ERROR;
+}
+
+bool AttributionAndPermissionUtils::isCallerCameraServerNotDelegating() {
+ return CameraThreadState::getCallingPid() == getpid();
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForCamera(const std::string& cameraId,
+ const AttributionSourceState& attributionSource) {
+ return checkPermissionForPreflight(cameraId, sCameraPermission,
+ attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForSystemCamera(const std::string& cameraId,
+ const AttributionSourceState& attributionSource, bool checkCameraPermissions) {
+ bool systemCameraPermission = checkPermissionForPreflight(cameraId,
+ sSystemCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
+ return systemCameraPermission && (!checkCameraPermissions
+ || hasPermissionsForCamera(cameraId, attributionSource));
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForCameraHeadlessSystemUser(
+ const std::string& cameraId, const AttributionSourceState& attributionSource) {
+ return checkPermissionForPreflight(cameraId, sCameraHeadlessSystemUserPermission,
+ attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForCameraPrivacyAllowlist(
+ const AttributionSourceState& attributionSource) {
+ return checkPermissionForPreflight(std::string(), sCameraPrivacyAllowlistPermission,
+ attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForOpenCloseListener(
+ const AttributionSourceState& attributionSource) {
+ return checkPermissionForPreflight(std::string(), sCameraOpenCloseListenerPermission,
+ attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
new file mode 100644
index 0000000..7f4a1bd
--- /dev/null
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -0,0 +1,203 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
+#define ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
+
+#include <android/content/AttributionSourceState.h>
+#include <android/permission/PermissionChecker.h>
+#include <binder/BinderService.h>
+
+namespace android {
+
+class CameraService;
+
+using content::AttributionSourceState;
+using permission::PermissionChecker;
+
+/**
+ * Utility class consolidating methods/data for verifying permissions and the identity of the
+ * caller.
+ */
+class AttributionAndPermissionUtils {
+public:
+ AttributionAndPermissionUtils() { }
+ virtual ~AttributionAndPermissionUtils() {}
+
+ void setCameraService(wp<CameraService> cameraService) {
+ mCameraService = cameraService;
+ }
+
+ /**
+ * Pre-grants the permission if the attribution source uid is for an automotive
+ * privileged client. Otherwise uses system service permission checker to check
+ * for the appropriate permission. If this function is called for accessing a specific
+ * camera,then the cameraID must not be empty. CameraId is used only in case of automotive
+ * privileged client so that permission is pre-granted only to access system camera device
+ * which is located outside of the vehicle body frame because camera located inside the vehicle
+ * cabin would need user permission.
+ */
+ virtual bool checkPermissionForPreflight(const std::string &cameraId,
+ const std::string &permission, const AttributionSourceState& attributionSource,
+ const std::string& message, int32_t attributedOpCode);
+
+ // Can camera service trust the caller based on the calling UID?
+ virtual bool isTrustedCallingUid(uid_t uid);
+
+ virtual bool isAutomotiveDevice();
+ virtual bool isHeadlessSystemUserMode();
+
+ /**
+ * Returns true if the client has uid AID_AUTOMOTIVE_EVS and the device is an automotive device.
+ */
+ virtual bool isAutomotivePrivilegedClient(int32_t uid);
+
+ virtual status_t getUidForPackage(const std::string &packageName, int userId,
+ /*inout*/uid_t& uid, int err);
+ virtual bool isCallerCameraServerNotDelegating();
+
+ // Utils for checking specific permissions
+ virtual bool hasPermissionsForCamera(const std::string& cameraId,
+ const AttributionSourceState& attributionSource);
+ virtual bool hasPermissionsForSystemCamera(const std::string& cameraId,
+ const AttributionSourceState& attributionSource, bool checkCameraPermissions = true);
+ virtual bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId,
+ const AttributionSourceState& attributionSource);
+ virtual bool hasPermissionsForCameraPrivacyAllowlist(
+ const AttributionSourceState& attributionSource);
+ virtual bool hasPermissionsForOpenCloseListener(
+ const AttributionSourceState& attributionSource);
+
+ static const std::string sDumpPermission;
+ static const std::string sManageCameraPermission;
+ static const std::string sCameraPermission;
+ static const std::string sSystemCameraPermission;
+ static const std::string sCameraHeadlessSystemUserPermission;
+ static const std::string sCameraPrivacyAllowlistPermission;
+ static const std::string sCameraSendSystemEventsPermission;
+ static const std::string sCameraOpenCloseListenerPermission;
+ static const std::string sCameraInjectExternalCameraPermission;
+
+protected:
+ wp<CameraService> mCameraService;
+
+ bool checkAutomotivePrivilegedClient(const std::string &cameraId,
+ const AttributionSourceState &attributionSource);
+};
+
+/**
+ * Class to be inherited by classes encapsulating AttributionAndPermissionUtils. Provides an
+ * additional utility layer above AttributionAndPermissionUtils calls, and avoids verbosity
+ * in the encapsulating class's methods.
+ */
+class AttributionAndPermissionUtilsEncapsulator {
+protected:
+ std::shared_ptr<AttributionAndPermissionUtils> mAttributionAndPermissionUtils;
+
+public:
+ AttributionAndPermissionUtilsEncapsulator(
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils)
+ : mAttributionAndPermissionUtils(attributionAndPermissionUtils) { }
+
+ static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
+ AttributionSourceState attributionSource{};
+ attributionSource.pid = callingPid;
+ attributionSource.uid = callingUid;
+ return attributionSource;
+ }
+
+ static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
+ const std::string& packageName) {
+ AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
+ attributionSource.packageName = packageName;
+ return attributionSource;
+ }
+
+ bool hasPermissionsForCamera(int callingPid, int callingUid) const {
+ return hasPermissionsForCamera(std::string(), callingPid, callingUid);
+ }
+
+ bool hasPermissionsForCamera(int callingPid, int callingUid,
+ const std::string& packageName) const {
+ return hasPermissionsForCamera(std::string(), callingPid, callingUid, packageName);
+ }
+
+ bool hasPermissionsForCamera(const std::string& cameraId, int callingPid,
+ int callingUid) const {
+ auto attributionSource = buildAttributionSource(callingPid, callingUid);
+ return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+ }
+
+ bool hasPermissionsForCamera(const std::string& cameraId, int callingPid, int callingUid,
+ const std::string& packageName) const {
+ auto attributionSource = buildAttributionSource(callingPid, callingUid, packageName);
+ return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+ }
+
+ bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid,
+ bool checkCameraPermissions = true) const {
+ auto attributionSource = buildAttributionSource(callingPid, callingUid);
+ return mAttributionAndPermissionUtils->hasPermissionsForSystemCamera(
+ cameraId, attributionSource, checkCameraPermissions);
+ }
+
+ bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId, int callingPid,
+ int callingUid) const {
+ auto attributionSource = buildAttributionSource(callingPid, callingUid);
+ return mAttributionAndPermissionUtils->hasPermissionsForCameraHeadlessSystemUser(
+ cameraId, attributionSource);
+ }
+
+ bool hasPermissionsForCameraPrivacyAllowlist(int callingPid, int callingUid) const {
+ auto attributionSource = buildAttributionSource(callingPid, callingUid);
+ return mAttributionAndPermissionUtils->hasPermissionsForCameraPrivacyAllowlist(
+ attributionSource);
+ }
+
+ bool hasPermissionsForOpenCloseListener(int callingPid, int callingUid) const {
+ auto attributionSource = buildAttributionSource(callingPid, callingUid);
+ return mAttributionAndPermissionUtils->hasPermissionsForOpenCloseListener(
+ attributionSource);
+ }
+
+ bool isAutomotiveDevice() const {
+ return mAttributionAndPermissionUtils->isAutomotiveDevice();
+ }
+
+ bool isAutomotivePrivilegedClient(int32_t uid) const {
+ return mAttributionAndPermissionUtils->isAutomotivePrivilegedClient(uid);
+ }
+
+ bool isTrustedCallingUid(uid_t uid) const {
+ return mAttributionAndPermissionUtils->isTrustedCallingUid(uid);
+ }
+
+ bool isHeadlessSystemUserMode() const {
+ return mAttributionAndPermissionUtils->isHeadlessSystemUserMode();
+ }
+
+ status_t getUidForPackage(const std::string &packageName, int userId,
+ /*inout*/uid_t& uid, int err) const {
+ return mAttributionAndPermissionUtils->getUidForPackage(packageName, userId, uid, err);
+ }
+
+ bool isCallerCameraServerNotDelegating() const {
+ return mAttributionAndPermissionUtils->isCallerCameraServerNotDelegating();
+ }
+};
+
+} // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 11ef9b7..23aed6e 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -432,7 +432,7 @@
const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
int64_t streamUseCase, int timestampBase, int mirrorMode,
- int32_t colorSpace) {
+ int32_t colorSpace, bool respectSurfaceSize) {
// bufferProducer must be non-null
if (gbp == nullptr) {
std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
@@ -529,8 +529,10 @@
// we can use the default stream configuration map
foundInMaxRes = true;
}
- // Round dimensions to the nearest dimensions available for this format
- if (flexibleConsumer && isPublicFormat(format) &&
+ // Round dimensions to the nearest dimensions available for this format.
+ // Only do the rounding if the client doesn't ask to respect the surface
+ // size.
+ if (flexibleConsumer && isPublicFormat(format) && !respectSurfaceSize &&
!SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
/*out*/&height)) {
@@ -753,6 +755,7 @@
const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
it.getGraphicBufferProducers();
bool deferredConsumer = it.isDeferred();
+ bool isConfigurationComplete = it.isComplete();
const std::string &physicalCameraId = it.getPhysicalCameraId();
int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
@@ -768,7 +771,8 @@
int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
OutputStreamInfo streamInfo;
- res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
+ res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType(),
+ isConfigurationComplete);
if (!res.isOk()) {
return res;
}
@@ -781,15 +785,38 @@
int64_t streamUseCase = it.getStreamUseCase();
int timestampBase = it.getTimestampBase();
int mirrorMode = it.getMirrorMode();
- if (deferredConsumer) {
+ // If the configuration is a deferred consumer, or a not yet completed
+ // configuration with no buffer producers attached.
+ if (deferredConsumer || (!isConfigurationComplete && numBufferProducers == 0)) {
streamInfo.width = it.getWidth();
streamInfo.height = it.getHeight();
- streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
auto surfaceType = it.getSurfaceType();
- streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
- if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
- streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
+ switch (surfaceType) {
+ case OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE:
+ streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
+ streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ break;
+ case OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW:
+ streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE
+ | GraphicBuffer::USAGE_HW_COMPOSER;
+ streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ break;
+ case OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER:
+ case OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC:
+ streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_VIDEO_ENCODER;
+ streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ break;
+ case OutputConfiguration::SURFACE_TYPE_IMAGE_READER:
+ streamInfo.consumerUsage = it.getUsage();
+ streamInfo.format = it.getFormat();
+ streamInfo.dataSpace = (android_dataspace)it.getDataspace();
+ break;
+ default:
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "Invalid surface type.");
}
streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
@@ -815,7 +842,8 @@
sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace);
+ streamUseCase, timestampBase, mirrorMode, colorSpace,
+ /*respectSurfaceSize*/true);
if (!res.isOk())
return res;
@@ -912,22 +940,37 @@
}
binder::Status checkSurfaceType(size_t numBufferProducers,
- bool deferredConsumer, int surfaceType) {
+ bool deferredConsumer, int surfaceType, bool isConfigurationComplete) {
if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
__FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
- } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
+ } else if ((numBufferProducers == 0) && (!deferredConsumer) && isConfigurationComplete) {
ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
}
- bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
- (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
-
- if (deferredConsumer && !validSurfaceType) {
- ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
+ if (deferredConsumer) {
+ bool validSurfaceType = (
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
+ if (!validSurfaceType) {
+ std::string msg = fmt::sprintf("Deferred target surface has invalid "
+ "surfaceType = %d.", surfaceType);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ } else if (!isConfigurationComplete && numBufferProducers == 0) {
+ bool validSurfaceType = (
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER) ||
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC) ||
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_IMAGE_READER));
+ if (!validSurfaceType) {
+ std::string msg = fmt::sprintf("OutputConfiguration target surface has invalid "
+ "surfaceType = %d.", surfaceType);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
}
return binder::Status::ok();
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 0545cea..a226829 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -112,7 +112,7 @@
const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
int64_t streamUseCase, int timestampBase, int mirrorMode,
- int32_t colorSpace);
+ int32_t colorSpace, bool respectSurfaceSize);
//check if format is 10-bit output compatible
bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace);
@@ -143,10 +143,10 @@
const std::string &logicalCameraId);
binder::Status checkSurfaceType(size_t numBufferProducers,
-bool deferredConsumer, int surfaceType);
+ bool deferredConsumer, int surfaceType, bool isConfigurationComplete);
binder::Status checkOperatingMode(int operatingMode,
-const CameraMetadata &staticInfo, const std::string &cameraId);
+ const CameraMetadata &staticInfo, const std::string &cameraId);
binder::Status
convertToHALStreamCombination(
diff --git a/services/camera/virtualcamera/Android.bp b/services/camera/virtualcamera/Android.bp
index cb4e10f..90530f6 100644
--- a/services/camera/virtualcamera/Android.bp
+++ b/services/camera/virtualcamera/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_xr_framework",
// See: http://go/android-license-faq
default_applicable_licenses: ["Android-Apache-2.0"],
}
@@ -11,6 +12,7 @@
"libbinder",
"libbinder_ndk",
"libcamera_metadata",
+ "libexif",
"liblog",
"libfmq",
"libgui",
@@ -46,7 +48,7 @@
name: "libvirtualcamera_utils",
srcs: [
"util/JpegUtil.cc",
- "util/MetadataBuilder.cc",
+ "util/MetadataUtil.cc",
"util/Util.cc",
"util/TestPatternHelper.cc",
"util/EglDisplayContext.cc",
@@ -54,7 +56,7 @@
"util/EglProgram.cc",
"util/EglSurfaceTexture.cc",
"util/EglUtil.cc",
- "util/Permissions.cc"
+ "util/Permissions.cc",
],
defaults: [
"libvirtualcamera_defaults",
diff --git a/services/camera/virtualcamera/TEST_MAPPING b/services/camera/virtualcamera/TEST_MAPPING
index 25fca73..e976704 100644
--- a/services/camera/virtualcamera/TEST_MAPPING
+++ b/services/camera/virtualcamera/TEST_MAPPING
@@ -9,7 +9,8 @@
{
"exclude-annotation": "androidx.test.filters.FlakyTest"
}
- ]
+ ],
+ "keywords": ["primary-device"]
}
]
}
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index 768dffb..a5301e5 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -23,8 +23,10 @@
#include <chrono>
#include <cstdint>
#include <iterator>
+#include <numeric>
#include <optional>
#include <string>
+#include <vector>
#include "VirtualCameraSession.h"
#include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
@@ -36,7 +38,7 @@
#include "android/binder_status.h"
#include "log/log.h"
#include "system/camera_metadata.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
#include "util/Util.h"
namespace android {
@@ -70,29 +72,78 @@
constexpr int32_t kMaxJpegSize = 3 * 1024 * 1024 /*3MiB*/;
+constexpr int32_t kMinFps = 15;
+
+constexpr std::chrono::nanoseconds kMaxFrameDuration =
+ std::chrono::duration_cast<std::chrono::nanoseconds>(1e9ns / kMinFps);
+
+constexpr uint8_t kPipelineMaxDepth = 2;
+
constexpr MetadataBuilder::ControlRegion kDefaultEmptyControlRegion{};
-const std::array<int32_t, 3> kOutputFormats{
- ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
- ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
- ANDROID_SCALER_AVAILABLE_FORMATS_BLOB};
+const std::array<Resolution, 5> kStandardJpegThumbnailSizes{
+ Resolution(176, 144), Resolution(240, 144), Resolution(256, 144),
+ Resolution(240, 160), Resolution(240, 180)};
-struct Resolution {
- Resolution(const int w, const int h) : width(w), height(h) {
- }
+const std::array<PixelFormat, 3> kOutputFormats{
+ PixelFormat::IMPLEMENTATION_DEFINED, PixelFormat::YCBCR_420_888,
+ PixelFormat::BLOB};
- bool operator<(const Resolution& other) const {
- return width * height < other.width * other.height;
- }
-
- bool operator==(const Resolution& other) const {
- return width == other.width && height == other.height;
- }
-
- const int width;
- const int height;
+// The resolutions below will used to extend the set of supported output formats.
+// All resolutions with lower pixel count and same aspect ratio as some supported
+// input resolution will be added to the set of supported output resolutions.
+const std::array<Resolution, 10> kOutputResolutions{
+ Resolution(320, 240), Resolution(640, 360), Resolution(640, 480),
+ Resolution(720, 480), Resolution(720, 576), Resolution(800, 600),
+ Resolution(1024, 576), Resolution(1280, 720), Resolution(1280, 960),
+ Resolution(1280, 1080),
};
+std::vector<Resolution> getSupportedJpegThumbnailSizes(
+ const std::vector<SupportedStreamConfiguration>& configs) {
+ auto isSupportedByAnyInputConfig =
+ [&configs](const Resolution thumbnailResolution) {
+ return std::any_of(
+ configs.begin(), configs.end(),
+ [thumbnailResolution](const SupportedStreamConfiguration& config) {
+ return isApproximatellySameAspectRatio(
+ thumbnailResolution, Resolution(config.width, config.height));
+ });
+ };
+
+ std::vector<Resolution> supportedThumbnailSizes({Resolution(0, 0)});
+ std::copy_if(kStandardJpegThumbnailSizes.begin(),
+ kStandardJpegThumbnailSizes.end(),
+ std::back_insert_iterator(supportedThumbnailSizes),
+ isSupportedByAnyInputConfig);
+ return supportedThumbnailSizes;
+}
+
+bool isSupportedOutputFormat(const PixelFormat pixelFormat) {
+ return std::find(kOutputFormats.begin(), kOutputFormats.end(), pixelFormat) !=
+ kOutputFormats.end();
+}
+
+std::vector<FpsRange> fpsRangesForInputConfig(
+ const std::vector<SupportedStreamConfiguration>& configs) {
+ std::set<FpsRange> availableRanges;
+
+ for (const SupportedStreamConfiguration& config : configs) {
+ availableRanges.insert({.minFps = kMinFps, .maxFps = config.maxFps});
+ availableRanges.insert({.minFps = config.maxFps, .maxFps = config.maxFps});
+ }
+
+ if (std::any_of(configs.begin(), configs.end(),
+ [](const SupportedStreamConfiguration& config) {
+ return config.maxFps >= 30;
+ })) {
+ availableRanges.insert({.minFps = kMinFps, .maxFps = 30});
+ availableRanges.insert({.minFps = 30, .maxFps = 30});
+ }
+
+ return std::vector<FpsRange>(availableRanges.begin(), availableRanges.end());
+}
+
std::optional<Resolution> getMaxResolution(
const std::vector<SupportedStreamConfiguration>& configs) {
auto itMax = std::max_element(configs.begin(), configs.end(),
@@ -127,6 +178,36 @@
}
}
+ std::map<Resolution, int> additionalResolutionToMaxFpsMap;
+ // Add additional resolutions we can support by downscaling input streams with
+ // same aspect ratio.
+ for (const Resolution& outputResolution : kOutputResolutions) {
+ for (const auto& [resolution, maxFps] : resolutionToMaxFpsMap) {
+ if (resolutionToMaxFpsMap.find(outputResolution) !=
+ resolutionToMaxFpsMap.end()) {
+ // Resolution is already in the map, skip it.
+ continue;
+ }
+
+ if (outputResolution < resolution &&
+ isApproximatellySameAspectRatio(outputResolution, resolution)) {
+ // Lower resolution with same aspect ratio, we can achieve this by
+ // downscaling, let's add it to the map.
+ ALOGD(
+ "Extending set of output resolutions with %dx%d which has same "
+ "aspect ratio as supported input %dx%d.",
+ outputResolution.width, outputResolution.height, resolution.width,
+ resolution.height);
+ additionalResolutionToMaxFpsMap[outputResolution] = maxFps;
+ break;
+ }
+ }
+ }
+
+ // Add all resolution we can achieve by downscaling to the map.
+ resolutionToMaxFpsMap.insert(additionalResolutionToMaxFpsMap.begin(),
+ additionalResolutionToMaxFpsMap.end());
+
return resolutionToMaxFpsMap;
}
@@ -151,25 +232,113 @@
.setFlashAvailable(false)
.setLensFacing(
static_cast<camera_metadata_enum_android_lens_facing>(lensFacing))
+ .setAvailableFocalLengths({VirtualCameraDevice::kFocalLength})
.setSensorOrientation(static_cast<int32_t>(sensorOrientation))
+ .setSensorReadoutTimestamp(
+ ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED)
+ .setSensorTimestampSource(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN)
+ .setSensorPhysicalSize(36.0, 24.0)
+ .setAvailableAberrationCorrectionModes(
+ {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF})
+ .setAvailableNoiseReductionModes({ANDROID_NOISE_REDUCTION_MODE_OFF})
.setAvailableFaceDetectModes({ANDROID_STATISTICS_FACE_DETECT_MODE_OFF})
+ .setAvailableTestPatternModes({ANDROID_SENSOR_TEST_PATTERN_MODE_OFF})
.setAvailableMaxDigitalZoom(1.0)
.setControlAvailableModes({ANDROID_CONTROL_MODE_AUTO})
.setControlAfAvailableModes({ANDROID_CONTROL_AF_MODE_OFF})
- .setControlAeAvailableFpsRange(10, 30)
+ .setControlAvailableSceneModes({ANDROID_CONTROL_SCENE_MODE_DISABLED})
+ .setControlAvailableEffects({ANDROID_CONTROL_EFFECT_MODE_OFF})
+ .setControlAvailableVideoStabilizationModes(
+ {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF})
+ .setControlAeAvailableModes({ANDROID_CONTROL_AE_MODE_ON})
+ .setControlAeAvailableAntibandingModes(
+ {ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO})
+ .setControlAeAvailableFpsRanges(
+ fpsRangesForInputConfig(supportedInputConfig))
.setControlMaxRegions(0, 0, 0)
.setControlAfRegions({kDefaultEmptyControlRegion})
.setControlAeRegions({kDefaultEmptyControlRegion})
.setControlAwbRegions({kDefaultEmptyControlRegion})
- .setControlAeCompensationRange(0, 1)
+ .setControlAeCompensationRange(0, 0)
.setControlAeCompensationStep(camera_metadata_rational_t{0, 1})
+ .setControlAwbLockAvailable(false)
+ .setControlAeLockAvailable(false)
+ .setControlAvailableAwbModes({ANDROID_CONTROL_AWB_MODE_AUTO})
.setControlZoomRatioRange(/*min=*/1.0, /*max=*/1.0)
+ .setCroppingType(ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY)
+ .setJpegAvailableThumbnailSizes(
+ getSupportedJpegThumbnailSizes(supportedInputConfig))
.setMaxJpegSize(kMaxJpegSize)
- .setAvailableRequestKeys({ANDROID_CONTROL_AF_MODE})
- .setAvailableResultKeys({ANDROID_CONTROL_AF_MODE})
+ .setMaxFaceCount(0)
+ .setMaxFrameDuration(kMaxFrameDuration)
+ .setMaxNumberOutputStreams(
+ VirtualCameraDevice::kMaxNumberOfRawStreams,
+ VirtualCameraDevice::kMaxNumberOfProcessedStreams,
+ VirtualCameraDevice::kMaxNumberOfStallStreams)
+ .setRequestPartialResultCount(1)
+ .setPipelineMaxDepth(kPipelineMaxDepth)
+ .setSyncMaxLatency(ANDROID_SYNC_MAX_LATENCY_UNKNOWN)
+ .setAvailableRequestKeys({ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+ ANDROID_CONTROL_CAPTURE_INTENT,
+ ANDROID_CONTROL_AE_MODE,
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+ ANDROID_CONTROL_AF_TRIGGER,
+ ANDROID_CONTROL_AF_MODE,
+ ANDROID_CONTROL_AWB_MODE,
+ ANDROID_SCALER_CROP_REGION,
+ ANDROID_CONTROL_EFFECT_MODE,
+ ANDROID_CONTROL_MODE,
+ ANDROID_CONTROL_SCENE_MODE,
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+ ANDROID_CONTROL_ZOOM_RATIO,
+ ANDROID_FLASH_MODE,
+ ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+ ANDROID_JPEG_ORIENTATION,
+ ANDROID_JPEG_QUALITY,
+ ANDROID_JPEG_THUMBNAIL_QUALITY,
+ ANDROID_JPEG_THUMBNAIL_SIZE,
+ ANDROID_NOISE_REDUCTION_MODE,
+ ANDROID_STATISTICS_FACE_DETECT_MODE})
+ .setAvailableResultKeys({
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ ANDROID_CONTROL_AE_LOCK,
+ ANDROID_CONTROL_AE_MODE,
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+ ANDROID_CONTROL_AE_STATE,
+ ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ ANDROID_CONTROL_AF_MODE,
+ ANDROID_CONTROL_AF_STATE,
+ ANDROID_CONTROL_AF_TRIGGER,
+ ANDROID_CONTROL_AWB_LOCK,
+ ANDROID_CONTROL_AWB_MODE,
+ ANDROID_CONTROL_AWB_STATE,
+ ANDROID_CONTROL_CAPTURE_INTENT,
+ ANDROID_CONTROL_EFFECT_MODE,
+ ANDROID_CONTROL_MODE,
+ ANDROID_CONTROL_SCENE_MODE,
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+ ANDROID_STATISTICS_FACE_DETECT_MODE,
+ ANDROID_FLASH_MODE,
+ ANDROID_FLASH_STATE,
+ ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+ ANDROID_JPEG_QUALITY,
+ ANDROID_JPEG_THUMBNAIL_QUALITY,
+ ANDROID_LENS_FOCAL_LENGTH,
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+ ANDROID_NOISE_REDUCTION_MODE,
+ ANDROID_REQUEST_PIPELINE_DEPTH,
+ ANDROID_SENSOR_TIMESTAMP,
+ ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
+ ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
+ ANDROID_STATISTICS_SCENE_FLICKER,
+ })
.setAvailableCapabilities(
- {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE})
- .setAvailableCharacteristicKeys();
+ {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE});
// Active array size must correspond to largest supported input resolution.
std::optional<Resolution> maxResolution =
@@ -179,6 +348,7 @@
}
builder.setSensorActiveArraySize(0, 0, maxResolution->width,
maxResolution->height);
+ builder.setSensorPixelArraySize(maxResolution->width, maxResolution->height);
std::vector<MetadataBuilder::StreamConfiguration> outputConfigurations;
@@ -189,7 +359,7 @@
getResolutionToMaxFpsMap(supportedInputConfig);
// Add configurations for all unique input resolutions and output formats.
- for (int32_t format : kOutputFormats) {
+ for (const PixelFormat format : kOutputFormats) {
std::transform(
resolutionToMaxFpsMap.begin(), resolutionToMaxFpsMap.end(),
std::back_inserter(outputConfigurations), [format](const auto& entry) {
@@ -198,7 +368,7 @@
return MetadataBuilder::StreamConfiguration{
.width = resolution.width,
.height = resolution.height,
- .format = format,
+ .format = static_cast<int32_t>(format),
.minFrameDuration = std::chrono::nanoseconds(1s) / maxFps,
.minStallDuration = 0s};
});
@@ -207,7 +377,7 @@
ALOGV("Adding %zu output configurations", outputConfigurations.size());
builder.setAvailableOutputStreamConfigurations(outputConfigurations);
- auto metadata = builder.build();
+ auto metadata = builder.setAvailableCharacteristicKeys().build();
if (metadata == nullptr) {
ALOGE("Failed to build metadata!");
return CameraMetadata();
@@ -288,6 +458,24 @@
return false;
}
+ const std::vector<Stream>& streams = streamConfiguration.streams;
+
+ Resolution firstStreamResolution(streams[0].width, streams[0].height);
+ auto isSameAspectRatioAsFirst = [firstStreamResolution](const Stream& stream) {
+ return isApproximatellySameAspectRatio(
+ firstStreamResolution, Resolution(stream.width, stream.height));
+ };
+ if (!std::all_of(streams.begin(), streams.end(), isSameAspectRatioAsFirst)) {
+ ALOGW(
+ "%s: Requested streams do not have same aspect ratio. Different aspect "
+ "ratios are currently "
+ "not supported by virtual camera. Stream configuration: %s",
+ __func__, streamConfiguration.toString().c_str());
+ return false;
+ }
+
+ int numberOfProcessedStreams = 0;
+ int numberOfStallStreams = 0;
for (const Stream& stream : streamConfiguration.streams) {
ALOGV("%s: Configuration queried: %s", __func__, stream.toString().c_str());
@@ -296,18 +484,25 @@
return false;
}
- // TODO(b/301023410) remove hardcoded format checks, verify against configuration.
if (stream.rotation != StreamRotation::ROTATION_0 ||
- (stream.format != PixelFormat::IMPLEMENTATION_DEFINED &&
- stream.format != PixelFormat::YCBCR_420_888 &&
- stream.format != PixelFormat::BLOB)) {
+ !isSupportedOutputFormat(stream.format)) {
ALOGV("Unsupported output stream type");
return false;
}
+ if (stream.format == PixelFormat::BLOB) {
+ numberOfStallStreams++;
+ } else {
+ numberOfProcessedStreams++;
+ }
+
+ Resolution requestedResolution(stream.width, stream.height);
auto matchesSupportedInputConfig =
- [&stream](const SupportedStreamConfiguration& config) {
- return stream.width == config.width && stream.height == config.height;
+ [requestedResolution](const SupportedStreamConfiguration& config) {
+ Resolution supportedInputResolution(config.width, config.height);
+ return requestedResolution <= supportedInputResolution &&
+ isApproximatellySameAspectRatio(requestedResolution,
+ supportedInputResolution);
};
if (std::none_of(mSupportedInputConfigurations.begin(),
mSupportedInputConfigurations.end(),
@@ -316,6 +511,19 @@
return false;
}
}
+
+ if (numberOfProcessedStreams > kMaxNumberOfProcessedStreams) {
+ ALOGE("%s: %d processed streams exceeds the supported maximum of %d",
+ __func__, numberOfProcessedStreams, kMaxNumberOfProcessedStreams);
+ return false;
+ }
+
+ if (numberOfStallStreams > kMaxNumberOfStallStreams) {
+ ALOGE("%s: %d stall streams exceeds the supported maximum of %d", __func__,
+ numberOfStallStreams, kMaxNumberOfStallStreams);
+ return false;
+ }
+
return true;
}
@@ -370,6 +578,24 @@
return std::string(kDevicePathPrefix) + std::to_string(mCameraId);
}
+const std::vector<SupportedStreamConfiguration>&
+VirtualCameraDevice::getInputConfigs() const {
+ return mSupportedInputConfigurations;
+}
+
+Resolution VirtualCameraDevice::getMaxInputResolution() const {
+ std::optional<Resolution> maxResolution =
+ getMaxResolution(mSupportedInputConfigurations);
+ if (!maxResolution.has_value()) {
+ ALOGE(
+ "%s: Cannot determine sensor size for virtual camera - input "
+ "configurations empty?",
+ __func__);
+ return Resolution(0, 0);
+ }
+ return maxResolution.value();
+}
+
std::shared_ptr<VirtualCameraDevice> VirtualCameraDevice::sharedFromThis() {
// SharedRefBase which BnCameraDevice inherits from breaks
// std::enable_shared_from_this. This is recommended replacement for
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.h b/services/camera/virtualcamera/VirtualCameraDevice.h
index 10d52af..0aebf6e 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.h
+++ b/services/camera/virtualcamera/VirtualCameraDevice.h
@@ -24,6 +24,8 @@
#include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
#include "aidl/android/hardware/camera/device/BnCameraDevice.h"
+#include "system/camera_metadata.h"
+#include "util/Util.h"
namespace android {
namespace companion {
@@ -94,6 +96,41 @@
uint32_t getCameraId() const { return mCameraId; }
+ const std::vector<
+ aidl::android::companion::virtualcamera::SupportedStreamConfiguration>&
+ getInputConfigs() const;
+
+ // Returns largest supported input resolution.
+ Resolution getMaxInputResolution() const;
+
+ // Maximal number of RAW streams - virtual camera doesn't support RAW streams.
+ static constexpr int32_t kMaxNumberOfRawStreams = 0;
+
+ // Maximal number of non-jpeg streams configured concurrently in single
+ // session. This should be at least 3 and can be increased at the potential
+ // cost of more CPU/GPU load if there are many concurrent streams.
+ static constexpr int32_t kMaxNumberOfProcessedStreams = 3;
+
+ // Maximal number of stalling (in case of virtual camera only jpeg for now)
+ // streams. Can be increaed at the cost of potential cost of more GPU/CPU
+ // load.
+ static constexpr int32_t kMaxNumberOfStallStreams = 1;
+
+ // Focal length for full frame sensor.
+ static constexpr float kFocalLength = 43.0;
+
+ // Default JPEG compression quality.
+ static constexpr uint8_t kDefaultJpegQuality = 80;
+
+ // Default JPEG orientation.
+ static constexpr uint8_t kDefaultJpegOrientation = 0;
+
+ // Default Make and Model for Exif
+ static constexpr char kDefaultMakeAndModel[] = "Android Virtual Camera";
+
+ static constexpr camera_metadata_enum_android_control_capture_intent_t
+ kDefaultCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+
private:
std::shared_ptr<VirtualCameraDevice> sharedFromThis();
diff --git a/services/camera/virtualcamera/VirtualCameraProvider.cc b/services/camera/virtualcamera/VirtualCameraProvider.cc
index e4a68f5..eed3e85 100644
--- a/services/camera/virtualcamera/VirtualCameraProvider.cc
+++ b/services/camera/virtualcamera/VirtualCameraProvider.cc
@@ -42,10 +42,6 @@
using ::aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination;
using ::aidl::android::hardware::camera::provider::ICameraProviderCallback;
-// TODO(b/301023410) Make camera id range configurable / dynamic
-// based on already registered devices.
-std::atomic_int VirtualCameraProvider::sNextId{42};
-
ndk::ScopedAStatus VirtualCameraProvider::setCallback(
const std::shared_ptr<ICameraProviderCallback>& in_callback) {
ALOGV("%s", __func__);
@@ -154,9 +150,15 @@
}
std::shared_ptr<VirtualCameraDevice> VirtualCameraProvider::createCamera(
- const VirtualCameraConfiguration& configuration) {
+ const VirtualCameraConfiguration& configuration, const int cameraId) {
+ if (cameraId < 0) {
+ ALOGE("%s: Cannot create camera with negative id. cameraId: %d", __func__,
+ cameraId);
+ return nullptr;
+ }
+
auto camera =
- ndk::SharedRefBase::make<VirtualCameraDevice>(sNextId++, configuration);
+ ndk::SharedRefBase::make<VirtualCameraDevice>(cameraId, configuration);
std::shared_ptr<ICameraProviderCallback> callback;
{
const std::lock_guard<std::mutex> lock(mLock);
diff --git a/services/camera/virtualcamera/VirtualCameraProvider.h b/services/camera/virtualcamera/VirtualCameraProvider.h
index 11d3123..c1283a0 100644
--- a/services/camera/virtualcamera/VirtualCameraProvider.h
+++ b/services/camera/virtualcamera/VirtualCameraProvider.h
@@ -76,7 +76,8 @@
// Returns nullptr if creation was not successful.
std::shared_ptr<VirtualCameraDevice> createCamera(
const aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
- configuration);
+ configuration,
+ int cameraId);
std::shared_ptr<VirtualCameraDevice> getCamera(const std::string& name);
@@ -91,9 +92,6 @@
std::map<std::string, std::shared_ptr<VirtualCameraDevice>> mCameras
GUARDED_BY(mLock);
-
- // Numerical id to assign to next created camera.
- static std::atomic_int sNextId;
};
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 25fe61b..f68efe2 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -18,17 +18,22 @@
#include "VirtualCameraRenderThread.h"
#include <chrono>
-#include <cstddef>
#include <cstdint>
+#include <cstring>
#include <future>
#include <memory>
#include <mutex>
#include <thread>
+#include <vector>
+#include "Exif.h"
#include "GLES/gl.h"
+#include "VirtualCameraDevice.h"
#include "VirtualCameraSessionContext.h"
#include "aidl/android/hardware/camera/common/Status.h"
#include "aidl/android/hardware/camera/device/BufferStatus.h"
+#include "aidl/android/hardware/camera/device/CameraBlob.h"
+#include "aidl/android/hardware/camera/device/CameraBlobId.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
#include "aidl/android/hardware/camera/device/CaptureResult.h"
#include "aidl/android/hardware/camera/device/ErrorCode.h"
@@ -39,10 +44,12 @@
#include "android-base/thread_annotations.h"
#include "android/binder_auto_utils.h"
#include "android/hardware_buffer.h"
+#include "system/camera_metadata.h"
#include "ui/GraphicBuffer.h"
+#include "ui/Rect.h"
#include "util/EglFramebuffer.h"
#include "util/JpegUtil.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
#include "util/TestPatternHelper.h"
#include "util/Util.h"
#include "utils/Errors.h"
@@ -53,6 +60,8 @@
using ::aidl::android::hardware::camera::common::Status;
using ::aidl::android::hardware::camera::device::BufferStatus;
+using ::aidl::android::hardware::camera::device::CameraBlob;
+using ::aidl::android::hardware::camera::device::CameraBlobId;
using ::aidl::android::hardware::camera::device::CameraMetadata;
using ::aidl::android::hardware::camera::device::CaptureResult;
using ::aidl::android::hardware::camera::device::ErrorCode;
@@ -65,16 +74,87 @@
using ::aidl::android::hardware::graphics::common::PixelFormat;
using ::android::base::ScopedLockAssertion;
+using ::android::hardware::camera::common::helper::ExifUtils;
+
namespace {
using namespace std::chrono_literals;
static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
+// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
+// This roughly corresponds to frame latency, we set to
+// documented minimum of 2.
+static constexpr uint8_t kPipelineDepth = 2;
+
+static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
+
CameraMetadata createCaptureResultMetadata(
- const std::chrono::nanoseconds timestamp) {
- std::unique_ptr<CameraMetadata> metadata =
- MetadataBuilder().setSensorTimestamp(timestamp).build();
+ const std::chrono::nanoseconds timestamp,
+ const RequestSettings& requestSettings,
+ const Resolution reportedSensorSize) {
+ // All of the keys used in the response needs to be referenced in
+ // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
+ // in VirtualCameraDevice.cc).
+ MetadataBuilder builder =
+ MetadataBuilder()
+ .setAberrationCorrectionMode(
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
+ .setControlAeAvailableAntibandingModes(
+ {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
+ .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
+ .setControlAeExposureCompensation(0)
+ .setControlAeLockAvailable(false)
+ .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
+ .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
+ .setControlAePrecaptureTrigger(
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+ .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
+ .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
+ .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
+ .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
+ .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+ .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
+ .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
+ .setControlCaptureIntent(requestSettings.captureIntent)
+ .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
+ .setControlMode(ANDROID_CONTROL_MODE_AUTO)
+ .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
+ .setControlVideoStabilizationMode(
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
+ .setCropRegion(0, 0, reportedSensorSize.width,
+ reportedSensorSize.height)
+ .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
+ .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+ .setFlashMode(ANDROID_FLASH_MODE_OFF)
+ .setFocalLength(VirtualCameraDevice::kFocalLength)
+ .setJpegQuality(requestSettings.jpegQuality)
+ .setJpegOrientation(requestSettings.jpegOrientation)
+ .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
+ requestSettings.thumbnailResolution.height)
+ .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
+ .setLensOpticalStabilizationMode(
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
+ .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
+ .setPipelineDepth(kPipelineDepth)
+ .setSensorTimestamp(timestamp)
+ .setStatisticsHotPixelMapMode(
+ ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
+ .setStatisticsLensShadingMapMode(
+ ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
+ .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
+
+ if (requestSettings.fpsRange.has_value()) {
+ builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
+ }
+
+ if (requestSettings.gpsCoordinates.has_value()) {
+ const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
+ builder.setJpegGpsCoordinates(coordinates);
+ }
+
+ std::unique_ptr<CameraMetadata> metadata = builder.build();
+
if (metadata == nullptr) {
ALOGE("%s: Failed to build capture result metadata", __func__);
return CameraMetadata();
@@ -150,6 +230,46 @@
}
}
+std::vector<uint8_t> createExif(
+ Resolution imageSize, const CameraMetadata resultMetadata,
+ const std::vector<uint8_t>& compressedThumbnail = {}) {
+ std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
+ exifUtils->initialize();
+
+ // Make a copy of the metadata in order to converting it the HAL metadata
+ // format (as opposed to the AIDL class) and use the setFromMetadata method
+ // from ExifUtil
+ camera_metadata_t* rawSettings =
+ clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
+ if (rawSettings != nullptr) {
+ android::hardware::camera::common::helper::CameraMetadata halMetadata(
+ rawSettings);
+ exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
+ }
+ exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
+ exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
+ exifUtils->setFlash(0);
+
+ std::vector<uint8_t> app1Data;
+
+ size_t thumbnailDataSize = compressedThumbnail.size();
+ const void* thumbnailData =
+ thumbnailDataSize > 0
+ ? reinterpret_cast<const void*>(compressedThumbnail.data())
+ : nullptr;
+
+ if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
+ ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
+ return app1Data;
+ }
+
+ const uint8_t* data = exifUtils->getApp1Buffer();
+ const size_t size = exifUtils->getApp1Length();
+
+ app1Data.insert(app1Data.end(), data, data + size);
+ return app1Data;
+}
+
} // namespace
CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
@@ -170,12 +290,12 @@
}
VirtualCameraRenderThread::VirtualCameraRenderThread(
- VirtualCameraSessionContext& sessionContext, const int mWidth,
- const int mHeight,
+ VirtualCameraSessionContext& sessionContext,
+ const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
: mCameraDeviceCallback(cameraDeviceCallback),
- mInputSurfaceWidth(mWidth),
- mInputSurfaceHeight(mHeight),
+ mInputSurfaceSize(inputSurfaceSize),
+ mReportedSensorSize(reportedSensorSize),
mTestMode(testMode),
mSessionContext(sessionContext) {
}
@@ -188,8 +308,11 @@
}
ProcessCaptureRequestTask::ProcessCaptureRequestTask(
- int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
- : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
+ int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
+ const RequestSettings& requestSettings)
+ : mFrameNumber(frameNumber),
+ mBuffers(requestBuffers),
+ mRequestSettings(requestSettings) {
}
int ProcessCaptureRequestTask::getFrameNumber() const {
@@ -201,6 +324,10 @@
return mBuffers;
}
+const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
+ return mRequestSettings;
+}
+
void VirtualCameraRenderThread::enqueueTask(
std::unique_ptr<ProcessCaptureRequestTask> task) {
std::lock_guard<std::mutex> lock(mLock);
@@ -263,14 +390,20 @@
std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
EglTextureProgram::TextureFormat::RGBA);
- mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(mInputSurfaceWidth,
- mInputSurfaceHeight);
+ mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
+ mInputSurfaceSize.width, mInputSurfaceSize.height);
mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
processCaptureRequest(*task);
}
+ // Destroy EGL utilities still on the render thread.
+ mEglSurfaceTexture.reset();
+ mEglTextureRgbProgram.reset();
+ mEglTextureYuvProgram.reset();
+ mEglDisplayContext.reset();
+
ALOGV("Render thread exiting");
}
@@ -287,7 +420,8 @@
captureResult.partialResult = 1;
captureResult.inputBuffer.streamId = -1;
captureResult.physicalCameraMetadata.resize(0);
- captureResult.result = createCaptureResultMetadata(timestamp);
+ captureResult.result = createCaptureResultMetadata(
+ timestamp, request.getRequestSettings(), mReportedSensorSize);
const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
captureResult.outputBuffers.resize(buffers.size());
@@ -316,9 +450,10 @@
}
auto status = streamConfig->format == PixelFormat::BLOB
- ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
- reqBuffer.getBufferId(),
- reqBuffer.getFence())
+ ? renderIntoBlobStreamBuffer(
+ reqBuffer.getStreamId(), reqBuffer.getBufferId(),
+ captureResult.result, request.getRequestSettings(),
+ reqBuffer.getFence())
: renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
reqBuffer.getBufferId(),
reqBuffer.getFence());
@@ -398,9 +533,56 @@
}
}
+std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
+ const Resolution resolution, const int quality) {
+ if (resolution.width == 0 || resolution.height == 0) {
+ ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
+ return {};
+ }
+
+ ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
+ resolution.width, resolution.height, quality);
+ Resolution bufferSize = roundTo2DctSize(resolution);
+ std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
+ mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
+ if (framebuffer == nullptr) {
+ ALOGE(
+ "Failed to allocate temporary framebuffer for JPEG thumbnail "
+ "compression");
+ return {};
+ }
+
+ // TODO(b/324383963) Add support for letterboxing if the thumbnail size
+ // doesn't correspond
+ // to input texture aspect ratio.
+ if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
+ Rect(resolution.width, resolution.height))
+ .isOk()) {
+ ALOGE(
+ "Failed to render input texture into temporary framebuffer for JPEG "
+ "thumbnail");
+ return {};
+ }
+
+ std::vector<uint8_t> compressedThumbnail;
+ compressedThumbnail.resize(kJpegThumbnailBufferSize);
+ ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
+ resolution.height);
+ std::optional<size_t> compressedSize =
+ compressJpeg(resolution.width, resolution.height, quality,
+ framebuffer->getHardwareBuffer(), {},
+ compressedThumbnail.size(), compressedThumbnail.data());
+ if (!compressedSize.has_value()) {
+ ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
+ return {};
+ }
+ compressedThumbnail.resize(compressedSize.value());
+ return compressedThumbnail;
+}
+
ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
- const int streamId, const int bufferId, sp<Fence> fence) {
- ALOGV("%s", __func__);
+ const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
+ const RequestSettings& requestSettings, sp<Fence> fence) {
std::shared_ptr<AHardwareBuffer> hwBuffer =
mSessionContext.fetchHardwareBuffer(streamId, bufferId);
if (hwBuffer == nullptr) {
@@ -415,74 +597,64 @@
return cameraStatus(Status::INTERNAL_ERROR);
}
+ ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
+ stream->width, stream->height, requestSettings.jpegQuality);
+
// Let's create YUV framebuffer and render the surface into this.
// This will take care about rescaling as well as potential format conversion.
+ // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
+ // size, however we pass the viewport corresponding to size of the stream so
+ // the image will be only rendered to the area corresponding to the stream
+ // size.
+ Resolution bufferSize =
+ roundTo2DctSize(Resolution(stream->width, stream->height));
std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
- mEglDisplayContext->getEglDisplay(), stream->width, stream->height);
+ mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
if (framebuffer == nullptr) {
ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
return cameraStatus(Status::INTERNAL_ERROR);
}
// Render into temporary framebuffer.
- ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer);
+ ndk::ScopedAStatus status = renderIntoEglFramebuffer(
+ *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
if (!status.isOk()) {
ALOGE("Failed to render input texture into temporary framebuffer");
return status;
}
- AHardwareBuffer_Planes planes_info;
-
- int32_t rawFence = fence != nullptr ? fence->get() : -1;
- int result = AHardwareBuffer_lockPlanes(hwBuffer.get(),
- AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
- rawFence, nullptr, &planes_info);
- if (result != OK) {
- ALOGE("%s: Failed to lock planes for BLOB buffer: %d", __func__, result);
+ PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
+ fence);
+ if (planesLock.getStatus() != OK) {
return cameraStatus(Status::INTERNAL_ERROR);
}
- std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
- GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
+ std::vector<uint8_t> app1ExifData =
+ createExif(Resolution(stream->width, stream->height), resultMetadata,
+ createThumbnail(requestSettings.thumbnailResolution,
+ requestSettings.thumbnailJpegQuality));
+ std::optional<size_t> compressedSize = compressJpeg(
+ stream->width, stream->height, requestSettings.jpegQuality,
+ framebuffer->getHardwareBuffer(), app1ExifData,
+ stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
- bool compressionSuccess = true;
- if (gBuffer != nullptr) {
- android_ycbcr ycbcr;
- if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
- // This should never happen since we're allocating the temporary buffer
- // with YUV420 layout above.
- ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
- gBuffer->getPixelFormat());
- AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
- return cameraStatus(Status::INTERNAL_ERROR);
- }
-
- status_t status =
- gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
- ALOGV("Locked buffers");
- if (status != NO_ERROR) {
- AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
- ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
- return cameraStatus(Status::INTERNAL_ERROR);
- }
-
- compressionSuccess =
- compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
- stream->bufferSize, planes_info.planes[0].data);
-
- status_t res = gBuffer->unlock();
- if (res != NO_ERROR) {
- ALOGE("Failed to unlock graphic buffer: %d", res);
- }
- } else {
- compressionSuccess =
- compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
- planes_info.planes[0].data);
+ if (!compressedSize.has_value()) {
+ ALOGE("%s: Failed to compress JPEG image", __func__);
+ return cameraStatus(Status::INTERNAL_ERROR);
}
- AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
- ALOGV("Unlocked buffers");
- return compressionSuccess ? ndk::ScopedAStatus::ok()
- : cameraStatus(Status::INTERNAL_ERROR);
+
+ CameraBlob cameraBlob{
+ .blobId = CameraBlobId::JPEG,
+ .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
+
+ memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
+ (stream->bufferSize - sizeof(cameraBlob)),
+ &cameraBlob, sizeof(cameraBlob));
+
+ ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
+ __func__, compressedSize.value());
+
+ return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
@@ -518,7 +690,7 @@
}
ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
- EglFrameBuffer& framebuffer, sp<Fence> fence) {
+ EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
ALOGV("%s", __func__);
// Wait for fence to clear.
if (fence != nullptr && fence->isValid()) {
@@ -532,6 +704,11 @@
mEglDisplayContext->makeCurrent();
framebuffer.beforeDraw();
+ Rect viewportRect =
+ viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
+ glViewport(viewportRect.leftTop().x, viewportRect.leftTop().y,
+ viewportRect.getWidth(), viewportRect.getHeight());
+
sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
if (textureBuffer == nullptr) {
// If there's no current buffer, nothing was written to the surface and
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index b3aaed8..90757a0 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -17,18 +17,24 @@
#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
+#include <cstdint>
#include <deque>
#include <future>
#include <memory>
#include <thread>
+#include <vector>
+#include "VirtualCameraDevice.h"
#include "VirtualCameraSessionContext.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
#include "android/binder_auto_utils.h"
#include "util/EglDisplayContext.h"
#include "util/EglFramebuffer.h"
#include "util/EglProgram.h"
#include "util/EglSurfaceTexture.h"
+#include "util/MetadataUtil.h"
+#include "util/Util.h"
namespace android {
namespace companion {
@@ -49,11 +55,23 @@
const sp<Fence> mFence;
};
+struct RequestSettings {
+ int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+ int jpegOrientation = VirtualCameraDevice::kDefaultJpegOrientation;
+ Resolution thumbnailResolution = Resolution(0, 0);
+ int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+ std::optional<FpsRange> fpsRange;
+ camera_metadata_enum_android_control_capture_intent_t captureIntent =
+ VirtualCameraDevice::kDefaultCaptureIntent;
+ std::optional<GpsCoordinates> gpsCoordinates;
+};
+
// Represents single capture request to fill set of buffers.
class ProcessCaptureRequestTask {
public:
ProcessCaptureRequestTask(
- int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers);
+ int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
+ const RequestSettings& RequestSettings = {});
// Returns frame number corresponding to the request.
int getFrameNumber() const;
@@ -65,9 +83,12 @@
// so it cannot be access outside of its lifetime.
const std::vector<CaptureRequestBuffer>& getBuffers() const;
+ const RequestSettings& getRequestSettings() const;
+
private:
const int mFrameNumber;
const std::vector<CaptureRequestBuffer> mBuffers;
+ const RequestSettings mRequestSettings;
};
// Wraps dedicated rendering thread and rendering business with corresponding
@@ -77,14 +98,14 @@
// Create VirtualCameraRenderThread instance:
// * sessionContext - VirtualCameraSessionContext reference for shared access
// to mapped buffers.
- // * inputWidth - requested width of input surface ("virtual camera sensor")
- // * inputHeight - requested height of input surface ("virtual camera sensor")
+ // * inputSurfaceSize - requested size of input surface.
+ // * reportedSensorSize - reported static sensor size of virtual camera.
// * cameraDeviceCallback - callback for corresponding camera instance
// * testMode - when set to true, test pattern is rendered to input surface
// before each capture request is processed to simulate client input.
VirtualCameraRenderThread(
- VirtualCameraSessionContext& sessionContext, int inputWidth,
- int inputHeight,
+ VirtualCameraSessionContext& sessionContext, Resolution inputSurfaceSize,
+ Resolution reportedSensorSize,
std::shared_ptr<
::aidl::android::hardware::camera::device::ICameraDeviceCallback>
cameraDeviceCallback,
@@ -122,13 +143,21 @@
// TODO(b/301023410) - Refactor the actual rendering logic off this class for
// easier testability.
+ // Create thumbnail with specified size for current image.
+ // The compressed image size is limited by 32KiB.
+ // Returns vector with compressed thumbnail if successful,
+ // empty vector otherwise.
+ std::vector<uint8_t> createThumbnail(Resolution resolution, int quality);
+
// Render current image to the BLOB buffer.
// If fence is specified, this function will block until the fence is cleared
// before writing to the buffer.
// Always called on render thread.
- ndk::ScopedAStatus renderIntoBlobStreamBuffer(const int streamId,
- const int bufferId,
- sp<Fence> fence = nullptr);
+ ndk::ScopedAStatus renderIntoBlobStreamBuffer(
+ const int streamId, const int bufferId,
+ const ::aidl::android::hardware::camera::device::CameraMetadata&
+ resultMetadata,
+ const RequestSettings& requestSettings, sp<Fence> fence = nullptr);
// Render current image to the YCbCr buffer.
// If fence is specified, this function will block until the fence is cleared
@@ -141,16 +170,17 @@
// If fence is specified, this function will block until the fence is cleared
// before writing to the buffer.
// Always called on the render thread.
- ndk::ScopedAStatus renderIntoEglFramebuffer(EglFrameBuffer& framebuffer,
- sp<Fence> fence = nullptr);
+ ndk::ScopedAStatus renderIntoEglFramebuffer(
+ EglFrameBuffer& framebuffer, sp<Fence> fence = nullptr,
+ std::optional<Rect> viewport = std::nullopt);
// Camera callback
const std::shared_ptr<
::aidl::android::hardware::camera::device::ICameraDeviceCallback>
mCameraDeviceCallback;
- const int mInputSurfaceWidth;
- const int mInputSurfaceHeight;
+ const Resolution mInputSurfaceSize;
+ const Resolution mReportedSensorSize;
const int mTestMode;
VirtualCameraSessionContext& mSessionContext;
diff --git a/services/camera/virtualcamera/VirtualCameraService.cc b/services/camera/virtualcamera/VirtualCameraService.cc
index 1144997..18961c0 100644
--- a/services/camera/virtualcamera/VirtualCameraService.cc
+++ b/services/camera/virtualcamera/VirtualCameraService.cc
@@ -46,6 +46,10 @@
using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
+// TODO(b/301023410) Make camera id range configurable / dynamic
+// based on already registered devices.
+std::atomic_int VirtualCameraService::sNextId{1000};
+
namespace {
constexpr int kVgaWidth = 640;
@@ -110,6 +114,13 @@
ndk::ScopedAStatus VirtualCameraService::registerCamera(
const ::ndk::SpAIBinder& token,
const VirtualCameraConfiguration& configuration, bool* _aidl_return) {
+ return registerCamera(token, configuration, sNextId++, _aidl_return);
+}
+
+ndk::ScopedAStatus VirtualCameraService::registerCamera(
+ const ::ndk::SpAIBinder& token,
+ const VirtualCameraConfiguration& configuration, const int cameraId,
+ bool* _aidl_return) {
if (!mPermissionProxy.checkCallingPermission(kCreateVirtualDevicePermission)) {
ALOGE("%s: caller (pid %d, uid %d) doesn't hold %s permission", __func__,
getpid(), getuid(), kCreateVirtualDevicePermission);
@@ -141,7 +152,7 @@
}
std::shared_ptr<VirtualCameraDevice> camera =
- mVirtualCameraProvider->createCamera(configuration);
+ mVirtualCameraProvider->createCamera(configuration, cameraId);
if (camera == nullptr) {
ALOGE("Failed to create camera for binder token 0x%" PRIxPTR,
reinterpret_cast<uintptr_t>(token.get()));
@@ -175,11 +186,12 @@
mVirtualCameraProvider->removeCamera(it->second);
+ mTokenToCameraName.erase(it);
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus VirtualCameraService::getCameraId(
- const ::ndk::SpAIBinder& token, int32_t* _aidl_return) {
+ const ::ndk::SpAIBinder& token, int32_t* _aidl_return) {
if (!mPermissionProxy.checkCallingPermission(kCreateVirtualDevicePermission)) {
ALOGE("%s: caller (pid %d, uid %d) doesn't hold %s permission", __func__,
getpid(), getuid(), kCreateVirtualDevicePermission);
@@ -188,7 +200,7 @@
if (_aidl_return == nullptr) {
return ndk::ScopedAStatus::fromServiceSpecificError(
- Status::EX_ILLEGAL_ARGUMENT);
+ Status::EX_ILLEGAL_ARGUMENT);
}
auto camera = getCamera(token);
@@ -235,7 +247,15 @@
}
const char* const cmd = args[0];
if (strcmp(kEnableTestCameraCmd, cmd) == 0) {
- enableTestCameraCmd(in, err);
+ int cameraId = 0;
+ if (numArgs > 1 && args[1] != nullptr) {
+ cameraId = atoi(args[1]);
+ }
+ if (cameraId == 0) {
+ cameraId = sNextId++;
+ }
+
+ enableTestCameraCmd(in, err, cameraId);
} else if (strcmp(kDisableTestCameraCmd, cmd) == 0) {
disableTestCameraCmd(in);
} else {
@@ -246,7 +266,8 @@
return STATUS_OK;
}
-void VirtualCameraService::enableTestCameraCmd(const int out, const int err) {
+void VirtualCameraService::enableTestCameraCmd(const int out, const int err,
+ const int cameraId) {
if (mTestCameraToken != nullptr) {
dprintf(out, "Test camera is already enabled (%s).",
getCamera(mTestCameraToken)->getCameraName().c_str());
@@ -263,7 +284,7 @@
Format::YUV_420_888,
.maxFps = kMaxFps});
configuration.lensFacing = LensFacing::EXTERNAL;
- registerCamera(mTestCameraToken, configuration, &ret);
+ registerCamera(mTestCameraToken, configuration, cameraId, &ret);
if (ret) {
dprintf(out, "Successfully registered test camera %s",
getCamera(mTestCameraToken)->getCameraName().c_str());
diff --git a/services/camera/virtualcamera/VirtualCameraService.h b/services/camera/virtualcamera/VirtualCameraService.h
index d573986..d447fc7 100644
--- a/services/camera/virtualcamera/VirtualCameraService.h
+++ b/services/camera/virtualcamera/VirtualCameraService.h
@@ -45,6 +45,13 @@
configuration,
bool* _aidl_return) override EXCLUDES(mLock);
+ // Register camera corresponding to the binder token.
+ ndk::ScopedAStatus registerCamera(
+ const ::ndk::SpAIBinder& token,
+ const ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
+ configuration,
+ int cameraId, bool* _aidl_return) EXCLUDES(mLock);
+
// Unregisters camera corresponding to the binder token.
ndk::ScopedAStatus unregisterCamera(const ::ndk::SpAIBinder& token) override
EXCLUDES(mLock);
@@ -64,7 +71,7 @@
private:
// Create and enable test camera instance if there's none.
- void enableTestCameraCmd(int out, int err);
+ void enableTestCameraCmd(int out, int err, int cameraId);
// Disable and destroy test camera instance if there's one.
void disableTestCameraCmd(int out);
@@ -84,6 +91,9 @@
// Local binder token for test camera instance, or nullptr if there's none.
::ndk::SpAIBinder mTestCameraToken;
+
+ // Numerical id to assign to next created camera.
+ static std::atomic_int sNextId;
};
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index 03d63b8..05fa5ae 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -21,12 +21,14 @@
#include <algorithm>
#include <atomic>
#include <chrono>
+#include <cmath>
#include <cstddef>
#include <cstdint>
#include <cstring>
#include <map>
#include <memory>
#include <mutex>
+#include <numeric>
#include <optional>
#include <tuple>
#include <unordered_set>
@@ -38,13 +40,17 @@
#include "VirtualCameraDevice.h"
#include "VirtualCameraRenderThread.h"
#include "VirtualCameraStream.h"
+#include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
#include "aidl/android/hardware/camera/common/Status.h"
#include "aidl/android/hardware/camera/device/BufferCache.h"
#include "aidl/android/hardware/camera/device/BufferStatus.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
#include "aidl/android/hardware/camera/device/CaptureRequest.h"
#include "aidl/android/hardware/camera/device/HalStream.h"
#include "aidl/android/hardware/camera/device/NotifyMsg.h"
+#include "aidl/android/hardware/camera/device/RequestTemplate.h"
#include "aidl/android/hardware/camera/device/ShutterMsg.h"
+#include "aidl/android/hardware/camera/device/Stream.h"
#include "aidl/android/hardware/camera/device/StreamBuffer.h"
#include "aidl/android/hardware/camera/device/StreamConfiguration.h"
#include "aidl/android/hardware/camera/device/StreamRotation.h"
@@ -59,7 +65,7 @@
#include "util/EglFramebuffer.h"
#include "util/EglProgram.h"
#include "util/JpegUtil.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
#include "util/TestPatternHelper.h"
#include "util/Util.h"
@@ -69,6 +75,7 @@
using ::aidl::android::companion::virtualcamera::Format;
using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
+using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
using ::aidl::android::hardware::camera::common::Status;
using ::aidl::android::hardware::camera::device::BufferCache;
using ::aidl::android::hardware::camera::device::CameraMetadata;
@@ -97,36 +104,75 @@
// Size of request/result metadata fast message queue.
// Setting to 0 to always disables FMQ.
-static constexpr size_t kMetadataMsgQueueSize = 0;
+constexpr size_t kMetadataMsgQueueSize = 0;
// Maximum number of buffers to use per single stream.
-static constexpr size_t kMaxStreamBuffers = 2;
+constexpr size_t kMaxStreamBuffers = 2;
-CameraMetadata createDefaultRequestSettings(RequestTemplate type) {
- hardware::camera::common::V1_0::helper::CameraMetadata metadataHelper;
+// Thumbnail size (0,0) correspods to disabling thumbnail.
+const Resolution kDefaultJpegThumbnailSize(0, 0);
- camera_metadata_enum_android_control_capture_intent_t intent =
- ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+camera_metadata_enum_android_control_capture_intent_t requestTemplateToIntent(
+ const RequestTemplate type) {
switch (type) {
case RequestTemplate::PREVIEW:
- intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
- break;
+ return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
case RequestTemplate::STILL_CAPTURE:
- intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
- break;
+ return ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
case RequestTemplate::VIDEO_RECORD:
- intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
- break;
+ return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
case RequestTemplate::VIDEO_SNAPSHOT:
- intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
- break;
+ return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
default:
- // Leave default.
- break;
+ // Return PREVIEW by default
+ return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
}
+}
- auto metadata = MetadataBuilder().setControlCaptureIntent(intent).build();
- return (metadata != nullptr) ? std::move(*metadata) : CameraMetadata();
+int getMaxFps(const std::vector<SupportedStreamConfiguration>& configs) {
+ return std::transform_reduce(
+ configs.begin(), configs.end(), 0,
+ [](const int a, const int b) { return std::max(a, b); },
+ [](const SupportedStreamConfiguration& config) { return config.maxFps; });
+}
+
+CameraMetadata createDefaultRequestSettings(
+ const RequestTemplate type,
+ const std::vector<SupportedStreamConfiguration>& inputConfigs) {
+ int maxFps = getMaxFps(inputConfigs);
+ auto metadata =
+ MetadataBuilder()
+ .setAberrationCorrectionMode(
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
+ .setControlCaptureIntent(requestTemplateToIntent(type))
+ .setControlMode(ANDROID_CONTROL_MODE_AUTO)
+ .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
+ .setControlAeExposureCompensation(0)
+ .setControlAeTargetFpsRange(FpsRange{maxFps, maxFps})
+ .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
+ .setControlAePrecaptureTrigger(
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+ .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
+ .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
+ .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+ .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
+ .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
+ .setFlashMode(ANDROID_FLASH_MODE_OFF)
+ .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+ .setJpegQuality(VirtualCameraDevice::kDefaultJpegQuality)
+ .setJpegThumbnailQuality(VirtualCameraDevice::kDefaultJpegQuality)
+ .setJpegThumbnailSize(0, 0)
+ .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
+ .build();
+ if (metadata == nullptr) {
+ ALOGE("%s: Failed to construct metadata for default request type %s",
+ __func__, toString(type).c_str());
+ return CameraMetadata();
+ } else {
+ ALOGV("%s: Successfully created metadata for request type %s", __func__,
+ toString(type).c_str());
+ }
+ return *metadata;
}
HalStream getHalStream(const Stream& stream) {
@@ -158,6 +204,70 @@
}));
}
+Resolution resolutionFromStream(const Stream& stream) {
+ return Resolution(stream.width, stream.height);
+}
+
+Resolution resolutionFromInputConfig(
+ const SupportedStreamConfiguration& inputConfig) {
+ return Resolution(inputConfig.width, inputConfig.height);
+}
+
+std::optional<SupportedStreamConfiguration> pickInputConfigurationForStreams(
+ const std::vector<Stream>& requestedStreams,
+ const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
+ Stream maxResolutionStream = getHighestResolutionStream(requestedStreams);
+ Resolution maxResolution = resolutionFromStream(maxResolutionStream);
+
+ // Find best fitting stream to satisfy all requested streams:
+ // Best fitting => same or higher resolution as input with lowest pixel count
+ // difference and same aspect ratio.
+ auto isBetterInputConfig = [maxResolution](
+ const SupportedStreamConfiguration& configA,
+ const SupportedStreamConfiguration& configB) {
+ int maxResPixelCount = maxResolution.width * maxResolution.height;
+ int pixelCountDiffA =
+ std::abs((configA.width * configA.height) - maxResPixelCount);
+ int pixelCountDiffB =
+ std::abs((configB.width * configB.height) - maxResPixelCount);
+
+ return pixelCountDiffA < pixelCountDiffB;
+ };
+
+ std::optional<SupportedStreamConfiguration> bestConfig;
+ for (const SupportedStreamConfiguration& inputConfig : supportedInputConfigs) {
+ Resolution inputConfigResolution = resolutionFromInputConfig(inputConfig);
+ if (inputConfigResolution < maxResolution ||
+ !isApproximatellySameAspectRatio(inputConfigResolution, maxResolution)) {
+ // We don't want to upscale from lower resolution, or use different aspect
+ // ratio, skip.
+ continue;
+ }
+
+ if (!bestConfig.has_value() ||
+ isBetterInputConfig(inputConfig, bestConfig.value())) {
+ bestConfig = inputConfig;
+ }
+ }
+
+ return bestConfig;
+}
+
+RequestSettings createSettingsFromMetadata(const CameraMetadata& metadata) {
+ return RequestSettings{
+ .jpegQuality = getJpegQuality(metadata).value_or(
+ VirtualCameraDevice::kDefaultJpegQuality),
+ .jpegOrientation = getJpegOrientation(metadata),
+ .thumbnailResolution =
+ getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
+ .thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
+ VirtualCameraDevice::kDefaultJpegQuality),
+ .fpsRange = getFpsRange(metadata),
+ .captureIntent = getCaptureIntent(metadata).value_or(
+ ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW),
+ .gpsCoordinates = getGpsCoordinates(metadata)};
+}
+
} // namespace
VirtualCameraSession::VirtualCameraSession(
@@ -223,15 +333,13 @@
halStreams.clear();
halStreams.resize(in_requestedConfiguration.streams.size());
- sp<Surface> inputSurface = nullptr;
- int inputWidth;
- int inputHeight;
-
if (!virtualCamera->isStreamCombinationSupported(in_requestedConfiguration)) {
ALOGE("%s: Requested stream configuration is not supported", __func__);
return cameraStatus(Status::ILLEGAL_ARGUMENT);
}
+ sp<Surface> inputSurface = nullptr;
+ std::optional<SupportedStreamConfiguration> inputConfig;
{
std::lock_guard<std::mutex> lock(mLock);
for (int i = 0; i < in_requestedConfiguration.streams.size(); ++i) {
@@ -241,14 +349,21 @@
}
}
- Stream maxResStream = getHighestResolutionStream(streams);
- inputWidth = maxResStream.width;
- inputHeight = maxResStream.height;
+ inputConfig = pickInputConfigurationForStreams(
+ streams, virtualCamera->getInputConfigs());
+ if (!inputConfig.has_value()) {
+ ALOGE(
+ "%s: Failed to pick any input configuration for stream configuration "
+ "request: %s",
+ __func__, in_requestedConfiguration.toString().c_str());
+ return cameraStatus(Status::ILLEGAL_ARGUMENT);
+ }
if (mRenderThread == nullptr) {
// If there's no client callback, start camera in test mode.
const bool testMode = mVirtualCameraClientCallback == nullptr;
mRenderThread = std::make_unique<VirtualCameraRenderThread>(
- mSessionContext, inputWidth, inputHeight, mCameraDeviceCallback,
+ mSessionContext, resolutionFromInputConfig(*inputConfig),
+ virtualCamera->getMaxInputResolution(), mCameraDeviceCallback,
testMode);
mRenderThread->start();
inputSurface = mRenderThread->getInputSurface();
@@ -261,10 +376,9 @@
// create single texture.
mVirtualCameraClientCallback->onStreamConfigured(
/*streamId=*/0, aidl::android::view::Surface(inputSurface.get()),
- inputWidth, inputHeight, Format::YUV_420_888);
+ inputConfig->width, inputConfig->height, inputConfig->pixelFormat);
}
- mFirstRequest.store(true);
return ndk::ScopedAStatus::ok();
}
@@ -272,12 +386,22 @@
RequestTemplate in_type, CameraMetadata* _aidl_return) {
ALOGV("%s: type %d", __func__, static_cast<int32_t>(in_type));
+ std::shared_ptr<VirtualCameraDevice> camera = mCameraDevice.lock();
+ if (camera == nullptr) {
+ ALOGW(
+ "%s: constructDefaultRequestSettings called on already unregistered "
+ "camera",
+ __func__);
+ return cameraStatus(Status::CAMERA_DISCONNECTED);
+ }
+
switch (in_type) {
case RequestTemplate::PREVIEW:
case RequestTemplate::STILL_CAPTURE:
case RequestTemplate::VIDEO_RECORD:
case RequestTemplate::VIDEO_SNAPSHOT: {
- *_aidl_return = createDefaultRequestSettings(in_type);
+ *_aidl_return =
+ createDefaultRequestSettings(in_type, camera->getInputConfigs());
return ndk::ScopedAStatus::ok();
}
case RequestTemplate::MANUAL:
@@ -396,13 +520,25 @@
const CaptureRequest& request) {
ALOGD("%s: request: %s", __func__, request.toString().c_str());
- if (mFirstRequest.exchange(false) && request.settings.metadata.empty()) {
- return cameraStatus(Status::ILLEGAL_ARGUMENT);
- }
-
std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
+ RequestSettings requestSettings;
{
std::lock_guard<std::mutex> lock(mLock);
+
+ // If metadata it empty, last received metadata applies, if it's non-empty
+ // update it.
+ if (!request.settings.metadata.empty()) {
+ mCurrentRequestMetadata = request.settings;
+ }
+
+ // We don't have any metadata for this request - this means we received none
+ // in first request, this is an error state.
+ if (mCurrentRequestMetadata.metadata.empty()) {
+ return cameraStatus(Status::ILLEGAL_ARGUMENT);
+ }
+
+ requestSettings = createSettingsFromMetadata(mCurrentRequestMetadata);
+
cameraCallback = mCameraDeviceCallback;
}
@@ -436,7 +572,7 @@
return cameraStatus(Status::INTERNAL_ERROR);
}
mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>(
- request.frameNumber, taskBuffers));
+ request.frameNumber, taskBuffers, requestSettings));
}
if (mVirtualCameraClientCallback != nullptr) {
diff --git a/services/camera/virtualcamera/VirtualCameraSession.h b/services/camera/virtualcamera/VirtualCameraSession.h
index 82a7a34..556314f 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.h
+++ b/services/camera/virtualcamera/VirtualCameraSession.h
@@ -25,6 +25,7 @@
#include "VirtualCameraSessionContext.h"
#include "aidl/android/companion/virtualcamera/IVirtualCameraCallback.h"
#include "aidl/android/hardware/camera/device/BnCameraDeviceSession.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
#include "utils/Mutex.h"
@@ -138,7 +139,8 @@
int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
std::shared_ptr<ResultMetadataQueue> mResultMetadataQueue;
- std::atomic_bool mFirstRequest{true};
+ aidl::android::hardware::camera::device::CameraMetadata mCurrentRequestMetadata
+ GUARDED_BY(mLock);
std::unique_ptr<VirtualCameraRenderThread> mRenderThread GUARDED_BY(mLock);
};
diff --git a/services/camera/virtualcamera/aidl/Android.bp b/services/camera/virtualcamera/aidl/Android.bp
index a9c2195..b3fe3ad 100644
--- a/services/camera/virtualcamera/aidl/Android.bp
+++ b/services/camera/virtualcamera/aidl/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_xr_framework",
// See: http://go/android-license-faq
default_applicable_licenses: ["Android-Apache-2.0"],
}
@@ -23,6 +24,9 @@
cpp: {
enabled: false,
},
+ rust: {
+ enabled: false,
+ },
ndk: {
enabled: true,
additional_shared_libraries: [
@@ -33,6 +37,6 @@
java: {
enabled: true,
platform_apis: true,
- }
+ },
},
}
diff --git a/services/camera/virtualcamera/flags/Android.bp b/services/camera/virtualcamera/flags/Android.bp
new file mode 100644
index 0000000..5fa8852
--- /dev/null
+++ b/services/camera/virtualcamera/flags/Android.bp
@@ -0,0 +1,37 @@
+package {
+ default_team: "trendy_team_xr_framework",
+}
+
+soong_config_module_type {
+ name: "virtual_device_build_flags_cc_defaults",
+ module_type: "cc_defaults",
+ config_namespace: "vdm",
+ bool_variables: [
+ "virtual_camera_service_enabled",
+ ],
+ properties: [
+ "cflags",
+ ],
+}
+
+soong_config_bool_variable {
+ name: "virtual_camera_service_enabled",
+}
+
+virtual_device_build_flags_cc_defaults {
+ name: "virtual_device_build_flags_defaults",
+ soong_config_variables: {
+ virtual_camera_service_enabled: {
+ cflags: ["-DVIRTUAL_CAMERA_SERVICE_ENABLED=1"],
+ },
+ },
+}
+
+cc_library_static {
+ name: "libvirtualdevicebuildflags",
+ srcs: [
+ "android_companion_virtualdevice_build_flags.cc",
+ ],
+ export_include_dirs: ["."],
+ defaults: ["virtual_device_build_flags_defaults"],
+}
diff --git a/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.cc b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.cc
new file mode 100644
index 0000000..5525bc9
--- /dev/null
+++ b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+namespace android {
+namespace companion {
+namespace virtualdevice {
+namespace flags {
+
+bool virtual_camera_service_build_flag() {
+#if VIRTUAL_CAMERA_SERVICE_ENABLED
+ return true;
+#else
+ return false;
+#endif
+}
+
+} // namespace flags
+} // namespace virtualdevice
+} // namespace companion
+} // namespace android
diff --git a/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.h b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.h
new file mode 100644
index 0000000..718ce9b
--- /dev/null
+++ b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+namespace android {
+namespace companion {
+namespace virtualdevice {
+namespace flags {
+
+// Returns true if the virtual camera service is enabled
+// in the build.
+//
+// TODO(b/309090563) - Deprecate in favor of autogened library to query build
+// flags once available.
+bool virtual_camera_service_build_flag();
+
+} // namespace flags
+} // namespace virtualdevice
+} // namespace companion
+} // namespace android
diff --git a/services/camera/virtualcamera/fuzzer/Android.bp b/services/camera/virtualcamera/fuzzer/Android.bp
index 71e8f50..6a72167 100644
--- a/services/camera/virtualcamera/fuzzer/Android.bp
+++ b/services/camera/virtualcamera/fuzzer/Android.bp
@@ -15,7 +15,8 @@
* limitations under the License.
*
*****************************************************************************/
- package {
+package {
+ default_team: "trendy_team_xr_framework",
// See: http://go/android-license-faq
default_applicable_licenses: ["Android-Apache-2.0"],
}
diff --git a/services/camera/virtualcamera/tests/Android.bp b/services/camera/virtualcamera/tests/Android.bp
index bc46ba0..543cc10 100644
--- a/services/camera/virtualcamera/tests/Android.bp
+++ b/services/camera/virtualcamera/tests/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_xr_framework",
// See: http://go/android-license-faq
default_applicable_licenses: ["Android-Apache-2.0"],
}
@@ -14,11 +15,14 @@
"libgtest",
"libgmock",
],
- srcs: ["EglUtilTest.cc",
- "VirtualCameraDeviceTest.cc",
- "VirtualCameraProviderTest.cc",
- "VirtualCameraRenderThreadTest.cc",
- "VirtualCameraServiceTest.cc",
- "VirtualCameraSessionTest.cc"],
+ srcs: [
+ "EglUtilTest.cc",
+ "JpegUtilTest.cc",
+ "VirtualCameraDeviceTest.cc",
+ "VirtualCameraProviderTest.cc",
+ "VirtualCameraRenderThreadTest.cc",
+ "VirtualCameraServiceTest.cc",
+ "VirtualCameraSessionTest.cc",
+ ],
test_suites: ["device-tests"],
}
diff --git a/services/camera/virtualcamera/tests/JpegUtilTest.cc b/services/camera/virtualcamera/tests/JpegUtilTest.cc
new file mode 100644
index 0000000..e6481f0
--- /dev/null
+++ b/services/camera/virtualcamera/tests/JpegUtilTest.cc
@@ -0,0 +1,199 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sys/types.h>
+
+#include "system/graphics.h"
+#define LOG_TAG "JpegUtilTest"
+
+#include <array>
+#include <cstdint>
+#include <cstring>
+
+#include "android/hardware_buffer.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "jpeglib.h"
+#include "util/JpegUtil.h"
+#include "util/Util.h"
+#include "utils/Errors.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+namespace {
+
+using testing::Eq;
+using testing::Gt;
+using testing::Optional;
+using testing::VariantWith;
+
+constexpr int kOutputBufferSize = 1024 * 1024; // 1 MiB.
+constexpr int kJpegQuality = 80;
+
+// Create black YUV420 buffer for testing purposes.
+std::shared_ptr<AHardwareBuffer> createHardwareBufferForTest(const int width,
+ const int height) {
+ const AHardwareBuffer_Desc desc{.width = static_cast<uint32_t>(width),
+ .height = static_cast<uint32_t>(height),
+ .layers = 1,
+ .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
+ .usage = AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+ .stride = 0,
+ .rfu0 = 0,
+ .rfu1 = 0};
+
+ AHardwareBuffer* hwBufferPtr;
+ int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
+ if (status != NO_ERROR) {
+ ALOGE(
+ "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
+ __func__, status);
+ return nullptr;
+ }
+
+ std::shared_ptr<AHardwareBuffer> hwBuffer(hwBufferPtr,
+ AHardwareBuffer_release);
+
+ YCbCrLockGuard yCbCrLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN);
+ const android_ycbcr& ycbr = (*yCbCrLock);
+
+ uint8_t* y = reinterpret_cast<uint8_t*>(ycbr.y);
+ for (int r = 0; r < height; r++) {
+ memset(y + r * ycbr.ystride, 0x00, width);
+ }
+
+ uint8_t* cb = reinterpret_cast<uint8_t*>(ycbr.cb);
+ uint8_t* cr = reinterpret_cast<uint8_t*>(ycbr.cr);
+ for (int r = 0; r < height / 2; r++) {
+ for (int c = 0; c < width / 2; c++) {
+ cb[r * ycbr.cstride + c * ycbr.chroma_step] = 0xff / 2;
+ cr[r * ycbr.cstride + c * ycbr.chroma_step] = 0xff / 2;
+ }
+ }
+
+ return hwBuffer;
+}
+
+// Decode JPEG header, return image resolution on success or error message on error.
+std::variant<std::string, Resolution> verifyHeaderAndGetResolution(
+ const uint8_t* data, int size) {
+ struct jpeg_decompress_struct ctx;
+ struct jpeg_error_mgr jerr;
+
+ struct DecompressionError {
+ bool success = true;
+ std::string error;
+ } result;
+
+ ctx.client_data = &result;
+
+ ctx.err = jpeg_std_error(&jerr);
+ ctx.err->error_exit = [](j_common_ptr cinfo) {
+ reinterpret_cast<DecompressionError*>(cinfo->client_data)->success = false;
+ };
+ ctx.err->output_message = [](j_common_ptr cinfo) {
+ char buffer[JMSG_LENGTH_MAX];
+ (*cinfo->err->format_message)(cinfo, buffer);
+ reinterpret_cast<DecompressionError*>(cinfo->client_data)->error = buffer;
+ ALOGE("libjpeg error: %s", buffer);
+ };
+
+ jpeg_create_decompress(&ctx);
+ jpeg_mem_src(&ctx, data, size);
+ jpeg_read_header(&ctx, /*require_image=*/true);
+
+ if (!result.success) {
+ jpeg_destroy_decompress(&ctx);
+ return result.error;
+ }
+
+ Resolution resolution(ctx.image_width, ctx.image_height);
+ jpeg_destroy_decompress(&ctx);
+ return resolution;
+}
+
+TEST(JpegUtil, roundToDctSize) {
+ EXPECT_THAT(roundTo2DctSize(Resolution(640, 480)), Eq(Resolution(640, 480)));
+ EXPECT_THAT(roundTo2DctSize(Resolution(5, 5)), Eq(Resolution(16, 16)));
+ EXPECT_THAT(roundTo2DctSize(Resolution(32, 32)), Eq(Resolution(32, 32)));
+ EXPECT_THAT(roundTo2DctSize(Resolution(33, 32)), Eq(Resolution(48, 32)));
+ EXPECT_THAT(roundTo2DctSize(Resolution(32, 33)), Eq(Resolution(32, 48)));
+}
+
+class JpegUtilTest : public ::testing::Test {
+ public:
+ void SetUp() override {
+ std::fill(mOutputBuffer.begin(), mOutputBuffer.end(), 0);
+ }
+
+ protected:
+ std::optional<size_t> compress(int imageWidth, int imageHeight,
+ std::shared_ptr<AHardwareBuffer> inBuffer) {
+ return compressJpeg(imageWidth, imageHeight, kJpegQuality, inBuffer,
+ /*app1ExifData=*/{}, mOutputBuffer.size(),
+ mOutputBuffer.data());
+ }
+
+ std::array<uint8_t, kOutputBufferSize> mOutputBuffer;
+};
+
+TEST_F(JpegUtilTest, compressImageSizeAlignedWithDctSucceeds) {
+ std::shared_ptr<AHardwareBuffer> inBuffer =
+ createHardwareBufferForTest(640, 480);
+
+ std::optional<size_t> compressedSize = compress(640, 480, inBuffer);
+
+ EXPECT_THAT(compressedSize, Optional(Gt(0)));
+ EXPECT_THAT(verifyHeaderAndGetResolution(mOutputBuffer.data(),
+ compressedSize.value()),
+ VariantWith<Resolution>(Resolution(640, 480)));
+}
+
+TEST_F(JpegUtilTest, compressImageSizeNotAlignedWidthDctSucceeds) {
+ std::shared_ptr<AHardwareBuffer> inBuffer =
+ createHardwareBufferForTest(640, 480);
+
+ std::optional<size_t> compressedSize = compress(630, 470, inBuffer);
+
+ EXPECT_THAT(compressedSize, Optional(Gt(0)));
+ EXPECT_THAT(verifyHeaderAndGetResolution(mOutputBuffer.data(),
+ compressedSize.value()),
+ VariantWith<Resolution>(Resolution(630, 470)));
+}
+
+TEST_F(JpegUtilTest, compressImageWithBufferNotAlignedWithDctFails) {
+ std::shared_ptr<AHardwareBuffer> inBuffer =
+ createHardwareBufferForTest(641, 480);
+
+ std::optional<size_t> compressedSize = compress(640, 480, inBuffer);
+
+ EXPECT_THAT(compressedSize, Eq(std::nullopt));
+}
+
+TEST_F(JpegUtilTest, compressImageWithBufferTooSmallFails) {
+ std::shared_ptr<AHardwareBuffer> inBuffer =
+ createHardwareBufferForTest(634, 464);
+
+ std::optional<size_t> compressedSize = compress(640, 480, inBuffer);
+
+ EXPECT_THAT(compressedSize, Eq(std::nullopt));
+}
+
+} // namespace
+} // namespace virtualcamera
+} // namespace companion
+} // namespace android
diff --git a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
index 027ecb7..ad9d83b 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <algorithm>
+#include <iterator>
#include <memory>
#include "VirtualCameraDevice.h"
@@ -22,11 +24,14 @@
#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
#include "aidl/android/hardware/camera/device/StreamConfiguration.h"
+#include "aidl/android/hardware/graphics/common/PixelFormat.h"
#include "android/binder_interface_utils.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "log/log_main.h"
#include "system/camera_metadata.h"
+#include "util/MetadataUtil.h"
+#include "util/Util.h"
#include "utils/Errors.h"
namespace android {
@@ -44,22 +49,42 @@
using ::aidl::android::hardware::camera::device::StreamConfiguration;
using ::aidl::android::hardware::camera::device::StreamType;
using ::aidl::android::hardware::graphics::common::PixelFormat;
+using ::testing::ElementsAre;
using ::testing::UnorderedElementsAreArray;
using metadata_stream_t =
camera_metadata_enum_android_scaler_available_stream_configurations_t;
constexpr int kCameraId = 42;
+constexpr int kQvgaWidth = 320;
+constexpr int kQvgaHeight = 240;
+constexpr int k360pWidth = 640;
+constexpr int k360pHeight = 360;
constexpr int kVgaWidth = 640;
constexpr int kVgaHeight = 480;
constexpr int kHdWidth = 1280;
constexpr int kHdHeight = 720;
constexpr int kMaxFps = 30;
+const Stream kVgaYUV420Stream = Stream{
+ .streamType = StreamType::OUTPUT,
+ .width = kVgaWidth,
+ .height = kVgaHeight,
+ .format = PixelFormat::YCBCR_420_888,
+};
+
+const Stream kVgaJpegStream = Stream{
+ .streamType = StreamType::OUTPUT,
+ .width = kVgaWidth,
+ .height = kVgaHeight,
+ .format = PixelFormat::BLOB,
+};
+
struct AvailableStreamConfiguration {
const int width;
const int height;
const int pixelFormat;
- const metadata_stream_t streamConfiguration;
+ const metadata_stream_t streamConfiguration =
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
};
bool operator==(const AvailableStreamConfiguration& a,
@@ -105,10 +130,11 @@
std::vector<AvailableStreamConfiguration> expectedAvailableStreamConfigs;
};
-class VirtualCameraDeviceTest
+class VirtualCameraDeviceCharacterisicsTest
: public testing::TestWithParam<VirtualCameraConfigTestParam> {};
-TEST_P(VirtualCameraDeviceTest, cameraCharacteristicsForInputFormat) {
+TEST_P(VirtualCameraDeviceCharacterisicsTest,
+ cameraCharacteristicsForInputFormat) {
const VirtualCameraConfigTestParam& param = GetParam();
std::shared_ptr<VirtualCameraDevice> camera =
ndk::SharedRefBase::make<VirtualCameraDevice>(kCameraId,
@@ -137,7 +163,7 @@
}
INSTANTIATE_TEST_SUITE_P(
- cameraCharacteristicsForInputFormat, VirtualCameraDeviceTest,
+ cameraCharacteristicsForInputFormat, VirtualCameraDeviceCharacterisicsTest,
testing::Values(
VirtualCameraConfigTestParam{
.inputConfig =
@@ -152,24 +178,33 @@
.lensFacing = LensFacing::FRONT},
.expectedAvailableStreamConfigs =
{AvailableStreamConfiguration{
- .width = kVgaWidth,
- .height = kVgaHeight,
- .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ .width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat =
+ ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+ AvailableStreamConfiguration{
+ .width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat =
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+ AvailableStreamConfiguration{
+ .width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
AvailableStreamConfiguration{
.width = kVgaWidth,
.height = kVgaHeight,
.pixelFormat =
- ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
AvailableStreamConfiguration{
.width = kVgaWidth,
.height = kVgaHeight,
- .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT}}},
+ .pixelFormat =
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+ AvailableStreamConfiguration{
+ .width = kVgaWidth,
+ .height = kVgaHeight,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB}}},
VirtualCameraConfigTestParam{
.inputConfig =
VirtualCameraConfiguration{
@@ -189,43 +224,141 @@
.lensFacing = LensFacing::BACK},
.expectedAvailableStreamConfigs = {
AvailableStreamConfiguration{
+ .width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+ AvailableStreamConfiguration{
+ .width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat =
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+ AvailableStreamConfiguration{
+ .width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+ AvailableStreamConfiguration{
+ .width = 640,
+ .height = 360,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+ AvailableStreamConfiguration{
+ .width = 640,
+ .height = 360,
+ .pixelFormat =
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+ AvailableStreamConfiguration{
+ .width = 640,
+ .height = 360,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+ AvailableStreamConfiguration{
.width = kVgaWidth,
.height = kVgaHeight,
- .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
AvailableStreamConfiguration{
.width = kVgaWidth,
.height = kVgaHeight,
.pixelFormat =
- ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
AvailableStreamConfiguration{
.width = kVgaWidth,
.height = kVgaHeight,
- .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+ AvailableStreamConfiguration{
+ .width = 1024,
+ .height = 576,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+ AvailableStreamConfiguration{
+ .width = 1024,
+ .height = 576,
+ .pixelFormat =
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+ AvailableStreamConfiguration{
+ .width = 1024,
+ .height = 576,
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
AvailableStreamConfiguration{
.width = kHdWidth,
.height = kHdHeight,
- .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
AvailableStreamConfiguration{
.width = kHdWidth,
.height = kHdHeight,
.pixelFormat =
- ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+ ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
AvailableStreamConfiguration{
.width = kHdWidth,
.height = kHdHeight,
- .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
- .streamConfiguration =
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT}}}));
+ .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB}}}));
+
+class VirtualCameraDeviceTest : public ::testing::Test {
+ public:
+ void SetUp() override {
+ mCamera = ndk::SharedRefBase::make<VirtualCameraDevice>(
+ kCameraId, VirtualCameraConfiguration{
+ .supportedStreamConfigs = {SupportedStreamConfiguration{
+ .width = kVgaWidth,
+ .height = kVgaHeight,
+ .pixelFormat = Format::YUV_420_888,
+ .maxFps = kMaxFps}},
+ .virtualCameraCallback = nullptr,
+ .sensorOrientation = SensorOrientation::ORIENTATION_0,
+ .lensFacing = LensFacing::FRONT});
+ }
+
+ protected:
+ std::shared_ptr<VirtualCameraDevice> mCamera;
+};
+
+TEST_F(VirtualCameraDeviceTest, configureMaximalNumberOfNonStallStreamsSuceeds) {
+ StreamConfiguration config;
+ std::fill_n(std::back_insert_iterator(config.streams),
+ VirtualCameraDevice::kMaxNumberOfProcessedStreams,
+ kVgaYUV420Stream);
+
+ bool aidl_ret;
+ ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+ EXPECT_TRUE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, configureTooManyNonStallStreamsFails) {
+ StreamConfiguration config;
+ std::fill_n(std::back_insert_iterator(config.streams),
+ VirtualCameraDevice::kMaxNumberOfProcessedStreams + 1,
+ kVgaYUV420Stream);
+
+ bool aidl_ret;
+ ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+ EXPECT_FALSE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, configureMaximalNumberOfStallStreamsSuceeds) {
+ StreamConfiguration config;
+ std::fill_n(std::back_insert_iterator(config.streams),
+ VirtualCameraDevice::kMaxNumberOfStallStreams, kVgaJpegStream);
+
+ bool aidl_ret;
+ ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+ EXPECT_TRUE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, configureTooManyStallStreamsFails) {
+ StreamConfiguration config;
+ std::fill_n(std::back_insert_iterator(config.streams),
+ VirtualCameraDevice::kMaxNumberOfStallStreams + 1, kVgaJpegStream);
+
+ bool aidl_ret;
+ ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+ EXPECT_FALSE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, thumbnailSizeWithCompatibleAspectRatio) {
+ CameraMetadata metadata;
+ ASSERT_TRUE(mCamera->getCameraCharacteristics(&metadata).isOk());
+
+ // Camera is configured with VGA input, we expect 240 x 180 thumbnail size in
+ // characteristics, since it has same aspect ratio.
+ EXPECT_THAT(getJpegAvailableThumbnailSizes(metadata),
+ ElementsAre(Resolution(0, 0), Resolution(240, 180)));
+}
} // namespace
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc b/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
index ab647a4..cd64ca5 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
@@ -53,6 +53,7 @@
constexpr int kVgaWidth = 640;
constexpr int kVgaHeight = 480;
constexpr int kMaxFps = 30;
+constexpr int kCameraId = 9999;
constexpr char kVirtualCameraNameRegex[] =
"device@[0-9]+\\.[0-9]+/virtual/[0-9]+";
@@ -118,7 +119,7 @@
ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
std::shared_ptr<VirtualCameraDevice> camera =
- mCameraProvider->createCamera(mInputConfig);
+ mCameraProvider->createCamera(mInputConfig, kCameraId);
EXPECT_THAT(camera, Not(IsNull()));
EXPECT_THAT(camera->getCameraName(), MatchesRegex(kVirtualCameraNameRegex));
@@ -136,7 +137,7 @@
.WillOnce(Return(ndk::ScopedAStatus::ok()));
std::shared_ptr<VirtualCameraDevice> camera =
- mCameraProvider->createCamera(mInputConfig);
+ mCameraProvider->createCamera(mInputConfig, kCameraId);
ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
// Created camera should be in the list of cameras.
@@ -148,7 +149,7 @@
TEST_F(VirtualCameraProviderTest, RemoveCamera) {
ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
std::shared_ptr<VirtualCameraDevice> camera =
- mCameraProvider->createCamera(mInputConfig);
+ mCameraProvider->createCamera(mInputConfig, kCameraId);
EXPECT_CALL(*mMockCameraProviderCallback,
cameraDeviceStatusChange(Eq(camera->getCameraName()),
@@ -165,7 +166,7 @@
TEST_F(VirtualCameraProviderTest, RemoveNonExistingCamera) {
ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
std::shared_ptr<VirtualCameraDevice> camera =
- mCameraProvider->createCamera(mInputConfig);
+ mCameraProvider->createCamera(mInputConfig, kCameraId);
// Removing non-existing camera should fail.
const std::string cameraName = "DefinitelyNoTCamera";
diff --git a/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc b/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
index 5f899b8..ddcb789 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
@@ -33,6 +33,7 @@
#include "android/binder_auto_utils.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
+#include "util/Util.h"
namespace android {
namespace companion {
@@ -62,6 +63,7 @@
constexpr int kInputWidth = 640;
constexpr int kInputHeight = 480;
+const Resolution kInputResolution(kInputWidth, kInputHeight);
Matcher<StreamBuffer> IsStreamBufferWithStatus(const int streamId,
const int bufferId,
@@ -102,7 +104,8 @@
mMockCameraDeviceCallback =
ndk::SharedRefBase::make<MockCameraDeviceCallback>();
mRenderThread = std::make_unique<VirtualCameraRenderThread>(
- *mSessionContext, kInputWidth, kInputHeight, mMockCameraDeviceCallback);
+ *mSessionContext, kInputResolution,
+ /*reportedSensorSize*/ kInputResolution, mMockCameraDeviceCallback);
}
protected:
diff --git a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
index d4d00a2..f729f73 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
@@ -142,6 +142,14 @@
Eq(NO_ERROR));
}
+ void execute_shell_command(const std::string cmd, const std::string cameraId) {
+ std::array<const char*, 2> args{cmd.data(), cameraId.data()};
+ ASSERT_THAT(
+ mCameraService->handleShellCommand(mDevNullFd, mDevNullFd, mDevNullFd,
+ args.data(), args.size()),
+ Eq(NO_ERROR));
+ }
+
std::vector<std::string> getCameraIds() {
std::vector<std::string> cameraIds;
EXPECT_TRUE(mCameraProvider->getCameraIdList(&cameraIds).isOk());
@@ -238,17 +246,6 @@
EXPECT_THAT(getCameraIds(), IsEmpty());
}
-TEST_F(VirtualCameraServiceTest, ConfigurationWithUnalignedResolutionFails) {
- bool aidlRet;
- VirtualCameraConfiguration config =
- createConfiguration(641, 481, Format::YUV_420_888, kMaxFps);
-
- ASSERT_FALSE(
- mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
- EXPECT_FALSE(aidlRet);
- EXPECT_THAT(getCameraIds(), IsEmpty());
-}
-
TEST_F(VirtualCameraServiceTest, ConfigurationWithNegativeResolutionFails) {
bool aidlRet;
VirtualCameraConfiguration config =
@@ -381,6 +378,18 @@
EXPECT_THAT(cameraIdsAfterDisable, IsEmpty());
}
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithId) {
+ execute_shell_command("enable_test_camera", "12345");
+
+ std::vector<std::string> cameraIdsAfterEnable = getCameraIds();
+ EXPECT_THAT(cameraIdsAfterEnable, SizeIs(1));
+
+ execute_shell_command("disable_test_camera");
+
+ std::vector<std::string> cameraIdsAfterDisable = getCameraIds();
+ EXPECT_THAT(cameraIdsAfterDisable, IsEmpty());
+}
+
} // namespace
} // namespace virtualcamera
} // namespace companion
diff --git a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
index 446c679..5f313a0 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
@@ -30,13 +30,15 @@
#include "android/binder_interface_utils.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
namespace android {
namespace companion {
namespace virtualcamera {
namespace {
+constexpr int kQvgaWidth = 320;
+constexpr int kQvgaHeight = 240;
constexpr int kVgaWidth = 640;
constexpr int kVgaHeight = 480;
constexpr int kSvgaWidth = 800;
@@ -104,32 +106,13 @@
MOCK_METHOD(ndk::ScopedAStatus, onStreamClosed, (int), (override));
};
-class VirtualCameraSessionTest : public ::testing::Test {
+class VirtualCameraSessionTestBase : public ::testing::Test {
public:
- void SetUp() override {
+ virtual void SetUp() override {
mMockCameraDeviceCallback =
ndk::SharedRefBase::make<MockCameraDeviceCallback>();
mMockVirtualCameraClientCallback =
ndk::SharedRefBase::make<MockVirtualCameraCallback>();
- mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
- kCameraId,
- VirtualCameraConfiguration{
- .supportedStreamConfigs = {SupportedStreamConfiguration{
- .width = kVgaWidth,
- .height = kVgaHeight,
- .pixelFormat = Format::YUV_420_888,
- .maxFps = kMaxFps},
- SupportedStreamConfiguration{
- .width = kSvgaWidth,
- .height = kSvgaHeight,
- .pixelFormat = Format::YUV_420_888,
- .maxFps = kMaxFps}},
- .virtualCameraCallback = nullptr,
- .sensorOrientation = SensorOrientation::ORIENTATION_0,
- .lensFacing = LensFacing::FRONT});
- mVirtualCameraSession = ndk::SharedRefBase::make<VirtualCameraSession>(
- mVirtualCameraDevice, mMockCameraDeviceCallback,
- mMockVirtualCameraClientCallback);
// Explicitly defining default actions below to prevent gmock from
// default-constructing ndk::ScopedAStatus, because default-constructed
@@ -155,6 +138,35 @@
protected:
std::shared_ptr<MockCameraDeviceCallback> mMockCameraDeviceCallback;
std::shared_ptr<MockVirtualCameraCallback> mMockVirtualCameraClientCallback;
+};
+
+class VirtualCameraSessionTest : public VirtualCameraSessionTestBase {
+ public:
+ void SetUp() override {
+ VirtualCameraSessionTestBase::SetUp();
+
+ mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
+ kCameraId,
+ VirtualCameraConfiguration{
+ .supportedStreamConfigs = {SupportedStreamConfiguration{
+ .width = kVgaWidth,
+ .height = kVgaHeight,
+ .pixelFormat = Format::YUV_420_888,
+ .maxFps = kMaxFps},
+ SupportedStreamConfiguration{
+ .width = kSvgaWidth,
+ .height = kSvgaHeight,
+ .pixelFormat = Format::YUV_420_888,
+ .maxFps = kMaxFps}},
+ .virtualCameraCallback = mMockVirtualCameraClientCallback,
+ .sensorOrientation = SensorOrientation::ORIENTATION_0,
+ .lensFacing = LensFacing::FRONT});
+ mVirtualCameraSession = ndk::SharedRefBase::make<VirtualCameraSession>(
+ mVirtualCameraDevice, mMockCameraDeviceCallback,
+ mMockVirtualCameraClientCallback);
+ }
+
+ protected:
std::shared_ptr<VirtualCameraDevice> mVirtualCameraDevice;
std::shared_ptr<VirtualCameraSession> mVirtualCameraSession;
};
@@ -272,6 +284,97 @@
Eq(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)));
}
+TEST_F(VirtualCameraSessionTest, ConfigureWithDifferentAspectRatioFails) {
+ StreamConfiguration streamConfiguration;
+ streamConfiguration.streams = {
+ createStream(kStreamId, kVgaWidth, kVgaHeight, PixelFormat::YCBCR_420_888),
+ createStream(kSecondStreamId, kVgaHeight, kVgaWidth,
+ PixelFormat::YCBCR_420_888)};
+
+ std::vector<HalStream> halStreams;
+
+ // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+ EXPECT_THAT(
+ mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+ .getServiceSpecificError(),
+ Eq(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)));
+}
+
+class VirtualCameraSessionInputChoiceTest : public VirtualCameraSessionTestBase {
+ public:
+ std::shared_ptr<VirtualCameraSession> createSession(
+ const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
+ mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
+ kCameraId, VirtualCameraConfiguration{
+ .supportedStreamConfigs = supportedInputConfigs,
+ .virtualCameraCallback = mMockVirtualCameraClientCallback,
+ .sensorOrientation = SensorOrientation::ORIENTATION_0,
+ .lensFacing = LensFacing::FRONT});
+ return ndk::SharedRefBase::make<VirtualCameraSession>(
+ mVirtualCameraDevice, mMockCameraDeviceCallback,
+ mMockVirtualCameraClientCallback);
+ }
+
+ protected:
+ std::shared_ptr<VirtualCameraDevice> mVirtualCameraDevice;
+};
+
+TEST_F(VirtualCameraSessionInputChoiceTest,
+ configureChoosesCorrectInputStreamForDownsampledOutput) {
+ // Create camera configured to support SVGA YUV input and RGB QVGA input.
+ auto virtualCameraSession = createSession(
+ {SupportedStreamConfiguration{.width = kSvgaWidth,
+ .height = kSvgaHeight,
+ .pixelFormat = Format::YUV_420_888,
+ .maxFps = kMaxFps},
+ SupportedStreamConfiguration{.width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat = Format::RGBA_8888,
+ .maxFps = kMaxFps}});
+
+ // Configure VGA stream. Expect SVGA input to be chosen to downscale from.
+ StreamConfiguration streamConfiguration;
+ streamConfiguration.streams = {createStream(
+ kStreamId, kVgaWidth, kVgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+ std::vector<HalStream> halStreams;
+
+ // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+ EXPECT_CALL(*mMockVirtualCameraClientCallback,
+ onStreamConfigured(kStreamId, _, kSvgaWidth, kSvgaHeight,
+ Format::YUV_420_888));
+ EXPECT_TRUE(
+ virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+ .isOk());
+}
+
+TEST_F(VirtualCameraSessionInputChoiceTest,
+ configureChoosesCorrectInputStreamForMatchingResolution) {
+ // Create camera configured to support SVGA YUV input and RGB QVGA input.
+ auto virtualCameraSession = createSession(
+ {SupportedStreamConfiguration{.width = kSvgaWidth,
+ .height = kSvgaHeight,
+ .pixelFormat = Format::YUV_420_888,
+ .maxFps = kMaxFps},
+ SupportedStreamConfiguration{.width = kQvgaWidth,
+ .height = kQvgaHeight,
+ .pixelFormat = Format::RGBA_8888,
+ .maxFps = kMaxFps}});
+
+ // Configure VGA stream. Expect SVGA input to be chosen to downscale from.
+ StreamConfiguration streamConfiguration;
+ streamConfiguration.streams = {createStream(
+ kStreamId, kQvgaWidth, kQvgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+ std::vector<HalStream> halStreams;
+
+ // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+ EXPECT_CALL(*mMockVirtualCameraClientCallback,
+ onStreamConfigured(kStreamId, _, kQvgaWidth, kQvgaHeight,
+ Format::RGBA_8888));
+ EXPECT_TRUE(
+ virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+ .isOk());
+}
+
} // namespace
} // namespace virtualcamera
} // namespace companion
diff --git a/services/camera/virtualcamera/util/JpegUtil.cc b/services/camera/virtualcamera/util/JpegUtil.cc
index 2b19c13..b034584 100644
--- a/services/camera/virtualcamera/util/JpegUtil.cc
+++ b/services/camera/virtualcamera/util/JpegUtil.cc
@@ -14,19 +14,20 @@
* limitations under the License.
*/
// #define LOG_NDEBUG 0
+#include "system/graphics.h"
#define LOG_TAG "JpegUtil"
-#include "JpegUtil.h"
-
#include <cstddef>
#include <cstdint>
-#include <memory>
+#include <optional>
#include <vector>
+#include "JpegUtil.h"
#include "android/hardware_buffer.h"
#include "jpeglib.h"
#include "log/log.h"
#include "ui/GraphicBuffer.h"
#include "ui/GraphicBufferMapper.h"
+#include "util/Util.h"
#include "utils/Errors.h"
namespace android {
@@ -34,11 +35,11 @@
namespace virtualcamera {
namespace {
-constexpr int kJpegQuality = 80;
+constexpr int k2DCTSIZE = 2 * DCTSIZE;
class LibJpegContext {
public:
- LibJpegContext(int width, int height, const size_t outBufferSize,
+ LibJpegContext(int width, int height, int quality, const size_t outBufferSize,
void* outBuffer)
: mWidth(width),
mHeight(height),
@@ -76,7 +77,7 @@
jpeg_set_defaults(&mCompressStruct);
// Set quality and colorspace.
- jpeg_set_quality(&mCompressStruct, kJpegQuality, 1);
+ jpeg_set_quality(&mCompressStruct, quality, 1);
jpeg_set_colorspace(&mCompressStruct, JCS_YCbCr);
// Configure RAW input mode - this let's libjpeg know we're providing raw,
@@ -94,11 +95,63 @@
mCompressStruct.comp_info[2].v_samp_factor = 1;
}
- bool compress(const android_ycbcr& ycbr) {
+ LibJpegContext& setApp1Data(const uint8_t* app1Data, const size_t size) {
+ mApp1Data = app1Data;
+ mApp1DataSize = size;
+ return *this;
+ }
+
+ std::optional<size_t> compress(std::shared_ptr<AHardwareBuffer> inBuffer) {
+ GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inBuffer.get());
+
+ if (gBuffer == nullptr) {
+ ALOGE("%s: Input graphic buffer is nullptr", __func__);
+ return std::nullopt;
+ }
+
+ if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+ // This should never happen since we're allocating the temporary buffer
+ // with YUV420 layout above.
+ ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
+ gBuffer->getPixelFormat());
+ return std::nullopt;
+ }
+
+ YCbCrLockGuard yCbCrLock(inBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
+ if (yCbCrLock.getStatus() != OK) {
+ ALOGE("%s: Failed to lock the input buffer: %s", __func__,
+ statusToString(yCbCrLock.getStatus()).c_str());
+ return std::nullopt;
+ }
+ const android_ycbcr& ycbr = *yCbCrLock;
+
+ const int inBufferWidth = gBuffer->getWidth();
+ const int inBufferHeight = gBuffer->getHeight();
+
+ if (inBufferWidth % k2DCTSIZE || (inBufferHeight % k2DCTSIZE)) {
+ ALOGE(
+ "%s: Compressing YUV420 buffer with size %dx%d not aligned with 2 * "
+ "DCTSIZE (%d) is not currently supported.",
+ __func__, inBufferWidth, inBufferHeight, DCTSIZE);
+ return std::nullopt;
+ }
+
+ if (inBufferWidth < mWidth || inBufferHeight < mHeight) {
+ ALOGE(
+ "%s: Input buffer has smaller size (%dx%d) than image to be "
+ "compressed (%dx%d)",
+ __func__, inBufferWidth, inBufferHeight, mWidth, mHeight);
+ return std::nullopt;
+ }
+
+ // Chroma planes have 1/2 resolution of the original image.
+ const int cHeight = inBufferHeight / 2;
+ const int cWidth = inBufferWidth / 2;
+
// Prepare arrays of pointers to scanlines of each plane.
- std::vector<JSAMPROW> yLines(mHeight);
- std::vector<JSAMPROW> cbLines(mHeight / 2);
- std::vector<JSAMPROW> crLines(mHeight / 2);
+ std::vector<JSAMPROW> yLines(inBufferHeight);
+ std::vector<JSAMPROW> cbLines(cHeight);
+ std::vector<JSAMPROW> crLines(cHeight);
uint8_t* y = static_cast<uint8_t*>(ycbr.y);
uint8_t* cb = static_cast<uint8_t*>(ycbr.cb);
@@ -107,42 +160,34 @@
// Since UV samples might be interleaved (semiplanar) we need to copy
// them to separate planes, since libjpeg doesn't directly
// support processing semiplanar YUV.
- const int c_samples = (mWidth / 2) * (mHeight / 2);
- std::vector<uint8_t> cb_plane(c_samples);
- std::vector<uint8_t> cr_plane(c_samples);
+ const int cSamples = cWidth * cHeight;
+ std::vector<uint8_t> cb_plane(cSamples);
+ std::vector<uint8_t> cr_plane(cSamples);
// TODO(b/301023410) - Use libyuv or ARM SIMD for "unzipping" the data.
- for (int i = 0; i < c_samples; ++i) {
- cb_plane[i] = *cb;
- cr_plane[i] = *cr;
- cb += ycbr.chroma_step;
- cr += ycbr.chroma_step;
+ int out_idx = 0;
+ for (int i = 0; i < cHeight; ++i) {
+ for (int j = 0; j < cWidth; ++j) {
+ cb_plane[out_idx] = cb[j * ycbr.chroma_step];
+ cr_plane[out_idx] = cr[j * ycbr.chroma_step];
+ out_idx++;
+ }
+ cb += ycbr.cstride;
+ cr += ycbr.cstride;
}
// Collect pointers to individual scanline of each plane.
- for (int i = 0; i < mHeight; ++i) {
+ for (int i = 0; i < inBufferHeight; ++i) {
yLines[i] = y + i * ycbr.ystride;
}
- for (int i = 0; i < (mHeight / 2); ++i) {
- cbLines[i] = cb_plane.data() + i * (mWidth / 2);
- crLines[i] = cr_plane.data() + i * (mWidth / 2);
+ for (int i = 0; i < cHeight; ++i) {
+ cbLines[i] = cb_plane.data() + i * cWidth;
+ crLines[i] = cr_plane.data() + i * cWidth;
}
return compress(yLines, cbLines, crLines);
}
- bool compressBlackImage() {
- // We only really need to prepare one scanline for Y and one shared scanline
- // for Cb & Cr.
- std::vector<uint8_t> yLine(mWidth, 0);
- std::vector<uint8_t> chromaLine(mWidth / 2, 0xff / 2);
-
- std::vector<JSAMPROW> yLines(mHeight, yLine.data());
- std::vector<JSAMPROW> cLines(mHeight / 2, chromaLine.data());
-
- return compress(yLines, cLines, cLines);
- }
-
private:
void setSuccess(const boolean success) {
mSuccess = success;
@@ -165,11 +210,18 @@
// Takes vector of pointers to Y / Cb / Cr scanlines as an input. Length of
// each vector needs to correspond to height of corresponding plane.
//
- // Returns true if compression is successful, false otherwise.
- bool compress(std::vector<JSAMPROW>& yLines, std::vector<JSAMPROW>& cbLines,
- std::vector<JSAMPROW>& crLines) {
+ // Returns size of compressed image in bytes on success, empty optional otherwise.
+ std::optional<size_t> compress(std::vector<JSAMPROW>& yLines,
+ std::vector<JSAMPROW>& cbLines,
+ std::vector<JSAMPROW>& crLines) {
jpeg_start_compress(&mCompressStruct, TRUE);
+ if (mApp1Data != nullptr && mApp1DataSize > 0) {
+ ALOGV("%s: Writing exif, size %zu B", __func__, mApp1DataSize);
+ jpeg_write_marker(&mCompressStruct, JPEG_APP0 + 1,
+ static_cast<const JOCTET*>(mApp1Data), mApp1DataSize);
+ }
+
while (mCompressStruct.next_scanline < mCompressStruct.image_height) {
const uint32_t batchSize = DCTSIZE * 2;
const uint32_t nl = mCompressStruct.next_scanline;
@@ -181,11 +233,11 @@
ALOGE("%s: compressed %u lines, expected %u (total %u/%u)",
__FUNCTION__, done, batchSize, mCompressStruct.next_scanline,
mCompressStruct.image_height);
- return false;
+ return std::nullopt;
}
}
jpeg_finish_compress(&mCompressStruct);
- return mSuccess;
+ return mEncodedSize;
}
// === libjpeg callbacks below ===
@@ -217,6 +269,10 @@
jpeg_error_mgr mErrorMgr;
jpeg_destination_mgr mDestinationMgr;
+ // APP1 data.
+ const uint8_t* mApp1Data = nullptr;
+ size_t mApp1DataSize = 0;
+
// Dimensions of the input image.
int mWidth;
int mHeight;
@@ -233,17 +289,28 @@
boolean mSuccess = true;
};
-} // namespace
-
-bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
- size_t outBufferSize, void* outBuffer) {
- return LibJpegContext(width, height, outBufferSize, outBuffer).compress(ycbcr);
+int roundTo2DCTMultiple(const int n) {
+ const int mod = n % k2DCTSIZE;
+ return mod == 0 ? n : n + (k2DCTSIZE - mod);
}
-bool compressBlackJpeg(int width, int height, size_t outBufferSize,
- void* outBuffer) {
- return LibJpegContext(width, height, outBufferSize, outBuffer)
- .compressBlackImage();
+} // namespace
+
+std::optional<size_t> compressJpeg(const int width, const int height,
+ const int quality,
+ std::shared_ptr<AHardwareBuffer> inBuffer,
+ const std::vector<uint8_t>& app1ExifData,
+ size_t outBufferSize, void* outBuffer) {
+ LibJpegContext context(width, height, quality, outBufferSize, outBuffer);
+ if (!app1ExifData.empty()) {
+ context.setApp1Data(app1ExifData.data(), app1ExifData.size());
+ }
+ return context.compress(inBuffer);
+}
+
+Resolution roundTo2DctSize(const Resolution resolution) {
+ return Resolution(roundTo2DCTMultiple(resolution.width),
+ roundTo2DCTMultiple(resolution.height));
}
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/JpegUtil.h b/services/camera/virtualcamera/util/JpegUtil.h
index c44d0a8..184dd56 100644
--- a/services/camera/virtualcamera/util/JpegUtil.h
+++ b/services/camera/virtualcamera/util/JpegUtil.h
@@ -17,24 +17,36 @@
#ifndef ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
#define ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
-#include <memory>
+#include <optional>
#include "android/hardware_buffer.h"
-#include "system/graphics.h"
+#include "util/Util.h"
namespace android {
namespace companion {
namespace virtualcamera {
// Jpeg-compress image into the output buffer.
-// Returns true if the compression was successful, false otherwise.
-bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
- size_t outBufferSize, void* outBuffer);
+// * width - width of the image, can be less than width of inBuffer.
+// * heigh - height of the image, can be less than height of inBuffer.
+// * quality - 0-100, higher number corresponds to higher quality.
+// * inBuffer - Input buffer, the dimensions of the buffer must be aligned to
+// 2*DCT_SIZE (16) to include necessary padding in case width and height of
+// image is not aligned with 2*DCT_SIZE.
+// * app1ExifData - vector containing data to be included in APP1
+// segment. Can be empty.
+// * outBufferSize - capacity of the output buffer.
+// * outBuffer - output buffer to write compressed data into.
+// Returns size of compressed data if the compression was successful,
+// empty optional otherwise.
+std::optional<size_t> compressJpeg(int width, int height, int quality,
+ std::shared_ptr<AHardwareBuffer> inBuffer,
+ const std::vector<uint8_t>& app1ExifData,
+ size_t outBufferSize, void* outBuffer);
-// Jpeg-compress all-black image into the output buffer.
-// Returns true if the compression was successful, false otherwise.
-bool compressBlackJpeg(int width, int height, size_t outBufferSize,
- void* outBuffer);
+// Round the resolution to the closest higher resolution where width and height
+// are divisible by 2*DCT_SIZE ().
+Resolution roundTo2DctSize(Resolution resolution);
} // namespace virtualcamera
} // namespace companion
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.cc b/services/camera/virtualcamera/util/MetadataBuilder.cc
deleted file mode 100644
index 70e22be..0000000
--- a/services/camera/virtualcamera/util/MetadataBuilder.cc
+++ /dev/null
@@ -1,338 +0,0 @@
-/*
- * Copyright 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "MetadataBuilder"
-
-#include "MetadataBuilder.h"
-
-#include <algorithm>
-#include <cstdint>
-#include <iterator>
-#include <memory>
-#include <utility>
-#include <variant>
-#include <vector>
-
-#include "CameraMetadata.h"
-#include "aidl/android/hardware/camera/device/CameraMetadata.h"
-#include "log/log.h"
-#include "system/camera_metadata.h"
-#include "utils/Errors.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-namespace {
-
-using ::android::hardware::camera::common::helper::CameraMetadata;
-
-template <typename To, typename From>
-std::vector<To> convertTo(const std::vector<From>& from) {
- std::vector<To> to;
- to.reserve(from.size());
- std::transform(from.begin(), from.end(), std::back_inserter(to),
- [](const From& x) { return static_cast<To>(x); });
- return to;
-}
-
-} // namespace
-
-MetadataBuilder& MetadataBuilder::setSupportedHardwareLevel(
- camera_metadata_enum_android_info_supported_hardware_level_t hwLevel) {
- mEntryMap[ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL] =
- std::vector<uint8_t>({static_cast<uint8_t>(hwLevel)});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setFlashAvailable(bool flashAvailable) {
- const uint8_t metadataVal = flashAvailable
- ? ANDROID_FLASH_INFO_AVAILABLE_TRUE
- : ANDROID_FLASH_INFO_AVAILABLE_FALSE;
- mEntryMap[ANDROID_FLASH_INFO_AVAILABLE] = std::vector<uint8_t>({metadataVal});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setLensFacing(
- camera_metadata_enum_android_lens_facing lensFacing) {
- mEntryMap[ANDROID_LENS_FACING] =
- std::vector<uint8_t>({static_cast<uint8_t>(lensFacing)});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setSensorOrientation(int32_t sensorOrientation) {
- mEntryMap[ANDROID_SENSOR_ORIENTATION] =
- std::vector<int32_t>({sensorOrientation});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setSensorTimestamp(
- std::chrono::nanoseconds timestamp) {
- mEntryMap[ANDROID_SENSOR_TIMESTAMP] =
- std::vector<int64_t>({timestamp.count()});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableFaceDetectModes(
- const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
- faceDetectModes) {
- mEntryMap[ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES] =
- convertTo<uint8_t>(faceDetectModes);
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAvailableModes(
- const std::vector<camera_metadata_enum_android_control_mode_t>&
- availableModes) {
- mEntryMap[ANDROID_CONTROL_AVAILABLE_MODES] =
- convertTo<uint8_t>(availableModes);
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAfAvailableModes(
- const std::vector<camera_metadata_enum_android_control_af_mode_t>&
- availableModes) {
- mEntryMap[ANDROID_CONTROL_AF_AVAILABLE_MODES] =
- convertTo<uint8_t>(availableModes);
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAfMode(
- const camera_metadata_enum_android_control_af_mode_t mode) {
- mEntryMap[ANDROID_CONTROL_AF_MODE] =
- std::vector<uint8_t>({static_cast<uint8_t>(mode)});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeAvailableFpsRange(
- const int32_t minFps, const int32_t maxFps) {
- mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES] =
- std::vector<int32_t>({minFps, maxFps});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlMaxRegions(int32_t maxAeRegions,
- int32_t maxAwbRegions,
- int32_t maxAfRegions) {
- mEntryMap[ANDROID_CONTROL_MAX_REGIONS] =
- std::vector<int32_t>({maxAeRegions, maxAwbRegions, maxAfRegions});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeRegions(
- const std::vector<ControlRegion>& aeRegions) {
- std::vector<int32_t> regions;
- regions.reserve(5 * aeRegions.size());
- for (const ControlRegion& region : aeRegions) {
- regions.push_back(region.x0);
- regions.push_back(region.y0);
- regions.push_back(region.x1);
- regions.push_back(region.y1);
- regions.push_back(region.weight);
- }
- mEntryMap[ANDROID_CONTROL_AE_REGIONS] = std::move(regions);
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAfRegions(
- const std::vector<ControlRegion>& afRegions) {
- std::vector<int32_t> regions;
- regions.reserve(5 * afRegions.size());
- for (const ControlRegion& region : afRegions) {
- regions.push_back(region.x0);
- regions.push_back(region.y0);
- regions.push_back(region.x1);
- regions.push_back(region.y1);
- regions.push_back(region.weight);
- }
- mEntryMap[ANDROID_CONTROL_AF_REGIONS] = std::move(regions);
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAwbRegions(
- const std::vector<ControlRegion>& awbRegions) {
- std::vector<int32_t> regions;
- regions.reserve(5 * awbRegions.size());
- for (const ControlRegion& region : awbRegions) {
- regions.push_back(region.x0);
- regions.push_back(region.y0);
- regions.push_back(region.x1);
- regions.push_back(region.y1);
- regions.push_back(region.weight);
- }
- mEntryMap[ANDROID_CONTROL_AWB_REGIONS] = std::move(regions);
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlCaptureIntent(
- const camera_metadata_enum_android_control_capture_intent_t intent) {
- mEntryMap[ANDROID_CONTROL_CAPTURE_INTENT] =
- std::vector<uint8_t>({static_cast<uint8_t>(intent)});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setMaxJpegSize(const int32_t size) {
- mEntryMap[ANDROID_JPEG_MAX_SIZE] = std::vector<int32_t>({size});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableOutputStreamConfigurations(
- const std::vector<StreamConfiguration>& streamConfigurations) {
- std::vector<int32_t> metadataStreamConfigs;
- std::vector<int64_t> metadataMinFrameDurations;
- std::vector<int64_t> metadataStallDurations;
- metadataStreamConfigs.reserve(streamConfigurations.size());
- metadataMinFrameDurations.reserve(streamConfigurations.size());
- metadataStallDurations.reserve(streamConfigurations.size());
-
- for (const auto& config : streamConfigurations) {
- metadataStreamConfigs.push_back(config.format);
- metadataStreamConfigs.push_back(config.width);
- metadataStreamConfigs.push_back(config.height);
- metadataStreamConfigs.push_back(
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
-
- metadataMinFrameDurations.push_back(config.format);
- metadataMinFrameDurations.push_back(config.width);
- metadataMinFrameDurations.push_back(config.height);
- metadataMinFrameDurations.push_back(config.minFrameDuration.count());
-
- metadataStallDurations.push_back(config.format);
- metadataStallDurations.push_back(config.width);
- metadataStallDurations.push_back(config.height);
- metadataStallDurations.push_back(config.minStallDuration.count());
- }
-
- mEntryMap[ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS] =
- metadataStreamConfigs;
- mEntryMap[ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS] =
- metadataMinFrameDurations;
- mEntryMap[ANDROID_SCALER_AVAILABLE_STALL_DURATIONS] = metadataStallDurations;
-
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableMaxDigitalZoom(const float maxZoom) {
- mEntryMap[ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM] =
- std::vector<float>(maxZoom);
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlZoomRatioRange(const float min,
- const float max) {
- mEntryMap[ANDROID_CONTROL_ZOOM_RATIO_RANGE] = std::vector<float>({min, max});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setSensorActiveArraySize(int x0, int y0,
- int x1, int y1) {
- mEntryMap[ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE] =
- std::vector<int32_t>({x0, y0, x1, y1});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeCompensationRange(int32_t min,
- int32_t max) {
- mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_RANGE] =
- std::vector<int32_t>({min, max});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeCompensationStep(
- const camera_metadata_rational step) {
- mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_STEP] =
- std::vector<camera_metadata_rational>({step});
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableRequestKeys(
- const std::vector<int32_t>& keys) {
- mEntryMap[ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS] = keys;
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableResultKeys(
- const std::vector<int32_t>& keys) {
- mEntryMap[ANDROID_REQUEST_AVAILABLE_RESULT_KEYS] = keys;
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableCapabilities(
- const std::vector<camera_metadata_enum_android_request_available_capabilities_t>&
- capabilities) {
- mEntryMap[ANDROID_REQUEST_AVAILABLE_CAPABILITIES] =
- convertTo<uint8_t>(capabilities);
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys(
- const std::vector<camera_metadata_tag_t>& keys) {
- mEntryMap[ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS] =
- convertTo<int32_t>(keys);
- return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys() {
- std::vector<camera_metadata_tag_t> availableKeys;
- availableKeys.reserve(mEntryMap.size());
- for (const auto& [key, _] : mEntryMap) {
- if (key != ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS) {
- availableKeys.push_back(key);
- }
- }
- setAvailableCharacteristicKeys(availableKeys);
- return *this;
-}
-
-std::unique_ptr<aidl::android::hardware::camera::device::CameraMetadata>
-MetadataBuilder::build() const {
- CameraMetadata metadataHelper;
- for (const auto& entry : mEntryMap) {
- status_t ret = std::visit(
- [&](auto&& arg) {
- return metadataHelper.update(entry.first, arg.data(), arg.size());
- },
- entry.second);
- if (ret != NO_ERROR) {
- ALOGE("Failed to update metadata with key %d - %s: %s", entry.first,
- get_camera_metadata_tag_name(entry.first),
- ::android::statusToString(ret).c_str());
- return nullptr;
- }
- }
-
- const camera_metadata_t* metadata = metadataHelper.getAndLock();
- if (metadata == nullptr) {
- ALOGE(
- "Failure when constructing metadata -> CameraMetadata helper returned "
- "nullptr");
- return nullptr;
- }
-
- auto aidlMetadata =
- std::make_unique<aidl::android::hardware::camera::device::CameraMetadata>();
- const uint8_t* data_ptr = reinterpret_cast<const uint8_t*>(metadata);
- aidlMetadata->metadata.assign(data_ptr,
- data_ptr + get_camera_metadata_size(metadata));
- metadataHelper.unlock(metadata);
-
- return aidlMetadata;
-}
-
-} // namespace virtualcamera
-} // namespace companion
-} // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.h b/services/camera/virtualcamera/util/MetadataBuilder.h
deleted file mode 100644
index 46f4c43..0000000
--- a/services/camera/virtualcamera/util/MetadataBuilder.h
+++ /dev/null
@@ -1,204 +0,0 @@
-/*
- * Copyright 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
-#define ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
-
-#include <chrono>
-#include <cstdint>
-#include <map>
-#include <memory>
-#include <variant>
-#include <vector>
-
-#include "aidl/android/hardware/camera/device/CameraMetadata.h"
-#include "system/camera_metadata.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-// Convenience builder for the
-// aidl::android::hardware::camera::device::CameraMetadata.
-//
-// Calling the same builder setter multiple will overwrite the value.
-// This class is not thread-safe.
-class MetadataBuilder {
- public:
- struct StreamConfiguration {
- int32_t width = 0;
- int32_t height = 0;
- int32_t format = 0;
- // Minimal frame duration - corresponds to maximal FPS for given format.
- // See ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS in CameraMetadataTag.aidl.
- std::chrono::nanoseconds minFrameDuration{0};
- // Minimal stall duration.
- // See ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
- std::chrono::nanoseconds minStallDuration{0};
- };
-
- struct ControlRegion {
- int32_t x0 = 0;
- int32_t y0 = 0;
- int32_t x1 = 0;
- int32_t y1 = 0;
- int32_t weight = 0;
- };
-
- MetadataBuilder() = default;
- ~MetadataBuilder() = default;
-
- // See ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL in CameraMetadataTag.aidl.
- MetadataBuilder& setSupportedHardwareLevel(
- camera_metadata_enum_android_info_supported_hardware_level_t hwLevel);
-
- // Whether this camera device has a flash unit
- // See ANDROID_FLASH_INFO_AVAILABLE in CameraMetadataTag.aidl.
- MetadataBuilder& setFlashAvailable(bool flashAvailable);
-
- // See ANDROID_LENS_FACING in CameraMetadataTag.aidl.
- MetadataBuilder& setLensFacing(
- camera_metadata_enum_android_lens_facing lensFacing);
-
- // See ANDROID_SENSOR_ORIENTATION in CameraMetadataTag.aidl.
- MetadataBuilder& setSensorOrientation(int32_t sensorOrientation);
-
- // Time at start of exposure of first row of the image
- // sensor active array, in nanoseconds.
- //
- // See ANDROID_SENSOR_TIMESTAMP in CameraMetadataTag.aidl.
- MetadataBuilder& setSensorTimestamp(std::chrono::nanoseconds timestamp);
-
- // See ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE in CameraMetadataTag.aidl.
- MetadataBuilder& setSensorActiveArraySize(int x0, int y0, int x1, int y1);
-
- // See ANDROID_STATISTICS_FACE_DETECT_MODE in CameraMetadataTag.aidl.
- MetadataBuilder& setAvailableFaceDetectModes(
- const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
- faceDetectMode);
-
- // Sets available stream configurations along with corresponding minimal frame
- // durations (corresponding to max fps) and stall durations.
- //
- // See ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
- // ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS and
- // ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
- MetadataBuilder& setAvailableOutputStreamConfigurations(
- const std::vector<StreamConfiguration>& streamConfigurations);
-
- // See ANDROID_CONTROL_AVAILABLE_MODES in CameraMetadataTag.aidl.
- MetadataBuilder& setControlAvailableModes(
- const std::vector<camera_metadata_enum_android_control_mode_t>&
- availableModes);
-
- // See ANDROID_CONTROL_AE_COMPENSATION_RANGE in CameraMetadataTag.aidl.
- MetadataBuilder& setControlAeCompensationRange(int32_t min, int32_t max);
-
- // See ANDROID_CONTROL_AE_COMPENSATION_STEP in CameraMetadataTag.aidl.
- MetadataBuilder& setControlAeCompensationStep(camera_metadata_rational step);
-
- // See ANDROID_CONTROL_AF_AVAILABLE_MODES in CameraMetadataTag.aidl.
- MetadataBuilder& setControlAfAvailableModes(
- const std::vector<camera_metadata_enum_android_control_af_mode_t>&
- availableModes);
-
- // See ANDROID_CONTROL_AF_MODE in CameraMetadataTag.aidl.
- MetadataBuilder& setControlAfMode(
- const camera_metadata_enum_android_control_af_mode_t mode);
-
- // See ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES in CameraMetadataTag.aidl.
- MetadataBuilder& setControlAeAvailableFpsRange(int32_t min, int32_t max);
-
- // See ANDROID_CONTROL_CAPTURE_INTENT in CameraMetadataTag.aidl.
- MetadataBuilder& setControlCaptureIntent(
- camera_metadata_enum_android_control_capture_intent_t intent);
-
- // See ANDROID_CONTROL_MAX_REGIONS in CameraMetadataTag.aidl.
- MetadataBuilder& setControlMaxRegions(int32_t maxAeRegions,
- int32_t maxAwbRegions,
- int32_t maxAfRegions);
-
- // See ANDROID_CONTROL_AE_REGIONS in CameraMetadataTag.aidl.
- MetadataBuilder& setControlAeRegions(
- const std::vector<ControlRegion>& aeRegions);
-
- // See ANDROID_CONTROL_AWB_REGIONS in CameraMetadataTag.aidl.
- MetadataBuilder& setControlAwbRegions(
- const std::vector<ControlRegion>& awbRegions);
-
- // See ANDROID_CONTROL_AF_REGIONS in CameraMetadataTag.aidl.
- MetadataBuilder& setControlAfRegions(
- const std::vector<ControlRegion>& afRegions);
-
- // The size of the compressed JPEG image, in bytes.
- //
- // See ANDROID_JPEG_SIZE in CameraMetadataTag.aidl.
- MetadataBuilder& setMaxJpegSize(int32_t size);
-
- // See ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM in CameraMetadataTag.aidl.
- MetadataBuilder& setAvailableMaxDigitalZoom(const float maxZoom);
-
- // See ANDROID_CONTROL_ZOOM_RATIO_RANGE in CameraMetadataTag.aidl.
- MetadataBuilder& setControlZoomRatioRange(float min, float max);
-
- // A list of all keys that the camera device has available to use with
- // CaptureRequest.
- //
- // See ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
- MetadataBuilder& setAvailableRequestKeys(const std::vector<int32_t>& keys);
-
- // A list of all keys that the camera device has available to use with
- // CaptureResult.
- //
- // See ANDROID_RESULT_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
- MetadataBuilder& setAvailableResultKeys(const std::vector<int32_t>& keys);
-
- // See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
- MetadataBuilder& setAvailableCapabilities(
- const std::vector<
- camera_metadata_enum_android_request_available_capabilities_t>&
- capabilities);
-
- // A list of all keys that the camera device has available to use.
- //
- // See ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS in CameraMetadataTag.aidl.
- MetadataBuilder& setAvailableCharacteristicKeys(
- const std::vector<camera_metadata_tag_t>& keys);
-
- // Extends metadata with ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS
- // containing all previously set tags.
- MetadataBuilder& setAvailableCharacteristicKeys();
-
- // Build CameraMetadata instance.
- //
- // Returns nullptr in case something went wrong.
- std::unique_ptr<::aidl::android::hardware::camera::device::CameraMetadata>
- build() const;
-
- private:
- // Maps metadata tags to vectors of values for the given tag.
- std::map<camera_metadata_tag_t,
- std::variant<std::vector<int64_t>, std::vector<int32_t>,
- std::vector<uint8_t>, std::vector<float>,
- std::vector<camera_metadata_rational_t>>>
- mEntryMap;
-};
-
-} // namespace virtualcamera
-} // namespace companion
-} // namespace android
-
-#endif // ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
diff --git a/services/camera/virtualcamera/util/MetadataUtil.cc b/services/camera/virtualcamera/util/MetadataUtil.cc
new file mode 100644
index 0000000..c7dc80e
--- /dev/null
+++ b/services/camera/virtualcamera/util/MetadataUtil.cc
@@ -0,0 +1,908 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MetadataUtil"
+
+#include "MetadataUtil.h"
+
+#include <algorithm>
+#include <cstdint>
+#include <iterator>
+#include <memory>
+#include <optional>
+#include <string>
+#include <utility>
+#include <variant>
+#include <vector>
+
+#include "CameraMetadata.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+#include "log/log.h"
+#include "system/camera_metadata.h"
+#include "util/Util.h"
+#include "utils/Errors.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+namespace {
+
+using ::android::hardware::camera::common::helper::CameraMetadata;
+
+template <typename To, typename From>
+std::vector<To> convertTo(const std::vector<From>& from) {
+ std::vector<To> to;
+ to.reserve(from.size());
+ std::transform(from.begin(), from.end(), std::back_inserter(to),
+ [](const From& x) { return static_cast<To>(x); });
+ return to;
+}
+
+template <typename To, typename From>
+std::vector<To> asVectorOf(const From from) {
+ return std::vector<To>({static_cast<To>(from)});
+}
+
+} // namespace
+
+MetadataBuilder& MetadataBuilder::setSupportedHardwareLevel(
+ const camera_metadata_enum_android_info_supported_hardware_level_t hwLevel) {
+ mEntryMap[ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL] =
+ asVectorOf<uint8_t>(hwLevel);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFlashAvailable(bool flashAvailable) {
+ const uint8_t metadataVal = flashAvailable
+ ? ANDROID_FLASH_INFO_AVAILABLE_TRUE
+ : ANDROID_FLASH_INFO_AVAILABLE_FALSE;
+ mEntryMap[ANDROID_FLASH_INFO_AVAILABLE] = asVectorOf<uint8_t>(metadataVal);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFlashState(
+ const camera_metadata_enum_android_flash_state_t flashState) {
+ mEntryMap[ANDROID_FLASH_STATE] = asVectorOf<uint8_t>(flashState);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFlashMode(
+ const camera_metadata_enum_android_flash_mode_t flashMode) {
+ mEntryMap[ANDROID_FLASH_MODE] = asVectorOf<uint8_t>(flashMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setLensFacing(
+ const camera_metadata_enum_android_lens_facing lensFacing) {
+ mEntryMap[ANDROID_LENS_FACING] = asVectorOf<uint8_t>(lensFacing);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorReadoutTimestamp(
+ const camera_metadata_enum_android_sensor_readout_timestamp_t
+ sensorReadoutTimestamp) {
+ mEntryMap[ANDROID_SENSOR_READOUT_TIMESTAMP] =
+ asVectorOf<uint8_t>(sensorReadoutTimestamp);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableFocalLengths(
+ const std::vector<float>& focalLengths) {
+ mEntryMap[ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS] = focalLengths;
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFocalLength(float focalLength) {
+ mEntryMap[ANDROID_LENS_FOCAL_LENGTH] = asVectorOf<float>(focalLength);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorOrientation(int32_t sensorOrientation) {
+ mEntryMap[ANDROID_SENSOR_ORIENTATION] = asVectorOf<int32_t>(sensorOrientation);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorTimestampSource(
+ const camera_metadata_enum_android_sensor_info_timestamp_source_t
+ timestampSource) {
+ mEntryMap[ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE] =
+ asVectorOf<uint8_t>(timestampSource);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorTimestamp(
+ std::chrono::nanoseconds timestamp) {
+ mEntryMap[ANDROID_SENSOR_TIMESTAMP] = asVectorOf<int64_t>(timestamp.count());
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableFaceDetectModes(
+ const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
+ faceDetectModes) {
+ mEntryMap[ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES] =
+ convertTo<uint8_t>(faceDetectModes);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableTestPatternModes(
+ const std::vector<camera_metadata_enum_android_sensor_test_pattern_mode>&
+ testPatternModes) {
+ mEntryMap[ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES] =
+ convertTo<int32_t>(testPatternModes);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFaceDetectMode(
+ const camera_metadata_enum_android_statistics_face_detect_mode_t
+ faceDetectMode) {
+ mEntryMap[ANDROID_STATISTICS_FACE_DETECT_MODE] =
+ asVectorOf<uint8_t>(faceDetectMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableModes(
+ const std::vector<camera_metadata_enum_android_control_mode_t>&
+ availableModes) {
+ mEntryMap[ANDROID_CONTROL_AVAILABLE_MODES] =
+ convertTo<uint8_t>(availableModes);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlMode(
+ const camera_metadata_enum_android_control_mode_t mode) {
+ mEntryMap[ANDROID_CONTROL_MODE] = asVectorOf<uint8_t>(mode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableSceneModes(
+ const std::vector<camera_metadata_enum_android_control_scene_mode>&
+ availableSceneModes) {
+ mEntryMap[ANDROID_CONTROL_AVAILABLE_SCENE_MODES] =
+ convertTo<uint8_t>(availableSceneModes);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlSceneMode(
+ const camera_metadata_enum_android_control_scene_mode sceneMode) {
+ mEntryMap[ANDROID_CONTROL_SCENE_MODE] = asVectorOf<uint8_t>(sceneMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableEffects(
+ const std::vector<camera_metadata_enum_android_control_effect_mode>&
+ availableEffects) {
+ mEntryMap[ANDROID_CONTROL_AVAILABLE_EFFECTS] =
+ convertTo<uint8_t>(availableEffects);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlEffectMode(
+ const camera_metadata_enum_android_control_effect_mode_t effectMode) {
+ mEntryMap[ANDROID_CONTROL_EFFECT_MODE] = asVectorOf<uint8_t>(effectMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableVideoStabilizationModes(
+ const std::vector<
+ camera_metadata_enum_android_control_video_stabilization_mode_t>&
+ videoStabilizationModes) {
+ mEntryMap[ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES] =
+ convertTo<uint8_t>(videoStabilizationModes);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlVideoStabilizationMode(
+ const camera_metadata_enum_android_control_video_stabilization_mode
+ stabilizationMode) {
+ mEntryMap[ANDROID_CONTROL_VIDEO_STABILIZATION_MODE] =
+ asVectorOf<uint8_t>(stabilizationMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfAvailableModes(
+ const std::vector<camera_metadata_enum_android_control_af_mode_t>&
+ availableModes) {
+ mEntryMap[ANDROID_CONTROL_AF_AVAILABLE_MODES] =
+ convertTo<uint8_t>(availableModes);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfMode(
+ const camera_metadata_enum_android_control_af_mode_t mode) {
+ mEntryMap[ANDROID_CONTROL_AF_MODE] = asVectorOf<uint8_t>(mode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfState(
+ const camera_metadata_enum_android_control_af_state afState) {
+ mEntryMap[ANDROID_CONTROL_AF_STATE] = asVectorOf<uint8_t>(afState);
+ return *this;
+}
+
+// See ANDROID_CONTROL_AF_TRIGGER_MODE in CameraMetadataTag.aidl.
+MetadataBuilder& MetadataBuilder::setControlAfTrigger(
+ const camera_metadata_enum_android_control_af_trigger_t trigger) {
+ mEntryMap[ANDROID_CONTROL_AF_TRIGGER] = asVectorOf<uint8_t>(trigger);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAvailableFpsRanges(
+ const std::vector<FpsRange>& fpsRanges) {
+ std::vector<int32_t> ranges;
+ ranges.reserve(2 * fpsRanges.size());
+ for (const FpsRange fpsRange : fpsRanges) {
+ ranges.push_back(fpsRange.minFps);
+ ranges.push_back(fpsRange.maxFps);
+ }
+ mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES] = std::move(ranges);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeTargetFpsRange(
+ const FpsRange fpsRange) {
+ mEntryMap[ANDROID_CONTROL_AE_TARGET_FPS_RANGE] =
+ std::vector<int32_t>({fpsRange.minFps, fpsRange.maxFps});
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeMode(
+ const camera_metadata_enum_android_control_ae_mode_t mode) {
+ mEntryMap[ANDROID_CONTROL_AE_MODE] = asVectorOf<uint8_t>(mode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAvailableModes(
+ const std::vector<camera_metadata_enum_android_control_ae_mode_t>& modes) {
+ mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_MODES] = convertTo<uint8_t>(modes);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAePrecaptureTrigger(
+ const camera_metadata_enum_android_control_ae_precapture_trigger_t trigger) {
+ mEntryMap[ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER] =
+ asVectorOf<uint8_t>(trigger);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlMaxRegions(int32_t maxAeRegions,
+ int32_t maxAwbRegions,
+ int32_t maxAfRegions) {
+ mEntryMap[ANDROID_CONTROL_MAX_REGIONS] =
+ std::vector<int32_t>({maxAeRegions, maxAwbRegions, maxAfRegions});
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableAwbModes(
+ const std::vector<camera_metadata_enum_android_control_awb_mode>& awbModes) {
+ mEntryMap[ANDROID_CONTROL_AWB_AVAILABLE_MODES] = convertTo<uint8_t>(awbModes);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbMode(
+ const camera_metadata_enum_android_control_awb_mode awbMode) {
+ mEntryMap[ANDROID_CONTROL_AWB_MODE] = asVectorOf<uint8_t>(awbMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbState(
+ const camera_metadata_enum_android_control_awb_state awbState) {
+ mEntryMap[ANDROID_CONTROL_AWB_STATE] = asVectorOf<uint8_t>(awbState);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbLockAvailable(
+ const bool awbLockAvailable) {
+ const uint8_t lockAvailable = awbLockAvailable
+ ? ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE
+ : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
+ mEntryMap[ANDROID_CONTROL_AWB_LOCK_AVAILABLE] =
+ std::vector<uint8_t>({lockAvailable});
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbLock(
+ const camera_metadata_enum_android_control_awb_lock awbLock) {
+ mEntryMap[ANDROID_CONTROL_AWB_LOCK] = asVectorOf<uint8_t>(awbLock);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAvailableAntibandingModes(
+ const std::vector<camera_metadata_enum_android_control_ae_antibanding_mode_t>&
+ antibandingModes) {
+ mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES] =
+ convertTo<uint8_t>(antibandingModes);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAntibandingMode(
+ const camera_metadata_enum_android_control_ae_antibanding_mode_t
+ antibandingMode) {
+ mEntryMap[ANDROID_CONTROL_AE_ANTIBANDING_MODE] =
+ asVectorOf<uint8_t>(antibandingMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeLockAvailable(
+ const bool aeLockAvailable) {
+ const uint8_t lockAvailable = aeLockAvailable
+ ? ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE
+ : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
+ mEntryMap[ANDROID_CONTROL_AE_LOCK_AVAILABLE] =
+ asVectorOf<uint8_t>(lockAvailable);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeLock(
+ const camera_metadata_enum_android_control_ae_lock aeLock) {
+ mEntryMap[ANDROID_CONTROL_AE_LOCK] = asVectorOf<uint8_t>(aeLock);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeRegions(
+ const std::vector<ControlRegion>& aeRegions) {
+ std::vector<int32_t> regions;
+ regions.reserve(5 * aeRegions.size());
+ for (const ControlRegion& region : aeRegions) {
+ regions.push_back(region.x0);
+ regions.push_back(region.y0);
+ regions.push_back(region.x1);
+ regions.push_back(region.y1);
+ regions.push_back(region.weight);
+ }
+ mEntryMap[ANDROID_CONTROL_AE_REGIONS] = std::move(regions);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfRegions(
+ const std::vector<ControlRegion>& afRegions) {
+ std::vector<int32_t> regions;
+ regions.reserve(5 * afRegions.size());
+ for (const ControlRegion& region : afRegions) {
+ regions.push_back(region.x0);
+ regions.push_back(region.y0);
+ regions.push_back(region.x1);
+ regions.push_back(region.y1);
+ regions.push_back(region.weight);
+ }
+ mEntryMap[ANDROID_CONTROL_AF_REGIONS] = std::move(regions);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbRegions(
+ const std::vector<ControlRegion>& awbRegions) {
+ std::vector<int32_t> regions;
+ regions.reserve(5 * awbRegions.size());
+ for (const ControlRegion& region : awbRegions) {
+ regions.push_back(region.x0);
+ regions.push_back(region.y0);
+ regions.push_back(region.x1);
+ regions.push_back(region.y1);
+ regions.push_back(region.weight);
+ }
+ mEntryMap[ANDROID_CONTROL_AWB_REGIONS] = std::move(regions);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlCaptureIntent(
+ const camera_metadata_enum_android_control_capture_intent_t intent) {
+ mEntryMap[ANDROID_CONTROL_CAPTURE_INTENT] = asVectorOf<uint8_t>(intent);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setCropRegion(const int32_t x, const int32_t y,
+ const int32_t width,
+ const int32_t height) {
+ mEntryMap[ANDROID_SCALER_CROP_REGION] =
+ std::vector<int32_t>({x, y, width, height});
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxJpegSize(const int32_t size) {
+ mEntryMap[ANDROID_JPEG_MAX_SIZE] = asVectorOf<int32_t>(size);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxFrameDuration(
+ const std::chrono::nanoseconds duration) {
+ mEntryMap[ANDROID_SENSOR_INFO_MAX_FRAME_DURATION] =
+ asVectorOf<int64_t>(duration.count());
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegAvailableThumbnailSizes(
+ const std::vector<Resolution>& thumbnailSizes) {
+ std::vector<int32_t> sizes;
+ sizes.reserve(thumbnailSizes.size() * 2);
+ for (const Resolution& resolution : thumbnailSizes) {
+ sizes.push_back(resolution.width);
+ sizes.push_back(resolution.height);
+ }
+ mEntryMap[ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES] = std::move(sizes);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegGpsCoordinates(
+ const GpsCoordinates& gpsCoordinates) {
+ mEntryMap[ANDROID_JPEG_GPS_COORDINATES] =
+ std::vector<double>({gpsCoordinates.latitude, gpsCoordinates.longitude,
+ gpsCoordinates.altitude});
+
+ if (!gpsCoordinates.provider.empty()) {
+ mEntryMap[ANDROID_JPEG_GPS_PROCESSING_METHOD] = std::vector<uint8_t>{
+ gpsCoordinates.provider.begin(), gpsCoordinates.provider.end()};
+ }
+
+ if (gpsCoordinates.timestamp.has_value()) {
+ mEntryMap[ANDROID_JPEG_GPS_TIMESTAMP] =
+ asVectorOf<int64_t>(gpsCoordinates.timestamp.value());
+ }
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegOrientation(const int32_t orientation) {
+ mEntryMap[ANDROID_JPEG_ORIENTATION] = asVectorOf<int32_t>(orientation);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegQuality(const uint8_t quality) {
+ mEntryMap[ANDROID_JPEG_QUALITY] = asVectorOf<uint8_t>(quality);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegThumbnailSize(const int width,
+ const int height) {
+ mEntryMap[ANDROID_JPEG_THUMBNAIL_SIZE] = std::vector<int32_t>({width, height});
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegThumbnailQuality(const uint8_t quality) {
+ mEntryMap[ANDROID_JPEG_THUMBNAIL_QUALITY] = asVectorOf<uint8_t>(quality);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxNumberOutputStreams(
+ const int32_t maxRawStreams, const int32_t maxProcessedStreams,
+ const int32_t maxStallStreams) {
+ mEntryMap[ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS] = std::vector<int32_t>(
+ {maxRawStreams, maxProcessedStreams, maxStallStreams});
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSyncMaxLatency(
+ const camera_metadata_enum_android_sync_max_latency latency) {
+ mEntryMap[ANDROID_SYNC_MAX_LATENCY] = asVectorOf<int32_t>(latency);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setPipelineMaxDepth(const uint8_t maxDepth) {
+ mEntryMap[ANDROID_REQUEST_PIPELINE_MAX_DEPTH] = asVectorOf<uint8_t>(maxDepth);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setPipelineDepth(const uint8_t depth) {
+ mEntryMap[ANDROID_REQUEST_PIPELINE_DEPTH] = asVectorOf<uint8_t>(depth);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableRequestCapabilities(
+ const std::vector<camera_metadata_enum_android_request_available_capabilities_t>&
+ requestCapabilities) {
+ mEntryMap[ANDROID_REQUEST_AVAILABLE_CAPABILITIES] =
+ convertTo<uint8_t>(requestCapabilities);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableOutputStreamConfigurations(
+ const std::vector<StreamConfiguration>& streamConfigurations) {
+ std::vector<int32_t> metadataStreamConfigs;
+ std::vector<int64_t> metadataMinFrameDurations;
+ std::vector<int64_t> metadataStallDurations;
+ metadataStreamConfigs.reserve(streamConfigurations.size());
+ metadataMinFrameDurations.reserve(streamConfigurations.size());
+ metadataStallDurations.reserve(streamConfigurations.size());
+
+ for (const auto& config : streamConfigurations) {
+ metadataStreamConfigs.push_back(config.format);
+ metadataStreamConfigs.push_back(config.width);
+ metadataStreamConfigs.push_back(config.height);
+ metadataStreamConfigs.push_back(
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
+
+ metadataMinFrameDurations.push_back(config.format);
+ metadataMinFrameDurations.push_back(config.width);
+ metadataMinFrameDurations.push_back(config.height);
+ metadataMinFrameDurations.push_back(config.minFrameDuration.count());
+
+ metadataStallDurations.push_back(config.format);
+ metadataStallDurations.push_back(config.width);
+ metadataStallDurations.push_back(config.height);
+ metadataStallDurations.push_back(config.minStallDuration.count());
+ }
+
+ mEntryMap[ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS] =
+ std::move(metadataStreamConfigs);
+ mEntryMap[ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS] =
+ std::move(metadataMinFrameDurations);
+ mEntryMap[ANDROID_SCALER_AVAILABLE_STALL_DURATIONS] =
+ std::move(metadataStallDurations);
+
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableAberrationCorrectionModes(
+ const std::vector<camera_metadata_enum_android_color_correction_aberration_mode>&
+ aberrationCorectionModes) {
+ mEntryMap[ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES] =
+ convertTo<uint8_t>(aberrationCorectionModes);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAberrationCorrectionMode(
+ const camera_metadata_enum_android_color_correction_aberration_mode
+ aberrationCorrectionMode) {
+ mEntryMap[ANDROID_COLOR_CORRECTION_ABERRATION_MODE] =
+ asVectorOf<uint8_t>(aberrationCorrectionMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableNoiseReductionModes(
+ const std::vector<camera_metadata_enum_android_noise_reduction_mode>&
+ noiseReductionModes) {
+ mEntryMap[ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES] =
+ convertTo<uint8_t>(noiseReductionModes);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setNoiseReductionMode(
+ const camera_metadata_enum_android_noise_reduction_mode noiseReductionMode) {
+ mEntryMap[ANDROID_NOISE_REDUCTION_MODE] =
+ asVectorOf<uint8_t>(noiseReductionMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setRequestPartialResultCount(
+ const int partialResultCount) {
+ mEntryMap[ANDROID_REQUEST_PARTIAL_RESULT_COUNT] =
+ asVectorOf<int32_t>(partialResultCount);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setCroppingType(
+ const camera_metadata_enum_android_scaler_cropping_type croppingType) {
+ mEntryMap[ANDROID_SCALER_CROPPING_TYPE] = asVectorOf<uint8_t>(croppingType);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxFaceCount(const int maxFaceCount) {
+ mEntryMap[ANDROID_STATISTICS_INFO_MAX_FACE_COUNT] =
+ asVectorOf<int32_t>(maxFaceCount);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableMaxDigitalZoom(const float maxZoom) {
+ mEntryMap[ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM] =
+ asVectorOf<float>(maxZoom);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlZoomRatioRange(const float min,
+ const float max) {
+ mEntryMap[ANDROID_CONTROL_ZOOM_RATIO_RANGE] = std::vector<float>({min, max});
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorActiveArraySize(int x0, int y0,
+ int x1, int y1) {
+ mEntryMap[ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE] =
+ std::vector<int32_t>({x0, y0, x1, y1});
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorPixelArraySize(int width,
+ int height) {
+ mEntryMap[ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE] =
+ std::vector<int32_t>({width, height});
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorPhysicalSize(float width,
+ float height) {
+ mEntryMap[ANDROID_SENSOR_INFO_PHYSICAL_SIZE] =
+ std::vector<float>({width, height});
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeCompensationRange(int32_t min,
+ int32_t max) {
+ mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_RANGE] =
+ std::vector<int32_t>({min, max});
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeCompensationStep(
+ const camera_metadata_rational step) {
+ mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_STEP] =
+ asVectorOf<camera_metadata_rational>(step);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeExposureCompensation(
+ const int32_t exposureCompensation) {
+ mEntryMap[ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION] =
+ asVectorOf<int32_t>(exposureCompensation);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeState(
+ const camera_metadata_enum_android_control_ae_state aeState) {
+ mEntryMap[ANDROID_CONTROL_AE_STATE] = asVectorOf<uint8_t>(aeState);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsSceneFlicker(
+ const camera_metadata_enum_android_statistics_scene_flicker sceneFlicker) {
+ mEntryMap[ANDROID_STATISTICS_SCENE_FLICKER] =
+ asVectorOf<uint8_t>(sceneFlicker);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsHotPixelMapMode(
+ const camera_metadata_enum_android_statistics_hot_pixel_map_mode
+ hotPixelMapMode) {
+ mEntryMap[ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE] =
+ asVectorOf<uint8_t>(hotPixelMapMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsLensShadingMapMode(
+ const camera_metadata_enum_android_statistics_lens_shading_map_mode
+ lensShadingMapMode) {
+ mEntryMap[ANDROID_STATISTICS_LENS_SHADING_MAP_MODE] =
+ asVectorOf<uint8_t>(lensShadingMapMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setLensOpticalStabilizationMode(
+ const camera_metadata_enum_android_lens_optical_stabilization_mode_t
+ opticalStabilizationMode) {
+ mEntryMap[ANDROID_LENS_OPTICAL_STABILIZATION_MODE] =
+ asVectorOf<uint8_t>(opticalStabilizationMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableRequestKeys(
+ const std::vector<int32_t>& keys) {
+ mEntryMap[ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS] = keys;
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableResultKeys(
+ const std::vector<int32_t>& keys) {
+ mEntryMap[ANDROID_REQUEST_AVAILABLE_RESULT_KEYS] = keys;
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableCapabilities(
+ const std::vector<camera_metadata_enum_android_request_available_capabilities_t>&
+ capabilities) {
+ mEntryMap[ANDROID_REQUEST_AVAILABLE_CAPABILITIES] =
+ convertTo<uint8_t>(capabilities);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys(
+ const std::vector<camera_metadata_tag_t>& keys) {
+ mEntryMap[ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS] =
+ convertTo<int32_t>(keys);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys() {
+ mExtendWithAvailableCharacteristicsKeys = true;
+ return *this;
+}
+
+std::unique_ptr<aidl::android::hardware::camera::device::CameraMetadata>
+MetadataBuilder::build() {
+ if (mExtendWithAvailableCharacteristicsKeys) {
+ std::vector<camera_metadata_tag_t> availableKeys;
+ availableKeys.reserve(mEntryMap.size());
+ for (const auto& [key, _] : mEntryMap) {
+ if (key != ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS) {
+ availableKeys.push_back(key);
+ }
+ }
+ setAvailableCharacteristicKeys(availableKeys);
+ }
+
+ CameraMetadata metadataHelper;
+ for (const auto& entry : mEntryMap) {
+ status_t ret = std::visit(
+ [&](auto&& arg) {
+ return metadataHelper.update(entry.first, arg.data(), arg.size());
+ },
+ entry.second);
+ if (ret != NO_ERROR) {
+ ALOGE("Failed to update metadata with key %d - %s: %s", entry.first,
+ get_camera_metadata_tag_name(entry.first),
+ ::android::statusToString(ret).c_str());
+ return nullptr;
+ }
+ }
+
+ const camera_metadata_t* metadata = metadataHelper.getAndLock();
+ if (metadata == nullptr) {
+ ALOGE(
+ "Failure when constructing metadata -> CameraMetadata helper returned "
+ "nullptr");
+ return nullptr;
+ }
+
+ auto aidlMetadata =
+ std::make_unique<aidl::android::hardware::camera::device::CameraMetadata>();
+ const uint8_t* data_ptr = reinterpret_cast<const uint8_t*>(metadata);
+ aidlMetadata->metadata.assign(data_ptr,
+ data_ptr + get_camera_metadata_size(metadata));
+ metadataHelper.unlock(metadata);
+
+ return aidlMetadata;
+}
+
+std::optional<int32_t> getJpegQuality(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_QUALITY, &entry) !=
+ OK) {
+ return std::nullopt;
+ }
+
+ return *entry.data.i32;
+}
+
+int32_t getJpegOrientation(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_ORIENTATION,
+ &entry) != OK) {
+ return 0;
+ }
+
+ return *entry.data.i32;
+}
+
+std::optional<Resolution> getJpegThumbnailSize(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_THUMBNAIL_SIZE,
+ &entry) != OK) {
+ return std::nullopt;
+ }
+
+ return Resolution(entry.data.i32[0], entry.data.i32[1]);
+}
+
+std::optional<int32_t> getJpegThumbnailQuality(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_THUMBNAIL_QUALITY,
+ &entry) != OK) {
+ return std::nullopt;
+ }
+
+ return *entry.data.i32;
+}
+
+std::vector<Resolution> getJpegAvailableThumbnailSizes(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(
+ metadata, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, &entry) != OK) {
+ return {};
+ }
+
+ std::vector<Resolution> thumbnailSizes;
+ thumbnailSizes.reserve(entry.count / 2);
+ for (int i = 0; i < entry.count; i += 2) {
+ thumbnailSizes.emplace_back(entry.data.i32[i], entry.data.i32[i + 1]);
+ }
+ return thumbnailSizes;
+}
+
+std::optional<FpsRange> getFpsRange(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(
+ metadata, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, &entry) != OK ||
+ entry.count != 2) {
+ return {};
+ }
+
+ FpsRange range{.minFps = entry.data.i32[0], .maxFps = entry.data.i32[1]};
+ return range;
+}
+
+std::optional<camera_metadata_enum_android_control_capture_intent>
+getCaptureIntent(const aidl::android::hardware::camera::device::CameraMetadata&
+ cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_CAPTURE_INTENT,
+ &entry) != OK) {
+ return {};
+ }
+
+ return static_cast<camera_metadata_enum_android_control_capture_intent>(
+ entry.data.u8[0]);
+}
+
+std::optional<GpsCoordinates> getGpsCoordinates(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_GPS_COORDINATES,
+ &entry) != OK) {
+ return std::nullopt;
+ }
+
+ GpsCoordinates coordinates{.latitude = entry.data.d[0],
+ .longitude = entry.data.d[1],
+ .altitude = entry.data.d[2]};
+
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_GPS_TIMESTAMP,
+ &entry) == OK) {
+ coordinates.timestamp = entry.data.i64[0];
+ }
+
+ // According to types.hal, the string describing the GPS processing method has
+ // a 32 characters size
+ static constexpr float kGpsProviderStringLength = 32;
+ if (find_camera_metadata_ro_entry(
+ metadata, ANDROID_JPEG_GPS_PROCESSING_METHOD, &entry) == OK) {
+ coordinates.provider.assign(
+ reinterpret_cast<const char*>(entry.data.u8),
+ std::min(entry.count, static_cast<size_t>(kGpsProviderStringLength)));
+ }
+
+ return coordinates;
+}
+
+} // namespace virtualcamera
+} // namespace companion
+} // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataUtil.h b/services/camera/virtualcamera/util/MetadataUtil.h
new file mode 100644
index 0000000..5d16506
--- /dev/null
+++ b/services/camera/virtualcamera/util/MetadataUtil.h
@@ -0,0 +1,476 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
+
+#include <chrono>
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <variant>
+#include <vector>
+
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+#include "system/camera_metadata.h"
+#include "util/Util.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Convenience builder for the
+// aidl::android::hardware::camera::device::CameraMetadata.
+//
+// Calling the same builder setter multiple will overwrite the value.
+// This class is not thread-safe.
+class MetadataBuilder {
+ public:
+ struct StreamConfiguration {
+ int32_t width = 0;
+ int32_t height = 0;
+ int32_t format = 0;
+ // Minimal frame duration - corresponds to maximal FPS for given format.
+ // See ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS in CameraMetadataTag.aidl.
+ std::chrono::nanoseconds minFrameDuration{0};
+ // Minimal stall duration.
+ // See ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
+ std::chrono::nanoseconds minStallDuration{0};
+ };
+
+ struct ControlRegion {
+ int32_t x0 = 0;
+ int32_t y0 = 0;
+ int32_t x1 = 0;
+ int32_t y1 = 0;
+ int32_t weight = 0;
+ };
+
+ MetadataBuilder() = default;
+ ~MetadataBuilder() = default;
+
+ // See ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL in CameraMetadataTag.aidl.
+ MetadataBuilder& setSupportedHardwareLevel(
+ camera_metadata_enum_android_info_supported_hardware_level_t hwLevel);
+
+ // Whether this camera device has a flash unit
+ // See ANDROID_FLASH_INFO_AVAILABLE in CameraMetadataTag.aidl.
+ MetadataBuilder& setFlashAvailable(bool flashAvailable);
+
+ // See FLASH_STATE in CaptureResult.java.
+ MetadataBuilder& setFlashState(
+ camera_metadata_enum_android_flash_state_t flashState);
+
+ // See FLASH_MODE in CaptureRequest.java.
+ MetadataBuilder& setFlashMode(
+ camera_metadata_enum_android_flash_mode_t flashMode);
+
+ // See ANDROID_LENS_FACING in CameraMetadataTag.aidl.
+ MetadataBuilder& setLensFacing(
+ camera_metadata_enum_android_lens_facing lensFacing);
+
+ // See ANDROID_SENSOR_READOUT_TIMESTAMP in CameraMetadataTag.aidl.
+ MetadataBuilder& setSensorReadoutTimestamp(
+ camera_metadata_enum_android_sensor_readout_timestamp_t
+ sensorReadoutTimestamp);
+
+ // See ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS in CameraMetadataTag.aidl.
+ MetadataBuilder& setAvailableFocalLengths(
+ const std::vector<float>& focalLengths);
+
+ // See ANDROID_LENS_FOCAL_LENGTH in CameraMetadataTag.aidl.
+ MetadataBuilder& setFocalLength(float focalLength);
+
+ // See ANDROID_SENSOR_ORIENTATION in CameraMetadataTag.aidl.
+ MetadataBuilder& setSensorOrientation(int32_t sensorOrientation);
+
+ // Time at start of exposure of first row of the image
+ // sensor active array, in nanoseconds.
+ //
+ // See ANDROID_SENSOR_TIMESTAMP in CameraMetadataTag.aidl.
+ MetadataBuilder& setSensorTimestamp(std::chrono::nanoseconds timestamp);
+
+ // See SENSOR_INFO_TIMESTAMP_SOURCE in CameraCharacteristic.java.
+ MetadataBuilder& setSensorTimestampSource(
+ camera_metadata_enum_android_sensor_info_timestamp_source_t timestampSource);
+
+ // See ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE in CameraMetadataTag.aidl.
+ MetadataBuilder& setSensorActiveArraySize(int x0, int y0, int x1, int y1);
+
+ // See ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE in CameraMetadataTag.aidl.
+ MetadataBuilder& setSensorPixelArraySize(int width, int height);
+
+ // See ANDROID_SENSOR_INFO_PHYSICAL_SIZE in CameraMetadataTag.aidl.
+ MetadataBuilder& setSensorPhysicalSize(float width, float height);
+
+ // See ANDROID_STATISTICS_FACE_DETECT_MODE in CameraMetadataTag.aidl.
+ MetadataBuilder& setAvailableFaceDetectModes(
+ const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
+ faceDetectMode);
+
+ // See SENSOR_AVAILABLE_TEST_PATTERN_MODES in CameraCharacteristics.java.
+ MetadataBuilder& setAvailableTestPatternModes(
+ const std::vector<camera_metadata_enum_android_sensor_test_pattern_mode>&
+ testPatternModes);
+
+ // See ANDROID_STATISTICS_FACE_DETECT_MODE in CaptureRequest.java.
+ MetadataBuilder& setFaceDetectMode(
+ camera_metadata_enum_android_statistics_face_detect_mode_t faceDetectMode);
+
+ // Sets available stream configurations along with corresponding minimal frame
+ // durations (corresponding to max fps) and stall durations.
+ //
+ // See ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+ // ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS and
+ // ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
+ MetadataBuilder& setAvailableOutputStreamConfigurations(
+ const std::vector<StreamConfiguration>& streamConfigurations);
+
+ // See COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES in CameraCharacteristics.java.
+ MetadataBuilder& setAvailableAberrationCorrectionModes(
+ const std::vector<
+ camera_metadata_enum_android_color_correction_aberration_mode>&
+ aberrationCorectionModes);
+
+ // See COLOR_CORRECTION_ABERRATION_MODE in CaptureRequest.java.
+ MetadataBuilder& setAberrationCorrectionMode(
+ camera_metadata_enum_android_color_correction_aberration_mode
+ aberrationCorrectionMode);
+
+ // See NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES in CameraCharacteristics.java.
+ MetadataBuilder& setAvailableNoiseReductionModes(
+ const std::vector<camera_metadata_enum_android_noise_reduction_mode>&
+ noiseReductionModes);
+
+ // See NOISE_REDUCTION_MODE in CaptureRequest.java.
+ MetadataBuilder& setNoiseReductionMode(
+ camera_metadata_enum_android_noise_reduction_mode noiseReductionMode);
+
+ // See REQUEST_PARTIAL_RESULT_COUNT in CameraCharacteristics.java.
+ MetadataBuilder& setRequestPartialResultCount(int partialResultCount);
+
+ // See SCALER_CROPPING_TYPE in CameraCharacteristics.java.
+ MetadataBuilder& setCroppingType(
+ camera_metadata_enum_android_scaler_cropping_type croppingType);
+
+ // See STATISTICS_INFO_MAX_FACE_COUNT in CameraCharacteristic.java.
+ MetadataBuilder& setMaxFaceCount(int maxFaceCount);
+
+ // See ANDROID_CONTROL_AVAILABLE_MODES in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAvailableModes(
+ const std::vector<camera_metadata_enum_android_control_mode_t>&
+ availableModes);
+
+ // See ANDROID_CONTROL_MODE in CaptureRequest.java.
+ MetadataBuilder& setControlMode(
+ camera_metadata_enum_android_control_mode_t mode);
+
+ // See ANDROID_CONTROL_AVAILABLE_SCENE_MODES in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAvailableSceneModes(
+ const std::vector<camera_metadata_enum_android_control_scene_mode>&
+ availableSceneModes);
+
+ // See ANDROID_CONTROL_SCENE_MODE in CameraMetadataTag.aidl
+ MetadataBuilder& setControlSceneMode(
+ camera_metadata_enum_android_control_scene_mode sceneMode);
+
+ // See ANDROID_CONTROL_AVAILABLE_EFFECTS in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAvailableEffects(
+ const std::vector<camera_metadata_enum_android_control_effect_mode>&
+ availableEffects);
+
+ // See CONTROL_EFFECT_MODE in CaptureRequest.java.
+ MetadataBuilder& setControlEffectMode(
+ camera_metadata_enum_android_control_effect_mode_t effectMode);
+
+ // See ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAvailableVideoStabilizationModes(
+ const std::vector<
+ camera_metadata_enum_android_control_video_stabilization_mode_t>&
+ videoStabilizationModes);
+
+ // See ANDROID_CONTROL_VIDEO_STABILIZATION_MODE in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlVideoStabilizationMode(
+ camera_metadata_enum_android_control_video_stabilization_mode
+ stabilizationMode);
+
+ // See CONTROL_AE_AVAILABLE_ANTIBANDING_MODES in CameraCharacteristics.java.
+ MetadataBuilder& setControlAeAvailableAntibandingModes(
+ const std::vector<camera_metadata_enum_android_control_ae_antibanding_mode_t>&
+ antibandingModes);
+
+ // See CONTROL_AE_ANTIBANDING_MODE in CaptureRequest.java.
+ MetadataBuilder& setControlAeAntibandingMode(
+ camera_metadata_enum_android_control_ae_antibanding_mode_t antibandingMode);
+
+ // See ANDROID_CONTROL_AE_COMPENSATION_RANGE in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAeCompensationRange(int32_t min, int32_t max);
+
+ // See ANDROID_CONTROL_AE_COMPENSATION_STEP in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAeCompensationStep(camera_metadata_rational step);
+
+ // See ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAeExposureCompensation(int32_t exposureCompensation);
+
+ // See ANDROID_CONTROL_AE_AVAILABLE_MODES in CameraCharacteristics.java.
+ MetadataBuilder& setControlAeAvailableModes(
+ const std::vector<camera_metadata_enum_android_control_ae_mode_t>& modes);
+
+ // See ANDROID_CONTROL_AE_MODE in CaptureRequest.java.
+ MetadataBuilder& setControlAeMode(
+ camera_metadata_enum_android_control_ae_mode_t step);
+
+ // See ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER in CaptureRequest.java.
+ MetadataBuilder& setControlAePrecaptureTrigger(
+ camera_metadata_enum_android_control_ae_precapture_trigger_t trigger);
+
+ // See ANDROID_CONTROL_AF_AVAILABLE_MODES in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAfAvailableModes(
+ const std::vector<camera_metadata_enum_android_control_af_mode_t>&
+ availableModes);
+
+ // See ANDROID_CONTROL_AF_MODE in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAfMode(
+ const camera_metadata_enum_android_control_af_mode_t mode);
+
+ // See ANDROID_CONTROL_AF_TRIGGER_MODE in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAfTrigger(
+ const camera_metadata_enum_android_control_af_trigger_t trigger);
+
+ // See ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAeAvailableFpsRanges(
+ const std::vector<FpsRange>& fpsRanges);
+
+ // See ANDROID_CONTROL_AE_TARGET_FPS_RANGE in CaptureRequest.java.
+ MetadataBuilder& setControlAeTargetFpsRange(FpsRange fpsRange);
+
+ // See ANDROID_CONTROL_CAPTURE_INTENT in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlCaptureIntent(
+ camera_metadata_enum_android_control_capture_intent_t intent);
+
+ // See ANDROID_CONTROL_MAX_REGIONS in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlMaxRegions(int32_t maxAeRegions,
+ int32_t maxAwbRegions,
+ int32_t maxAfRegions);
+
+ // See ANDROID_CONTROL_AWB_AVAILABLE_MODES in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAvailableAwbModes(
+ const std::vector<camera_metadata_enum_android_control_awb_mode>& awbModes);
+
+ // See ANDROID_CONTROL_AWB_AVAILABLE_MODE in CaptureRequest.java.
+ MetadataBuilder& setControlAwbMode(
+ camera_metadata_enum_android_control_awb_mode awb);
+
+ // See CONTROL_AWB_LOCK_AVAILABLE in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAwbLockAvailable(bool awbLockAvailable);
+
+ // See CONTROL_AWB_LOCK in CameraMetadataTag.aidl
+ MetadataBuilder& setControlAwbLock(
+ camera_metadata_enum_android_control_awb_lock awbLock);
+
+ // See CONTROL_AE_LOCK_AVAILABLE in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAeLockAvailable(bool aeLockAvailable);
+
+ // See CONTROL_AE_LOCK in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAeLock(
+ camera_metadata_enum_android_control_ae_lock aeLock);
+
+ // See CONTROL_AE_STATE in CameraMetadataTag.aidl
+ MetadataBuilder& setControlAeState(
+ camera_metadata_enum_android_control_ae_state aeState);
+
+ // See ANDROID_CONTROL_AE_REGIONS in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAeRegions(
+ const std::vector<ControlRegion>& aeRegions);
+
+ // See ANDROID_CONTROL_AWB_REGIONS in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAwbRegions(
+ const std::vector<ControlRegion>& awbRegions);
+
+ // See ANDROID_CONTROL_AWB_STATE in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAwbState(
+ camera_metadata_enum_android_control_awb_state awbState);
+
+ // See ANDROID_SCALER_CROP_REGION in CaptureRequest.java.
+ MetadataBuilder& setCropRegion(int32_t x, int32_t y, int32_t width,
+ int32_t height);
+
+ // See ANDROID_CONTROL_AF_REGIONS in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAfRegions(
+ const std::vector<ControlRegion>& afRegions);
+
+ // See ANDROID_CONTROL_AF_STATE in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAfState(
+ camera_metadata_enum_android_control_af_state aeftate);
+
+ // The size of the compressed JPEG image, in bytes.
+ //
+ // See ANDROID_JPEG_SIZE in CameraMetadataTag.aidl.
+ MetadataBuilder& setMaxJpegSize(int32_t size);
+
+ // See SENSOR_INFO_MAX_FRAME_DURATION in CameraCharacteristic.java.
+ MetadataBuilder& setMaxFrameDuration(std::chrono::nanoseconds duration);
+
+ // See JPEG_AVAILABLE_THUMBNAIL_SIZES in CameraCharacteristic.java.
+ MetadataBuilder& setJpegAvailableThumbnailSizes(
+ const std::vector<Resolution>& thumbnailSizes);
+
+ // See ANDROID_JPEG_GPS_COORDINATES.
+ MetadataBuilder& setJpegGpsCoordinates(const GpsCoordinates& gpsCoordinates);
+
+ // See JPEG_ORIENTATION in CaptureRequest.java.
+ MetadataBuilder& setJpegOrientation(int32_t orientation);
+
+ // See JPEG_QUALITY in CaptureRequest.java.
+ MetadataBuilder& setJpegQuality(uint8_t quality);
+
+ // See JPEG_THUMBNAIL_SIZE in CaptureRequest.java.
+ MetadataBuilder& setJpegThumbnailSize(int width, int height);
+
+ // See JPEG_THUMBNAIL_QUALITY in CaptureRequest.java.
+ MetadataBuilder& setJpegThumbnailQuality(uint8_t quality);
+
+ // The maximum numbers of different types of output streams
+ // that can be configured and used simultaneously by a camera device.
+ //
+ // See ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS in CameraMetadataTag.aidl.
+ MetadataBuilder& setMaxNumberOutputStreams(int32_t maxRawStreams,
+ int32_t maxProcessedStreams,
+ int32_t maxStallStreams);
+
+ // See ANDROID_SYNC_MAX_LATENCY in CameraMetadataTag.aidl.
+ MetadataBuilder& setSyncMaxLatency(
+ camera_metadata_enum_android_sync_max_latency setSyncMaxLatency);
+
+ // See REQUEST_PIPELINE_MAX_DEPTH in CameraCharacteristic.java.
+ MetadataBuilder& setPipelineMaxDepth(uint8_t maxDepth);
+
+ // See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
+ MetadataBuilder& setPipelineDepth(uint8_t depth);
+
+ // See ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM in CameraMetadataTag.aidl.
+ MetadataBuilder& setAvailableMaxDigitalZoom(const float maxZoom);
+
+ // See ANDROID_CONTROL_ZOOM_RATIO_RANGE in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlZoomRatioRange(float min, float max);
+
+ // See ANDROID_STATISTICS_SCENE_FLICKER in CameraMetadataTag.aidl.
+ MetadataBuilder& setStatisticsSceneFlicker(
+ camera_metadata_enum_android_statistics_scene_flicker sceneFlicker);
+
+ // See ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE in CameraMetadataTag.aidl.
+ MetadataBuilder& setStatisticsHotPixelMapMode(
+ camera_metadata_enum_android_statistics_hot_pixel_map_mode mode);
+
+ // See ANDROID_STATISTICS_LENS_SHADING_MAP_MODE in CameraMetadataTag.aidl.
+ MetadataBuilder& setStatisticsLensShadingMapMode(
+ camera_metadata_enum_android_statistics_lens_shading_map_mode
+ lensShadingMapMode);
+
+ // See ANDROID_LENS_OPTICAL_STABILIZATION_MODE in CameraMetadataTag.aidl.
+ MetadataBuilder& setLensOpticalStabilizationMode(
+ camera_metadata_enum_android_lens_optical_stabilization_mode_t
+ opticalStabilizationMode);
+
+ // See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
+ MetadataBuilder& setAvailableRequestCapabilities(
+ const std::vector<
+ camera_metadata_enum_android_request_available_capabilities_t>&
+ requestCapabilities);
+
+ // A list of all keys that the camera device has available to use with
+ // CaptureRequest.
+ //
+ // See ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
+ MetadataBuilder& setAvailableRequestKeys(const std::vector<int32_t>& keys);
+
+ // A list of all keys that the camera device has available to use with
+ // CaptureResult.
+ //
+ // See ANDROID_RESULT_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
+ MetadataBuilder& setAvailableResultKeys(const std::vector<int32_t>& keys);
+
+ // See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
+ MetadataBuilder& setAvailableCapabilities(
+ const std::vector<
+ camera_metadata_enum_android_request_available_capabilities_t>&
+ capabilities);
+
+ // A list of all keys that the camera device has available to use.
+ //
+ // See ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS in CameraMetadataTag.aidl.
+ MetadataBuilder& setAvailableCharacteristicKeys(
+ const std::vector<camera_metadata_tag_t>& keys);
+
+ // Extends metadata with ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS
+ // containing all set tags.
+ MetadataBuilder& setAvailableCharacteristicKeys();
+
+ // Build CameraMetadata instance.
+ //
+ // Returns nullptr in case something went wrong.
+ std::unique_ptr<::aidl::android::hardware::camera::device::CameraMetadata>
+ build();
+
+ private:
+ // Maps metadata tags to vectors of values for the given tag.
+ std::map<
+ camera_metadata_tag_t,
+ std::variant<std::vector<int64_t>, std::vector<int32_t>,
+ std::vector<uint8_t>, std::vector<float>,
+ std::vector<camera_metadata_rational_t>, std::vector<double>>>
+ mEntryMap;
+ // Extend metadata with ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS.
+ bool mExtendWithAvailableCharacteristicsKeys = false;
+};
+
+// Returns JPEG_QUALITY from metadata, or nullopt if the key is not present.
+std::optional<int32_t> getJpegQuality(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Return JPEG_ORIENTATION from metadata, or 0 if the key is not present
+int32_t getJpegOrientation(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_THUMBNAIL_SIZE from metadata, or nullopt if the key is not present.
+std::optional<Resolution> getJpegThumbnailSize(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_THUMBNAIL_QUALITY from metadata, or nullopt if the key is not present.
+std::optional<int32_t> getJpegThumbnailQuality(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_AVAILABLE_THUMBNAIL_SIZES from metadata, or nullopt if the key
+// is not present.
+std::vector<Resolution> getJpegAvailableThumbnailSizes(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<FpsRange> getFpsRange(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<camera_metadata_enum_android_control_capture_intent> getCaptureIntent(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns ANDROID_JPEG_GPS_COORDINATES in a GpsCoordinate object or nullopt if
+// the key is not present.
+std::optional<GpsCoordinates> getGpsCoordinates(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+} // namespace virtualcamera
+} // namespace companion
+} // namespace android
+
+#endif // ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
diff --git a/services/camera/virtualcamera/util/Util.cc b/services/camera/virtualcamera/util/Util.cc
index 2d0545d..0c607d7 100644
--- a/services/camera/virtualcamera/util/Util.cc
+++ b/services/camera/virtualcamera/util/Util.cc
@@ -20,8 +20,13 @@
#include <algorithm>
#include <array>
+#include <cstdint>
+#include <memory>
+#include "android/hardware_buffer.h"
#include "jpeglib.h"
+#include "ui/GraphicBuffer.h"
+#include "utils/Errors.h"
namespace android {
namespace companion {
@@ -40,6 +45,82 @@
constexpr std::array<Format, 2> kSupportedFormats{Format::YUV_420_888,
Format::RGBA_8888};
+YCbCrLockGuard::YCbCrLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+ const uint32_t usageFlags)
+ : mHwBuffer(hwBuffer) {
+ GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(mHwBuffer.get());
+ if (gBuffer == nullptr) {
+ ALOGE("%s: Attempting to lock nullptr buffer.", __func__);
+ return;
+ }
+ mLockStatus = gBuffer->lockYCbCr(usageFlags, &mYCbCr);
+ if (mLockStatus != OK) {
+ ALOGE("%s: Failed to lock graphic buffer: %s", __func__,
+ statusToString(mLockStatus).c_str());
+ }
+}
+
+YCbCrLockGuard::~YCbCrLockGuard() {
+ if (getStatus() != OK) {
+ return;
+ }
+
+ GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(mHwBuffer.get());
+ if (gBuffer == nullptr) {
+ return;
+ }
+ gBuffer->unlock();
+ status_t status = gBuffer->unlock();
+ if (status != NO_ERROR) {
+ ALOGE("Failed to unlock graphic buffer: %s", statusToString(status).c_str());
+ }
+}
+
+status_t YCbCrLockGuard::getStatus() const {
+ return mLockStatus;
+}
+
+const android_ycbcr& YCbCrLockGuard::operator*() const {
+ LOG_ALWAYS_FATAL_IF(getStatus() != OK,
+ "Dereferencing unlocked YCbCrLockGuard, status is %s",
+ statusToString(mLockStatus).c_str());
+ return mYCbCr;
+}
+
+PlanesLockGuard::PlanesLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+ const uint64_t usageFlags, sp<Fence> fence) {
+ if (hwBuffer == nullptr) {
+ ALOGE("%s: Attempting to lock nullptr buffer.", __func__);
+ return;
+ }
+
+ const int32_t rawFence = fence != nullptr ? fence->get() : -1;
+ mLockStatus = static_cast<status_t>(AHardwareBuffer_lockPlanes(
+ hwBuffer.get(), usageFlags, rawFence, nullptr, &mPlanes));
+ if (mLockStatus != OK) {
+ ALOGE("%s: Failed to lock graphic buffer: %s", __func__,
+ statusToString(mLockStatus).c_str());
+ }
+}
+
+PlanesLockGuard::~PlanesLockGuard() {
+ if (getStatus() != OK || mHwBuffer == nullptr) {
+ return;
+ }
+ AHardwareBuffer_unlock(mHwBuffer.get(), /*fence=*/nullptr);
+}
+
+int PlanesLockGuard::getStatus() const {
+ return mLockStatus;
+}
+
+const AHardwareBuffer_Planes& PlanesLockGuard::operator*() const {
+ LOG_ALWAYS_FATAL_IF(getStatus() != OK,
+ "Dereferencing unlocked PlanesLockGuard, status is %s",
+ statusToString(mLockStatus).c_str());
+ return mPlanes;
+}
+
sp<Fence> importFence(const NativeHandle& aidlHandle) {
if (aidlHandle.fds.size() != 1) {
return sp<Fence>::make();
@@ -65,13 +146,6 @@
return false;
}
- if (width % kLibJpegDctSize != 0 || height % kLibJpegDctSize != 0) {
- // Input dimension needs to be multiple of libjpeg DCT size.
- // TODO(b/301023410) This restriction can be removed once we add support for
- // unaligned jpeg compression.
- return false;
- }
-
if (maxFps <= 0 || maxFps > kMaxFpsUpperLimit) {
return false;
}
@@ -79,6 +153,10 @@
return true;
}
+std::ostream& operator<<(std::ostream& os, const Resolution& resolution) {
+ return os << resolution.width << "x" << resolution.height;
+}
+
} // namespace virtualcamera
} // namespace companion
} // namespace android
diff --git a/services/camera/virtualcamera/util/Util.h b/services/camera/virtualcamera/util/Util.h
index e0a31c0..291e105 100644
--- a/services/camera/virtualcamera/util/Util.h
+++ b/services/camera/virtualcamera/util/Util.h
@@ -17,18 +17,84 @@
#ifndef ANDROID_COMPANION_VIRTUALCAMERA_UTIL_H
#define ANDROID_COMPANION_VIRTUALCAMERA_UTIL_H
+#include <cmath>
#include <cstdint>
+#include <memory>
+#include <optional>
+#include <string>
#include "aidl/android/companion/virtualcamera/Format.h"
#include "aidl/android/hardware/camera/common/Status.h"
#include "aidl/android/hardware/camera/device/StreamBuffer.h"
#include "android/binder_auto_utils.h"
+#include "android/hardware_buffer.h"
+#include "system/graphics.h"
#include "ui/Fence.h"
namespace android {
namespace companion {
namespace virtualcamera {
+// RAII utility class to safely lock AHardwareBuffer and obtain android_ycbcr
+// structure describing YUV plane layout.
+//
+// Access to the buffer is locked immediatelly afer construction.
+class YCbCrLockGuard {
+ public:
+ YCbCrLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer, uint32_t usageFlags);
+ YCbCrLockGuard(YCbCrLockGuard&& other) = default;
+ ~YCbCrLockGuard();
+
+ // Returns OK if the buffer is successfully locked.
+ status_t getStatus() const;
+
+ // Dereferencing instance of this guard returns android_ycbcr structure
+ // describing the layout.
+ // Caller needs to check whether the buffer was successfully locked
+ // before dereferencing.
+ const android_ycbcr& operator*() const;
+
+ // Disable copy.
+ YCbCrLockGuard(const YCbCrLockGuard&) = delete;
+ YCbCrLockGuard& operator=(const YCbCrLockGuard&) = delete;
+
+ private:
+ std::shared_ptr<AHardwareBuffer> mHwBuffer;
+ android_ycbcr mYCbCr = {};
+ status_t mLockStatus = DEAD_OBJECT;
+};
+
+// RAII utility class to safely lock AHardwareBuffer and obtain
+// AHardwareBuffer_Planes (Suitable for interacting with RGBA / BLOB buffers.
+//
+// Access to the buffer is locked immediatelly afer construction.
+class PlanesLockGuard {
+ public:
+ PlanesLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+ uint64_t usageFlags, sp<Fence> fence = nullptr);
+ PlanesLockGuard(PlanesLockGuard&& other) = default;
+ ~PlanesLockGuard();
+
+ // Returns OK if the buffer is successfully locked.
+ status_t getStatus() const;
+
+ // Dereferencing instance of this guard returns AHardwareBuffer_Planes
+ // structure describing the layout.
+ //
+ // Caller needs to check whether the buffer was successfully locked
+ // before dereferencing.
+ const AHardwareBuffer_Planes& operator*() const;
+
+ // Disable copy.
+ PlanesLockGuard(const PlanesLockGuard&) = delete;
+ PlanesLockGuard& operator=(const YCbCrLockGuard&) = delete;
+
+ private:
+ std::shared_ptr<AHardwareBuffer> mHwBuffer;
+ AHardwareBuffer_Planes mPlanes;
+ status_t mLockStatus = DEAD_OBJECT;
+};
+
// Converts camera AIDL status to ndk::ScopedAStatus
inline ndk::ScopedAStatus cameraStatus(
const ::aidl::android::hardware::camera::common::Status status) {
@@ -52,6 +118,65 @@
int width, int height,
::aidl::android::companion::virtualcamera::Format format, int maxFps);
+// Representation of resolution / size.
+struct Resolution {
+ Resolution() = default;
+ Resolution(const int w, const int h) : width(w), height(h) {
+ }
+
+ // Order by increasing pixel count, and by width for same pixel count.
+ bool operator<(const Resolution& other) const {
+ const int pixCount = width * height;
+ const int otherPixCount = other.width * other.height;
+ return pixCount == otherPixCount ? width < other.width
+ : pixCount < otherPixCount;
+ }
+
+ bool operator<=(const Resolution& other) const {
+ return *this == other || *this < other;
+ }
+
+ bool operator==(const Resolution& other) const {
+ return width == other.width && height == other.height;
+ }
+
+ int width = 0;
+ int height = 0;
+};
+
+struct FpsRange {
+ int32_t minFps;
+ int32_t maxFps;
+
+ bool operator<(const FpsRange& other) const {
+ return maxFps == other.maxFps ? minFps < other.minFps
+ : maxFps < other.maxFps;
+ }
+};
+
+struct GpsCoordinates {
+ // Represented by a double[] in metadata with index 0 for
+ // latitude and index 1 for longitude, 2 for altitude.
+ double_t latitude;
+ double_t longitude;
+ double_t altitude;
+ std::optional<int64_t> timestamp;
+ std::string provider;
+};
+
+inline bool isApproximatellySameAspectRatio(const Resolution r1,
+ const Resolution r2) {
+ static constexpr float kAspectRatioEpsilon = 0.05;
+ float aspectRatio1 =
+ static_cast<float>(r1.width) / static_cast<float>(r1.height);
+ float aspectRatio2 =
+ static_cast<float>(r2.width) / static_cast<float>(r2.height);
+
+ return std::abs(aspectRatio1 - aspectRatio2) < kAspectRatioEpsilon;
+}
+
+std::ostream& operator<<(std::ostream& os, const Resolution& resolution);
+
} // namespace virtualcamera
} // namespace companion
} // namespace android
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
index c96c37b..bf90f43 100644
--- a/services/medialog/fuzzer/Android.bp
+++ b/services/medialog/fuzzer/Android.bp
@@ -1,4 +1,5 @@
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/mediaresourcemanager/DefaultResourceModel.cpp b/services/mediaresourcemanager/DefaultResourceModel.cpp
index 7bad715..990df82 100644
--- a/services/mediaresourcemanager/DefaultResourceModel.cpp
+++ b/services/mediaresourcemanager/DefaultResourceModel.cpp
@@ -44,7 +44,9 @@
clients.clear();
MediaResourceParcel mediaResource{.type = reclimRequestInfo.mResources[0].type,
.subType = reclimRequestInfo.mResources[0].subType};
- ResourceRequestInfo resourceRequestInfo{reclimRequestInfo.mCallingPid, &mediaResource};
+ ResourceRequestInfo resourceRequestInfo{reclimRequestInfo.mCallingPid,
+ reclimRequestInfo.mClientId,
+ &mediaResource};
// Resolve the secure-unsecure codec conflicts if there is any.
switch (reclimRequestInfo.mResources[0].type) {
@@ -116,7 +118,9 @@
const ReclaimRequestInfo& reclimRequestInfo,
std::vector<ClientInfo>& clients) {
MediaResourceParcel mediaResource;
- ResourceRequestInfo resourceRequestInfo{reclimRequestInfo.mCallingPid, &mediaResource};
+ ResourceRequestInfo resourceRequestInfo{reclimRequestInfo.mCallingPid,
+ reclimRequestInfo.mClientId,
+ &mediaResource};
// 1. Look to find the client(s) with the other resources, for the given
// primary type.
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index af85772..cd00937 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -43,6 +43,28 @@
MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
using stats::media_metrics::\
MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
+using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_UNSPECIFIED;
+using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_AUDIO;
+using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_VIDEO;
+using stats::media_metrics::MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_IMAGE;
+
+// Map MediaResourceSubType to stats::media_metrics::CodecType
+inline int32_t getMetricsCodecType(MediaResourceSubType codecType) {
+ switch (codecType) {
+ case MediaResourceSubType::kHwAudioCodec:
+ case MediaResourceSubType::kSwAudioCodec:
+ return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_AUDIO;
+ case MediaResourceSubType::kHwVideoCodec:
+ case MediaResourceSubType::kSwVideoCodec:
+ return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_VIDEO;
+ case MediaResourceSubType::kHwImageCodec:
+ case MediaResourceSubType::kSwImageCodec:
+ return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_IMAGE;
+ case MediaResourceSubType::kUnspecifiedSubType:
+ return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_UNSPECIFIED;
+ }
+ return MEDIA_CODEC_STARTED__CODEC_TYPE__CODEC_TYPE_UNSPECIFIED;
+}
inline const char* getCodecType(MediaResourceSubType codecType) {
switch (codecType) {
@@ -229,7 +251,7 @@
clientConfig.clientInfo.uid,
clientConfig.id,
clientConfig.clientInfo.name.c_str(),
- static_cast<int32_t>(clientConfig.codecType),
+ getMetricsCodecType(clientConfig.codecType),
clientConfig.isEncoder,
isHardwareCodec(clientConfig.codecType),
clientConfig.width, clientConfig.height,
@@ -311,7 +333,7 @@
clientConfig.clientInfo.uid,
clientConfig.id,
clientConfig.clientInfo.name.c_str(),
- static_cast<int32_t>(clientConfig.codecType),
+ getMetricsCodecType(clientConfig.codecType),
clientConfig.isEncoder,
isHardwareCodec(clientConfig.codecType),
clientConfig.width, clientConfig.height,
@@ -425,6 +447,42 @@
#endif
}
+inline void pushReclaimStats(int32_t callingPid,
+ int32_t requesterUid,
+ int requesterPriority,
+ const std::string& clientName,
+ int32_t noOfConcurrentCodecs,
+ int32_t reclaimStatus,
+ int32_t noOfCodecsReclaimed = 0,
+ int32_t targetIndex = -1,
+ int32_t targetClientPid = -1,
+ int32_t targetClientUid = -1,
+ int32_t targetPriority = -1) {
+ // Post the pushed atom
+ int result = stats_write(
+ MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
+ requesterUid,
+ requesterPriority,
+ clientName.c_str(),
+ noOfConcurrentCodecs,
+ reclaimStatus,
+ noOfCodecsReclaimed,
+ targetIndex,
+ targetClientUid,
+ targetPriority);
+ ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
+ "Requester[pid(%d): uid(%d): priority(%d)] "
+ "Codec: [%s] "
+ "No of concurrent codecs: %d "
+ "Reclaim Status: %d "
+ "No of codecs reclaimed: %d "
+ "Target[%d][pid(%d): uid(%d): priority(%d)] result: %d",
+ __func__, callingPid, requesterUid, requesterPriority,
+ clientName.c_str(), noOfConcurrentCodecs,
+ reclaimStatus, noOfCodecsReclaimed,
+ targetIndex, targetClientPid, targetClientUid, targetPriority, result);
+}
+
void ResourceManagerMetrics::pushReclaimAtom(const ClientInfoParcel& clientInfo,
const std::vector<int>& priorities,
const std::vector<ClientInfo>& targetClients,
@@ -463,33 +521,34 @@
MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
}
}
+
+ if (targetClients.empty()) {
+ // Push the reclaim atom to stats.
+ pushReclaimStats(callingPid,
+ requesterUid,
+ requesterPriority,
+ clientName,
+ noOfConcurrentCodecs,
+ reclaimStatus);
+ return;
+ }
+
int32_t noOfCodecsReclaimed = targetClients.size();
int32_t targetIndex = 1;
for (const ClientInfo& targetClient : targetClients) {
int targetPriority = priorities[targetIndex];
- // Post the pushed atom
- int result = stats_write(
- MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
- requesterUid,
- requesterPriority,
- clientName.c_str(),
- noOfConcurrentCodecs,
- reclaimStatus,
- noOfCodecsReclaimed,
- targetIndex,
- targetClient.mUid,
- targetPriority);
- ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
- "Requester[pid(%d): uid(%d): priority(%d)] "
- "Codec: [%s] "
- "No of concurrent codecs: %d "
- "Reclaim Status: %d "
- "No of codecs reclaimed: %d "
- "Target[%d][pid(%d): uid(%d): priority(%d)] result: %d",
- __func__, callingPid, requesterUid, requesterPriority,
- clientName.c_str(), noOfConcurrentCodecs,
- reclaimStatus, noOfCodecsReclaimed,
- targetIndex, targetClient.mPid, targetClient.mUid, targetPriority, result);
+ // Push the reclaim atom to stats.
+ pushReclaimStats(callingPid,
+ requesterUid,
+ requesterPriority,
+ clientName,
+ noOfConcurrentCodecs,
+ reclaimStatus,
+ noOfCodecsReclaimed,
+ targetIndex,
+ targetClient.mPid,
+ targetClient.mUid,
+ targetPriority);
targetIndex++;
}
}
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
index a9bc34b..7a5a89f 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.h
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -96,7 +96,32 @@
};
//
-// ResourceManagerMetrics class that maintaines concurrent codec count based:
+// Resource Manager Metrics is designed to answer some of the questions like:
+// - What apps are causing reclaim and what apps are targeted (reclaimed from) in the process?
+// - which apps use the most codecs and the most codec memory?
+// - What is the % of total successful reclaims?
+//
+// Though, it's not in the context of this class, metrics should also answer:
+// - what % of codec errors are due to codec being reclaimed?
+// - What % of successful codec creation(start) requires codec reclaims?
+// - How often codec start fails even after successful reclaim?
+//
+// The metrics are collected to analyze and understand the codec resource usage
+// and use that information to help with:
+// - minimize the no of reclaims
+// - reduce the codec start delays by minimizing no of times we try to reclaim
+// - minimize the reclaim errors in codec records
+//
+// Success metrics for Resource Manager Service could be defined as:
+// - increase in sucecssful codec creation for the foreground apps
+// - reduce the number of reclaims for codecs
+// - reduce the time to create codec
+//
+// We would like to use this data to come up with a better resource management that would:
+// - increase the successful codec creation (for all kind of apps)
+// - decrease the codec errors due to resources
+//
+// This class that maintains concurrent codec counts based on:
//
// 1. # of concurrent active codecs (initialized, but aren't released yet) of given
// implementation (by codec name) across the system.
@@ -111,7 +136,7 @@
// This should help with understanding the (video) memory usage per
// application.
//
-//
+
class ResourceManagerMetrics {
public:
ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo);
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 3a02443..d37d893 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -474,6 +474,7 @@
const std::vector<MediaResourceParcel>& resources,
std::vector<ClientInfo>& targetClients) {
int32_t callingPid = clientInfo.pid;
+ int64_t clientId = clientInfo.id;
std::scoped_lock lock{mLock};
if (!mProcessInfo->isPidTrusted(callingPid)) {
pid_t actualCallingPid = IPCThreadState::self()->getCallingPid();
@@ -508,7 +509,7 @@
if (secureCodec != NULL) {
MediaResourceParcel mediaResource{.type = MediaResource::Type::kSecureCodec,
.subType = secureCodec->subType};
- ResourceRequestInfo resourceRequestInfo{callingPid, &mediaResource};
+ ResourceRequestInfo resourceRequestInfo{callingPid, clientId, &mediaResource};
if (!mSupportsMultipleSecureCodecs) {
if (!getAllClients_l(resourceRequestInfo, targetClients)) {
return false;
@@ -525,7 +526,7 @@
if (!mSupportsSecureWithNonSecureCodec) {
MediaResourceParcel mediaResource{.type = MediaResource::Type::kSecureCodec,
.subType = nonSecureCodec->subType};
- ResourceRequestInfo resourceRequestInfo{callingPid, &mediaResource};
+ ResourceRequestInfo resourceRequestInfo{callingPid, clientId, &mediaResource};
if (!getAllClients_l(resourceRequestInfo, targetClients)) {
return false;
}
@@ -533,7 +534,7 @@
}
if (drmSession != NULL) {
- ResourceRequestInfo resourceRequestInfo{callingPid, drmSession};
+ ResourceRequestInfo resourceRequestInfo{callingPid, clientId, drmSession};
getClientForResource_l(resourceRequestInfo, targetClients);
if (targetClients.size() == 0) {
return false;
@@ -542,18 +543,18 @@
if (targetClients.size() == 0 && graphicMemory != nullptr) {
// if no secure/non-secure codec conflict, run second pass to handle other resources.
- ResourceRequestInfo resourceRequestInfo{callingPid, graphicMemory};
+ ResourceRequestInfo resourceRequestInfo{callingPid, clientId, graphicMemory};
getClientForResource_l(resourceRequestInfo, targetClients);
}
if (targetClients.size() == 0) {
// if we are here, run the third pass to free one codec with the same type.
if (secureCodec != nullptr) {
- ResourceRequestInfo resourceRequestInfo{callingPid, secureCodec};
+ ResourceRequestInfo resourceRequestInfo{callingPid, clientId, secureCodec};
getClientForResource_l(resourceRequestInfo, targetClients);
}
if (nonSecureCodec != nullptr) {
- ResourceRequestInfo resourceRequestInfo{callingPid, nonSecureCodec};
+ ResourceRequestInfo resourceRequestInfo{callingPid, clientId, nonSecureCodec};
getClientForResource_l(resourceRequestInfo, targetClients);
}
}
@@ -562,12 +563,12 @@
// if we are here, run the fourth pass to free one codec with the different type.
if (secureCodec != nullptr) {
MediaResource temp(MediaResource::Type::kNonSecureCodec, secureCodec->subType, 1);
- ResourceRequestInfo resourceRequestInfo{callingPid, &temp};
+ ResourceRequestInfo resourceRequestInfo{callingPid, clientId, &temp};
getClientForResource_l(resourceRequestInfo, targetClients);
}
if (nonSecureCodec != nullptr) {
MediaResource temp(MediaResource::Type::kSecureCodec, nonSecureCodec->subType, 1);
- ResourceRequestInfo resourceRequestInfo{callingPid, &temp};
+ ResourceRequestInfo resourceRequestInfo{callingPid, clientId, &temp};
getClientForResource_l(resourceRequestInfo, targetClients);
}
}
@@ -585,18 +586,21 @@
// Check if there are any resources to be reclaimed before processing.
if (resources.empty()) {
+ // Invalid reclaim request. So no need to log.
return Status::ok();
}
std::vector<ClientInfo> targetClients;
- if (!getTargetClients(clientInfo, resources, targetClients)) {
- // Nothing to reclaim from.
+ if (getTargetClients(clientInfo, resources, targetClients)) {
+ // Reclaim all the target clients.
+ *_aidl_return = reclaimUnconditionallyFrom(targetClients);
+ } else {
+ // No clients to reclaim from.
ALOGI("%s: There aren't any clients to reclaim from", __func__);
- return Status::ok();
+ // We need to log this failed reclaim as "no clients to reclaim from".
+ targetClients.clear();
}
- *_aidl_return = reclaimUnconditionallyFrom(targetClients);
-
// Log Reclaim Pushed Atom to statsd
pushReclaimAtom(clientInfo, targetClients, *_aidl_return);
@@ -914,6 +918,11 @@
for (auto& [pid, infos] : mMap) {
for (const auto& [id, info] : infos) {
+ if (pid == resourceRequestInfo.mCallingPid && id == resourceRequestInfo.mClientId) {
+ ALOGI("%s: Skip the client[%jd] for which the resource request is made",
+ __func__, id);
+ continue;
+ }
if (hasResourceType(type, subType, info.resources)) {
if (!isCallingPriorityHigher_l(resourceRequestInfo.mCallingPid, pid)) {
// some higher/equal priority process owns the resource,
diff --git a/services/mediaresourcemanager/ResourceManagerServiceNew.cpp b/services/mediaresourcemanager/ResourceManagerServiceNew.cpp
index af093ca..0a0a8f4 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceNew.cpp
+++ b/services/mediaresourcemanager/ResourceManagerServiceNew.cpp
@@ -248,7 +248,7 @@
// Use the Resource Model to get a list of all the clients that hold the
// needed/requested resources.
uint32_t callingImportance = std::max(0, clientInfo.importance);
- ReclaimRequestInfo reclaimRequestInfo{callingPid, callingImportance, resources};
+ ReclaimRequestInfo reclaimRequestInfo{callingPid, clientInfo.id, callingImportance, resources};
std::vector<ClientInfo> clients;
if (!mDefaultResourceModel->getAllClients(reclaimRequestInfo, clients)) {
if (clients.empty()) {
@@ -300,7 +300,10 @@
// Use the DefaultResourceModel to get all the clients with the resources requested.
std::vector<MediaResourceParcel> resources{*resourceRequestInfo.mResource};
- ReclaimRequestInfo reclaimRequestInfo{resourceRequestInfo.mCallingPid, 0, resources};
+ ReclaimRequestInfo reclaimRequestInfo{resourceRequestInfo.mCallingPid,
+ resourceRequestInfo.mClientId,
+ 0, // default importance
+ resources};
std::vector<ClientInfo> clients;
mDefaultResourceModel->getAllClients(reclaimRequestInfo, clients);
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.h b/services/mediaresourcemanager/ResourceManagerServiceUtils.h
index 32cb219..e8f1515 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceUtils.h
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.h
@@ -166,11 +166,13 @@
/*
* Resource Reclaim request info that encapsulates
* - the calling/requesting process pid.
+ * - id of the client that made reclaim request.
* - the calling/requesting client's importance.
* - the list of resources requesting (to be reclaimed from others)
*/
struct ReclaimRequestInfo {
int mCallingPid = -1;
+ int64_t mClientId = 0;
uint32_t mCallingClientImportance = 0;
const std::vector<::aidl::android::media::MediaResourceParcel>& mResources;
};
@@ -178,11 +180,14 @@
/*
* Resource request info that encapsulates
* - the calling/requesting process pid.
+ * - the calling/requesting client's id.
* - the resource requesting (to be reclaimed from others)
*/
struct ResourceRequestInfo {
// pid of the calling/requesting process.
int mCallingPid = -1;
+ // id of the calling/requesting client.
+ int64_t mClientId = 0;
// resources requested.
const ::aidl::android::media::MediaResourceParcel* mResource;
};
diff --git a/services/mediaresourcemanager/ResourceTracker.cpp b/services/mediaresourcemanager/ResourceTracker.cpp
index 22381c3..3ee20cd 100644
--- a/services/mediaresourcemanager/ResourceTracker.cpp
+++ b/services/mediaresourcemanager/ResourceTracker.cpp
@@ -715,6 +715,11 @@
MediaResource::SubType subType = resourceRequestInfo.mResource->subType;
for (auto& [pid, /* ResourceInfos */ infos] : mMap) {
for (const auto& [id, /* ResourceInfo */ info] : infos) {
+ if (pid == resourceRequestInfo.mCallingPid && id == resourceRequestInfo.mClientId) {
+ ALOGI("%s: Skip the client[%jd] for which the resource request is made",
+ __func__, id);
+ continue;
+ }
if (hasResourceType(type, subType, info.resources)) {
if (!isCallingPriorityHigher(resourceRequestInfo.mCallingPid, pid)) {
// some higher/equal priority process owns the resource,
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
index 7e8a4a0..2c8659d 100644
--- a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -165,6 +165,7 @@
DISALLOW_EVIL_CONSTRUCTORS(TestClient);
};
+// [pid, uid] used by the test.
static const int kTestPid1 = 30;
static const int kTestUid1 = 1010;
@@ -175,6 +176,12 @@
static const int kMidPriorityPid = 25;
static const int kHighPriorityPid = 10;
+// Client Ids used by the test.
+static const int kLowPriorityClientId = 1111;
+static const int kMidPriorityClientId = 2222;
+static const int kHighPriorityClientId = 3333;
+
+// Client importance used by the test.
static const int32_t kHighestCodecImportance = 0;
static const int32_t kLowestCodecImportance = 100;
static const int32_t kMidCodecImportance = 50;
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index b3a0932..027987e 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -337,7 +337,7 @@
// priority too low to reclaim resource
ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(kLowPriorityPid),
.uid = static_cast<int32_t>(kTestUid1),
- .id = 0,
+ .id = kLowPriorityClientId,
.name = "none"};
CHECK_STATUS_FALSE(mService->reclaimResource(clientInfo, resources, &result));
@@ -475,9 +475,9 @@
MediaResource resource(MediaResource::Type::kSecureCodec,
MediaResource::SubType::kUnspecifiedSubType,
1);
- ResourceRequestInfo requestInfoHigh { kHighPriorityPid, &resource};
- ResourceRequestInfo requestInfoMid { kMidPriorityPid, &resource};
- ResourceRequestInfo requestInfoLow { kLowPriorityPid, &resource};
+ ResourceRequestInfo requestInfoHigh { kHighPriorityPid, kHighPriorityClientId, &resource};
+ ResourceRequestInfo requestInfoMid { kMidPriorityPid, kMidPriorityClientId, &resource};
+ ResourceRequestInfo requestInfoLow { kLowPriorityPid, kLowPriorityClientId, &resource};
EXPECT_FALSE(mService->getAllClients_l(requestInfoLow, targetClients));
// some higher priority process (e.g. kTestPid2) owns the resource, so getAllClients_l
@@ -491,6 +491,81 @@
EXPECT_EQ(getId(mTestClient1), targetClients[1].mClientId);
}
+ // test set up
+ // ---------------------------------------------------------------------------
+ // pid/priority client/clientId type number
+ // ---------------------------------------------------------------------------
+ // kTestPid1(30) mTestClient1 secure codec 1
+ // graphic memory 200
+ // graphic memory 200
+ // ---------------------------------------------------------------------------
+ // kTestPid2(20) mTestClient2 non-secure codec 1
+ // graphic memory 300
+ // ---------------------------------------------------
+ // mTestClient3 secure codec 1
+ // graphic memory 100
+ // ---------------------------------------------------------------------------
+ // kHighPriorityPid(10) kHighPriorityClient secure codec 1
+ // ---------------------------------------------------------------------------
+ // The kHighPriorityClient tries to reclaim request (after adding self)
+ // This should pass (and shouldn't be considered as a new client trying to
+ // reclaim from an existing client from same/higher priority process).
+ void testSelfReclaimResourceSecure() {
+ std::vector<MediaResourceParcel> resources;
+ resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
+ resources.push_back(MediaResource(MediaResource::Type::kGraphicMemory, 150));
+
+ ClientInfoParcel lowPriorityClient{.pid = static_cast<int32_t>(kLowPriorityPid),
+ .uid = static_cast<int32_t>(kTestUid2),
+ .id = kLowPriorityClientId,
+ .name = "none"};
+ ClientInfoParcel midPriorityClient{.pid = static_cast<int32_t>(kMidPriorityPid),
+ .uid = static_cast<int32_t>(kTestUid2),
+ .id = kMidPriorityClientId,
+ .name = "none"};
+ // HighPriority process with client id kHighPriorityClientId.
+ ClientInfoParcel highPriorityClient1{.pid = static_cast<int32_t>(kHighPriorityPid),
+ .uid = static_cast<int32_t>(kTestUid2),
+ .id = kHighPriorityClientId,
+ .name = "none"};
+ // HighPriority process with client id 0xABCD.
+ ClientInfoParcel highPriorityClient2{.pid = static_cast<int32_t>(kHighPriorityPid),
+ .uid = static_cast<int32_t>(kTestUid2),
+ .id = 0xABCD,
+ .name = "none"};
+
+ addResource();
+
+ // Add a secure codec resource for the highPriorityClient1.
+ std::shared_ptr<IResourceManagerClient> testClient4 =
+ createTestClient(kHighPriorityPid, kTestUid2);
+ std::vector<MediaResourceParcel> resources1;
+ resources1.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
+ mService->addResource(highPriorityClient1, testClient4, resources1);
+
+ // secure codecs can't coexist and secure codec can't coexist with non-secure codec.
+ updateConfig(false, false);
+
+ // priority too low
+ CHECK_STATUS_FALSE(mService->reclaimResource(lowPriorityClient, resources, &result));
+ CHECK_STATUS_FALSE(mService->reclaimResource(midPriorityClient, resources, &result));
+
+ // highPriorityClient2 tries to reclaim SecureCodec with Graphic memory.
+ // This should fail as this process already has an instance of secure
+ // codec through testClient4.
+ CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient2, resources, &result));
+
+ // highPriorityClient1 tries to reclaim SecureCodec with Graphic memory.
+ // This should reclaim all secure and non-secure codecs.
+ CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient1, resources, &result));
+ EXPECT_TRUE(toTestClient(mTestClient1)->checkIfReclaimedAndReset());
+ EXPECT_TRUE(toTestClient(mTestClient2)->checkIfReclaimedAndReset());
+ EXPECT_TRUE(toTestClient(mTestClient3)->checkIfReclaimedAndReset());
+
+ // Make sure there is nothing left.
+ CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient1, resources, &result));
+ }
+
void testReclaimResourceSecure() {
std::vector<MediaResourceParcel> resources;
resources.push_back(MediaResource(MediaResource::Type::kSecureCodec, 1));
@@ -498,15 +573,15 @@
ClientInfoParcel lowPriorityClient{.pid = static_cast<int32_t>(kLowPriorityPid),
.uid = static_cast<int32_t>(kTestUid2),
- .id = 0,
+ .id = kLowPriorityClientId,
.name = "none"};
ClientInfoParcel midPriorityClient{.pid = static_cast<int32_t>(kMidPriorityPid),
.uid = static_cast<int32_t>(kTestUid2),
- .id = 0,
+ .id = kMidPriorityClientId,
.name = "none"};
ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
.uid = static_cast<int32_t>(kTestUid2),
- .id = 0,
+ .id = kHighPriorityClientId,
.name = "none"};
// ### secure codec can't coexist and secure codec can coexist with non-secure codec ###
@@ -553,7 +628,6 @@
CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, resources, &result));
}
-
// ### secure codecs can coexist but secure codec can't coexist with non-secure codec ###
{
addResource();
@@ -650,15 +724,15 @@
ClientInfoParcel lowPriorityClient{.pid = static_cast<int32_t>(kLowPriorityPid),
.uid = static_cast<int32_t>(kTestUid2),
- .id = 0,
+ .id = kLowPriorityClientId,
.name = "none"};
ClientInfoParcel midPriorityClient{.pid = static_cast<int32_t>(kMidPriorityPid),
.uid = static_cast<int32_t>(kTestUid2),
- .id = 0,
+ .id = kMidPriorityClientId,
.name = "none"};
ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
.uid = static_cast<int32_t>(kTestUid2),
- .id = 0,
+ .id = kHighPriorityClientId,
.name = "none"};
// ### secure codec can't coexist with non-secure codec ###
@@ -751,8 +825,8 @@
MediaResource resource(MediaResource::Type::kGraphicMemory,
MediaResource::SubType::kUnspecifiedSubType,
1);
- ResourceRequestInfo requestInfoHigh { kHighPriorityPid, &resource};
- ResourceRequestInfo requestInfoLow { kLowPriorityPid, &resource};
+ ResourceRequestInfo requestInfoHigh { kHighPriorityPid, kHighPriorityClientId, &resource};
+ ResourceRequestInfo requestInfoLow { kLowPriorityPid, kLowPriorityClientId, &resource};
EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(requestInfoHigh, clientInfo));
addResource();
@@ -910,7 +984,7 @@
reclaimResources.push_back(createNonSecureVideoCodecResource());
ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
.uid = static_cast<int32_t>(kTestUid2),
- .id = 0,
+ .id = kHighPriorityClientId,
.name = "none"};
CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
@@ -951,7 +1025,7 @@
reclaimResources.push_back(createNonSecureAudioCodecResource());
ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
.uid = static_cast<int32_t>(kTestUid2),
- .id = 0,
+ .id = kHighPriorityClientId,
.name = "none"};
CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
@@ -992,7 +1066,7 @@
reclaimResources.push_back(createNonSecureImageCodecResource());
ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
.uid = static_cast<int32_t>(kTestUid2),
- .id = 0,
+ .id = kHighPriorityClientId,
.name = "none"};
CHECK_STATUS_FALSE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
@@ -1034,7 +1108,7 @@
reclaimResources.push_back(createGraphicMemoryResource(100));
ClientInfoParcel highPriorityClient{.pid = static_cast<int32_t>(kHighPriorityPid),
.uid = static_cast<int32_t>(kTestUid2),
- .id = 0,
+ .id = kHighPriorityClientId,
.name = "none"};
CHECK_STATUS_TRUE(mService->reclaimResource(highPriorityClient, reclaimResources, &result));
@@ -1786,6 +1860,10 @@
testRemoveClient();
}
+TEST_F(ResourceManagerServiceTest, selfReclaimResource) {
+ testSelfReclaimResourceSecure();
+}
+
TEST_F(ResourceManagerServiceTest, reclaimResource) {
testReclaimResourceSecure();
testReclaimResourceNonSecure();
@@ -1873,6 +1951,10 @@
testRemoveClient();
}
+TEST_F(ResourceManagerServiceNewTest, selfReclaimResource) {
+ testSelfReclaimResourceSecure();
+}
+
TEST_F(ResourceManagerServiceNewTest, reclaimResource) {
testReclaimResourceSecure();
testReclaimResourceNonSecure();
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 6b48075..5b4fca9 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -37,6 +37,8 @@
#include "AAudioServiceEndpointPlay.h"
#include "AAudioServiceEndpointMMAP.h"
+#include <com_android_media_aaudio.h>
+
#define AAUDIO_BUFFER_CAPACITY_MIN (4 * 512)
#define AAUDIO_SAMPLE_RATE_DEFAULT 48000
@@ -148,9 +150,15 @@
// Try other formats if the config from APM is the same as our current config.
// Some HALs may report its format support incorrectly.
- if ((previousConfig.format == config.format) &&
- (previousConfig.sample_rate == config.sample_rate)) {
- config.format = getNextFormatToTry(config.format);
+ if (previousConfig.format == config.format) {
+ if (previousConfig.sample_rate == config.sample_rate) {
+ config.format = getNextFormatToTry(config.format);
+ } else if (!com::android::media::aaudio::sample_rate_conversion()) {
+ ALOGI("%s() - AAudio SRC feature not enabled, different rates! %d != %d",
+ __func__, previousConfig.sample_rate, config.sample_rate);
+ result = AAUDIO_ERROR_INVALID_RATE;
+ break;
+ }
}
ALOGD("%s() %#x %d failed, perhaps due to format or sample rate. Try again with %#x %d",
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 5fb152e..dc70c79 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -75,11 +75,7 @@
this, getState());
// Stop the command thread before destroying.
- if (mThreadEnabled) {
- mThreadEnabled = false;
- mCommandQueue.stopWaiting();
- mCommandThread.stop();
- }
+ stopCommandThread();
}
std::string AAudioServiceStreamBase::dumpHeader() {
@@ -194,26 +190,27 @@
error:
closeAndClear();
- mThreadEnabled = false;
- mCommandQueue.stopWaiting();
- mCommandThread.stop();
+ stopCommandThread();
return result;
}
aaudio_result_t AAudioServiceStreamBase::close() {
aaudio_result_t result = sendCommand(CLOSE, nullptr, true /*waitForReply*/, TIMEOUT_NANOS);
+ if (result == AAUDIO_ERROR_ALREADY_CLOSED) {
+ // AAUDIO_ERROR_ALREADY_CLOSED is not a really error but just indicate the stream has
+ // already been closed. In that case, there is no need to close the stream once more.
+ ALOGD("The stream(%d) is already closed", mHandle);
+ return AAUDIO_OK;
+ }
- // Stop the command thread as the stream is closed.
- mThreadEnabled = false;
- mCommandQueue.stopWaiting();
- mCommandThread.stop();
+ stopCommandThread();
return result;
}
aaudio_result_t AAudioServiceStreamBase::close_l() {
if (getState() == AAUDIO_STREAM_STATE_CLOSED) {
- return AAUDIO_OK;
+ return AAUDIO_ERROR_ALREADY_CLOSED;
}
// This will stop the stream, just in case it was not already stopped.
@@ -643,7 +640,7 @@
int32_t count = mUpMessageQueue->getFifoBuffer()->write(command, 1);
if (count != 1) {
ALOGW("%s(): Queue full. Did client stop? Suspending stream. what = %u, %s",
- __func__, command->what, getTypeText());
+ __func__, static_cast<unsigned>(command->what), getTypeText());
setSuspended(true);
return AAUDIO_ERROR_WOULD_BLOCK;
} else {
@@ -766,3 +763,11 @@
.record();
return result;
}
+
+void AAudioServiceStreamBase::stopCommandThread() {
+ bool threadEnabled = true;
+ if (mThreadEnabled.compare_exchange_strong(threadEnabled, false)) {
+ mCommandQueue.stopWaiting();
+ mCommandThread.stop();
+ }
+}
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index d5061b3..96a6d44 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -360,7 +360,7 @@
EXIT_STANDBY,
};
AAudioThread mCommandThread;
- std::atomic<bool> mThreadEnabled{false};
+ std::atomic_bool mThreadEnabled{false};
AAudioCommandQueue mCommandQueue;
int32_t mFramesPerBurst = 0;
@@ -400,6 +400,8 @@
bool waitForReply = false,
int64_t timeoutNanos = 0);
+ void stopCommandThread();
+
aaudio_result_t closeAndClear();
/**
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 9fe06b7..12ce17f 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -97,6 +97,7 @@
"framework-permission-aidl-cpp",
"libaudioclient_aidl_conversion",
"packagemanager_aidl-cpp",
+ "com.android.media.aaudio-aconfig-cc",
],
static_libs: [
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
index 0230935..31ed8ac 100644
--- a/services/oboeservice/fuzzer/Android.bp
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -19,6 +19,7 @@
*/
package {
+ default_team: "trendy_team_media_framework_audio",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "frameworks_av_license"
@@ -51,6 +52,7 @@
"aaudio-aidl-cpp",
"framework-permission-aidl-cpp",
"libaudioclient_aidl_conversion",
+ "com.android.media.aaudio-aconfig-cc",
],
static_libs: [
"libaaudioservice",