Merge "Transcoder: Create a transcoder command line tool." into sc-dev
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 28a57bd..8e1fcc0 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -119,10 +119,11 @@
* @param width Width of the input buffers
* @param height Height of the input buffers
* @param format Format of the input buffers. One of HAL_PIXEL_FORMAT_*.
+ * @param isMultiResolution Whether the input stream supports variable resolution image.
*
* @return new stream ID
*/
- int createInputStream(int width, int height, int format);
+ int createInputStream(int width, int height, int format, boolean isMultiResolution);
/**
* Get the surface of the input stream.
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 4e9b27d..2f6bc30 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -68,6 +68,10 @@
return mPhysicalCameraId;
}
+bool OutputConfiguration::isMultiResolution() const {
+ return mIsMultiResolution;
+}
+
OutputConfiguration::OutputConfiguration() :
mRotation(INVALID_ROTATION),
mSurfaceSetID(INVALID_SET_ID),
@@ -75,7 +79,8 @@
mWidth(0),
mHeight(0),
mIsDeferred(false),
- mIsShared(false) {
+ mIsShared(false),
+ mIsMultiResolution(false) {
}
OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -145,6 +150,12 @@
parcel->readString16(&mPhysicalCameraId);
+ int isMultiResolution = 0;
+ if ((err = parcel->readInt32(&isMultiResolution)) != OK) {
+ ALOGE("%s: Failed to read surface isMultiResolution flag from parcel", __FUNCTION__);
+ return err;
+ }
+
mRotation = rotation;
mSurfaceSetID = setID;
mSurfaceType = surfaceType;
@@ -152,6 +163,7 @@
mHeight = height;
mIsDeferred = isDeferred != 0;
mIsShared = isShared != 0;
+ mIsMultiResolution = isMultiResolution != 0;
for (auto& surface : surfaceShims) {
ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
surface.graphicBufferProducer.get(),
@@ -160,8 +172,8 @@
}
ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
- " physicalCameraId = %s", __FUNCTION__, mRotation, mSurfaceSetID,
- mSurfaceType, String8(mPhysicalCameraId).string());
+ " physicalCameraId = %s, isMultiResolution = %d", __FUNCTION__, mRotation,
+ mSurfaceSetID, mSurfaceType, String8(mPhysicalCameraId).string(), mIsMultiResolution);
return err;
}
@@ -175,6 +187,7 @@
mIsDeferred = false;
mIsShared = isShared;
mPhysicalCameraId = physicalId;
+ mIsMultiResolution = false;
}
OutputConfiguration::OutputConfiguration(
@@ -183,7 +196,7 @@
int width, int height, bool isShared)
: mGbps(gbps), mRotation(rotation), mSurfaceSetID(surfaceSetID), mSurfaceType(surfaceType),
mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
- mPhysicalCameraId(physicalCameraId) { }
+ mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false) { }
status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
@@ -224,6 +237,9 @@
err = parcel->writeString16(mPhysicalCameraId);
if (err != OK) return err;
+ err = parcel->writeInt32(mIsMultiResolution ? 1 : 0);
+ if (err != OK) return err;
+
return OK;
}
diff --git a/camera/camera2/SessionConfiguration.cpp b/camera/camera2/SessionConfiguration.cpp
index a431a33..7cf6087 100644
--- a/camera/camera2/SessionConfiguration.cpp
+++ b/camera/camera2/SessionConfiguration.cpp
@@ -55,6 +55,12 @@
return err;
}
+ bool inputIsMultiResolution = false;
+ if ((err = parcel->readBool(&inputIsMultiResolution)) != OK) {
+ ALOGE("%s: Failed to read input multi-resolution flag from parcel", __FUNCTION__);
+ return err;
+ }
+
std::vector<OutputConfiguration> outputStreams;
if ((err = parcel->readParcelableVector(&outputStreams)) != OK) {
ALOGE("%s: Failed to read output configurations from parcel", __FUNCTION__);
@@ -65,6 +71,7 @@
mInputWidth = inputWidth;
mInputHeight = inputHeight;
mInputFormat = inputFormat;
+ mInputIsMultiResolution = inputIsMultiResolution;
for (auto& stream : outputStreams) {
mOutputStreams.push_back(stream);
}
@@ -90,6 +97,9 @@
err = parcel->writeInt32(mInputFormat);
if (err != OK) return err;
+ err = parcel->writeBool(mInputIsMultiResolution);
+ if (err != OK) return err;
+
err = parcel->writeParcelableVector(mOutputStreams);
if (err != OK) return err;
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 9398ec3..8ca8920 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -42,6 +42,7 @@
"android.hardware.camera.provider@2.4",
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
+ "android.hardware.camera.provider@2.7",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 95c4f39..6009370 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -47,6 +47,8 @@
bool isDeferred() const;
bool isShared() const;
String16 getPhysicalCameraId() const;
+ bool isMultiResolution() const;
+
/**
* Keep impl up-to-date with OutputConfiguration.java in frameworks/base
*/
@@ -83,7 +85,8 @@
mIsDeferred == other.mIsDeferred &&
mIsShared == other.mIsShared &&
gbpsEqual(other) &&
- mPhysicalCameraId == other.mPhysicalCameraId );
+ mPhysicalCameraId == other.mPhysicalCameraId &&
+ mIsMultiResolution == other.mIsMultiResolution);
}
bool operator != (const OutputConfiguration& other) const {
return !(*this == other);
@@ -114,6 +117,9 @@
if (mPhysicalCameraId != other.mPhysicalCameraId) {
return mPhysicalCameraId < other.mPhysicalCameraId;
}
+ if (mIsMultiResolution != other.mIsMultiResolution) {
+ return mIsMultiResolution < other.mIsMultiResolution;
+ }
return gbpsLessThan(other);
}
bool operator > (const OutputConfiguration& other) const {
@@ -133,6 +139,7 @@
bool mIsDeferred;
bool mIsShared;
String16 mPhysicalCameraId;
+ bool mIsMultiResolution;
};
} // namespace params
} // namespace camera2
diff --git a/camera/include/camera/camera2/SessionConfiguration.h b/camera/include/camera/camera2/SessionConfiguration.h
index 64288ed..29913f6 100644
--- a/camera/include/camera/camera2/SessionConfiguration.h
+++ b/camera/include/camera/camera2/SessionConfiguration.h
@@ -38,6 +38,7 @@
int getInputHeight() const { return mInputHeight; }
int getInputFormat() const { return mInputFormat; }
int getOperatingMode() const { return mOperatingMode; }
+ bool inputIsMultiResolution() const { return mInputIsMultiResolution; }
virtual status_t writeToParcel(android::Parcel* parcel) const override;
virtual status_t readFromParcel(const android::Parcel* parcel) override;
@@ -61,7 +62,8 @@
mInputWidth == other.mInputWidth &&
mInputHeight == other.mInputHeight &&
mInputFormat == other.mInputFormat &&
- mOperatingMode == other.mOperatingMode);
+ mOperatingMode == other.mOperatingMode &&
+ mInputIsMultiResolution == other.mInputIsMultiResolution);
}
bool operator != (const SessionConfiguration& other) const {
@@ -83,6 +85,10 @@
return mInputFormat < other.mInputFormat;
}
+ if (mInputIsMultiResolution != other.mInputIsMultiResolution) {
+ return mInputIsMultiResolution < other.mInputIsMultiResolution;
+ }
+
if (mOperatingMode != other.mOperatingMode) {
return mOperatingMode < other.mOperatingMode;
}
@@ -104,6 +110,7 @@
std::vector<OutputConfiguration> mOutputStreams;
int mInputWidth, mInputHeight, mInputFormat, mOperatingMode;
+ bool mInputIsMultiResolution = false;
};
} // namespace params
} // namespace camera2
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index c1b2712..4e07c5c 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -3868,6 +3868,35 @@
*/
ACAMERA_SCALER_DEFAULT_SECURE_IMAGE_SIZE = // int32[2]
ACAMERA_SCALER_START + 18,
+ /**
+ * <p>The available multi-resolution stream configurations that this
+ * physical camera device supports
+ * (i.e. format, width, height, output/input stream).</p>
+ *
+ * <p>Type: int32[n*4] (acamera_metadata_enum_android_scaler_physical_camera_multi_resolution_stream_configurations_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This list contains a subset of the parent logical camera's multi-resolution stream
+ * configurations which belong to this physical camera, and it will advertise and will only
+ * advertise the maximum supported resolutions for a particular format.</p>
+ * <p>If this camera device isn't a physical camera device constituting a logical camera,
+ * but a standalone ULTRA_HIGH_RESOLUTION_SENSOR camera, this field represents the
+ * multi-resolution input/output stream configurations of default mode and max resolution
+ * modes. The sizes will be the maximum resolution of a particular format for default mode
+ * and max resolution mode.</p>
+ * <p>This field will only be advertised if the device is a physical camera of a
+ * logical multi-camera device or an ultra high resolution sensor camera. For a logical
+ * multi-camera, the camera API will derive the logical camera’s multi-resolution stream
+ * configurations from all physical cameras. For an ultra high resolution sensor camera, this
+ * is used directly as the camera’s multi-resolution stream configurations.</p>
+ */
+ ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS =
+ // int32[n*4] (acamera_metadata_enum_android_scaler_physical_camera_multi_resolution_stream_configurations_t)
+ ACAMERA_SCALER_START + 19,
ACAMERA_SCALER_END,
/**
@@ -8475,6 +8504,16 @@
} acamera_metadata_enum_android_scaler_rotate_and_crop_t;
+// ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS
+typedef enum acamera_metadata_enum_acamera_scaler_physical_camera_multi_resolution_stream_configurations {
+ ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS_OUTPUT
+ = 0,
+
+ ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS_INPUT
+ = 1,
+
+} acamera_metadata_enum_android_scaler_physical_camera_multi_resolution_stream_configurations_t;
+
// ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
typedef enum acamera_metadata_enum_acamera_sensor_reference_illuminant1 {
diff --git a/drm/libmediadrm/CryptoHal.cpp b/drm/libmediadrm/CryptoHal.cpp
index af17679..3257f71 100644
--- a/drm/libmediadrm/CryptoHal.cpp
+++ b/drm/libmediadrm/CryptoHal.cpp
@@ -343,6 +343,7 @@
Return<void> hResult;
+ mLock.unlock();
if (mPluginV1_2 != NULL) {
hResult = mPluginV1_2->decrypt_1_2(secure, toHidlArray16(keyId), toHidlArray16(iv),
hMode, hPattern, hSubSamples, hSource, offset, hDestination,
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index a84fd92..ba801e7 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -16,13 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "DrmHal"
-#include <iomanip>
-
-#include <utils/Log.h>
-
-#include <android/binder_manager.h>
#include <aidl/android/media/BnResourceManagerClient.h>
+#include <android/binder_manager.h>
#include <android/hardware/drm/1.2/types.h>
#include <android/hidl/manager/1.2/IServiceManager.h>
#include <hidl/ServiceManagement.h>
@@ -40,7 +36,9 @@
#include <mediadrm/DrmSessionManager.h>
#include <mediadrm/IDrmMetricsConsumer.h>
#include <mediadrm/DrmUtils.h>
+#include <utils/Log.h>
+#include <iomanip>
#include <vector>
using drm::V1_0::KeyedVector;
@@ -340,7 +338,7 @@
}
std::vector<sp<IDrmFactory>> DrmHal::makeDrmFactories() {
- std::vector<sp<IDrmFactory>> factories(DrmUtils::MakeDrmFactories());
+ static std::vector<sp<IDrmFactory>> factories(DrmUtils::MakeDrmFactories());
if (factories.size() == 0) {
// must be in passthrough mode, load the default passthrough service
auto passthrough = IDrmFactory::getService();
@@ -364,7 +362,7 @@
Return<void> hResult = factory->createPlugin(uuid, appPackageName.string(),
[&](Status status, const sp<IDrmPlugin>& hPlugin) {
if (status != Status::OK) {
- DrmUtils::LOG2BE("Failed to make drm plugin: %d", status);
+ DrmUtils::LOG2BE(uuid, "Failed to make drm plugin: %d", status);
return;
}
plugin = hPlugin;
@@ -372,7 +370,8 @@
);
if (!hResult.isOk()) {
- DrmUtils::LOG2BE("createPlugin remote call failed");
+ DrmUtils::LOG2BE(uuid, "createPlugin remote call failed: %s",
+ hResult.description().c_str());
}
return plugin;
@@ -580,6 +579,7 @@
}
if (mPlugin == NULL) {
+ DrmUtils::LOG2BE(uuid, "No supported hal instance found");
mInitCheck = ERROR_UNSUPPORTED;
} else {
mInitCheck = OK;
diff --git a/drm/libmediadrm/DrmUtils.cpp b/drm/libmediadrm/DrmUtils.cpp
index f7e6717..ed3848d 100644
--- a/drm/libmediadrm/DrmUtils.cpp
+++ b/drm/libmediadrm/DrmUtils.cpp
@@ -43,6 +43,9 @@
#include <mediadrm/ICrypto.h>
#include <mediadrm/IDrm.h>
+#include <map>
+#include <string>
+
using HServiceManager = ::android::hidl::manager::V1_2::IServiceManager;
using ::android::hardware::hidl_array;
using ::android::hardware::hidl_string;
@@ -66,8 +69,8 @@
return obj;
}
-template <typename Hal, typename V>
-void MakeHidlFactories(const uint8_t uuid[16], V &factories) {
+template <typename Hal, typename V, typename M>
+void MakeHidlFactories(const uint8_t uuid[16], V &factories, M& instances) {
sp<HServiceManager> serviceManager = HServiceManager::getService();
if (serviceManager == nullptr) {
LOG2BE("Failed to get service manager");
@@ -78,7 +81,7 @@
for (const auto &instance : registered) {
auto factory = Hal::getService(instance);
if (factory != nullptr) {
- LOG2BI("found %s %s", Hal::descriptor, instance.c_str());
+ instances[instance.c_str()] = Hal::descriptor;
if (!uuid || factory->isCryptoSchemeSupported(uuid)) {
factories.push_back(factory);
}
@@ -87,6 +90,12 @@
});
}
+template <typename Hal, typename V>
+void MakeHidlFactories(const uint8_t uuid[16], V &factories) {
+ std::map<std::string, std::string> instances;
+ MakeHidlFactories<Hal>(uuid, factories, instances);
+}
+
hidl_vec<uint8_t> toHidlVec(const void *ptr, size_t size) {
hidl_vec<uint8_t> vec(size);
if (ptr != nullptr) {
@@ -108,7 +117,7 @@
factory->createPlugin(toHidlArray16(uuid), hidl_string(appPackageName),
[&](::V1_0::Status status, const sp<::V1_0::IDrmPlugin> &hPlugin) {
if (status != ::V1_0::Status::OK) {
- LOG2BE("MakeDrmPlugin failed: %d", status);
+ LOG2BE(uuid, "MakeDrmPlugin failed: %d", status);
return;
}
plugin = hPlugin;
@@ -123,7 +132,7 @@
factory->createPlugin(toHidlArray16(uuid), toHidlVec(initData, initDataSize),
[&](::V1_0::Status status, const sp<::V1_0::ICryptoPlugin> &hPlugin) {
if (status != ::V1_0::Status::OK) {
- LOG2BE("MakeCryptoPlugin failed: %d", status);
+ LOG2BE(uuid, "MakeCryptoPlugin failed: %d", status);
return;
}
plugin = hPlugin;
@@ -147,11 +156,15 @@
std::vector<sp<::V1_0::IDrmFactory>> MakeDrmFactories(const uint8_t uuid[16]) {
std::vector<sp<::V1_0::IDrmFactory>> drmFactories;
- MakeHidlFactories<::V1_0::IDrmFactory>(uuid, drmFactories);
- MakeHidlFactories<::V1_1::IDrmFactory>(uuid, drmFactories);
- MakeHidlFactories<::V1_2::IDrmFactory>(uuid, drmFactories);
- MakeHidlFactories<::V1_3::IDrmFactory>(uuid, drmFactories);
- MakeHidlFactories<::V1_4::IDrmFactory>(uuid, drmFactories);
+ std::map<std::string, std::string> instances;
+ MakeHidlFactories<::V1_0::IDrmFactory>(uuid, drmFactories, instances);
+ MakeHidlFactories<::V1_1::IDrmFactory>(uuid, drmFactories, instances);
+ MakeHidlFactories<::V1_2::IDrmFactory>(uuid, drmFactories, instances);
+ MakeHidlFactories<::V1_3::IDrmFactory>(uuid, drmFactories, instances);
+ MakeHidlFactories<::V1_4::IDrmFactory>(uuid, drmFactories, instances);
+ for (auto const& entry : instances) {
+ LOG2BI("found instance=%s version=%s", entry.first.c_str(), entry.second.c_str());
+ }
return drmFactories;
}
@@ -255,6 +268,8 @@
return ERROR_DRM_PROVISIONING_CONFIG;
case ::V1_4::Status::PROVISIONING_PARSE_ERROR:
return ERROR_DRM_PROVISIONING_PARSE;
+ case ::V1_4::Status::PROVISIONING_REQUEST_REJECTED:
+ return ERROR_DRM_PROVISIONING_REQUEST_REJECTED;
case ::V1_4::Status::RETRYABLE_PROVISIONING_ERROR:
return ERROR_DRM_PROVISIONING_RETRY;
case ::V1_4::Status::SECURE_STOP_RELEASE_ERROR:
diff --git a/drm/libmediadrm/interface/mediadrm/DrmUtils.h b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
index 7fe3501..988cda9 100644
--- a/drm/libmediadrm/interface/mediadrm/DrmUtils.h
+++ b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
@@ -33,8 +33,10 @@
#include <cstdint>
#include <ctime>
#include <deque>
+#include <endian.h>
#include <iterator>
#include <mutex>
+#include <string>
#include <vector>
@@ -88,6 +90,14 @@
}
}
+template <typename... Args>
+void LogToBuffer(android_LogPriority level, const uint8_t uuid[16], const char *fmt, Args... args) {
+ const uint64_t* uuid2 = reinterpret_cast<const uint64_t*>(uuid);
+ std::string uuidFmt("uuid=[%lx %lx] ");
+ uuidFmt += fmt;
+ LogToBuffer(level, uuidFmt.c_str(), htobe64(uuid2[0]), htobe64(uuid2[1]), args...);
+}
+
#ifndef LOG2BE
#define LOG2BE(...) LogToBuffer(ANDROID_LOG_ERROR, __VA_ARGS__)
#define LOG2BW(...) LogToBuffer(ANDROID_LOG_WARN, __VA_ARGS__)
@@ -196,10 +206,13 @@
hResult = plugin->getLogMessages(cb);
}
if (!hResult.isOk()) {
- LOG2BW("%s::getLogMessages remote call failed", T::descriptor);
+ LOG2BW("%s::getLogMessages remote call failed %s",
+ T::descriptor, hResult.description().c_str());
}
auto allLogs(gLogBuf.getLogs());
+ LOG2BI("framework logs size %zu; plugin logs size %zu",
+ allLogs.size(), pluginLogs.size());
std::copy(pluginLogs.begin(), pluginLogs.end(), std::back_inserter(allLogs));
std::sort(allLogs.begin(), allLogs.end(),
[](const ::V1_4::LogMessage &a, const ::V1_4::LogMessage &b) {
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index cfaeb66..43b2c14 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -186,6 +186,23 @@
.build());
addParameter(
+ DefineParam(mQuantization, C2_PARAMKEY_QUANTIZATION)
+ .withDefault(new C2StreamQuantizationInfo::output(0u,
+ DEFAULT_QP_MAX, DEFAULT_QP_MIN,
+ DEFAULT_QP_MAX, DEFAULT_QP_MIN,
+ DEFAULT_QP_MAX, DEFAULT_QP_MIN))
+ .withFields({
+ C2F(mQuantization, iMax).inRange(1, 51),
+ C2F(mQuantization, iMin).inRange(1, 51),
+ C2F(mQuantization, pMax).inRange(1, 51),
+ C2F(mQuantization, pMin).inRange(1, 51),
+ C2F(mQuantization, bMax).inRange(1, 51),
+ C2F(mQuantization, bMin).inRange(1, 51),
+ })
+ .withSetter(QuantizationSetter)
+ .build());
+
+ addParameter(
DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
.withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
.withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
@@ -220,6 +237,71 @@
return res;
}
+ static C2R QuantizationSetter(bool mayBlock, C2P<C2StreamQuantizationInfo::output> &me) {
+ (void)mayBlock;
+ (void)me;
+ C2R res = C2R::Ok();
+
+ ALOGV("QuantizationSetter enters max/min i %d/%d p %d/%d b %d/%d",
+ me.v.iMax, me.v.iMin, me.v.pMax, me.v.pMin, me.v.bMax, me.v.bMin);
+
+ // bounds checking
+ constexpr int qp_lowest = 1;
+ constexpr int qp_highest = 51;
+
+ if (me.v.iMax < qp_lowest) {
+ me.set().iMax = qp_lowest;
+ } else if (me.v.iMax > qp_highest) {
+ me.set().iMax = qp_highest;
+ }
+
+ if (me.v.iMin < qp_lowest) {
+ me.set().iMin = qp_lowest;
+ } else if (me.v.iMin > qp_highest) {
+ me.set().iMin = qp_highest;
+ }
+
+ if (me.v.pMax < qp_lowest) {
+ me.set().pMax = qp_lowest;
+ } else if (me.v.pMax > qp_highest) {
+ me.set().pMax = qp_highest;
+ }
+
+ if (me.v.pMin < qp_lowest) {
+ me.set().pMin = qp_lowest;
+ } else if (me.v.pMin > qp_highest) {
+ me.set().pMin = qp_highest;
+ }
+
+ if (me.v.bMax < qp_lowest) {
+ me.set().bMax = qp_lowest;
+ } else if (me.v.bMax > qp_highest) {
+ me.set().bMax = qp_highest;
+ }
+
+ if (me.v.bMin < qp_lowest) {
+ me.set().bMin = qp_lowest;
+ } else if (me.v.bMin > qp_highest) {
+ me.set().bMin = qp_highest;
+ }
+
+ // consistency checking, e.g. min<max
+ //
+ if (me.v.iMax < me.v.iMin) {
+ me.set().iMax = me.v.iMin;
+ }
+ if (me.v.pMax < me.v.pMin) {
+ me.set().pMax = me.v.pMin;
+ }
+ if (me.v.bMax < me.v.bMin) {
+ me.set().bMax = me.v.bMin;
+ }
+
+ // TODO: enforce any sort of i_max < p_max < b_max?
+
+ return res;
+ }
+
static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
C2P<C2StreamPictureSizeInfo::input> &me) {
(void)mayBlock;
@@ -393,6 +475,7 @@
std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
std::shared_ptr<C2StreamGopTuning::output> getGop_l() const { return mGop; }
+ std::shared_ptr<C2StreamQuantizationInfo::output> getQuantization_l() const { return mQuantization; }
private:
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -404,6 +487,7 @@
std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
std::shared_ptr<C2StreamGopTuning::output> mGop;
+ std::shared_ptr<C2StreamQuantizationInfo::output> mQuantization;
};
#define ive_api_function ih264e_api_function
@@ -664,6 +748,7 @@
ive_ctl_set_qp_op_t s_qp_op;
IV_STATUS_T status;
+ // set the defaults
s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
@@ -679,6 +764,21 @@
s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX;
s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN;
+ // parameter parsing ensured proper range 1..51, so only worry about ordering
+ bool valid = true;
+ if (mQuantization->iMax < mQuantization->iMin) valid = false;
+ if (mQuantization->pMax < mQuantization->pMin) valid = false;
+ if (mQuantization->bMax < mQuantization->bMin) valid = false;
+
+ if (valid) {
+ s_qp_ip.u4_i_qp_max = mQuantization->iMax;
+ s_qp_ip.u4_i_qp_min = mQuantization->iMin;
+ s_qp_ip.u4_p_qp_max = mQuantization->pMax;
+ s_qp_ip.u4_p_qp_min = mQuantization->pMin;
+ s_qp_ip.u4_b_qp_max = mQuantization->bMax;
+ s_qp_ip.u4_b_qp_min = mQuantization->bMin;
+ }
+
s_qp_ip.u4_timestamp_high = -1;
s_qp_ip.u4_timestamp_low = -1;
@@ -926,6 +1026,7 @@
mIInterval = mIntf->getSyncFramePeriod_l();
mIDRInterval = mIntf->getSyncFramePeriod_l();
gop = mIntf->getGop_l();
+ mQuantization = mIntf->getQuantization_l();
}
if (gop && gop->flexCount() > 0) {
uint32_t syncInterval = 1;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index 555055b..e4bf0b0 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -192,6 +192,7 @@
std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+ std::shared_ptr<C2StreamQuantizationInfo::output> mQuantization;
uint32_t mOutBufferSize;
UWORD32 mHeaderGenerated;
diff --git a/media/codec2/components/mpeg4_h263/TEST_MAPPING b/media/codec2/components/mpeg4_h263/TEST_MAPPING
new file mode 100644
index 0000000..93fba22
--- /dev/null
+++ b/media/codec2/components/mpeg4_h263/TEST_MAPPING
@@ -0,0 +1,6 @@
+// mappings for frameworks/av/media/codec2/components/mpeg4_h263
+{
+ "presubmit": [
+ { "name": "C2SoftMpeg4DecTest" }
+ ]
+}
diff --git a/media/codec2/components/tests/Android.bp b/media/codec2/components/tests/Android.bp
new file mode 100644
index 0000000..3c68eee
--- /dev/null
+++ b/media/codec2/components/tests/Android.bp
@@ -0,0 +1,68 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+ name: "C2SoftCodecTest-defaults",
+ gtest: true,
+ host_supported: false,
+ srcs: [
+ "C2SoftCodecTest.cpp",
+ ],
+
+ static_libs: [
+ "liblog",
+ "libion",
+ "libfmq",
+ "libbase",
+ "libutils",
+ "libcutils",
+ "libcodec2",
+ "libhidlbase",
+ "libdmabufheap",
+ "libcodec2_vndk",
+ "libnativewindow",
+ "libcodec2_soft_common",
+ "libsfplugin_ccodec_utils",
+ "libstagefright_foundation",
+ "libstagefright_bufferpool@2.0.1",
+ "android.hardware.graphics.mapper@2.0",
+ "android.hardware.graphics.mapper@3.0",
+ "android.hardware.media.bufferpool@2.0",
+ "android.hardware.graphics.allocator@2.0",
+ "android.hardware.graphics.allocator@3.0",
+ "android.hardware.graphics.bufferqueue@2.0",
+ ],
+
+ shared_libs: [
+ "libui",
+ "libdl",
+ "libhardware",
+ "libvndksupport",
+ "libprocessgroup",
+ ],
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
+}
+
+cc_test {
+ name: "C2SoftMpeg4DecTest",
+ defaults: ["C2SoftCodecTest-defaults"],
+
+ static_libs: [
+ "libstagefright_m4vh263dec",
+ "libcodec2_soft_mpeg4dec",
+ ],
+
+ test_suites: [
+ "general-tests",
+ ],
+}
diff --git a/media/codec2/components/tests/C2SoftCodecTest.cpp b/media/codec2/components/tests/C2SoftCodecTest.cpp
new file mode 100644
index 0000000..84c2562
--- /dev/null
+++ b/media/codec2/components/tests/C2SoftCodecTest.cpp
@@ -0,0 +1,105 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <C2Config.h>
+#include <C2ComponentFactory.h>
+#include <gtest/gtest.h>
+#include <log/log.h>
+
+using namespace android;
+extern "C" ::C2ComponentFactory* CreateCodec2Factory();
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory);
+
+class C2SoftCodecTest : public ::testing::Test {
+public:
+ void SetUp() override {
+ mFactory = CreateCodec2Factory();
+ }
+
+ void TearDown() override {
+ if (mFactory) {
+ DestroyCodec2Factory(mFactory);
+ }
+ }
+
+ c2_status_t createComponent(
+ std::shared_ptr<C2Component>* const comp) {
+ if (!mFactory) {
+ return C2_NO_INIT;
+ }
+ return mFactory->createComponent(
+ kPlaceholderId, comp, std::default_delete<C2Component>());
+ }
+
+ c2_status_t createInterface(
+ std::shared_ptr<C2ComponentInterface>* const intf) {
+ if (!mFactory) {
+ return C2_NO_INIT;
+ }
+ return mFactory->createInterface(
+ kPlaceholderId, intf, std::default_delete<C2ComponentInterface>());
+ }
+
+ ::C2ComponentFactory *getFactory() { return mFactory; }
+
+private:
+ static constexpr ::c2_node_id_t kPlaceholderId = 0;
+
+ ::C2ComponentFactory *mFactory;
+};
+
+TEST_F(C2SoftCodecTest, PictureSizeInfoTest) {
+ std::shared_ptr<C2ComponentInterface> interface;
+ c2_status_t status = createInterface(&interface);
+ ASSERT_EQ(status, C2_OK) << "Error in createInterface";
+ ASSERT_NE(interface, nullptr) << "interface is null";
+
+ std::unique_ptr<C2StreamPictureSizeInfo::output> param =
+ std::make_unique<C2StreamPictureSizeInfo::output>();
+ std::vector<C2FieldSupportedValuesQuery> validValueInfos = {
+ C2FieldSupportedValuesQuery::Current(
+ C2ParamField(param.get(), &C2StreamPictureSizeInfo::width)),
+ C2FieldSupportedValuesQuery::Current(
+ C2ParamField(param.get(), &C2StreamPictureSizeInfo::height))};
+ status = interface->querySupportedValues_vb(validValueInfos, C2_MAY_BLOCK);
+ ASSERT_EQ(status, C2_OK) << "Error in querySupportedValues_vb";
+ ASSERT_EQ(validValueInfos.size(), 2) << "querySupportedValues_vb didn't return 2 values";
+
+ ASSERT_EQ(validValueInfos[0].values.range.max.ref<uint32_t>(), 1920)
+ << "Incorrect maximum value for width";
+ ASSERT_EQ(validValueInfos[1].values.range.max.ref<uint32_t>(), 1920)
+ << "Incorrect maximum value for height";
+ ASSERT_EQ(validValueInfos[0].values.range.min.ref<uint32_t>(), 2)
+ << "Incorrect minimum value for width";
+ ASSERT_EQ(validValueInfos[1].values.range.min.ref<uint32_t>(), 2)
+ << "Incorrect minimum value for height";
+ ASSERT_EQ(validValueInfos[0].values.range.step.ref<uint32_t>(), 2)
+ << "Incorrect alignment value for width";
+ ASSERT_EQ(validValueInfos[1].values.range.step.ref<uint32_t>(), 2)
+ << "Incorrect alignment value for height";
+
+ return;
+}
+
+int main(int argc, char** argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ int status = RUN_ALL_TESTS();
+ ALOGV("Test result = %d\n", status);
+ return status;
+}
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 752140a..8e8a08b 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -187,6 +187,8 @@
kParamIndexPictureType,
kParamIndexHdr10PlusMetadata,
+ kParamIndexQuantization,
+
/* ------------------------------------ video components ------------------------------------ */
kParamIndexFrameRate = C2_PARAM_INDEX_VIDEO_PARAM_START,
@@ -708,6 +710,38 @@
C2StreamProfileLevelInfo;
constexpr char C2_PARAMKEY_PROFILE_LEVEL[] = "coded.pl";
+struct C2QuantizationStruct {
+ int32_t iMax; ///< max/min for I frames
+ int32_t iMin;
+ int32_t pMax; ///< max/min for P frames
+ int32_t pMin;
+ int32_t bMax; ///< max/min for B frames
+ int32_t bMin;
+
+ C2QuantizationStruct(
+ int32_t iMax_ = INT32_MAX,
+ int32_t iMin_ = INT32_MIN,
+ int32_t pMax_ = INT32_MAX,
+ int32_t pMin_ = INT32_MIN,
+ int32_t bMax_ = INT32_MAX,
+ int32_t bMin_ = INT32_MIN)
+ : iMax(iMax_), iMin(iMin_),
+ pMax(pMax_), pMin(pMin_),
+ bMax(bMax_), bMin(bMin_) { }
+
+ DEFINE_AND_DESCRIBE_C2STRUCT(Quantization) // reference?
+ C2FIELD(iMax, "i-max")
+ C2FIELD(iMin, "i-min")
+ C2FIELD(pMax, "p-max")
+ C2FIELD(pMin, "p-min")
+ C2FIELD(bMax, "b-max")
+ C2FIELD(bMin, "b-min")
+};
+
+typedef C2StreamParam<C2Info, C2QuantizationStruct, kParamIndexQuantization>
+ C2StreamQuantizationInfo;
+constexpr char C2_PARAMKEY_QUANTIZATION[] = "coded.qp";
+
/**
* Codec-specific initialization data.
*
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 66a2b6a..c8a1994 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -552,13 +552,15 @@
}
// Report to MediaCodec
- // Note: for now we do not propagate the error code to MediaCodec as we would need
- // to translate to a MediaCodec error.
+ // Note: for now we do not propagate the error code to MediaCodec
+ // except for C2_NO_MEMORY, as we would need to translate to a MediaCodec error.
sp<CCodec> codec(mCodec.promote());
if (!codec || !codec->mCallback) {
return;
}
- codec->mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+ codec->mCallback->onError(
+ errorCode == C2_NO_MEMORY ? NO_MEMORY : UNKNOWN_ERROR,
+ ACTION_CODE_FATAL);
}
virtual void onDeath(
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index d3814fb..2190f46 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -727,6 +727,19 @@
return C2Value();
}));
+ add(ConfigMapper(KEY_VIDEO_QP_I_MAX, C2_PARAMKEY_QUANTIZATION, "i-max")
+ .limitTo(D::VIDEO & D::ENCODER));
+ add(ConfigMapper(KEY_VIDEO_QP_I_MIN, C2_PARAMKEY_QUANTIZATION, "i-min")
+ .limitTo(D::VIDEO & D::ENCODER));
+ add(ConfigMapper(KEY_VIDEO_QP_P_MAX, C2_PARAMKEY_QUANTIZATION, "p-max")
+ .limitTo(D::VIDEO & D::ENCODER));
+ add(ConfigMapper(KEY_VIDEO_QP_P_MIN, C2_PARAMKEY_QUANTIZATION, "p-min")
+ .limitTo(D::VIDEO & D::ENCODER));
+ add(ConfigMapper(KEY_VIDEO_QP_B_MAX, C2_PARAMKEY_QUANTIZATION, "b-max")
+ .limitTo(D::VIDEO & D::ENCODER));
+ add(ConfigMapper(KEY_VIDEO_QP_B_MIN, C2_PARAMKEY_QUANTIZATION, "b-min")
+ .limitTo(D::VIDEO & D::ENCODER));
+
// convert to dBFS and add default
add(ConfigMapper(KEY_AAC_DRC_TARGET_REFERENCE_LEVEL, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL, "value")
.limitTo(D::AUDIO & D::DECODER & (D::CONFIG | D::PARAM | D::READ))
diff --git a/media/janitors/OWNERS-codecs b/media/janitors/codec_OWNERS
similarity index 100%
rename from media/janitors/OWNERS-codecs
rename to media/janitors/codec_OWNERS
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 00ed3b8..a53e664 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -35,15 +35,6 @@
#include <media/MediaMetricsItem.h>
#include <media/TypeConverter.h>
-#define VALUE_OR_FATAL(result) \
- ({ \
- auto _tmp = (result); \
- LOG_ALWAYS_FATAL_IF(!_tmp.ok(), \
- "Failed result (%d)", \
- _tmp.error()); \
- std::move(_tmp.value()); \
- })
-
#define WAIT_PERIOD_MS 10
namespace android {
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index d2f714a..7d6d453 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -38,15 +38,6 @@
#include <media/MediaMetricsItem.h>
#include <media/TypeConverter.h>
-#define VALUE_OR_FATAL(result) \
- ({ \
- auto _tmp = (result); \
- LOG_ALWAYS_FATAL_IF(!_tmp.ok(), \
- "Failed result (%d)", \
- _tmp.error()); \
- std::move(_tmp.value()); \
- })
-
#define WAIT_PERIOD_MS 10
#define WAIT_STREAM_END_TIMEOUT_SEC 120
static const int kMaxLoopCountNotifications = 32;
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index db2b0b8..84309ee 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -35,6 +35,10 @@
#define MAX_STRING_LENGTH 256
#define MAX_ARRAY_LENGTH 256
+constexpr int32_t kMinSampleRateHz = 4000;
+constexpr int32_t kMaxSampleRateHz = 192000;
+constexpr int32_t kSampleRateUnspecified = 0;
+
using namespace std;
using namespace android;
@@ -130,29 +134,20 @@
xsdc_enum_range<xsd::AudioInOutFlag>{}, audio_output_flag_from_string, "_OUTPUT_");
template <typename T, size_t size>
-T getValueFromArray(FuzzedDataProvider *fdp, const T (&arr)[size]) {
- return arr[fdp->ConsumeIntegralInRange<int32_t>(0, size - 1)];
-}
-
-template <typename T, size_t size>
T getValue(FuzzedDataProvider *fdp, const T (&arr)[size]) {
- if (fdp->ConsumeBool()) {
- return static_cast<T>(fdp->ConsumeIntegral<int32_t>());
- }
- return getValueFromArray(fdp, arr);
-}
-
-template <typename T>
-T getValueFromVector(FuzzedDataProvider *fdp, std::vector<T> vec) {
- return vec[fdp->ConsumeIntegralInRange<int32_t>(0, vec.size() - 1)];
+ return arr[fdp->ConsumeIntegralInRange<int32_t>(0, size - 1)];
}
template <typename T>
T getValue(FuzzedDataProvider *fdp, std::vector<T> vec) {
+ return vec[fdp->ConsumeIntegralInRange<int32_t>(0, vec.size() - 1)];
+}
+
+int32_t getSampleRate(FuzzedDataProvider *fdp) {
if (fdp->ConsumeBool()) {
- return static_cast<T>(fdp->ConsumeIntegral<int32_t>());
+ return fdp->ConsumeIntegralInRange<int32_t>(kMinSampleRateHz, kMaxSampleRateHz);
}
- return getValueFromVector(fdp, vec);
+ return kSampleRateUnspecified;
}
class DeathNotifier : public IBinder::DeathRecipient {
@@ -189,7 +184,7 @@
}
void AudioFlingerFuzzer::invokeAudioTrack() {
- uint32_t sampleRate = mFdp.ConsumeIntegral<uint32_t>();
+ uint32_t sampleRate = getSampleRate(&mFdp);
audio_format_t format = getValue(&mFdp, kFormats);
audio_channel_mask_t channelMask = getValue(&mFdp, kChannelMasks);
size_t frameCount = static_cast<size_t>(mFdp.ConsumeIntegral<uint32_t>());
@@ -259,7 +254,7 @@
float auxEffectSendLevel;
track->getAuxEffectSendLevel(&auxEffectSendLevel);
- track->setSampleRate(mFdp.ConsumeIntegral<uint32_t>());
+ track->setSampleRate(getSampleRate(&mFdp));
track->getSampleRate();
track->getOriginalSampleRate();
@@ -292,7 +287,7 @@
void AudioFlingerFuzzer::invokeAudioRecord() {
int32_t notificationFrames = mFdp.ConsumeIntegral<int32_t>();
- uint32_t sampleRate = mFdp.ConsumeIntegral<uint32_t>();
+ uint32_t sampleRate = getSampleRate(&mFdp);
size_t frameCount = static_cast<size_t>(mFdp.ConsumeIntegral<uint32_t>());
audio_format_t format = getValue(&mFdp, kFormats);
audio_channel_mask_t channelMask = getValue(&mFdp, kChannelMasks);
@@ -518,7 +513,7 @@
AudioSystem::getFrameCountHAL(mFdp.ConsumeIntegral<int32_t>(), &frameCount);
size_t buffSize;
- uint32_t sampleRate = mFdp.ConsumeIntegral<uint32_t>();
+ uint32_t sampleRate = getSampleRate(&mFdp);
audio_format_t format = getValue(&mFdp, kFormats);
audio_channel_mask_t channelMask = getValue(&mFdp, kChannelMasks);
AudioSystem::getInputBufferSize(sampleRate, format, channelMask, &buffSize);
@@ -572,12 +567,12 @@
config.offload_info.format = getValue(&mFdp, kFormats);
config.offload_info.has_video = mFdp.ConsumeBool();
config.offload_info.is_streaming = mFdp.ConsumeBool();
- config.offload_info.sample_rate = (mFdp.ConsumeIntegral<uint32_t>());
+ config.offload_info.sample_rate = getSampleRate(&mFdp);
config.offload_info.sync_id = mFdp.ConsumeIntegral<uint32_t>();
config.offload_info.stream_type = getValue(&mFdp, kStreamtypes);
config.offload_info.usage = getValue(&mFdp, kUsages);
- config.sample_rate = mFdp.ConsumeIntegral<uint32_t>();
+ config.sample_rate = getSampleRate(&mFdp);
audio_devices_t device = getValue(&mFdp, kDevices);
audio_source_t source = getValue(&mFdp, kInputSources);
@@ -628,13 +623,13 @@
config.offload_info.format = getValue(&mFdp, kFormats);
config.offload_info.has_video = mFdp.ConsumeBool();
config.offload_info.is_streaming = mFdp.ConsumeBool();
- config.offload_info.sample_rate = mFdp.ConsumeIntegral<uint32_t>();
+ config.offload_info.sample_rate = getSampleRate(&mFdp);
config.offload_info.stream_type = getValue(&mFdp, kStreamtypes);
config.offload_info.sync_id = mFdp.ConsumeIntegral<uint32_t>();
config.offload_info.usage = getValue(&mFdp, kUsages);
config.format = getValue(&mFdp, kFormats);
- config.sample_rate = mFdp.ConsumeIntegral<uint32_t>();
+ config.sample_rate = getSampleRate(&mFdp);
sp<DeviceDescriptorBase> device = new DeviceDescriptorBase(getValue(&mFdp, kDevices));
audio_output_flags_t flags = getValue(&mFdp, kOutputFlags);
@@ -683,7 +678,7 @@
patch.sources[i].gain.ramp_duration_ms = mFdp.ConsumeIntegral<uint32_t>();
patch.sources[i].id = static_cast<audio_format_t>(mFdp.ConsumeIntegral<int32_t>());
patch.sources[i].role = getValue(&mFdp, kPortRoles);
- patch.sources[i].sample_rate = mFdp.ConsumeIntegral<uint32_t>();
+ patch.sources[i].sample_rate = getSampleRate(&mFdp);
patch.sources[i].type = getValue(&mFdp, kPortTypes);
patch.sinks[i].config_mask = mFdp.ConsumeIntegral<uint32_t>();
@@ -695,7 +690,7 @@
patch.sinks[i].gain.ramp_duration_ms = mFdp.ConsumeIntegral<uint32_t>();
patch.sinks[i].id = static_cast<audio_format_t>(mFdp.ConsumeIntegral<int32_t>());
patch.sinks[i].role = getValue(&mFdp, kPortRoles);
- patch.sinks[i].sample_rate = mFdp.ConsumeIntegral<uint32_t>();
+ patch.sinks[i].sample_rate = getSampleRate(&mFdp);
patch.sinks[i].type = getValue(&mFdp, kPortTypes);
}
diff --git a/media/libaudioclient/include/media/AidlConversionUtil.h b/media/libaudioclient/include/media/AidlConversionUtil.h
index bf2d800..c1a2be3 100644
--- a/media/libaudioclient/include/media/AidlConversionUtil.h
+++ b/media/libaudioclient/include/media/AidlConversionUtil.h
@@ -48,6 +48,15 @@
std::move(_tmp.value()); \
})
+#define VALUE_OR_FATAL(result) \
+ ({ \
+ auto _tmp = (result); \
+ LOG_ALWAYS_FATAL_IF(!_tmp.ok(), \
+ "Function: %s Line: %d Failed result (%d)",\
+ __FUNCTION__, __LINE__, _tmp.error()); \
+ std::move(_tmp.value()); \
+ })
+
/**
* A generic template to safely cast between integral types, respecting limits of the destination
* type.
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
index 761c6ce..bbe7de0 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
@@ -57,7 +57,7 @@
* Create the instance handle if not already initialised
*/
if (*phInstance == LVM_NULL) {
- *phInstance = new LVDBE_Instance_t;
+ *phInstance = new LVDBE_Instance_t{};
}
pInstance = (LVDBE_Instance_t*)*phInstance;
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
index b092970..6ea08bc 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
@@ -93,7 +93,7 @@
/*
* Create the instance handle
*/
- *phInstance = new LVM_Instance_t;
+ *phInstance = new LVM_Instance_t{};
pInstance = (LVM_Instance_t*)*phInstance;
pInstance->InstParams = *pInstParams;
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
index 37e6d4d..8cb7013 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
@@ -52,7 +52,7 @@
LVEQNB_Capabilities_t* pCapabilities, void* pScratch) {
LVEQNB_Instance_t* pInstance;
- *phInstance = new LVEQNB_Instance_t;
+ *phInstance = new LVEQNB_Instance_t{};
pInstance = (LVEQNB_Instance_t*)*phInstance;
pInstance->Capabilities = *pCapabilities;
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
index bf71634..298655b 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
@@ -114,7 +114,7 @@
* Set the instance handle if not already initialised
*/
if (*phInstance == LVM_NULL) {
- *phInstance = new LVREV_Instance_st;
+ *phInstance = new LVREV_Instance_st{};
}
pLVREV_Private = (LVREV_Instance_st*)*phInstance;
pLVREV_Private->MemoryTable = *pMemoryTable;
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
index 5ca8543..5550b9c 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
@@ -49,7 +49,7 @@
LVM_UINT32 BufferLength = 0;
/* Set the instance handle if not already initialised */
- *phInstance = new LVPSA_InstancePr_t;
+ *phInstance = new LVPSA_InstancePr_t{};
pLVPSA_Inst = (LVPSA_InstancePr_t*)*phInstance;
pLVPSA_Inst->pScratch = pScratch;
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
index d60b360..dd1baf3 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
@@ -55,7 +55,7 @@
* Create the instance handle if not already initialised
*/
if (*phInstance == LVM_NULL) {
- *phInstance = new LVCS_Instance_t;
+ *phInstance = new LVCS_Instance_t{};
}
pInstance = (LVCS_Instance_t*)*phInstance;
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 639af4d..9939ed1 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -10,12 +10,42 @@
}
cc_test {
+ name: "EffectReverbTest",
+ vendor: true,
+ gtest: true,
+ host_supported: true,
+ srcs: [
+ "EffectReverbTest.cpp",
+ "EffectTestHelper.cpp",
+ ],
+ include_dirs: [
+ "frameworks/av/media/libeffects/lvm/lib/Common/lib",
+ "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
+ ],
+ static_libs: [
+ "libaudioutils",
+ "libreverb",
+ "libreverbwrapper",
+ ],
+ shared_libs: [
+ "liblog",
+ ],
+ header_libs: [
+ "libaudioeffects",
+ "libhardware_headers",
+ ],
+}
+
+cc_test {
name: "EffectBundleTest",
vendor: true,
gtest: true,
host_supported: true,
test_suites: ["device-tests"],
- srcs: ["EffectBundleTest.cpp"],
+ srcs: [
+ "EffectBundleTest.cpp",
+ "EffectTestHelper.cpp",
+ ],
static_libs: [
"libaudioutils",
"libbundlewrapper",
diff --git a/media/libeffects/lvm/tests/EffectBundleTest.cpp b/media/libeffects/lvm/tests/EffectBundleTest.cpp
index aae09de..881ffb1 100644
--- a/media/libeffects/lvm/tests/EffectBundleTest.cpp
+++ b/media/libeffects/lvm/tests/EffectBundleTest.cpp
@@ -14,22 +14,8 @@
* limitations under the License.
*/
-#include <array>
-#include <audio_utils/channels.h>
-#include <audio_utils/primitives.h>
-#include <climits>
-#include <cstdlib>
-#include <gtest/gtest.h>
-#include <hardware/audio_effect.h>
-#include <log/log.h>
-#include <random>
-#include <system/audio.h>
-#include <vector>
-
-extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
-
-// Corresponds to SNR for 1 bit difference between two int16_t signals
-constexpr float kSNRThreshold = 90.308998;
+#include "EffectTestHelper.h"
+using namespace android;
// Update isBassBoost, if the order of effects is updated
constexpr effect_uuid_t kEffectUuids[] = {
@@ -50,120 +36,15 @@
constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
-constexpr audio_channel_mask_t kChMasks[] = {
- AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO,
- AUDIO_CHANNEL_OUT_2POINT1, AUDIO_CHANNEL_OUT_2POINT0POINT2,
- AUDIO_CHANNEL_OUT_QUAD, AUDIO_CHANNEL_OUT_QUAD_BACK,
- AUDIO_CHANNEL_OUT_QUAD_SIDE, AUDIO_CHANNEL_OUT_SURROUND,
- AUDIO_CHANNEL_INDEX_MASK_4, AUDIO_CHANNEL_OUT_2POINT1POINT2,
- AUDIO_CHANNEL_OUT_3POINT0POINT2, AUDIO_CHANNEL_OUT_PENTA,
- AUDIO_CHANNEL_INDEX_MASK_5, AUDIO_CHANNEL_OUT_3POINT1POINT2,
- AUDIO_CHANNEL_OUT_5POINT1, AUDIO_CHANNEL_OUT_5POINT1_BACK,
- AUDIO_CHANNEL_OUT_5POINT1_SIDE, AUDIO_CHANNEL_INDEX_MASK_6,
- AUDIO_CHANNEL_OUT_6POINT1, AUDIO_CHANNEL_INDEX_MASK_7,
- AUDIO_CHANNEL_OUT_5POINT1POINT2, AUDIO_CHANNEL_OUT_7POINT1,
- AUDIO_CHANNEL_INDEX_MASK_8, AUDIO_CHANNEL_INDEX_MASK_9,
- AUDIO_CHANNEL_INDEX_MASK_10, AUDIO_CHANNEL_INDEX_MASK_11,
- AUDIO_CHANNEL_INDEX_MASK_12, AUDIO_CHANNEL_INDEX_MASK_13,
- AUDIO_CHANNEL_INDEX_MASK_14, AUDIO_CHANNEL_INDEX_MASK_15,
- AUDIO_CHANNEL_INDEX_MASK_16, AUDIO_CHANNEL_INDEX_MASK_17,
- AUDIO_CHANNEL_INDEX_MASK_18, AUDIO_CHANNEL_INDEX_MASK_19,
- AUDIO_CHANNEL_INDEX_MASK_20, AUDIO_CHANNEL_INDEX_MASK_21,
- AUDIO_CHANNEL_INDEX_MASK_22, AUDIO_CHANNEL_INDEX_MASK_23,
- AUDIO_CHANNEL_INDEX_MASK_24,
-};
-
-constexpr size_t kNumChMasks = std::size(kChMasks);
-
-constexpr size_t kSampleRates[] = {8000, 11025, 12000, 16000, 22050, 24000, 32000,
- 44100, 48000, 88200, 96000, 176400, 192000};
-
-constexpr size_t kNumSampleRates = std::size(kSampleRates);
-
-constexpr size_t kFrameCounts[] = {4, 2048};
-
-constexpr size_t kNumFrameCounts = std::size(kFrameCounts);
-
-constexpr size_t kLoopCounts[] = {1, 4};
-
-constexpr size_t kNumLoopCounts = std::size(kLoopCounts);
-
-class EffectBundleHelper {
- public:
- EffectBundleHelper(const effect_uuid_t* uuid, size_t chMask, size_t sampleRate,
- size_t frameCount, size_t loopCount)
- : mUuid(uuid),
- mChMask(chMask),
- mChannelCount(audio_channel_count_from_out_mask(mChMask)),
- mSampleRate(sampleRate),
- mFrameCount(frameCount),
- mLoopCount(loopCount) {}
- void createEffect();
- void releaseEffect();
- void configEffect();
- void process(float* input, float* output);
-
- private:
- const effect_uuid_t* mUuid;
- const size_t mChMask;
- const size_t mChannelCount;
- const size_t mSampleRate;
- const size_t mFrameCount;
- const size_t mLoopCount;
- effect_handle_t mEffectHandle{};
-};
-
-void EffectBundleHelper::createEffect() {
- int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(mUuid, 1, 1, &mEffectHandle);
- ASSERT_EQ(status, 0) << "create_effect returned an error " << status << "\n";
-}
-
-void EffectBundleHelper::releaseEffect() {
- int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(mEffectHandle);
- ASSERT_EQ(status, 0) << "release_effect returned an error " << status << "\n";
-}
-
-void EffectBundleHelper::configEffect() {
- effect_config_t config{};
- config.inputCfg.samplingRate = config.outputCfg.samplingRate = mSampleRate;
- config.inputCfg.channels = config.outputCfg.channels = mChMask;
- config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
-
- int reply = 0;
- uint32_t replySize = sizeof(reply);
- int status = (*mEffectHandle)
- ->command(mEffectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
- &config, &replySize, &reply);
- ASSERT_EQ(status, 0) << "command returned an error " << status << "\n";
- ASSERT_EQ(reply, 0) << "command reply non zero " << reply << "\n";
-
- status = (*mEffectHandle)
- ->command(mEffectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
- ASSERT_EQ(status, 0) << "command enable returned an error " << status << "\n";
- ASSERT_EQ(reply, 0) << "command reply non zero " << reply << "\n";
-}
-
-void EffectBundleHelper::process(float* input, float* output) {
- audio_buffer_t inBuffer = {.frameCount = mFrameCount, .f32 = input};
- audio_buffer_t outBuffer = {.frameCount = mFrameCount, .f32 = output};
- for (size_t i = 0; i < mLoopCount; i++) {
- int status = (*mEffectHandle)->process(mEffectHandle, &inBuffer, &outBuffer);
- ASSERT_EQ(status, 0) << "process returned an error " << status << "\n";
-
- inBuffer.f32 += mFrameCount * mChannelCount;
- outBuffer.f32 += mFrameCount * mChannelCount;
- }
-}
-
typedef std::tuple<int, int, int, int, int> SingleEffectTestParam;
class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
public:
SingleEffectTest()
- : mChMask(kChMasks[std::get<0>(GetParam())]),
+ : mChMask(EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
mChannelCount(audio_channel_count_from_out_mask(mChMask)),
- mSampleRate(kSampleRates[std::get<1>(GetParam())]),
- mFrameCount(kFrameCounts[std::get<2>(GetParam())]),
- mLoopCount(kLoopCounts[std::get<3>(GetParam())]),
+ mSampleRate(EffectTestHelper::kSampleRates[std::get<1>(GetParam())]),
+ mFrameCount(EffectTestHelper::kFrameCounts[std::get<2>(GetParam())]),
+ mLoopCount(EffectTestHelper::kLoopCounts[std::get<3>(GetParam())]),
mTotalFrameCount(mFrameCount * mLoopCount),
mUuid(&kEffectUuids[std::get<4>(GetParam())]) {}
@@ -182,10 +63,10 @@
<< "chMask: " << mChMask << " sampleRate: " << mSampleRate
<< " frameCount: " << mFrameCount << " loopCount: " << mLoopCount);
- EffectBundleHelper effect(mUuid, mChMask, mSampleRate, mFrameCount, mLoopCount);
+ EffectTestHelper effect(mUuid, mChMask, mChMask, mSampleRate, mFrameCount, mLoopCount);
ASSERT_NO_FATAL_FAILURE(effect.createEffect());
- ASSERT_NO_FATAL_FAILURE(effect.configEffect());
+ ASSERT_NO_FATAL_FAILURE(effect.setConfig());
// Initialize input buffer with deterministic pseudo-random values
std::vector<float> input(mTotalFrameCount * mChannelCount);
@@ -199,21 +80,22 @@
ASSERT_NO_FATAL_FAILURE(effect.releaseEffect());
}
-INSTANTIATE_TEST_SUITE_P(EffectBundleTestAll, SingleEffectTest,
- ::testing::Combine(::testing::Range(0, (int)kNumChMasks),
- ::testing::Range(0, (int)kNumSampleRates),
- ::testing::Range(0, (int)kNumFrameCounts),
- ::testing::Range(0, (int)kNumLoopCounts),
- ::testing::Range(0, (int)kNumEffectUuids)));
+INSTANTIATE_TEST_SUITE_P(
+ EffectBundleTestAll, SingleEffectTest,
+ ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumChMasks),
+ ::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+ ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
+ ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+ ::testing::Range(0, (int)kNumEffectUuids)));
typedef std::tuple<int, int, int, int> SingleEffectComparisonTestParam;
class SingleEffectComparisonTest
: public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
public:
SingleEffectComparisonTest()
- : mSampleRate(kSampleRates[std::get<0>(GetParam())]),
- mFrameCount(kFrameCounts[std::get<1>(GetParam())]),
- mLoopCount(kLoopCounts[std::get<2>(GetParam())]),
+ : mSampleRate(EffectTestHelper::kSampleRates[std::get<0>(GetParam())]),
+ mFrameCount(EffectTestHelper::kFrameCounts[std::get<1>(GetParam())]),
+ mLoopCount(EffectTestHelper::kLoopCounts[std::get<2>(GetParam())]),
mTotalFrameCount(mFrameCount * mLoopCount),
mUuid(&kEffectUuids[std::get<3>(GetParam())]) {}
@@ -224,26 +106,6 @@
const effect_uuid_t* mUuid;
};
-template <typename T>
-float computeSnr(const T* ref, const T* tst, size_t count) {
- double signal{};
- double noise{};
-
- for (size_t i = 0; i < count; ++i) {
- const double value(ref[i]);
- const double diff(tst[i] - value);
- signal += value * value;
- noise += diff * diff;
- }
- // Initialized to a value greater than kSNRThreshold to handle
- // cases where ref and tst match exactly
- float snr = kSNRThreshold + 1.0f;
- if (signal > 0.0f && noise > 0.0f) {
- snr = 10.f * log(signal / noise);
- }
- return snr;
-}
-
// Compares first two channels in multi-channel output to stereo output when same effect is applied
TEST_P(SingleEffectComparisonTest, SimpleProcess) {
SCOPED_TRACE(testing::Message() << " sampleRate: " << mSampleRate << " frameCount: "
@@ -264,11 +126,11 @@
mTotalFrameCount * sizeof(float) * FCC_1);
// Apply effect on stereo channels
- EffectBundleHelper stereoEffect(mUuid, AUDIO_CHANNEL_OUT_STEREO, mSampleRate, mFrameCount,
- mLoopCount);
+ EffectTestHelper stereoEffect(mUuid, AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_STEREO,
+ mSampleRate, mFrameCount, mLoopCount);
ASSERT_NO_FATAL_FAILURE(stereoEffect.createEffect());
- ASSERT_NO_FATAL_FAILURE(stereoEffect.configEffect());
+ ASSERT_NO_FATAL_FAILURE(stereoEffect.setConfig());
std::vector<float> stereoOutput(mTotalFrameCount * FCC_2);
ASSERT_NO_FATAL_FAILURE(stereoEffect.process(stereoInput.data(), stereoOutput.data()));
@@ -278,12 +140,12 @@
std::vector<int16_t> stereoRefI16(mTotalFrameCount * FCC_2);
memcpy_to_i16_from_float(stereoRefI16.data(), stereoOutput.data(), mTotalFrameCount * FCC_2);
- for (size_t chMask : kChMasks) {
+ for (size_t chMask : EffectTestHelper::kChMasks) {
size_t channelCount = audio_channel_count_from_out_mask(chMask);
- EffectBundleHelper testEffect(mUuid, chMask, mSampleRate, mFrameCount, mLoopCount);
+ EffectTestHelper testEffect(mUuid, chMask, chMask, mSampleRate, mFrameCount, mLoopCount);
ASSERT_NO_FATAL_FAILURE(testEffect.createEffect());
- ASSERT_NO_FATAL_FAILURE(testEffect.configEffect());
+ ASSERT_NO_FATAL_FAILURE(testEffect.setConfig());
std::vector<float> testInput(mTotalFrameCount * channelCount);
@@ -312,7 +174,8 @@
// SNR must be above the threshold
float snr = computeSnr<int16_t>(stereoRefI16.data(), stereoTestI16.data(),
mTotalFrameCount * FCC_2);
- ASSERT_GT(snr, kSNRThreshold) << "SNR " << snr << "is lower than " << kSNRThreshold;
+ ASSERT_GT(snr, EffectTestHelper::kSNRThreshold)
+ << "SNR " << snr << "is lower than " << EffectTestHelper::kSNRThreshold;
} else {
ASSERT_EQ(0,
memcmp(stereoRefI16.data(), stereoTestI16.data(), mTotalFrameCount * FCC_2))
@@ -321,11 +184,12 @@
}
}
-INSTANTIATE_TEST_SUITE_P(EffectBundleTestAll, SingleEffectComparisonTest,
- ::testing::Combine(::testing::Range(0, (int)kNumSampleRates),
- ::testing::Range(0, (int)kNumFrameCounts),
- ::testing::Range(0, (int)kNumLoopCounts),
- ::testing::Range(0, (int)kNumEffectUuids)));
+INSTANTIATE_TEST_SUITE_P(
+ EffectBundleTestAll, SingleEffectComparisonTest,
+ ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+ ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
+ ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+ ::testing::Range(0, (int)kNumEffectUuids)));
int main(int argc, char** argv) {
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/libeffects/lvm/tests/EffectReverbTest.cpp b/media/libeffects/lvm/tests/EffectReverbTest.cpp
new file mode 100644
index 0000000..59453eb
--- /dev/null
+++ b/media/libeffects/lvm/tests/EffectReverbTest.cpp
@@ -0,0 +1,239 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <audio_effects/effect_presetreverb.h>
+#include <VectorArithmetic.h>
+
+#include "EffectTestHelper.h"
+using namespace android;
+
+constexpr effect_uuid_t kEffectUuids[] = {
+ // NXP SW insert environmental reverb
+ {0xc7a511a0, 0xa3bb, 0x11df, 0x860e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+ // NXP SW insert preset reverb
+ {0x172cdf00, 0xa3bc, 0x11df, 0xa72f, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+ // NXP SW auxiliary environmental reverb
+ {0x4a387fc0, 0x8ab3, 0x11df, 0x8bad, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+ // NXP SW auxiliary preset reverb
+ {0xf29a1400, 0xa3bb, 0x11df, 0x8ddc, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+};
+
+constexpr size_t kNumEffectUuids = std::size(kEffectUuids);
+
+static bool isAuxMode(const effect_uuid_t* uuid) {
+ // Update this, if the order of effects in kEffectUuids is updated
+ return (uuid == &kEffectUuids[2] || uuid == &kEffectUuids[3]);
+}
+
+constexpr int kPresets[] = {
+ REVERB_PRESET_NONE, REVERB_PRESET_SMALLROOM, REVERB_PRESET_MEDIUMROOM,
+ REVERB_PRESET_LARGEROOM, REVERB_PRESET_MEDIUMHALL, REVERB_PRESET_LARGEHALL,
+ REVERB_PRESET_PLATE,
+};
+
+constexpr size_t kNumPresets = std::size(kPresets);
+
+typedef std::tuple<int, int, int, int, int, int> SingleEffectTestParam;
+class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
+ public:
+ SingleEffectTest()
+ : mSampleRate(EffectTestHelper::kSampleRates[std::get<1>(GetParam())]),
+ mFrameCount(EffectTestHelper::kFrameCounts[std::get<2>(GetParam())]),
+ mLoopCount(EffectTestHelper::kLoopCounts[std::get<3>(GetParam())]),
+ mTotalFrameCount(mFrameCount * mLoopCount),
+ mUuid(&kEffectUuids[std::get<4>(GetParam())]),
+ mInChMask(isAuxMode(mUuid) ? AUDIO_CHANNEL_OUT_MONO
+ : EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+ mInChannelCount(audio_channel_count_from_out_mask(mInChMask)),
+ mOutChMask(EffectTestHelper::kChMasks[std::get<0>(GetParam())]),
+ mOutChannelCount(audio_channel_count_from_out_mask(mOutChMask)),
+ mPreset(kPresets[std::get<5>(GetParam())]) {}
+
+ const size_t mSampleRate;
+ const size_t mFrameCount;
+ const size_t mLoopCount;
+ const size_t mTotalFrameCount;
+ const effect_uuid_t* mUuid;
+ const size_t mInChMask;
+ const size_t mInChannelCount;
+ const size_t mOutChMask;
+ const size_t mOutChannelCount;
+ const size_t mPreset;
+};
+
+// Tests applying a single effect
+TEST_P(SingleEffectTest, SimpleProcess) {
+ SCOPED_TRACE(testing::Message() << "outChMask: " << mOutChMask << " sampleRate: " << mSampleRate
+ << " frameCount: " << mFrameCount
+ << " loopCount: " << mLoopCount << " preset: " << mPreset);
+
+ EffectTestHelper effect(mUuid, mInChMask, mOutChMask, mSampleRate, mFrameCount, mLoopCount);
+
+ ASSERT_NO_FATAL_FAILURE(effect.createEffect());
+ ASSERT_NO_FATAL_FAILURE(effect.setConfig());
+ ASSERT_NO_FATAL_FAILURE(effect.setParam(REVERB_PARAM_PRESET, mPreset));
+
+ // Initialize input buffer with deterministic pseudo-random values
+ std::vector<float> input(mTotalFrameCount * mInChannelCount);
+ std::vector<float> output(mTotalFrameCount * mOutChannelCount);
+ std::minstd_rand gen(mOutChMask);
+ std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+ for (auto& in : input) {
+ in = dis(gen);
+ }
+ ASSERT_NO_FATAL_FAILURE(effect.process(input.data(), output.data()));
+ ASSERT_NO_FATAL_FAILURE(effect.releaseEffect());
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ EffectReverbTestAll, SingleEffectTest,
+ ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumChMasks),
+ ::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+ ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
+ ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+ ::testing::Range(0, (int)kNumEffectUuids),
+ ::testing::Range(0, (int)kNumPresets)));
+
+typedef std::tuple<int, int, int, int, int> SingleEffectComparisonTestParam;
+class SingleEffectComparisonTest
+ : public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
+ public:
+ SingleEffectComparisonTest()
+ : mSampleRate(EffectTestHelper::kSampleRates[std::get<0>(GetParam())]),
+ mFrameCount(EffectTestHelper::kFrameCounts[std::get<1>(GetParam())]),
+ mLoopCount(EffectTestHelper::kLoopCounts[std::get<2>(GetParam())]),
+ mTotalFrameCount(mFrameCount * mLoopCount),
+ mUuid(&kEffectUuids[std::get<3>(GetParam())]),
+ mPreset(kPresets[std::get<4>(GetParam())]) {}
+
+ const size_t mSampleRate;
+ const size_t mFrameCount;
+ const size_t mLoopCount;
+ const size_t mTotalFrameCount;
+ const effect_uuid_t* mUuid;
+ const size_t mPreset;
+};
+
+// Compares first two channels in multi-channel output to stereo output when same effect is applied
+TEST_P(SingleEffectComparisonTest, SimpleProcess) {
+ SCOPED_TRACE(testing::Message()
+ << " sampleRate: " << mSampleRate << " frameCount: " << mFrameCount
+ << " loopCount: " << mLoopCount << " preset: " << mPreset);
+
+ // Initialize mono input buffer with deterministic pseudo-random values
+ std::vector<float> monoInput(mTotalFrameCount);
+
+ std::minstd_rand gen(mSampleRate);
+ std::uniform_real_distribution<> dis(-1.0f, 1.0f);
+ for (auto& in : monoInput) {
+ in = dis(gen);
+ }
+
+ // Generate stereo by repeating mono channel data
+ std::vector<float> stereoInput(mTotalFrameCount * FCC_2);
+ adjust_channels(monoInput.data(), FCC_1, stereoInput.data(), FCC_2, sizeof(float),
+ mTotalFrameCount * sizeof(float) * FCC_1);
+
+ // Apply effect on stereo channels
+ EffectTestHelper stereoEffect(
+ mUuid, isAuxMode(mUuid) ? AUDIO_CHANNEL_OUT_MONO : AUDIO_CHANNEL_OUT_STEREO,
+ AUDIO_CHANNEL_OUT_STEREO, mSampleRate, mFrameCount, mLoopCount);
+
+ ASSERT_NO_FATAL_FAILURE(stereoEffect.createEffect());
+ ASSERT_NO_FATAL_FAILURE(stereoEffect.setConfig());
+ ASSERT_NO_FATAL_FAILURE(stereoEffect.setParam(REVERB_PARAM_PRESET, mPreset));
+
+ std::vector<float> stereoOutput(mTotalFrameCount * FCC_2);
+ ASSERT_NO_FATAL_FAILURE(stereoEffect.process(
+ (isAuxMode(mUuid) ? monoInput.data() : stereoInput.data()), stereoOutput.data()));
+ ASSERT_NO_FATAL_FAILURE(stereoEffect.releaseEffect());
+
+ // Average of both channels data is stored for mono comparison
+ std::vector<float> monoOutput(mTotalFrameCount);
+ From2iToMono_Float((const float*)stereoOutput.data(), monoOutput.data(), mTotalFrameCount);
+
+ // Convert stereo float data to stereo int16_t to be used as reference
+ std::vector<int16_t> stereoRefI16(mTotalFrameCount * FCC_2);
+ memcpy_to_i16_from_float(stereoRefI16.data(), stereoOutput.data(), mTotalFrameCount * FCC_2);
+
+ // mono int16_t to be used as refernece for mono comparison
+ std::vector<int16_t> monoRefI16(mTotalFrameCount);
+ memcpy_to_i16_from_float(monoRefI16.data(), monoOutput.data(), mTotalFrameCount);
+
+ for (size_t outChMask : EffectTestHelper::kChMasks) {
+ size_t outChannelCount = audio_channel_count_from_out_mask(outChMask);
+ size_t inChMask = isAuxMode(mUuid) ? AUDIO_CHANNEL_OUT_MONO : outChMask;
+
+ EffectTestHelper testEffect(mUuid, inChMask, outChMask, mSampleRate, mFrameCount,
+ mLoopCount);
+
+ ASSERT_NO_FATAL_FAILURE(testEffect.createEffect());
+ ASSERT_NO_FATAL_FAILURE(testEffect.setConfig());
+ ASSERT_NO_FATAL_FAILURE(testEffect.setParam(REVERB_PARAM_PRESET, mPreset));
+
+ std::vector<float> testInput(mTotalFrameCount * outChannelCount);
+
+ // Repeat mono channel data to all the channels
+ // adjust_channels() zero fills channels > 2, hence can't be used here
+ for (size_t i = 0; i < mTotalFrameCount; ++i) {
+ auto* fp = &testInput[i * outChannelCount];
+ std::fill(fp, fp + outChannelCount, monoInput[i]);
+ }
+
+ std::vector<float> testOutput(mTotalFrameCount * outChannelCount);
+ ASSERT_NO_FATAL_FAILURE(testEffect.process(
+ (isAuxMode(mUuid) ? monoInput.data() : testInput.data()), testOutput.data()));
+ ASSERT_NO_FATAL_FAILURE(testEffect.releaseEffect());
+
+ if (outChannelCount == FCC_1) {
+ // Convert the test data to int16_t
+ std::vector<int16_t> monoTestI16(mTotalFrameCount);
+ memcpy_to_i16_from_float(monoTestI16.data(), testOutput.data(), mTotalFrameCount);
+
+ ASSERT_EQ(0, memcmp(monoRefI16.data(), monoTestI16.data(), mTotalFrameCount * FCC_2))
+ << "Mono channel do not match with reference output \n";
+ } else {
+ // Extract first two channels
+ std::vector<float> stereoTestOutput(mTotalFrameCount * FCC_2);
+ adjust_channels(testOutput.data(), outChannelCount, stereoTestOutput.data(), FCC_2,
+ sizeof(float), mTotalFrameCount * sizeof(float) * outChannelCount);
+
+ // Convert the test data to int16_t
+ std::vector<int16_t> stereoTestI16(mTotalFrameCount * FCC_2);
+ memcpy_to_i16_from_float(stereoTestI16.data(), stereoTestOutput.data(),
+ mTotalFrameCount * FCC_2);
+
+ ASSERT_EQ(0,
+ memcmp(stereoRefI16.data(), stereoTestI16.data(), mTotalFrameCount * FCC_2))
+ << "First two channels do not match with stereo output \n";
+ }
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ EffectReverbTestAll, SingleEffectComparisonTest,
+ ::testing::Combine(::testing::Range(0, (int)EffectTestHelper::kNumSampleRates),
+ ::testing::Range(0, (int)EffectTestHelper::kNumFrameCounts),
+ ::testing::Range(0, (int)EffectTestHelper::kNumLoopCounts),
+ ::testing::Range(0, (int)kNumEffectUuids),
+ ::testing::Range(0, (int)kNumPresets)));
+
+int main(int argc, char** argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ int status = RUN_ALL_TESTS();
+ ALOGV("Test result = %d\n", status);
+ return status;
+}
diff --git a/media/libeffects/lvm/tests/EffectTestHelper.cpp b/media/libeffects/lvm/tests/EffectTestHelper.cpp
new file mode 100644
index 0000000..625c15a
--- /dev/null
+++ b/media/libeffects/lvm/tests/EffectTestHelper.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "EffectTestHelper.h"
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
+
+namespace android {
+
+void EffectTestHelper::createEffect() {
+ int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(mUuid, 1, 1, &mEffectHandle);
+ ASSERT_EQ(status, 0) << "create_effect returned an error " << status;
+}
+
+void EffectTestHelper::releaseEffect() {
+ int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(mEffectHandle);
+ ASSERT_EQ(status, 0) << "release_effect returned an error " << status;
+}
+
+void EffectTestHelper::setConfig() {
+ effect_config_t config{};
+ config.inputCfg.samplingRate = config.outputCfg.samplingRate = mSampleRate;
+ config.inputCfg.channels = mInChMask;
+ config.outputCfg.channels = mOutChMask;
+ config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+ int status = (*mEffectHandle)
+ ->command(mEffectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
+ &config, &replySize, &reply);
+ ASSERT_EQ(status, 0) << "set_config returned an error " << status;
+ ASSERT_EQ(reply, 0) << "set_config reply non zero " << reply;
+
+ status = (*mEffectHandle)
+ ->command(mEffectHandle, EFFECT_CMD_ENABLE, 0, nullptr, &replySize, &reply);
+ ASSERT_EQ(status, 0) << "cmd_enable returned an error " << status;
+ ASSERT_EQ(reply, 0) << "cmd_enable reply non zero " << reply;
+}
+
+void EffectTestHelper::setParam(uint32_t type, uint32_t value) {
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+ uint32_t paramData[2] = {type, value};
+ auto effectParam = new effect_param_t[sizeof(effect_param_t) + sizeof(paramData)];
+ memcpy(&effectParam->data[0], ¶mData[0], sizeof(paramData));
+ effectParam->psize = sizeof(paramData[0]);
+ effectParam->vsize = sizeof(paramData[1]);
+ int status = (*mEffectHandle)
+ ->command(mEffectHandle, EFFECT_CMD_SET_PARAM,
+ sizeof(effect_param_t) + sizeof(paramData), effectParam,
+ &replySize, &reply);
+ delete[] effectParam;
+ ASSERT_EQ(status, 0) << "set_param returned an error " << status;
+ ASSERT_EQ(reply, 0) << "set_param reply non zero " << reply;
+}
+
+void EffectTestHelper::process(float* input, float* output) {
+ audio_buffer_t inBuffer = {.frameCount = mFrameCount, .f32 = input};
+ audio_buffer_t outBuffer = {.frameCount = mFrameCount, .f32 = output};
+ for (size_t i = 0; i < mLoopCount; i++) {
+ int status = (*mEffectHandle)->process(mEffectHandle, &inBuffer, &outBuffer);
+ ASSERT_EQ(status, 0) << "process returned an error " << status;
+
+ inBuffer.f32 += mFrameCount * mInChannelCount;
+ outBuffer.f32 += mFrameCount * mOutChannelCount;
+ }
+}
+} // namespace android
diff --git a/media/libeffects/lvm/tests/EffectTestHelper.h b/media/libeffects/lvm/tests/EffectTestHelper.h
new file mode 100644
index 0000000..3854d46
--- /dev/null
+++ b/media/libeffects/lvm/tests/EffectTestHelper.h
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <array>
+#include <audio_utils/channels.h>
+#include <audio_utils/primitives.h>
+#include <climits>
+#include <cstdlib>
+#include <gtest/gtest.h>
+#include <hardware/audio_effect.h>
+#include <log/log.h>
+#include <random>
+#include <stdint.h>
+#include <system/audio.h>
+#include <vector>
+
+namespace android {
+template <typename T>
+static float computeSnr(const T* ref, const T* tst, size_t count) {
+ double signal{};
+ double noise{};
+
+ for (size_t i = 0; i < count; ++i) {
+ const double value(ref[i]);
+ const double diff(tst[i] - value);
+ signal += value * value;
+ noise += diff * diff;
+ }
+ // Initialized to large value to handle
+ // cases where ref and tst match exactly
+ float snr = FLT_MAX;
+ if (signal > 0.0f && noise > 0.0f) {
+ snr = 10.f * log(signal / noise);
+ }
+ return snr;
+}
+
+class EffectTestHelper {
+ public:
+ EffectTestHelper(const effect_uuid_t* uuid, size_t inChMask, size_t outChMask,
+ size_t sampleRate, size_t frameCount, size_t loopCount)
+ : mUuid(uuid),
+ mInChMask(inChMask),
+ mInChannelCount(audio_channel_count_from_out_mask(mInChMask)),
+ mOutChMask(outChMask),
+ mOutChannelCount(audio_channel_count_from_out_mask(mOutChMask)),
+ mSampleRate(sampleRate),
+ mFrameCount(frameCount),
+ mLoopCount(loopCount) {}
+ void createEffect();
+ void releaseEffect();
+ void setConfig();
+ void setParam(uint32_t type, uint32_t val);
+ void process(float* input, float* output);
+
+ // Corresponds to SNR for 1 bit difference between two int16_t signals
+ static constexpr float kSNRThreshold = 90.308998;
+
+ static constexpr audio_channel_mask_t kChMasks[] = {
+ AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO,
+ AUDIO_CHANNEL_OUT_2POINT1, AUDIO_CHANNEL_OUT_2POINT0POINT2,
+ AUDIO_CHANNEL_OUT_QUAD, AUDIO_CHANNEL_OUT_QUAD_BACK,
+ AUDIO_CHANNEL_OUT_QUAD_SIDE, AUDIO_CHANNEL_OUT_SURROUND,
+ AUDIO_CHANNEL_INDEX_MASK_4, AUDIO_CHANNEL_OUT_2POINT1POINT2,
+ AUDIO_CHANNEL_OUT_3POINT0POINT2, AUDIO_CHANNEL_OUT_PENTA,
+ AUDIO_CHANNEL_INDEX_MASK_5, AUDIO_CHANNEL_OUT_3POINT1POINT2,
+ AUDIO_CHANNEL_OUT_5POINT1, AUDIO_CHANNEL_OUT_5POINT1_BACK,
+ AUDIO_CHANNEL_OUT_5POINT1_SIDE, AUDIO_CHANNEL_INDEX_MASK_6,
+ AUDIO_CHANNEL_OUT_6POINT1, AUDIO_CHANNEL_INDEX_MASK_7,
+ AUDIO_CHANNEL_OUT_5POINT1POINT2, AUDIO_CHANNEL_OUT_7POINT1,
+ AUDIO_CHANNEL_INDEX_MASK_8, AUDIO_CHANNEL_INDEX_MASK_9,
+ AUDIO_CHANNEL_INDEX_MASK_10, AUDIO_CHANNEL_INDEX_MASK_11,
+ AUDIO_CHANNEL_INDEX_MASK_12, AUDIO_CHANNEL_INDEX_MASK_13,
+ AUDIO_CHANNEL_INDEX_MASK_14, AUDIO_CHANNEL_INDEX_MASK_15,
+ AUDIO_CHANNEL_INDEX_MASK_16, AUDIO_CHANNEL_INDEX_MASK_17,
+ AUDIO_CHANNEL_INDEX_MASK_18, AUDIO_CHANNEL_INDEX_MASK_19,
+ AUDIO_CHANNEL_INDEX_MASK_20, AUDIO_CHANNEL_INDEX_MASK_21,
+ AUDIO_CHANNEL_INDEX_MASK_22, AUDIO_CHANNEL_INDEX_MASK_23,
+ AUDIO_CHANNEL_INDEX_MASK_24,
+ };
+
+ static constexpr size_t kNumChMasks = std::size(kChMasks);
+
+ static constexpr size_t kSampleRates[] = {8000, 11025, 12000, 16000, 22050, 24000, 32000,
+ 44100, 48000, 88200, 96000, 176400, 192000};
+
+ static constexpr size_t kNumSampleRates = std::size(kSampleRates);
+
+ static constexpr size_t kFrameCounts[] = {4, 2048};
+
+ static constexpr size_t kNumFrameCounts = std::size(kFrameCounts);
+
+ static constexpr size_t kLoopCounts[] = {1, 4};
+
+ static constexpr size_t kNumLoopCounts = std::size(kLoopCounts);
+
+ private:
+ const effect_uuid_t* mUuid;
+ const size_t mInChMask;
+ const size_t mInChannelCount;
+ const size_t mOutChMask;
+ const size_t mOutChannelCount;
+ const size_t mSampleRate;
+ const size_t mFrameCount;
+ const size_t mLoopCount;
+ effect_handle_t mEffectHandle{};
+};
+} // namespace android
diff --git a/media/libeffects/lvm/tests/reverb_test.cpp b/media/libeffects/lvm/tests/reverb_test.cpp
index cecc975..0ea401c 100644
--- a/media/libeffects/lvm/tests/reverb_test.cpp
+++ b/media/libeffects/lvm/tests/reverb_test.cpp
@@ -312,9 +312,6 @@
config.inputCfg.samplingRate = config.outputCfg.samplingRate = revConfigParams.sampleRate;
config.inputCfg.channels = config.outputCfg.channels = revConfigParams.chMask;
config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
- if (AUDIO_CHANNEL_OUT_MONO == revConfigParams.chMask) {
- config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
- }
if (int status = reverbCreateEffect(&effectHandle, &config, sessionId, ioId,
revConfigParams.auxiliary);
status != 0) {
@@ -346,19 +343,11 @@
const int ioChannelCount = revConfigParams.fChannels;
const int ioFrameSize = ioChannelCount * sizeof(short);
const int maxChannelCount = std::max(channelCount, ioChannelCount);
- /*
- * Mono input will be converted to 2 channels internally in the process call
- * by copying the same data into the second channel.
- * Hence when channelCount is 1, output buffer should be allocated for
- * 2 channels. The outChannelCount takes care of allocation of sufficient
- * memory for the output buffer.
- */
- const int outChannelCount = (channelCount == 1 ? 2 : channelCount);
std::vector<short> in(frameLength * maxChannelCount);
- std::vector<short> out(frameLength * outChannelCount);
+ std::vector<short> out(frameLength * maxChannelCount);
std::vector<float> floatIn(frameLength * channelCount);
- std::vector<float> floatOut(frameLength * outChannelCount);
+ std::vector<float> floatOut(frameLength * channelCount);
int frameCounter = 0;
@@ -392,11 +381,11 @@
#else
memcpy(floatOut.data(), floatIn.data(), frameLength * frameSize);
#endif
- memcpy_to_i16_from_float(out.data(), floatOut.data(), frameLength * outChannelCount);
+ memcpy_to_i16_from_float(out.data(), floatOut.data(), frameLength * channelCount);
- if (ioChannelCount != outChannelCount) {
- adjust_channels(out.data(), outChannelCount, out.data(), ioChannelCount, sizeof(short),
- frameLength * outChannelCount * sizeof(short));
+ if (ioChannelCount != channelCount) {
+ adjust_channels(out.data(), channelCount, out.data(), ioChannelCount, sizeof(short),
+ frameLength * channelCount * sizeof(short));
}
(void)fwrite(out.data(), ioFrameSize, frameLength, outputFp.get());
frameCounter += frameLength;
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index 4489e81..3738d62 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -33,6 +33,7 @@
#include "EffectReverb.h"
// from Reverb/lib
#include "LVREV.h"
+#include "VectorArithmetic.h"
// effect_handle_t interface implementation for reverb
extern "C" const struct effect_interface_s gReverbInterface;
@@ -332,6 +333,7 @@
//----------------------------------------------------------------------------
int process(effect_buffer_t* pIn, effect_buffer_t* pOut, int frameCount, ReverbContext* pContext) {
int channels = audio_channel_count_from_out_mask(pContext->config.inputCfg.channels);
+ int outChannels = audio_channel_count_from_out_mask(pContext->config.outputCfg.channels);
LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */
// Reverb only effects the stereo channels in multichannel source.
@@ -454,33 +456,49 @@
}
}
- if (channels > 2) {
+ if (outChannels > 2) {
// Accumulate if required
if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
for (int i = 0; i < frameCount; i++) {
- pOut[channels * i] += pContext->OutFrames[FCC_2 * i];
- pOut[channels * i + 1] += pContext->OutFrames[FCC_2 * i + 1];
+ pOut[outChannels * i] += pContext->OutFrames[FCC_2 * i];
+ pOut[outChannels * i + 1] += pContext->OutFrames[FCC_2 * i + 1];
}
} else {
for (int i = 0; i < frameCount; i++) {
- pOut[channels * i] = pContext->OutFrames[FCC_2 * i];
- pOut[channels * i + 1] = pContext->OutFrames[FCC_2 * i + 1];
+ pOut[outChannels * i] = pContext->OutFrames[FCC_2 * i];
+ pOut[outChannels * i + 1] = pContext->OutFrames[FCC_2 * i + 1];
}
}
- for (int i = 0; i < frameCount; i++) {
- for (int j = FCC_2; j < channels; j++) {
- pOut[channels * i + j] = pIn[channels * i + j];
+ if (!pContext->auxiliary) {
+ for (int i = 0; i < frameCount; i++) {
+ // channels and outChannels are expected to be same.
+ for (int j = FCC_2; j < outChannels; j++) {
+ pOut[outChannels * i + j] = pIn[outChannels * i + j];
+ }
}
}
} else {
if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
- for (int i = 0; i < frameCount * FCC_2; i++) {
- pOut[i] += pContext->OutFrames[i];
+ if (outChannels == FCC_1) {
+ for (int i = 0; i < frameCount; i++) {
+ pOut[i] +=
+ ((pContext->OutFrames[i * FCC_2] + pContext->OutFrames[i * FCC_2 + 1]) *
+ 0.5f);
+ }
+ } else {
+ for (int i = 0; i < frameCount * FCC_2; i++) {
+ pOut[i] += pContext->OutFrames[i];
+ }
}
} else {
- memcpy(pOut, pContext->OutFrames, frameCount * sizeof(*pOut) * FCC_2);
+ if (outChannels == FCC_1) {
+ From2iToMono_Float((const process_buffer_t*)pContext->OutFrames, pOut, frameCount);
+ } else {
+ memcpy(pOut, pContext->OutFrames, frameCount * sizeof(*pOut) * FCC_2);
+ }
}
}
+
return 0;
} /* end process */
@@ -549,7 +567,7 @@
CHECK_ARG((pContext->auxiliary && pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_MONO) ||
((!pContext->auxiliary) && (inputChannels <= LVM_MAX_CHANNELS)));
int outputChannels = audio_channel_count_from_out_mask(pConfig->outputCfg.channels);
- CHECK_ARG(outputChannels >= FCC_2 && outputChannels <= LVM_MAX_CHANNELS);
+ CHECK_ARG(outputChannels <= LVM_MAX_CHANNELS);
CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE ||
pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
CHECK_ARG(pConfig->inputCfg.format == EFFECT_BUFFER_FORMAT);
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index 534fa91..042850c 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -82,6 +82,7 @@
srcs: [
"TranscoderWrapper.cpp",
"TranscodingClientManager.cpp",
+ "TranscodingLogger.cpp",
"TranscodingResourcePolicy.cpp",
"TranscodingSessionController.cpp",
"TranscodingThermalPolicy.cpp",
@@ -96,6 +97,7 @@
"libutils",
"libmediatranscoder",
"libmediandk",
+ "libstatssocket#30",
],
export_shared_lib_headers: [
"libmediandk",
@@ -106,6 +108,7 @@
static_libs: [
"mediatranscoding_aidl_interface-ndk_platform",
"resourceobserver_aidl_interface-V1-ndk_platform",
+ "libstatslog_media",
],
cflags: [
@@ -126,3 +129,43 @@
cfi: true,
},
}
+
+cc_library_static {
+ name: "libstatslog_media",
+ generated_sources: ["statslog_media.cpp"],
+ generated_headers: ["statslog_media.h"],
+ min_sdk_version: "29",
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
+ export_generated_headers: ["statslog_media.h"],
+ apex_available: [
+ "com.android.media",
+ "test_com.android.media",
+ ],
+ shared_libs: [
+ "libcutils",
+ "liblog",
+ "libstatssocket#30",
+ "libutils",
+ ],
+}
+
+genrule {
+ name: "statslog_media.h",
+ tools: ["stats-log-api-gen"],
+ cmd: "$(location stats-log-api-gen) --header $(genDir)/statslog_media.h --module media --namespace android,media,stats",
+ out: [
+ "statslog_media.h",
+ ],
+}
+
+genrule {
+ name: "statslog_media.cpp",
+ tools: ["stats-log-api-gen"],
+ cmd: "$(location stats-log-api-gen) --cpp $(genDir)/statslog_media.cpp --module media --namespace android,media,stats --importHeader statslog_media.h",
+ out: [
+ "statslog_media.cpp",
+ ],
+}
\ No newline at end of file
diff --git a/media/libmediatranscoding/TranscoderWrapper.cpp b/media/libmediatranscoding/TranscoderWrapper.cpp
index 4bd4105..d9c98c6 100644
--- a/media/libmediatranscoding/TranscoderWrapper.cpp
+++ b/media/libmediatranscoding/TranscoderWrapper.cpp
@@ -56,34 +56,34 @@
}
}
-static AMediaFormat* getVideoFormat(
+static std::shared_ptr<AMediaFormat> getVideoFormat(
const char* originalMime,
const std::optional<TranscodingVideoTrackFormat>& requestedFormat) {
if (requestedFormat == std::nullopt) {
return nullptr;
}
- AMediaFormat* format = AMediaFormat_new();
+ std::shared_ptr<AMediaFormat> format =
+ std::shared_ptr<AMediaFormat>(AMediaFormat_new(), &AMediaFormat_delete);
bool changed = false;
if (requestedFormat->codecType == TranscodingVideoCodecType::kHevc &&
strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_HEVC)) {
- AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_HEVC);
+ AMediaFormat_setString(format.get(), AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_HEVC);
changed = true;
} else if (requestedFormat->codecType == TranscodingVideoCodecType::kAvc &&
strcmp(originalMime, AMEDIA_MIMETYPE_VIDEO_AVC)) {
- AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
+ AMediaFormat_setString(format.get(), AMEDIAFORMAT_KEY_MIME, AMEDIA_MIMETYPE_VIDEO_AVC);
changed = true;
}
if (requestedFormat->bitrateBps > 0) {
- AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, requestedFormat->bitrateBps);
+ AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_BIT_RATE, requestedFormat->bitrateBps);
changed = true;
}
// TODO: translate other fields from requestedFormat to the format for MediaTranscoder.
// Also need to determine more settings to expose in TranscodingVideoTrackFormat.
if (!changed) {
- AMediaFormat_delete(format);
// Use null format for passthru.
- format = nullptr;
+ format.reset();
}
return format;
}
@@ -180,8 +180,10 @@
};
TranscoderWrapper::TranscoderWrapper(const std::shared_ptr<TranscoderCallbackInterface>& cb,
+ const std::shared_ptr<TranscodingLogger>& logger,
int64_t heartBeatIntervalUs)
: mCallback(cb),
+ mLogger(logger),
mHeartBeatIntervalUs(heartBeatIntervalUs),
mCurrentClientId(0),
mCurrentSessionId(-1),
@@ -219,10 +221,10 @@
}
void TranscoderWrapper::start(ClientIdType clientId, SessionIdType sessionId,
- const TranscodingRequestParcel& request,
+ const TranscodingRequestParcel& request, uid_t callingUid,
const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
queueEvent(Event::Start, clientId, sessionId, [=, &request] {
- media_status_t err = handleStart(clientId, sessionId, request, clientCb);
+ media_status_t err = handleStart(clientId, sessionId, request, callingUid, clientCb);
if (err != AMEDIA_OK) {
cleanup();
reportError(clientId, sessionId, err);
@@ -253,10 +255,10 @@
}
void TranscoderWrapper::resume(ClientIdType clientId, SessionIdType sessionId,
- const TranscodingRequestParcel& request,
+ const TranscodingRequestParcel& request, uid_t callingUid,
const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
queueEvent(Event::Resume, clientId, sessionId, [=, &request] {
- media_status_t err = handleResume(clientId, sessionId, request, clientCb);
+ media_status_t err = handleResume(clientId, sessionId, request, callingUid, clientCb);
if (err != AMEDIA_OK) {
cleanup();
reportError(clientId, sessionId, err);
@@ -280,6 +282,7 @@
} else {
ALOGI("transcoder stopped");
}
+ logSessionEnded(TranscodingLogger::SessionEndedReason::CANCELLED, err);
cleanup();
} else {
// For sessions that's not currently running, release any pausedState for the session.
@@ -297,6 +300,7 @@
queueEvent(Event::Finish, clientId, sessionId, [=] {
if (mTranscoder != nullptr && clientId == mCurrentClientId &&
sessionId == mCurrentSessionId) {
+ logSessionEnded(TranscodingLogger::SessionEndedReason::FINISHED, AMEDIA_OK);
cleanup();
}
@@ -314,6 +318,7 @@
[=] {
if (mTranscoder != nullptr && clientId == mCurrentClientId &&
sessionId == mCurrentSessionId) {
+ logSessionEnded(TranscodingLogger::SessionEndedReason::ERROR, error);
cleanup();
}
reportError(clientId, sessionId, error);
@@ -345,7 +350,8 @@
media_status_t TranscoderWrapper::setupTranscoder(
ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
- const std::shared_ptr<ITranscodingClientCallback>& clientCb,
+ uid_t callingUid, const std::shared_ptr<ITranscodingClientCallback>& clientCb,
+ TranscodingLogger::SessionEndedReason* failureReason,
const std::shared_ptr<ndk::ScopedAParcel>& pausedState) {
if (clientCb == nullptr) {
ALOGE("client callback is null");
@@ -364,6 +370,7 @@
status = clientCb->openFileDescriptor(request.sourceFilePath, "r", &srcFd);
if (!status.isOk() || srcFd.get() < 0) {
ALOGE("failed to open source");
+ *failureReason = TranscodingLogger::SessionEndedReason::OPEN_SRC_FD_FAILED;
return AMEDIA_ERROR_IO;
}
srcFdInt = srcFd.get();
@@ -377,6 +384,7 @@
status = clientCb->openFileDescriptor(request.destinationFilePath, "rw", &dstFd);
if (!status.isOk() || dstFd.get() < 0) {
ALOGE("failed to open destination");
+ *failureReason = TranscodingLogger::SessionEndedReason::OPEN_DST_FD_FAILED;
return AMEDIA_ERROR_IO;
}
dstFdInt = dstFd.get();
@@ -384,41 +392,46 @@
mCurrentClientId = clientId;
mCurrentSessionId = sessionId;
+ mCurrentCallingUid = callingUid;
mTranscoderCb = std::make_shared<CallbackImpl>(shared_from_this(), clientId, sessionId);
mTranscoder = MediaTranscoder::create(mTranscoderCb, mHeartBeatIntervalUs, request.clientPid,
request.clientUid, pausedState);
if (mTranscoder == nullptr) {
ALOGE("failed to create transcoder");
+ *failureReason = TranscodingLogger::SessionEndedReason::CREATE_FAILED;
return AMEDIA_ERROR_UNKNOWN;
}
media_status_t err = mTranscoder->configureSource(srcFdInt);
if (err != AMEDIA_OK) {
ALOGE("failed to configure source: %d", err);
+ *failureReason = TranscodingLogger::SessionEndedReason::CONFIG_SRC_FAILED;
return err;
}
std::vector<std::shared_ptr<AMediaFormat>> trackFormats = mTranscoder->getTrackFormats();
if (trackFormats.size() == 0) {
ALOGE("failed to get track formats!");
+ *failureReason = TranscodingLogger::SessionEndedReason::NO_TRACKS;
return AMEDIA_ERROR_MALFORMED;
}
for (int i = 0; i < trackFormats.size(); ++i) {
- AMediaFormat* format = nullptr;
+ std::shared_ptr<AMediaFormat> format;
const char* mime = nullptr;
AMediaFormat_getString(trackFormats[i].get(), AMEDIAFORMAT_KEY_MIME, &mime);
if (!strncmp(mime, "video/", 6)) {
format = getVideoFormat(mime, request.requestedVideoTrackFormat);
+
+ mSrcFormat = trackFormats[i];
+ mDstFormat = format;
}
- err = mTranscoder->configureTrackFormat(i, format);
- if (format != nullptr) {
- AMediaFormat_delete(format);
- }
+ err = mTranscoder->configureTrackFormat(i, format.get());
if (err != AMEDIA_OK) {
ALOGE("failed to configure track format for track %d: %d", i, err);
+ *failureReason = TranscodingLogger::SessionEndedReason::CONFIG_TRACK_FAILED;
return err;
}
}
@@ -426,6 +439,7 @@
err = mTranscoder->configureDestination(dstFdInt);
if (err != AMEDIA_OK) {
ALOGE("failed to configure dest: %d", err);
+ *failureReason = TranscodingLogger::SessionEndedReason::CONFIG_DST_FAILED;
return err;
}
@@ -434,17 +448,23 @@
media_status_t TranscoderWrapper::handleStart(
ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
- const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+ uid_t callingUid, const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
ALOGI("%s: setting up transcoder for start", __FUNCTION__);
- media_status_t err = setupTranscoder(clientId, sessionId, request, clientCb);
+ TranscodingLogger::SessionEndedReason reason = TranscodingLogger::SessionEndedReason::UNKNOWN;
+ media_status_t err =
+ setupTranscoder(clientId, sessionId, request, callingUid, clientCb, &reason);
if (err != AMEDIA_OK) {
ALOGI("%s: failed to setup transcoder", __FUNCTION__);
+ logSessionEnded(reason, err);
return err;
}
+ mTranscodeStartTime = std::chrono::steady_clock::now();
+
err = mTranscoder->start();
if (err != AMEDIA_OK) {
ALOGE("%s: failed to start transcoder: %d", __FUNCTION__, err);
+ logSessionEnded(TranscodingLogger::SessionEndedReason::START_FAILED, err);
return err;
}
@@ -467,6 +487,7 @@
std::shared_ptr<ndk::ScopedAParcel> pauseStates;
media_status_t err = mTranscoder->pause(&pauseStates);
+ logSessionEnded(TranscodingLogger::SessionEndedReason::PAUSED, err);
if (err != AMEDIA_OK) {
ALOGE("%s: failed to pause transcoder: %d", __FUNCTION__, err);
return err;
@@ -479,7 +500,7 @@
media_status_t TranscoderWrapper::handleResume(
ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
- const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
+ uid_t callingUid, const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
std::shared_ptr<ndk::ScopedAParcel> pausedState;
auto it = mPausedStateMap.find(SessionKeyType(clientId, sessionId));
if (it != mPausedStateMap.end()) {
@@ -491,15 +512,23 @@
}
ALOGI("%s: setting up transcoder for resume", __FUNCTION__);
- media_status_t err = setupTranscoder(clientId, sessionId, request, clientCb, pausedState);
+ TranscodingLogger::SessionEndedReason reason = TranscodingLogger::SessionEndedReason::UNKNOWN;
+ media_status_t err = setupTranscoder(clientId, sessionId, request, callingUid, clientCb,
+ &reason, pausedState);
if (err != AMEDIA_OK) {
ALOGE("%s: failed to setup transcoder: %d", __FUNCTION__, err);
+ logSessionEnded(reason, err);
return err;
}
+ // Note: For now resume() will just restart transcoding from the beginning, so there is no need
+ // to distinguish between resume and start from a performance perspective.
+ mTranscodeStartTime = std::chrono::steady_clock::now();
+
err = mTranscoder->resume();
if (err != AMEDIA_OK) {
ALOGE("%s: failed to resume transcoder: %d", __FUNCTION__, err);
+ logSessionEnded(TranscodingLogger::SessionEndedReason::RESUME_FAILED, err);
return err;
}
@@ -510,8 +539,23 @@
void TranscoderWrapper::cleanup() {
mCurrentClientId = 0;
mCurrentSessionId = -1;
+ mCurrentCallingUid = -1;
mTranscoderCb = nullptr;
mTranscoder = nullptr;
+ mSrcFormat = nullptr;
+ mDstFormat = nullptr;
+}
+
+void TranscoderWrapper::logSessionEnded(const TranscodingLogger::SessionEndedReason& reason,
+ int error) {
+ std::chrono::microseconds transcodeDuration(-1);
+ if (reason == TranscodingLogger::SessionEndedReason::FINISHED && error == AMEDIA_OK) {
+ transcodeDuration = std::chrono::duration_cast<std::chrono::microseconds>(
+ std::chrono::steady_clock::now() - mTranscodeStartTime);
+ }
+
+ mLogger->logSessionEnded(reason, mCurrentCallingUid, error, transcodeDuration, mSrcFormat.get(),
+ mDstFormat.get());
}
void TranscoderWrapper::queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
@@ -555,5 +599,4 @@
lock.lock();
}
}
-
} // namespace android
diff --git a/media/libmediatranscoding/TranscodingClientManager.cpp b/media/libmediatranscoding/TranscodingClientManager.cpp
index 76bb33e..06c5421 100644
--- a/media/libmediatranscoding/TranscodingClientManager.cpp
+++ b/media/libmediatranscoding/TranscodingClientManager.cpp
@@ -162,8 +162,8 @@
int32_t sessionId = mNextSessionId.fetch_add(1);
- *_aidl_return = owner->mSessionController->submit(mClientId, sessionId, in_clientUid,
- in_request, mClientCallback);
+ *_aidl_return = owner->mSessionController->submit(mClientId, sessionId, callingUid,
+ in_clientUid, in_request, mClientCallback);
if (*_aidl_return) {
out_session->sessionId = sessionId;
diff --git a/media/libmediatranscoding/TranscodingLogger.cpp b/media/libmediatranscoding/TranscodingLogger.cpp
new file mode 100644
index 0000000..29a52b0
--- /dev/null
+++ b/media/libmediatranscoding/TranscodingLogger.cpp
@@ -0,0 +1,194 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingLogger"
+
+#include <media/NdkCommon.h>
+#include <media/TranscodingLogger.h>
+#include <statslog_media.h>
+#include <utils/Log.h>
+
+#include <cmath>
+#include <string>
+
+namespace android {
+
+static_assert(TranscodingLogger::UNKNOWN ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__UNKNOWN,
+ "Session event mismatch");
+static_assert(TranscodingLogger::FINISHED ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__FINISHED,
+ "Session event mismatch");
+static_assert(TranscodingLogger::ERROR ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__ERROR,
+ "Session event mismatch");
+static_assert(TranscodingLogger::PAUSED ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__PAUSED,
+ "Session event mismatch");
+static_assert(TranscodingLogger::CANCELLED ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CANCELLED,
+ "Session event mismatch");
+static_assert(TranscodingLogger::START_FAILED ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__START_FAILED,
+ "Session event mismatch");
+static_assert(TranscodingLogger::RESUME_FAILED ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__RESUME_FAILED,
+ "Session event mismatch");
+static_assert(TranscodingLogger::CREATE_FAILED ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CREATE_FAILED,
+ "Session event mismatch");
+static_assert(
+ TranscodingLogger::CONFIG_SRC_FAILED ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CONFIG_SRC_FAILED,
+ "Session event mismatch");
+static_assert(
+ TranscodingLogger::CONFIG_DST_FAILED ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CONFIG_DST_FAILED,
+ "Session event mismatch");
+static_assert(
+ TranscodingLogger::CONFIG_TRACK_FAILED ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__CONFIG_TRACK_FAILED,
+ "Session event mismatch");
+static_assert(
+ TranscodingLogger::OPEN_SRC_FD_FAILED ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__OPEN_SRC_FD_FAILED,
+ "Session event mismatch");
+static_assert(
+ TranscodingLogger::OPEN_DST_FD_FAILED ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__OPEN_DST_FD_FAILED,
+ "Session event mismatch");
+static_assert(TranscodingLogger::NO_TRACKS ==
+ android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED__REASON__NO_TRACKS,
+ "Session event mismatch");
+
+static inline int32_t getInt32(AMediaFormat* fmt, const char* key, int32_t defaultValue = -1) {
+ int32_t value;
+ if (fmt == nullptr || !AMediaFormat_getInt32(fmt, key, &value)) {
+ ALOGW("Unable to get %s", key);
+ value = defaultValue;
+ }
+ return value;
+}
+
+// Note: returned string is owned by format and only valid until the next getString.
+static inline const char* getString(AMediaFormat* fmt, const char* key,
+ const char* defaultValue = "(null)") {
+ const char* value;
+ if (fmt == nullptr || !AMediaFormat_getString(fmt, key, &value)) {
+ ALOGW("Unable to get %s", key);
+ value = defaultValue;
+ }
+ return value;
+}
+
+TranscodingLogger::TranscodingLogger()
+ : mSessionEndedAtomWriter(&android::media::stats::stats_write) {}
+
+void TranscodingLogger::logSessionEnded(enum SessionEndedReason reason, uid_t callingUid,
+ int status, std::chrono::microseconds duration,
+ AMediaFormat* srcFormat, AMediaFormat* dstFormat) {
+ logSessionEnded(std::chrono::steady_clock::now(), reason, callingUid, status, duration,
+ srcFormat, dstFormat);
+}
+
+void TranscodingLogger::logSessionEnded(const std::chrono::steady_clock::time_point& now,
+ enum SessionEndedReason reason, uid_t callingUid,
+ int status, std::chrono::microseconds duration,
+ AMediaFormat* srcFormat, AMediaFormat* dstFormat) {
+ if (srcFormat == nullptr) {
+ ALOGE("Source format is null. Dropping event.");
+ return;
+ }
+
+ if (!shouldLogAtom(now, status)) {
+ ALOGD("Maximum logged event count reached. Dropping event.");
+ return;
+ }
+
+ // Extract the pieces of information to log.
+ const int32_t srcWidth = getInt32(srcFormat, AMEDIAFORMAT_KEY_WIDTH);
+ const int32_t srcHeight = getInt32(srcFormat, AMEDIAFORMAT_KEY_HEIGHT);
+ const char* srcMime = getString(srcFormat, AMEDIAFORMAT_KEY_MIME);
+ const int32_t srcProfile = getInt32(srcFormat, AMEDIAFORMAT_KEY_PROFILE);
+ const int32_t srcLevel = getInt32(srcFormat, AMEDIAFORMAT_KEY_LEVEL);
+ const int32_t srcFrameRate = getInt32(srcFormat, AMEDIAFORMAT_KEY_FRAME_RATE);
+ const int32_t srcFrameCount = getInt32(srcFormat, AMEDIAFORMAT_KEY_FRAME_COUNT);
+ const bool srcIsHdr = AMediaFormatUtils::VideoIsHdr(srcFormat);
+
+ int32_t dstWidth = getInt32(dstFormat, AMEDIAFORMAT_KEY_WIDTH, srcWidth);
+ int32_t dstHeight = getInt32(dstFormat, AMEDIAFORMAT_KEY_HEIGHT, srcHeight);
+ const char* dstMime = dstFormat == nullptr
+ ? "passthrough"
+ : getString(dstFormat, AMEDIAFORMAT_KEY_MIME, srcMime);
+ const bool dstIsHdr = false; // Transcoder always request SDR output.
+
+ int64_t tmpDurationUs;
+ const int32_t srcDurationMs =
+ AMediaFormat_getInt64(srcFormat, AMEDIAFORMAT_KEY_DURATION, &tmpDurationUs)
+ ? static_cast<int32_t>(tmpDurationUs / 1000)
+ : -1;
+
+ int32_t transcodeFrameRate = -1;
+ if (status == 0 && srcFrameCount > 0 && duration.count() > 0) {
+ std::chrono::duration<double> seconds{duration};
+ transcodeFrameRate = static_cast<int32_t>(
+ std::round(static_cast<double>(srcFrameCount) / seconds.count()));
+ }
+
+ // Write the atom.
+ mSessionEndedAtomWriter(android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED,
+ static_cast<int>(reason), callingUid, status, transcodeFrameRate,
+ srcWidth, srcHeight, srcMime, srcProfile, srcLevel, srcFrameRate,
+ srcDurationMs, srcIsHdr, dstWidth, dstHeight, dstMime, dstIsHdr);
+}
+
+bool TranscodingLogger::shouldLogAtom(const std::chrono::steady_clock::time_point& now,
+ int status) {
+ std::scoped_lock lock{mLock};
+ static const std::chrono::hours oneDay(24);
+
+ // Remove events older than one day.
+ while (mLastLoggedAtoms.size() > 0 && (now - mLastLoggedAtoms.front().first) >= oneDay) {
+ if (mLastLoggedAtoms.front().second == AMEDIA_OK) {
+ --mSuccessfulCount;
+ }
+ mLastLoggedAtoms.pop();
+ }
+
+ // Don't log if maximum number of events is reached.
+ if (mLastLoggedAtoms.size() >= kMaxAtomsPerDay) {
+ return false;
+ }
+
+ // Don't log if the event is successful and the maximum number of successful events is reached.
+ if (status == AMEDIA_OK && mSuccessfulCount >= kMaxSuccessfulAtomsPerDay) {
+ return false;
+ }
+
+ // Record the event.
+ if (status == AMEDIA_OK) {
+ ++mSuccessfulCount;
+ }
+ mLastLoggedAtoms.emplace(now, status);
+ return true;
+}
+
+void TranscodingLogger::setSessionEndedAtomWriter(const SessionEndedAtomWriter& writer) {
+ mSessionEndedAtomWriter = writer;
+}
+
+} // namespace android
diff --git a/media/libmediatranscoding/TranscodingSessionController.cpp b/media/libmediatranscoding/TranscodingSessionController.cpp
index d12af21..aeabe0f 100644
--- a/media/libmediatranscoding/TranscodingSessionController.cpp
+++ b/media/libmediatranscoding/TranscodingSessionController.cpp
@@ -87,15 +87,12 @@
// Whether watchdog is aborted and the monitoring thread should exit.
bool mAbort GUARDED_BY(mLock);
// When watchdog is active, the next timeout time point.
- std::chrono::system_clock::time_point mNextTimeoutTime GUARDED_BY(mLock);
+ std::chrono::steady_clock::time_point mNextTimeoutTime GUARDED_BY(mLock);
// When watchdog is active, the session being watched.
SessionKeyType mSessionToWatch GUARDED_BY(mLock);
std::thread mThread;
};
-static constexpr int64_t kWatchdogTimeoutUs = 3000000LL;
-static constexpr int64_t kTranscoderHeartBeatIntervalUs = 1000000LL;
-
TranscodingSessionController::Watchdog::Watchdog(TranscodingSessionController* owner,
int64_t timeoutUs)
: mOwner(owner),
@@ -159,7 +156,7 @@
// updateTimer_l() is only called with lock held.
void TranscodingSessionController::Watchdog::updateTimer_l() NO_THREAD_SAFETY_ANALYSIS {
std::chrono::microseconds timeout(mTimeoutUs);
- mNextTimeoutTime = std::chrono::system_clock::now() + timeout;
+ mNextTimeoutTime = std::chrono::steady_clock::now() + timeout;
}
// Unfortunately std::unique_lock is incompatible with -Wthread-safety.
@@ -188,12 +185,84 @@
}
}
///////////////////////////////////////////////////////////////////////////////
+struct TranscodingSessionController::Pacer {
+ Pacer(const ControllerConfig& config)
+ : mBurstThresholdMs(config.pacerBurstThresholdMs),
+ mBurstCountQuota(config.pacerBurstCountQuota),
+ mBurstTimeQuotaSec(config.pacerBurstTimeQuotaSeconds) {}
+
+ ~Pacer() = default;
+
+ void onSessionCompleted(uid_t uid, std::chrono::microseconds runningTime);
+ bool onSessionStarted(uid_t uid);
+
+private:
+ // Threshold of time between finish/start below which a back-to-back start is counted.
+ int32_t mBurstThresholdMs;
+ // Maximum allowed back-to-back start count.
+ int32_t mBurstCountQuota;
+ // Maximum allowed back-to-back running time.
+ int32_t mBurstTimeQuotaSec;
+
+ struct UidHistoryEntry {
+ std::chrono::steady_clock::time_point lastCompletedTime;
+ int32_t burstCount = 0;
+ std::chrono::steady_clock::duration burstDuration{0};
+ };
+ std::map<uid_t, UidHistoryEntry> mUidHistoryMap;
+};
+
+void TranscodingSessionController::Pacer::onSessionCompleted(
+ uid_t uid, std::chrono::microseconds runningTime) {
+ if (mUidHistoryMap.find(uid) == mUidHistoryMap.end()) {
+ mUidHistoryMap.emplace(uid, UidHistoryEntry{});
+ }
+ mUidHistoryMap[uid].lastCompletedTime = std::chrono::steady_clock::now();
+ mUidHistoryMap[uid].burstCount++;
+ mUidHistoryMap[uid].burstDuration += runningTime;
+}
+
+bool TranscodingSessionController::Pacer::onSessionStarted(uid_t uid) {
+ // If uid doesn't exist, this uid has no completed sessions. Skip.
+ if (mUidHistoryMap.find(uid) == mUidHistoryMap.end()) {
+ return true;
+ }
+
+ // TODO: if Thermal throttling or resoure lost happened to occurr between this start
+ // and the previous completion, we should deduct the paused time from the elapsed time.
+ // (Individual session's pause time, on the other hand, doesn't need to be deducted
+ // because it doesn't affect the gap between last completion and the start.
+ auto timeSinceLastComplete =
+ std::chrono::steady_clock::now() - mUidHistoryMap[uid].lastCompletedTime;
+ if (mUidHistoryMap[uid].burstCount >= mBurstCountQuota &&
+ mUidHistoryMap[uid].burstDuration >= std::chrono::seconds(mBurstTimeQuotaSec)) {
+ ALOGW("Pacer: uid %d: over quota, burst count %d, time %lldms", uid,
+ mUidHistoryMap[uid].burstCount, (long long)mUidHistoryMap[uid].burstDuration.count());
+ return false;
+ }
+
+ // If not over quota, allow the session, and reset as long as this is not too close
+ // to previous completion.
+ if (timeSinceLastComplete > std::chrono::milliseconds(mBurstThresholdMs)) {
+ ALOGV("Pacer: uid %d: reset quota", uid);
+ mUidHistoryMap[uid].burstCount = 0;
+ mUidHistoryMap[uid].burstDuration = std::chrono::milliseconds(0);
+ } else {
+ ALOGV("Pacer: uid %d: burst count %d, time %lldms", uid, mUidHistoryMap[uid].burstCount,
+ (long long)mUidHistoryMap[uid].burstDuration.count());
+ }
+
+ return true;
+}
+
+///////////////////////////////////////////////////////////////////////////////
TranscodingSessionController::TranscodingSessionController(
const TranscoderFactoryType& transcoderFactory,
const std::shared_ptr<UidPolicyInterface>& uidPolicy,
const std::shared_ptr<ResourcePolicyInterface>& resourcePolicy,
- const std::shared_ptr<ThermalPolicyInterface>& thermalPolicy)
+ const std::shared_ptr<ThermalPolicyInterface>& thermalPolicy,
+ const ControllerConfig* config)
: mTranscoderFactory(transcoderFactory),
mUidPolicy(uidPolicy),
mResourcePolicy(resourcePolicy),
@@ -206,6 +275,13 @@
mSessionQueues.emplace(OFFLINE_UID, SessionQueueType());
mUidPackageNames[OFFLINE_UID] = "(offline)";
mThermalThrottling = thermalPolicy->getThrottlingStatus();
+ if (config != nullptr) {
+ mConfig = *config;
+ }
+ mPacer.reset(new Pacer(mConfig));
+ ALOGD("@@@ watchdog %lld, burst count %d, burst time %d, burst threshold %d",
+ (long long)mConfig.watchdogTimeoutUs, mConfig.pacerBurstCountQuota,
+ mConfig.pacerBurstTimeQuotaSeconds, mConfig.pacerBurstThresholdMs);
}
TranscodingSessionController::~TranscodingSessionController() {}
@@ -280,10 +356,21 @@
write(fd, result.string(), result.size());
}
+/*
+ * Returns nullptr if there is no session, or we're paused globally (due to resource lost,
+ * thermal throttling, etc.). Otherwise, return the session that should be run next.
+ */
TranscodingSessionController::Session* TranscodingSessionController::getTopSession_l() {
if (mSessionMap.empty()) {
return nullptr;
}
+
+ // Return nullptr if we're paused globally due to resource lost or thermal throttling.
+ if (((mResourcePolicy != nullptr && mResourceLost) ||
+ (mThermalPolicy != nullptr && mThermalThrottling))) {
+ return nullptr;
+ }
+
uid_t topUid = *mUidSortedList.begin();
SessionKeyType topSessionKey = *mSessionQueues[topUid].begin();
return &mSessionMap[topSessionKey];
@@ -313,9 +400,10 @@
if (state == newState) {
return;
}
- auto nowTime = std::chrono::system_clock::now();
+ auto nowTime = std::chrono::steady_clock::now();
if (state != INVALID) {
- std::chrono::microseconds elapsedTime = (nowTime - stateEnterTime);
+ std::chrono::microseconds elapsedTime =
+ std::chrono::duration_cast<std::chrono::microseconds>(nowTime - stateEnterTime);
switch (state) {
case PAUSED:
pausedTime = pausedTime + elapsedTime;
@@ -338,47 +426,60 @@
}
void TranscodingSessionController::updateCurrentSession_l() {
- Session* topSession = getTopSession_l();
Session* curSession = mCurrentSession;
- ALOGV("updateCurrentSession: topSession is %s, curSession is %s",
- topSession == nullptr ? "null" : sessionToString(topSession->key).c_str(),
- curSession == nullptr ? "null" : sessionToString(curSession->key).c_str());
+ Session* topSession = getTopSession_l();
- if (topSession == nullptr) {
- mCurrentSession = nullptr;
- return;
+ // Delayed init of transcoder and watchdog.
+ if (mTranscoder == nullptr) {
+ mTranscoder = mTranscoderFactory(shared_from_this());
+ mWatchdog = std::make_shared<Watchdog>(this, mConfig.watchdogTimeoutUs);
}
- bool shouldBeRunning = !((mResourcePolicy != nullptr && mResourceLost) ||
- (mThermalPolicy != nullptr && mThermalThrottling));
- // If we found a topSession that should be run, and it's not already running,
- // take some actions to ensure it's running.
- if (topSession != curSession ||
- (shouldBeRunning ^ (topSession->getState() == Session::RUNNING))) {
- if (mTranscoder == nullptr) {
- mTranscoder = mTranscoderFactory(shared_from_this(), kTranscoderHeartBeatIntervalUs);
- mWatchdog = std::make_shared<Watchdog>(this, kWatchdogTimeoutUs);
- }
+ // If we found a different top session, or the top session's running state is not
+ // correct. Take some actions to ensure it's correct.
+ while ((topSession = getTopSession_l()) != curSession ||
+ (topSession != nullptr && !topSession->isRunning())) {
+ ALOGV("updateCurrentSession_l: topSession is %s, curSession is %s",
+ topSession == nullptr ? "null" : sessionToString(topSession->key).c_str(),
+ curSession == nullptr ? "null" : sessionToString(curSession->key).c_str());
- // If current session is running, pause it first. Note this is true for either
- // cases: 1) If top session is changing, or 2) if top session is not changing but
- // the topSession's state is changing.
+ // If current session is running, pause it first. Note this is needed for either
+ // cases: 1) Top session is changing to another session, or 2) Top session is
+ // changing to null (which means we should be globally paused).
if (curSession != nullptr && curSession->getState() == Session::RUNNING) {
mTranscoder->pause(curSession->key.first, curSession->key.second);
setSessionState_l(curSession, Session::PAUSED);
}
- // If we are not experiencing resource loss nor thermal throttling, we can start
- // or resume the topSession now.
- if (shouldBeRunning) {
- if (topSession->getState() == Session::NOT_STARTED) {
- mTranscoder->start(topSession->key.first, topSession->key.second,
- topSession->request, topSession->callback.lock());
- } else if (topSession->getState() == Session::PAUSED) {
- mTranscoder->resume(topSession->key.first, topSession->key.second,
- topSession->request, topSession->callback.lock());
+
+ if (topSession == nullptr) {
+ // Nothing more to run (either no session or globally paused).
+ break;
+ }
+
+ // Otherwise, ensure topSession is running.
+ if (topSession->getState() == Session::NOT_STARTED) {
+ if (!mPacer->onSessionStarted(topSession->clientUid)) {
+ // Unfortunately this uid is out of quota for new sessions.
+ // Drop this sesion and try another one.
+ {
+ auto clientCallback = mSessionMap[topSession->key].callback.lock();
+ if (clientCallback != nullptr) {
+ clientCallback->onTranscodingFailed(
+ topSession->key.second, TranscodingErrorCode::kDroppedByService);
+ }
+ }
+ removeSession_l(topSession->key, Session::DROPPED_BY_PACER);
+ continue;
}
+ mTranscoder->start(topSession->key.first, topSession->key.second, topSession->request,
+ topSession->callingUid, topSession->callback.lock());
+ setSessionState_l(topSession, Session::RUNNING);
+ } else if (topSession->getState() == Session::PAUSED) {
+ mTranscoder->resume(topSession->key.first, topSession->key.second, topSession->request,
+ topSession->callingUid, topSession->callback.lock());
setSessionState_l(topSession, Session::RUNNING);
}
+ break;
}
mCurrentSession = topSession;
}
@@ -393,7 +494,7 @@
}
// Remove session from uid's queue.
- const uid_t uid = mSessionMap[sessionKey].uid;
+ const uid_t uid = mSessionMap[sessionKey].clientUid;
SessionQueueType& sessionQueue = mSessionQueues[uid];
auto it = std::find(sessionQueue.begin(), sessionQueue.end(), sessionKey);
if (it == sessionQueue.end()) {
@@ -419,6 +520,12 @@
}
setSessionState_l(&mSessionMap[sessionKey], finalState);
+
+ if (finalState == Session::FINISHED || finalState == Session::ERROR) {
+ mPacer->onSessionCompleted(mSessionMap[sessionKey].clientUid,
+ mSessionMap[sessionKey].runningTime);
+ }
+
mSessionHistory.push_back(mSessionMap[sessionKey]);
if (mSessionHistory.size() > kSessionHistoryMax) {
mSessionHistory.erase(mSessionHistory.begin());
@@ -482,13 +589,13 @@
}
bool TranscodingSessionController::submit(
- ClientIdType clientId, SessionIdType sessionId, uid_t uid,
+ ClientIdType clientId, SessionIdType sessionId, uid_t callingUid, uid_t clientUid,
const TranscodingRequestParcel& request,
const std::weak_ptr<ITranscodingClientCallback>& callback) {
SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
ALOGV("%s: session %s, uid %d, prioirty %d", __FUNCTION__, sessionToString(sessionKey).c_str(),
- uid, (int32_t)request.priority);
+ clientUid, (int32_t)request.priority);
std::scoped_lock lock{mLock};
@@ -498,21 +605,20 @@
}
// Add the uid package name to the store of package names we already know.
- if (mUidPackageNames.count(uid) == 0) {
- mUidPackageNames.emplace(uid, request.clientPackageName);
+ if (mUidPackageNames.count(clientUid) == 0) {
+ mUidPackageNames.emplace(clientUid, request.clientPackageName);
}
// TODO(chz): only support offline vs real-time for now. All kUnspecified sessions
// go to offline queue.
if (request.priority == TranscodingSessionPriority::kUnspecified) {
- uid = OFFLINE_UID;
+ clientUid = OFFLINE_UID;
}
// Add session to session map.
mSessionMap[sessionKey].key = sessionKey;
- mSessionMap[sessionKey].uid = uid;
- mSessionMap[sessionKey].lastProgress = 0;
- mSessionMap[sessionKey].pauseCount = 0;
+ mSessionMap[sessionKey].clientUid = clientUid;
+ mSessionMap[sessionKey].callingUid = callingUid;
mSessionMap[sessionKey].request = request;
mSessionMap[sessionKey].callback = callback;
setSessionState_l(&mSessionMap[sessionKey], Session::NOT_STARTED);
@@ -520,25 +626,25 @@
// If it's an offline session, the queue was already added in constructor.
// If it's a real-time sessions, check if a queue is already present for the uid,
// and add a new queue if needed.
- if (uid != OFFLINE_UID) {
- if (mSessionQueues.count(uid) == 0) {
- mUidPolicy->registerMonitorUid(uid);
- if (mUidPolicy->isUidOnTop(uid)) {
- mUidSortedList.push_front(uid);
+ if (clientUid != OFFLINE_UID) {
+ if (mSessionQueues.count(clientUid) == 0) {
+ mUidPolicy->registerMonitorUid(clientUid);
+ if (mUidPolicy->isUidOnTop(clientUid)) {
+ mUidSortedList.push_front(clientUid);
} else {
// Shouldn't be submitting real-time requests from non-top app,
// put it in front of the offline queue.
- mUidSortedList.insert(mOfflineUidIterator, uid);
+ mUidSortedList.insert(mOfflineUidIterator, clientUid);
}
- } else if (uid != *mUidSortedList.begin()) {
- if (mUidPolicy->isUidOnTop(uid)) {
- mUidSortedList.remove(uid);
- mUidSortedList.push_front(uid);
+ } else if (clientUid != *mUidSortedList.begin()) {
+ if (mUidPolicy->isUidOnTop(clientUid)) {
+ mUidSortedList.remove(clientUid);
+ mUidSortedList.push_front(clientUid);
}
}
}
// Append this session to the uid's queue.
- mSessionQueues[uid].push_back(sessionKey);
+ mSessionQueues[clientUid].push_back(sessionKey);
updateCurrentSession_l();
@@ -557,7 +663,7 @@
if (sessionId < 0) {
for (auto it = mSessionMap.begin(); it != mSessionMap.end(); ++it) {
- if (it->first.first == clientId && it->second.uid != OFFLINE_UID) {
+ if (it->first.first == clientId && it->second.clientUid != OFFLINE_UID) {
sessionsToRemove.push_back(it->first);
}
}
@@ -687,7 +793,7 @@
mTranscoder->stop(clientId, sessionId, true /*abandon*/);
// Clear the last ref count before we create new transcoder.
mTranscoder = nullptr;
- mTranscoder = mTranscoderFactory(shared_from_this(), kTranscoderHeartBeatIntervalUs);
+ mTranscoder = mTranscoderFactory(shared_from_this());
}
{
diff --git a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
index 23072ff..5349fe1 100644
--- a/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
+++ b/media/libmediatranscoding/aidl/android/media/TranscodingErrorCode.aidl
@@ -23,13 +23,19 @@
*/
@Backing(type = "int")
enum TranscodingErrorCode {
+ // Errors exposed to client side.
kNoError = 0,
- kUnknown = 1,
- kMalformed = 2,
- kUnsupported = 3,
- kInvalidParameter = 4,
- kInvalidOperation = 5,
- kErrorIO = 6,
- kInsufficientResources = 7,
- kWatchdogTimeout = 8,
+ kDroppedByService = 1,
+ kServiceUnavailable = 2,
+
+ // Other private errors.
+ kPrivateErrorFirst = 1000,
+ kUnknown = kPrivateErrorFirst + 0,
+ kMalformed = kPrivateErrorFirst + 1,
+ kUnsupported = kPrivateErrorFirst + 2,
+ kInvalidParameter = kPrivateErrorFirst + 3,
+ kInvalidOperation = kPrivateErrorFirst + 4,
+ kErrorIO = kPrivateErrorFirst + 5,
+ kInsufficientResources = kPrivateErrorFirst + 6,
+ kWatchdogTimeout = kPrivateErrorFirst + 7,
}
\ No newline at end of file
diff --git a/media/libmediatranscoding/include/media/ControllerClientInterface.h b/media/libmediatranscoding/include/media/ControllerClientInterface.h
index 3fd4f0c..0d13607 100644
--- a/media/libmediatranscoding/include/media/ControllerClientInterface.h
+++ b/media/libmediatranscoding/include/media/ControllerClientInterface.h
@@ -36,8 +36,8 @@
* Returns true on success and false on failure. This call will fail is a session identified
* by <clientId, sessionId> already exists.
*/
- virtual bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t uid,
- const TranscodingRequestParcel& request,
+ virtual bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t callingUid,
+ uid_t clientUid, const TranscodingRequestParcel& request,
const std::weak_ptr<ITranscodingClientCallback>& clientCallback) = 0;
/**
diff --git a/media/libmediatranscoding/include/media/TranscoderInterface.h b/media/libmediatranscoding/include/media/TranscoderInterface.h
index 5f27d82..3b0bd3b 100644
--- a/media/libmediatranscoding/include/media/TranscoderInterface.h
+++ b/media/libmediatranscoding/include/media/TranscoderInterface.h
@@ -33,11 +33,11 @@
class TranscoderInterface {
public:
virtual void start(ClientIdType clientId, SessionIdType sessionId,
- const TranscodingRequestParcel& request,
+ const TranscodingRequestParcel& request, uid_t callingUid,
const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
virtual void pause(ClientIdType clientId, SessionIdType sessionId) = 0;
virtual void resume(ClientIdType clientId, SessionIdType sessionId,
- const TranscodingRequestParcel& request,
+ const TranscodingRequestParcel& request, uid_t callingUid,
const std::shared_ptr<ITranscodingClientCallback>& clientCallback) = 0;
// Stop the specified session. If abandon is true, the transcoder wrapper will be discarded
// after the session stops.
diff --git a/media/libmediatranscoding/include/media/TranscoderWrapper.h b/media/libmediatranscoding/include/media/TranscoderWrapper.h
index 7935bbe..d3d4c86 100644
--- a/media/libmediatranscoding/include/media/TranscoderWrapper.h
+++ b/media/libmediatranscoding/include/media/TranscoderWrapper.h
@@ -18,8 +18,11 @@
#define ANDROID_TRANSCODER_WRAPPER_H
#include <media/NdkMediaError.h>
+#include <media/NdkMediaFormat.h>
#include <media/TranscoderInterface.h>
+#include <media/TranscodingLogger.h>
+#include <chrono>
#include <list>
#include <map>
#include <mutex>
@@ -37,16 +40,17 @@
public std::enable_shared_from_this<TranscoderWrapper> {
public:
TranscoderWrapper(const std::shared_ptr<TranscoderCallbackInterface>& cb,
+ const std::shared_ptr<TranscodingLogger>& logger,
int64_t heartBeatIntervalUs);
~TranscoderWrapper();
// TranscoderInterface
void start(ClientIdType clientId, SessionIdType sessionId,
- const TranscodingRequestParcel& request,
+ const TranscodingRequestParcel& request, uid_t callingUid,
const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
void pause(ClientIdType clientId, SessionIdType sessionId) override;
void resume(ClientIdType clientId, SessionIdType sessionId,
- const TranscodingRequestParcel& request,
+ const TranscodingRequestParcel& request, uid_t callingUid,
const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
void stop(ClientIdType clientId, SessionIdType sessionId, bool abandon = false) override;
// ~TranscoderInterface
@@ -76,6 +80,9 @@
std::shared_ptr<CallbackImpl> mTranscoderCb;
std::shared_ptr<MediaTranscoder> mTranscoder;
std::weak_ptr<TranscoderCallbackInterface> mCallback;
+ std::shared_ptr<TranscodingLogger> mLogger;
+ std::shared_ptr<AMediaFormat> mSrcFormat;
+ std::shared_ptr<AMediaFormat> mDstFormat;
int64_t mHeartBeatIntervalUs;
std::mutex mLock;
std::condition_variable mCondition;
@@ -83,6 +90,9 @@
std::map<SessionKeyType, std::shared_ptr<ndk::ScopedAParcel>> mPausedStateMap;
ClientIdType mCurrentClientId;
SessionIdType mCurrentSessionId;
+ uid_t mCurrentCallingUid;
+ std::chrono::steady_clock::time_point mTranscodeStartTime;
+
// Whether the looper has been created.
bool mLooperReady;
@@ -93,18 +103,20 @@
void onHeartBeat(ClientIdType clientId, SessionIdType sessionId);
media_status_t handleStart(ClientIdType clientId, SessionIdType sessionId,
- const TranscodingRequestParcel& request,
+ const TranscodingRequestParcel& request, uid_t callingUid,
const std::shared_ptr<ITranscodingClientCallback>& callback);
media_status_t handlePause(ClientIdType clientId, SessionIdType sessionId);
media_status_t handleResume(ClientIdType clientId, SessionIdType sessionId,
- const TranscodingRequestParcel& request,
+ const TranscodingRequestParcel& request, uid_t callingUid,
const std::shared_ptr<ITranscodingClientCallback>& callback);
media_status_t setupTranscoder(
ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
- const std::shared_ptr<ITranscodingClientCallback>& callback,
+ uid_t callingUid, const std::shared_ptr<ITranscodingClientCallback>& callback,
+ TranscodingLogger::SessionEndedReason* failureReason /* nonnull */,
const std::shared_ptr<ndk::ScopedAParcel>& pausedState = nullptr);
void cleanup();
+ void logSessionEnded(const TranscodingLogger::SessionEndedReason& reason, int error);
void reportError(ClientIdType clientId, SessionIdType sessionId, media_status_t err);
void queueEvent(Event::Type type, ClientIdType clientId, SessionIdType sessionId,
const std::function<void()> runnable, int32_t arg = 0);
diff --git a/media/libmediatranscoding/include/media/TranscodingLogger.h b/media/libmediatranscoding/include/media/TranscodingLogger.h
new file mode 100644
index 0000000..dc24551
--- /dev/null
+++ b/media/libmediatranscoding/include/media/TranscodingLogger.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_MEDIA_TRANSCODING_LOGGER_H
+#define ANDROID_MEDIA_TRANSCODING_LOGGER_H
+
+#include <media/NdkMediaFormat.h>
+#include <utils/Condition.h>
+
+#include <chrono>
+#include <memory>
+#include <mutex>
+#include <queue>
+
+namespace android {
+
+/** Class for logging transcoding events. */
+class TranscodingLogger {
+public:
+ /** The maximum number of atoms pushed to statsd per day. */
+ static constexpr int kMaxAtomsPerDay = 50;
+
+ /** The maximum number of successful transcoding atoms pushed to statsd per day. */
+ static constexpr int kMaxSuccessfulAtomsPerDay = 35;
+
+ /** Reason transcoding session ended. Maps to MediaTranscodingSessionEnded atom's Reason. */
+ enum SessionEndedReason {
+ UNKNOWN = 0,
+ FINISHED,
+ ERROR,
+ PAUSED,
+ CANCELLED,
+ START_FAILED,
+ RESUME_FAILED,
+ CREATE_FAILED,
+ CONFIG_SRC_FAILED,
+ CONFIG_DST_FAILED,
+ CONFIG_TRACK_FAILED,
+ OPEN_SRC_FD_FAILED,
+ OPEN_DST_FD_FAILED,
+ NO_TRACKS,
+ };
+
+ TranscodingLogger();
+ ~TranscodingLogger() = default;
+
+ /**
+ * Logs a transcoding session ended event (MediaTranscodingSessionEnded atom).
+ * @param reason Reason for the transcoding session to end.
+ * @param callingUid UID of the caller connecting to the transcoding service.
+ * @param status Status (error code) of the transcoding session.
+ * @param duration Duration of the transcoding session.
+ * @param srcFormat The source video track format.
+ * @param dstFormat The destination video track format.
+ */
+ void logSessionEnded(enum SessionEndedReason reason, uid_t callingUid, int status,
+ std::chrono::microseconds duration, AMediaFormat* srcFormat,
+ AMediaFormat* dstFormat);
+
+private:
+ friend class TranscodingLoggerTest;
+
+ // Function prototype for writing out the session ended atom.
+ using SessionEndedAtomWriter = std::function<int(
+ int32_t, int32_t, int32_t, int32_t, int32_t, int32_t, int32_t, char const*, int32_t,
+ int32_t, int32_t, int32_t, bool arg12, int32_t, int32_t, char const*, bool)>;
+
+ std::mutex mLock;
+ std::queue<std::pair<std::chrono::steady_clock::time_point, int>> mLastLoggedAtoms
+ GUARDED_BY(mLock);
+ uint32_t mSuccessfulCount = 0;
+ SessionEndedAtomWriter mSessionEndedAtomWriter;
+
+ void logSessionEnded(const std::chrono::steady_clock::time_point& now,
+ enum SessionEndedReason reason, uid_t callingUid, int status,
+ std::chrono::microseconds duration, AMediaFormat* srcFormat,
+ AMediaFormat* dstFormat);
+ bool shouldLogAtom(const std::chrono::steady_clock::time_point& now, int status);
+ // Used for testing to validate what gets sent to statsd.
+ void setSessionEndedAtomWriter(const SessionEndedAtomWriter& writer);
+};
+
+} // namespace android
+#endif // ANDROID_MEDIA_TRANSCODING_LOGGER_H
diff --git a/media/libmediatranscoding/include/media/TranscodingSessionController.h b/media/libmediatranscoding/include/media/TranscodingSessionController.h
index 34e9506..b2d6f0a 100644
--- a/media/libmediatranscoding/include/media/TranscodingSessionController.h
+++ b/media/libmediatranscoding/include/media/TranscodingSessionController.h
@@ -48,7 +48,7 @@
virtual ~TranscodingSessionController();
// ControllerClientInterface
- bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t uid,
+ bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t callingUid, uid_t clientUid,
const TranscodingRequestParcel& request,
const std::weak_ptr<ITranscodingClientCallback>& clientCallback) override;
bool cancel(ClientIdType clientId, SessionIdType sessionId) override;
@@ -93,7 +93,18 @@
using SessionKeyType = std::pair<ClientIdType, SessionIdType>;
using SessionQueueType = std::list<SessionKeyType>;
using TranscoderFactoryType = std::function<std::shared_ptr<TranscoderInterface>(
- const std::shared_ptr<TranscoderCallbackInterface>&, int64_t)>;
+ const std::shared_ptr<TranscoderCallbackInterface>&)>;
+
+ struct ControllerConfig {
+ // Watchdog timeout.
+ int64_t watchdogTimeoutUs = 3000000LL;
+ // Threshold of time between finish/start below which a back-to-back start is counted.
+ int32_t pacerBurstThresholdMs = 1000;
+ // Maximum allowed back-to-back start count.
+ int32_t pacerBurstCountQuota = 10;
+ // Maximum allowed back-to-back running time.
+ int32_t pacerBurstTimeQuotaSeconds = 180; // 3-min
+ };
struct Session {
enum State {
@@ -106,15 +117,17 @@
FINISHED,
CANCELED,
ERROR,
+ DROPPED_BY_PACER,
};
SessionKeyType key;
- uid_t uid;
- int32_t lastProgress;
- int32_t pauseCount;
- std::chrono::time_point<std::chrono::system_clock> stateEnterTime;
- std::chrono::microseconds waitingTime;
- std::chrono::microseconds runningTime;
- std::chrono::microseconds pausedTime;
+ uid_t clientUid;
+ uid_t callingUid;
+ int32_t lastProgress = 0;
+ int32_t pauseCount = 0;
+ std::chrono::time_point<std::chrono::steady_clock> stateEnterTime;
+ std::chrono::microseconds waitingTime{0};
+ std::chrono::microseconds runningTime{0};
+ std::chrono::microseconds pausedTime{0};
TranscodingRequest request;
std::weak_ptr<ITranscodingClientCallback> callback;
@@ -122,12 +135,16 @@
// Must use setState to change state.
void setState(Session::State state);
State getState() const { return state; }
+ bool isRunning() { return state == RUNNING; }
private:
State state = INVALID;
};
struct Watchdog;
+ struct Pacer;
+
+ ControllerConfig mConfig;
// TODO(chz): call transcoder without global lock.
// Use mLock for all entrypoints for now.
@@ -155,12 +172,14 @@
bool mThermalThrottling;
std::list<Session> mSessionHistory;
std::shared_ptr<Watchdog> mWatchdog;
+ std::shared_ptr<Pacer> mPacer;
// Only allow MediaTranscodingService and unit tests to instantiate.
TranscodingSessionController(const TranscoderFactoryType& transcoderFactory,
const std::shared_ptr<UidPolicyInterface>& uidPolicy,
const std::shared_ptr<ResourcePolicyInterface>& resourcePolicy,
- const std::shared_ptr<ThermalPolicyInterface>& thermalPolicy);
+ const std::shared_ptr<ThermalPolicyInterface>& thermalPolicy,
+ const ControllerConfig* config = nullptr);
void dumpSession_l(const Session& session, String8& result, bool closedSession = false);
Session* getTopSession_l();
diff --git a/media/libmediatranscoding/tests/Android.bp b/media/libmediatranscoding/tests/Android.bp
index 06b9b17..603611a 100644
--- a/media/libmediatranscoding/tests/Android.bp
+++ b/media/libmediatranscoding/tests/Android.bp
@@ -72,3 +72,15 @@
srcs: ["AdjustableMaxPriorityQueue_tests.cpp"],
}
+
+//
+// TranscodingLogger unit test
+//
+cc_test {
+ name: "TranscodingLogger_tests",
+ defaults: ["libmediatranscoding_test_defaults"],
+ shared_libs: ["libmediandk", "libstatssocket#30"],
+ static_libs: ["libmediatranscoder", "libstatslog_media"],
+
+ srcs: ["TranscodingLogger_tests.cpp"],
+}
diff --git a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
index 1a50923..57a2e27 100644
--- a/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
+++ b/media/libmediatranscoding/tests/TranscodingClientManager_tests.cpp
@@ -135,7 +135,7 @@
virtual ~TestController() { ALOGI("TestController Destroyed"); }
- bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t /*uid*/,
+ bool submit(ClientIdType clientId, SessionIdType sessionId, uid_t /*callingUid*/, uid_t /*uid*/,
const TranscodingRequestParcel& request,
const std::weak_ptr<ITranscodingClientCallback>& clientCallback) override {
SessionKeyType sessionKey = std::make_pair(clientId, sessionId);
diff --git a/media/libmediatranscoding/tests/TranscodingLogger_tests.cpp b/media/libmediatranscoding/tests/TranscodingLogger_tests.cpp
new file mode 100644
index 0000000..39e5cd4
--- /dev/null
+++ b/media/libmediatranscoding/tests/TranscodingLogger_tests.cpp
@@ -0,0 +1,286 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit Test for TranscodingLogger
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "TranscodingLoggerTest"
+
+#include <android-base/logging.h>
+#include <gtest/gtest.h>
+#include <media/NdkCommon.h>
+#include <media/TranscodingLogger.h>
+#include <statslog_media.h>
+#include <utils/Log.h>
+
+#include <chrono>
+
+namespace android {
+
+using Reason = TranscodingLogger::SessionEndedReason;
+
+// Data structure corresponding to MediaTranscodingEnded atom.
+struct SessionEndedAtom {
+ SessionEndedAtom(int32_t atomCode, int32_t reason, int32_t callingUid, int32_t status,
+ int32_t transcoderFps, int32_t srcWidth, int32_t srcHeight,
+ char const* srcMime, int32_t srcProfile, int32_t srcLevel, int32_t srcFps,
+ int32_t srcDurationMs, bool srcIsHdr, int32_t dstWidth, int32_t dstHeight,
+ char const* dstMime, bool dstIsHdr)
+ : atomCode(atomCode),
+ reason(reason),
+ callingUid(callingUid),
+ status(status),
+ transcoderFps(transcoderFps),
+ srcWidth(srcWidth),
+ srcHeight(srcHeight),
+ srcMime(srcMime),
+ srcProfile(srcProfile),
+ srcLevel(srcLevel),
+ srcFps(srcFps),
+ srcDurationMs(srcDurationMs),
+ srcIsHdr(srcIsHdr),
+ dstWidth(dstWidth),
+ dstHeight(dstHeight),
+ dstMime(dstMime),
+ dstIsHdr(dstIsHdr) {}
+
+ int32_t atomCode;
+ int32_t reason;
+ int32_t callingUid;
+ int32_t status;
+ int32_t transcoderFps;
+ int32_t srcWidth;
+ int32_t srcHeight;
+ std::string srcMime;
+ int32_t srcProfile;
+ int32_t srcLevel;
+ int32_t srcFps;
+ int32_t srcDurationMs;
+ bool srcIsHdr;
+ int32_t dstWidth;
+ int32_t dstHeight;
+ std::string dstMime;
+ bool dstIsHdr;
+};
+
+// Default configuration values.
+static constexpr int32_t kDefaultCallingUid = 1;
+static constexpr std::chrono::microseconds kDefaultTranscodeDuration = std::chrono::seconds{2};
+
+static constexpr int32_t kDefaultSrcWidth = 1920;
+static constexpr int32_t kDefaultSrcHeight = 1080;
+static const std::string kDefaultSrcMime{AMEDIA_MIMETYPE_VIDEO_HEVC};
+static constexpr int32_t kDefaultSrcProfile = 1; // HEVC Main
+static constexpr int32_t kDefaultSrcLevel = 65536; // HEVCMainTierLevel51
+static constexpr int32_t kDefaultSrcFps = 30;
+static constexpr int32_t kDefaultSrcFrameCount = 120;
+static constexpr int64_t kDefaultSrcDurationUs = 1000000 * kDefaultSrcFrameCount / kDefaultSrcFps;
+
+static constexpr int32_t kDefaultDstWidth = 1280;
+static constexpr int32_t kDefaultDstHeight = 720;
+static const std::string kDefaultDstMime{AMEDIA_MIMETYPE_VIDEO_AVC};
+
+// Util for creating a default source video format.
+static AMediaFormat* CreateSrcFormat() {
+ AMediaFormat* fmt = AMediaFormat_new();
+ AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_WIDTH, kDefaultSrcWidth);
+ AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_HEIGHT, kDefaultSrcHeight);
+ AMediaFormat_setString(fmt, AMEDIAFORMAT_KEY_MIME, kDefaultSrcMime.c_str());
+ AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_PROFILE, kDefaultSrcProfile);
+ AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_LEVEL, kDefaultSrcLevel);
+ AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_FRAME_RATE, kDefaultSrcFps);
+ AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_FRAME_COUNT, kDefaultSrcFrameCount);
+ AMediaFormat_setInt64(fmt, AMEDIAFORMAT_KEY_DURATION, kDefaultSrcDurationUs);
+ return fmt;
+}
+
+// Util for creating a default destination video format.
+static AMediaFormat* CreateDstFormat() {
+ AMediaFormat* fmt = AMediaFormat_new();
+ AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_WIDTH, kDefaultDstWidth);
+ AMediaFormat_setInt32(fmt, AMEDIAFORMAT_KEY_HEIGHT, kDefaultDstHeight);
+ AMediaFormat_setString(fmt, AMEDIAFORMAT_KEY_MIME, kDefaultDstMime.c_str());
+ return fmt;
+}
+
+class TranscodingLoggerTest : public ::testing::Test {
+public:
+ TranscodingLoggerTest() { ALOGI("TranscodingLoggerTest created"); }
+
+ void SetUp() override {
+ ALOGI("TranscodingLoggerTest set up");
+ mLogger.reset(new TranscodingLogger());
+ mLoggedAtoms.clear();
+ mSrcFormat.reset();
+ mDstFormat.reset();
+
+ // Set a custom atom writer that saves all data, so the test can validate it afterwards.
+ mLogger->setSessionEndedAtomWriter(
+ [=](int32_t atomCode, int32_t reason, int32_t callingUid, int32_t status,
+ int32_t transcoderFps, int32_t srcWidth, int32_t srcHeight, char const* srcMime,
+ int32_t srcProfile, int32_t srcLevel, int32_t srcFps, int32_t srcDurationMs,
+ bool srcIsHdr, int32_t dstWidth, int32_t dstHeight, char const* dstMime,
+ bool dstIsHdr) -> int {
+ mLoggedAtoms.emplace_back(atomCode, reason, callingUid, status, transcoderFps,
+ srcWidth, srcHeight, srcMime, srcProfile, srcLevel,
+ srcFps, srcDurationMs, srcIsHdr, dstWidth, dstHeight,
+ dstMime, dstIsHdr);
+ return 0;
+ });
+ }
+
+ void logSession(const std::chrono::steady_clock::time_point& time, Reason reason, int status,
+ AMediaFormat* srcFormat, AMediaFormat* dstFormat) {
+ mLogger->logSessionEnded(time, reason, kDefaultCallingUid, status,
+ kDefaultTranscodeDuration, srcFormat, dstFormat);
+ }
+
+ void logSession(const std::chrono::steady_clock::time_point& time, Reason reason, int status) {
+ if (!mSrcFormat) {
+ mSrcFormat = std::shared_ptr<AMediaFormat>(CreateSrcFormat(), &AMediaFormat_delete);
+ }
+ if (!mDstFormat) {
+ mDstFormat = std::shared_ptr<AMediaFormat>(CreateDstFormat(), &AMediaFormat_delete);
+ }
+ logSession(time, reason, status, mSrcFormat.get(), mDstFormat.get());
+ }
+
+ void logSessionFinished(const std::chrono::steady_clock::time_point& time) {
+ logSession(time, Reason::FINISHED, 0);
+ }
+
+ void logSessionFailed(const std::chrono::steady_clock::time_point& time) {
+ logSession(time, Reason::ERROR, AMEDIA_ERROR_UNKNOWN);
+ }
+
+ int logCount() const { return mLoggedAtoms.size(); }
+
+ void validateLatestAtom(Reason reason, int status, bool passthrough = false) {
+ const SessionEndedAtom& atom = mLoggedAtoms.back();
+
+ EXPECT_EQ(atom.atomCode, android::media::stats::MEDIA_TRANSCODING_SESSION_ENDED);
+ EXPECT_EQ(atom.reason, static_cast<int>(reason));
+ EXPECT_EQ(atom.callingUid, kDefaultCallingUid);
+ EXPECT_EQ(atom.status, status);
+ EXPECT_EQ(atom.srcWidth, kDefaultSrcWidth);
+ EXPECT_EQ(atom.srcHeight, kDefaultSrcHeight);
+ EXPECT_EQ(atom.srcMime, kDefaultSrcMime);
+ EXPECT_EQ(atom.srcProfile, kDefaultSrcProfile);
+ EXPECT_EQ(atom.srcLevel, kDefaultSrcLevel);
+ EXPECT_EQ(atom.srcFps, kDefaultSrcFps);
+ EXPECT_EQ(atom.srcDurationMs, kDefaultSrcDurationUs / 1000);
+ EXPECT_FALSE(atom.srcIsHdr);
+ EXPECT_EQ(atom.dstWidth, passthrough ? kDefaultSrcWidth : kDefaultDstWidth);
+ EXPECT_EQ(atom.dstHeight, passthrough ? kDefaultSrcHeight : kDefaultDstHeight);
+ EXPECT_EQ(atom.dstMime, passthrough ? "passthrough" : kDefaultDstMime);
+ EXPECT_FALSE(atom.dstIsHdr);
+
+ // Transcoder frame rate is only present on successful sessions.
+ if (status == AMEDIA_OK) {
+ std::chrono::duration<double> seconds{kDefaultTranscodeDuration};
+ const int32_t transcoderFps =
+ static_cast<int32_t>(kDefaultSrcFrameCount / seconds.count());
+ EXPECT_EQ(atom.transcoderFps, transcoderFps);
+ } else {
+ EXPECT_EQ(atom.transcoderFps, -1);
+ }
+ }
+
+ void TearDown() override { ALOGI("TranscodingLoggerTest tear down"); }
+ ~TranscodingLoggerTest() { ALOGD("TranscodingLoggerTest destroyed"); }
+
+ std::shared_ptr<TranscodingLogger> mLogger;
+ std::vector<SessionEndedAtom> mLoggedAtoms;
+
+ std::shared_ptr<AMediaFormat> mSrcFormat;
+ std::shared_ptr<AMediaFormat> mDstFormat;
+};
+
+TEST_F(TranscodingLoggerTest, TestDailyLogQuota) {
+ ALOGD("TestDailyLogQuota");
+ auto start = std::chrono::steady_clock::now();
+
+ EXPECT_LT(TranscodingLogger::kMaxSuccessfulAtomsPerDay, TranscodingLogger::kMaxAtomsPerDay);
+
+ // 1. Check that the first kMaxSuccessfulAtomsPerDay successful atoms are logged.
+ for (int i = 0; i < TranscodingLogger::kMaxSuccessfulAtomsPerDay; ++i) {
+ logSessionFinished(start + std::chrono::seconds{i});
+ EXPECT_EQ(logCount(), i + 1);
+ }
+
+ // 2. Check that subsequent successful atoms within the same 24h interval are not logged.
+ for (int i = 1; i < 24; ++i) {
+ logSessionFinished(start + std::chrono::hours{i});
+ EXPECT_EQ(logCount(), TranscodingLogger::kMaxSuccessfulAtomsPerDay);
+ }
+
+ // 3. Check that failed atoms are logged up to kMaxAtomsPerDay.
+ for (int i = TranscodingLogger::kMaxSuccessfulAtomsPerDay;
+ i < TranscodingLogger::kMaxAtomsPerDay; ++i) {
+ logSessionFailed(start + std::chrono::seconds{i});
+ EXPECT_EQ(logCount(), i + 1);
+ }
+
+ // 4. Check that subsequent failed atoms within the same 24h interval are not logged.
+ for (int i = 1; i < 24; ++i) {
+ logSessionFailed(start + std::chrono::hours{i});
+ EXPECT_EQ(logCount(), TranscodingLogger::kMaxAtomsPerDay);
+ }
+
+ // 5. Check that failed and successful atoms are logged again after 24h.
+ logSessionFinished(start + std::chrono::hours{24});
+ EXPECT_EQ(logCount(), TranscodingLogger::kMaxAtomsPerDay + 1);
+
+ logSessionFailed(start + std::chrono::hours{24} + std::chrono::seconds{1});
+ EXPECT_EQ(logCount(), TranscodingLogger::kMaxAtomsPerDay + 2);
+}
+
+TEST_F(TranscodingLoggerTest, TestNullFormats) {
+ ALOGD("TestNullFormats");
+ auto srcFormat = std::shared_ptr<AMediaFormat>(CreateSrcFormat(), &AMediaFormat_delete);
+ auto dstFormat = std::shared_ptr<AMediaFormat>(CreateDstFormat(), &AMediaFormat_delete);
+ auto now = std::chrono::steady_clock::now();
+
+ // Source format null, should not log.
+ logSession(now, Reason::FINISHED, AMEDIA_OK, nullptr /*srcFormat*/, dstFormat.get());
+ EXPECT_EQ(logCount(), 0);
+
+ // Both formats null, should not log.
+ logSession(now, Reason::FINISHED, AMEDIA_OK, nullptr /*srcFormat*/, nullptr /*dstFormat*/);
+ EXPECT_EQ(logCount(), 0);
+
+ // Destination format null (passthrough mode), should log.
+ logSession(now, Reason::FINISHED, AMEDIA_OK, srcFormat.get(), nullptr /*dstFormat*/);
+ EXPECT_EQ(logCount(), 1);
+ validateLatestAtom(Reason::FINISHED, AMEDIA_OK, true /*passthrough*/);
+}
+
+TEST_F(TranscodingLoggerTest, TestAtomContentCorrectness) {
+ ALOGD("TestAtomContentCorrectness");
+ auto now = std::chrono::steady_clock::now();
+
+ // Log and validate a failure.
+ logSession(now, Reason::ERROR, AMEDIA_ERROR_MALFORMED);
+ EXPECT_EQ(logCount(), 1);
+ validateLatestAtom(Reason::ERROR, AMEDIA_ERROR_MALFORMED);
+
+ // Log and validate a success.
+ logSession(now, Reason::FINISHED, AMEDIA_OK);
+ EXPECT_EQ(logCount(), 2);
+ validateLatestAtom(Reason::FINISHED, AMEDIA_OK);
+}
+
+} // namespace android
diff --git a/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
index 2e9daee..560d1fe 100644
--- a/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
+++ b/media/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
@@ -118,12 +118,12 @@
class TestTranscoder : public TranscoderInterface {
public:
- TestTranscoder() : mLastError(TranscodingErrorCode::kUnknown), mGeneration(0) {}
+ TestTranscoder() : mGeneration(0) {}
virtual ~TestTranscoder() {}
// TranscoderInterface
void start(ClientIdType clientId, SessionIdType sessionId,
- const TranscodingRequestParcel& /*request*/,
+ const TranscodingRequestParcel& /*request*/, uid_t /*callingUid*/,
const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
append(Start(clientId, sessionId));
}
@@ -131,7 +131,7 @@
append(Pause(clientId, sessionId));
}
void resume(ClientIdType clientId, SessionIdType sessionId,
- const TranscodingRequestParcel& /*request*/,
+ const TranscodingRequestParcel& /*request*/, uid_t /*callingUid*/,
const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) override {
append(Resume(clientId, sessionId));
}
@@ -152,19 +152,6 @@
mGeneration++;
}
- TranscodingErrorCode getLastError() {
- std::scoped_lock lock{mLock};
- // Clear last error.
- TranscodingErrorCode result = mLastError;
- mLastError = TranscodingErrorCode::kNoError;
- return result;
- }
-
- int32_t getGeneration() {
- std::scoped_lock lock{mLock};
- return mGeneration;
- }
-
struct Event {
enum { NoEvent, Start, Pause, Resume, Stop, Finished, Failed, Abandon } type;
ClientIdType clientId;
@@ -195,7 +182,7 @@
// Error is sticky, non-error event will not erase it, only getLastError()
// clears last error.
if (err != TranscodingErrorCode::kNoError) {
- mLastError = err;
+ mLastErrorQueue.push_back(err);
}
mCondition.notify_one();
}
@@ -218,12 +205,27 @@
return mPoppedEvent;
}
+ TranscodingErrorCode getLastError() {
+ std::scoped_lock lock{mLock};
+ if (mLastErrorQueue.empty()) {
+ return TranscodingErrorCode::kNoError;
+ }
+ TranscodingErrorCode err = mLastErrorQueue.front();
+ mLastErrorQueue.pop_front();
+ return err;
+ }
+
+ int32_t getGeneration() {
+ std::scoped_lock lock{mLock};
+ return mGeneration;
+ }
+
private:
std::mutex mLock;
std::condition_variable mCondition;
Event mPoppedEvent;
std::list<Event> mEventQueue;
- TranscodingErrorCode mLastError;
+ std::list<TranscodingErrorCode> mLastErrorQueue;
int32_t mGeneration;
};
@@ -291,16 +293,21 @@
mUidPolicy.reset(new TestUidPolicy());
mResourcePolicy.reset(new TestResourcePolicy());
mThermalPolicy.reset(new TestThermalPolicy());
+ // Overrid default burst params with shorter values for testing.
+ TranscodingSessionController::ControllerConfig config = {
+ .pacerBurstThresholdMs = 500,
+ .pacerBurstCountQuota = 10,
+ .pacerBurstTimeQuotaSeconds = 3,
+ };
mController.reset(new TranscodingSessionController(
- [this](const std::shared_ptr<TranscoderCallbackInterface>& /*cb*/,
- int64_t /*heartBeatIntervalUs*/) {
+ [this](const std::shared_ptr<TranscoderCallbackInterface>& /*cb*/) {
// Here we require that the SessionController clears out all its refcounts of
// the transcoder object when it calls create.
EXPECT_EQ(mTranscoder.use_count(), 1);
mTranscoder->onCreated();
return mTranscoder;
},
- mUidPolicy, mResourcePolicy, mThermalPolicy));
+ mUidPolicy, mResourcePolicy, mThermalPolicy, &config));
mUidPolicy->setCallback(mController);
// Set priority only, ignore other fields for now.
@@ -328,6 +335,40 @@
EXPECT_EQ(mTranscoder.use_count(), 2);
}
+ void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess,
+ bool pauseLastSuccessSession = false) {
+ for (int i = 0; i < numSubmits; i++) {
+ mController->submit(CLIENT(0), SESSION(i), UID(0), UID(0),
+ mRealtimeRequest, mClientCallback0);
+ }
+ for (int i = 0; i < expectedSuccess; i++) {
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(i)));
+ if ((i == expectedSuccess - 1) && pauseLastSuccessSession) {
+ // Insert a pause of 3 sec to the last success running session
+ mController->onThrottlingStarted();
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(i)));
+ sleep(3);
+ mController->onThrottlingStopped();
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(i)));
+ }
+ usleep(sessionDurationMs * 1000);
+ // Test half of Finish and half of Error, both should be counted as burst runs.
+ if (i & 1) {
+ mController->onFinish(CLIENT(0), SESSION(i));
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Finished(CLIENT(0), SESSION(i)));
+ } else {
+ mController->onError(CLIENT(0), SESSION(i), TranscodingErrorCode::kUnknown);
+ EXPECT_EQ(mTranscoder->popEvent(100000),
+ TestTranscoder::Failed(CLIENT(0), SESSION(i)));
+ EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
+ }
+ }
+ for (int i = expectedSuccess; i < numSubmits; i++) {
+ EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(i)));
+ EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kDroppedByService);
+ }
+ }
+
std::shared_ptr<TestTranscoder> mTranscoder;
std::shared_ptr<TestUidPolicy> mUidPolicy;
std::shared_ptr<TestResourcePolicy> mResourcePolicy;
@@ -349,32 +390,32 @@
// Submit offline session to CLIENT(0) in UID(0).
// Should start immediately (because this is the only session).
- mController->submit(CLIENT(0), SESSION(0), UID(0), mOfflineRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mOfflineRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), 0));
// Submit real-time session to CLIENT(0).
// Should pause offline session and start new session, even if UID(0) is not on top.
- mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
// Submit real-time session to CLIENT(0), should be queued after the previous session.
- mController->submit(CLIENT(0), SESSION(2), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Submit real-time session to CLIENT(1) in same uid, should be queued after the previous
// session.
- mController->submit(CLIENT(1), SESSION(0), UID(0), mRealtimeRequest, mClientCallback1);
+ mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mRealtimeRequest, mClientCallback1);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Submit real-time session to CLIENT(2) in UID(1).
// Should pause previous session and start new session, because UID(1) is (has been) top.
- mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+ mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
// Submit offline session, shouldn't generate any event.
- mController->submit(CLIENT(2), SESSION(1), UID(1), mOfflineRequest, mClientCallback2);
+ mController->submit(CLIENT(2), SESSION(1), UID(2), UID(1), mOfflineRequest, mClientCallback2);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Bring UID(0) to top.
@@ -388,15 +429,15 @@
ALOGD("TestCancelSession");
// Submit real-time session SESSION(0), should start immediately.
- mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
// Submit real-time session SESSION(1), should not start.
- mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Submit offline session SESSION(2), should not start.
- mController->submit(CLIENT(0), SESSION(2), UID(0), mOfflineRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mOfflineRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Cancel queued real-time session.
@@ -408,7 +449,7 @@
EXPECT_TRUE(mController->cancel(CLIENT(0), SESSION(2)));
// Submit offline session SESSION(3), shouldn't cause any event.
- mController->submit(CLIENT(0), SESSION(3), UID(0), mOfflineRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(3), UID(0), UID(0), mOfflineRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Cancel running real-time session SESSION(0).
@@ -420,7 +461,7 @@
// Submit real-time session SESSION(4), offline SESSION(3) should pause and SESSION(4)
// should start.
- mController->submit(CLIENT(0), SESSION(4), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(4), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(3)));
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(4)));
@@ -438,16 +479,16 @@
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Submit offline session SESSION(0), should start immediately.
- mController->submit(CLIENT(0), SESSION(0), UID(0), mOfflineRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mOfflineRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
// Submit real-time session SESSION(1), should pause offline session and start immediately.
- mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
// Submit real-time session SESSION(2), should not start.
- mController->submit(CLIENT(0), SESSION(2), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Finish when the session never started, should be ignored.
@@ -458,7 +499,7 @@
mUidPolicy->setTop(UID(1));
// Submit real-time session to CLIENT(1) in UID(1), should pause previous session and start
// new session.
- mController->submit(CLIENT(1), SESSION(0), UID(1), mRealtimeRequest, mClientCallback1);
+ mController->submit(CLIENT(1), SESSION(0), UID(1), UID(1), mRealtimeRequest, mClientCallback1);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
@@ -495,16 +536,16 @@
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Submit offline session SESSION(0), should start immediately.
- mController->submit(CLIENT(0), SESSION(0), UID(0), mOfflineRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mOfflineRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
// Submit real-time session SESSION(1), should pause offline session and start immediately.
- mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
// Submit real-time session SESSION(2), should not start.
- mController->submit(CLIENT(0), SESSION(2), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Fail when the session never started, should be ignored.
@@ -515,7 +556,7 @@
mUidPolicy->setTop(UID(1));
// Submit real-time session to CLIENT(1) in UID(1), should pause previous session and start
// new session.
- mController->submit(CLIENT(1), SESSION(0), UID(1), mRealtimeRequest, mClientCallback1);
+ mController->submit(CLIENT(1), SESSION(0), UID(1), UID(1), mRealtimeRequest, mClientCallback1);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(1)));
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(1), SESSION(0)));
@@ -523,15 +564,18 @@
// Should still be propagated to client, but shouldn't trigger any new start.
mController->onError(CLIENT(0), SESSION(1), TranscodingErrorCode::kUnknown);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(1)));
+ EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
// Fail running real-time session, should start next real-time session in queue.
mController->onError(CLIENT(1), SESSION(0), TranscodingErrorCode::kUnknown);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(1), SESSION(0)));
+ EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
// Fail running real-time session, should resume next session (offline session) in queue.
mController->onError(CLIENT(0), SESSION(2), TranscodingErrorCode::kUnknown);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Failed(CLIENT(0), SESSION(2)));
+ EXPECT_EQ(mTranscoder->getLastError(), TranscodingErrorCode::kUnknown);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(0), SESSION(0)));
// Fail running offline session, and test error code propagation.
@@ -549,11 +593,11 @@
// Start with unspecified top UID.
// Submit real-time session to CLIENT(0), session should start immediately.
- mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
// Submit offline session to CLIENT(0), should not start.
- mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+ mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Move UID(1) to top.
@@ -562,7 +606,7 @@
// Submit real-time session to CLIENT(2) in different uid UID(1).
// Should pause previous session and start new session.
- mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+ mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
@@ -591,11 +635,11 @@
// Start with unspecified top UID.
// Submit real-time session to CLIENT(0), session should start immediately.
- mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
// Submit offline session to CLIENT(0), should not start.
- mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+ mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Set UID(0), UID(1) to top set.
@@ -605,7 +649,7 @@
// Submit real-time session to CLIENT(2) in different uid UID(1).
// UID(0) should pause and UID(1) should start.
- mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+ mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
@@ -647,12 +691,12 @@
// Start with unspecified top UID.
// Submit real-time session to CLIENT(0), session should start immediately.
mRealtimeRequest.clientPid = PID(0);
- mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
// Submit offline session to CLIENT(0), should not start.
mOfflineRequest.clientPid = PID(0);
- mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+ mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Move UID(1) to top.
@@ -662,7 +706,7 @@
// Submit real-time session to CLIENT(2) in different uid UID(1).
// Should pause previous session and start new session.
mRealtimeRequest.clientPid = PID(1);
- mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+ mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
@@ -719,7 +763,7 @@
// Submit real-time session to CLIENT(3) in UID(2), session shouldn't start due to no resource.
mRealtimeRequest.clientPid = PID(2);
- mController->submit(CLIENT(3), SESSION(0), UID(2), mRealtimeRequest, mClientCallback3);
+ mController->submit(CLIENT(3), SESSION(0), UID(3), UID(2), mRealtimeRequest, mClientCallback3);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Signal resource available, CLIENT(3)'s session should start.
@@ -734,12 +778,12 @@
// Start with unspecified top UID.
// Submit real-time session to CLIENT(0), session should start immediately.
mRealtimeRequest.clientPid = PID(0);
- mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
// Submit offline session to CLIENT(0), should not start.
mOfflineRequest.clientPid = PID(0);
- mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+ mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Move UID(1) to top.
@@ -749,7 +793,7 @@
// Submit real-time session to CLIENT(2) in different uid UID(1).
// Should pause previous session and start new session.
mRealtimeRequest.clientPid = PID(1);
- mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+ mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
@@ -788,7 +832,7 @@
mUidPolicy->setTop(UID(2));
// Submit real-time session to CLIENT(3) in UID(2), session shouldn't start during throttling.
mRealtimeRequest.clientPid = PID(2);
- mController->submit(CLIENT(3), SESSION(0), UID(2), mRealtimeRequest, mClientCallback3);
+ mController->submit(CLIENT(3), SESSION(0), UID(3), UID(2), mRealtimeRequest, mClientCallback3);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Throttling stops, CLIENT(3)'s session should start.
mController->onThrottlingStopped();
@@ -802,12 +846,12 @@
// Start with unspecified top UID.
// Submit real-time session to CLIENT(0), session should start immediately.
mRealtimeRequest.clientPid = PID(0);
- mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
// Submit offline session to CLIENT(0), should not start.
mOfflineRequest.clientPid = PID(0);
- mController->submit(CLIENT(1), SESSION(0), UID(0), mOfflineRequest, mClientCallback1);
+ mController->submit(CLIENT(1), SESSION(0), UID(1), UID(0), mOfflineRequest, mClientCallback1);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::NoEvent);
// Move UID(1) to top.
@@ -817,7 +861,7 @@
// Submit real-time session to CLIENT(2) in different uid UID(1).
// Should pause previous session and start new session.
mRealtimeRequest.clientPid = PID(1);
- mController->submit(CLIENT(2), SESSION(0), UID(1), mRealtimeRequest, mClientCallback2);
+ mController->submit(CLIENT(2), SESSION(0), UID(2), UID(1), mRealtimeRequest, mClientCallback2);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(0)));
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(2), SESSION(0)));
@@ -854,29 +898,35 @@
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Resume(CLIENT(2), SESSION(0)));
}
-TEST_F(TranscodingSessionControllerTest, TestTranscoderWatchdogTimeout) {
+TEST_F(TranscodingSessionControllerTest, TestTranscoderWatchdogNoHeartbeat) {
ALOGD("TestTranscoderWatchdogTimeout");
// Submit session to CLIENT(0) in UID(0).
// Should start immediately (because this is the only session).
- mController->submit(CLIENT(0), SESSION(0), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(0), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(0)));
- int32_t expectedGen = 2;
// Test 1: If not sending keep-alive at all, timeout after 3 seconds.
- expectTimeout(CLIENT(0), SESSION(0), expectedGen++);
+ expectTimeout(CLIENT(0), SESSION(0), 2);
+}
+TEST_F(TranscodingSessionControllerTest, TestTranscoderWatchdogHeartbeat) {
// Test 2: No timeout as long as keep-alive coming; timeout after keep-alive stops.
- mController->submit(CLIENT(0), SESSION(1), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(1), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(1)));
+
for (int i = 0; i < 5; i++) {
EXPECT_EQ(mTranscoder->popEvent(1000000), TestTranscoder::NoEvent);
mController->onHeartBeat(CLIENT(0), SESSION(1));
}
- expectTimeout(CLIENT(0), SESSION(1), expectedGen++);
+ expectTimeout(CLIENT(0), SESSION(1), 2);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderWatchdogDuringPause) {
+ int expectedGen = 2;
// Test 3a: No timeout for paused session even if no keep-alive is sent.
- mController->submit(CLIENT(0), SESSION(2), UID(0), mOfflineRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(2), UID(0), UID(0), mOfflineRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(2)));
// Trigger a pause by sending a resource lost.
mController->onResourceLost(CLIENT(0), SESSION(2));
@@ -886,12 +936,12 @@
expectTimeout(CLIENT(0), SESSION(2), expectedGen++);
// Test 3b: No timeout for paused session even if no keep-alive is sent.
- mController->submit(CLIENT(0), SESSION(3), UID(0), mOfflineRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(3), UID(0), UID(0), mOfflineRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(3)));
// Let the session run almost to timeout, to test timeout reset after pause.
EXPECT_EQ(mTranscoder->popEvent(2900000), TestTranscoder::NoEvent);
// Trigger a pause by submitting a higher-priority request.
- mController->submit(CLIENT(0), SESSION(4), UID(0), mRealtimeRequest, mClientCallback0);
+ mController->submit(CLIENT(0), SESSION(4), UID(0), UID(0), mRealtimeRequest, mClientCallback0);
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Pause(CLIENT(0), SESSION(3)));
EXPECT_EQ(mTranscoder->popEvent(), TestTranscoder::Start(CLIENT(0), SESSION(4)));
// Finish the higher-priority session, lower-priority session should resume,
@@ -902,4 +952,25 @@
expectTimeout(CLIENT(0), SESSION(3), expectedGen++);
}
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerOverCountOnly) {
+ ALOGD("TestTranscoderPacerOverCountOnly");
+ testPacerHelper(12 /*numSubmits*/, 100 /*sessionDurationMs*/, 12 /*expectedSuccess*/);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerOverTimeOnly) {
+ ALOGD("TestTranscoderPacerOverTimeOnly");
+ testPacerHelper(5 /*numSubmits*/, 1000 /*sessionDurationMs*/, 5 /*expectedSuccess*/);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerOverQuota) {
+ ALOGD("TestTranscoderPacerOverQuota");
+ testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/);
+}
+
+TEST_F(TranscodingSessionControllerTest, TestTranscoderPacerWithPause) {
+ ALOGD("TestTranscoderPacerDuringPause");
+ testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/,
+ true /*pauseLastSuccessSession*/);
+}
+
} // namespace android
diff --git a/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh b/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
index 5db9258..3cbf1dd 100644
--- a/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
+++ b/media/libmediatranscoding/tests/build_and_run_all_unit_tests.sh
@@ -32,3 +32,7 @@
echo "testing TranscodingSessionController"
#adb shell /data/nativetest64/TranscodingSessionController_tests/TranscodingSessionController_tests
adb shell /data/nativetest/TranscodingSessionController_tests/TranscodingSessionController_tests
+
+echo "testing TranscodingLogger"
+#adb shell /data/nativetest64/TranscodingLogger_tests/TranscodingLogger_tests
+adb shell /data/nativetest/TranscodingLogger_tests/TranscodingLogger_tests
diff --git a/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
index 0efe85d..88c1c42 100644
--- a/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
+++ b/media/libmediatranscoding/transcoder/MediaSampleWriter.cpp
@@ -237,8 +237,8 @@
}
std::chrono::microseconds updateInterval(mHeartBeatIntervalUs);
- std::chrono::system_clock::time_point nextUpdateTime =
- std::chrono::system_clock::now() + updateInterval;
+ std::chrono::steady_clock::time_point nextUpdateTime =
+ std::chrono::steady_clock::now() + updateInterval;
while (true) {
if (trackEosCount >= mTracks.size()) {
diff --git a/media/libmediatranscoding/transcoder/NdkCommon.cpp b/media/libmediatranscoding/transcoder/NdkCommon.cpp
index f5c9594..fb909b2 100644
--- a/media/libmediatranscoding/transcoder/NdkCommon.cpp
+++ b/media/libmediatranscoding/transcoder/NdkCommon.cpp
@@ -90,4 +90,29 @@
DEFINE_SET_DEFAULT_FORMAT_VALUE_FUNC(float, Float);
DEFINE_SET_DEFAULT_FORMAT_VALUE_FUNC(int32_t, Int32);
-} // namespace AMediaFormatUtils
\ No newline at end of file
+// Determines whether a track format describes HDR video content or not. The
+// logic is based on isHdr() in libstagefright/Utils.cpp.
+bool VideoIsHdr(AMediaFormat* format) {
+ // If VUI signals HDR content, this internal flag is set by the extractor.
+ int32_t isHdr;
+ if (AMediaFormat_getInt32(format, "android._is-hdr", &isHdr)) {
+ return isHdr;
+ }
+
+ // If container supplied HDR static info without transfer set, assume HDR.
+ const char* hdrInfo;
+ int32_t transfer;
+ if ((AMediaFormat_getString(format, AMEDIAFORMAT_KEY_HDR_STATIC_INFO, &hdrInfo) ||
+ AMediaFormat_getString(format, "hdr10-plus-info", &hdrInfo)) &&
+ !AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_TRANSFER, &transfer)) {
+ return true;
+ }
+
+ // Otherwise, check if an HDR transfer function is set.
+ if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_TRANSFER, &transfer)) {
+ return transfer == COLOR_TRANSFER_ST2084 || transfer == COLOR_TRANSFER_HLG;
+ }
+
+ return false;
+}
+} // namespace AMediaFormatUtils
diff --git a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
index 04a513e..acf5f6c 100644
--- a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
@@ -51,32 +51,6 @@
// Default frame rate.
static constexpr int32_t kDefaultFrameRate = 30;
-// Determines whether a track format describes HDR video content or not. The
-// logic is based on isHdr() in libstagefright/Utils.cpp.
-static bool isHdr(AMediaFormat* format) {
- // If VUI signals HDR content, this internal flag is set by the extractor.
- int32_t isHdr;
- if (AMediaFormat_getInt32(format, "android._is-hdr", &isHdr)) {
- return isHdr;
- }
-
- // If container supplied HDR static info without transfer set, assume HDR.
- const char* hdrInfo;
- int32_t transfer;
- if ((AMediaFormat_getString(format, AMEDIAFORMAT_KEY_HDR_STATIC_INFO, &hdrInfo) ||
- AMediaFormat_getString(format, "hdr10-plus-info", &hdrInfo)) &&
- !AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_TRANSFER, &transfer)) {
- return true;
- }
-
- // Otherwise, check if an HDR transfer function is set.
- if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_TRANSFER, &transfer)) {
- return transfer == COLOR_TRANSFER_ST2084 || transfer == COLOR_TRANSFER_HLG;
- }
-
- return false;
-}
-
template <typename T>
void VideoTrackTranscoder::BlockingQueue<T>::push(T const& value, bool front) {
{
@@ -347,7 +321,7 @@
}
// Request decoder to convert HDR content to SDR.
- const bool sourceIsHdr = isHdr(mSourceFormat.get());
+ const bool sourceIsHdr = VideoIsHdr(mSourceFormat.get());
if (sourceIsHdr) {
AMediaFormat_setInt32(decoderFormat.get(),
TBD_AMEDIACODEC_PARAMETER_KEY_COLOR_TRANSFER_REQUEST,
diff --git a/media/libmediatranscoding/transcoder/include/media/NdkCommon.h b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
index cc3399a..a7ed6a7 100644
--- a/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
+++ b/media/libmediatranscoding/transcoder/include/media/NdkCommon.h
@@ -88,5 +88,7 @@
bool SetDefaultFormatValueFloat(const char* key, AMediaFormat* format, float value);
bool SetDefaultFormatValueInt32(const char* key, AMediaFormat* format, int32_t value);
+bool VideoIsHdr(AMediaFormat* format);
+
} // namespace AMediaFormatUtils
#endif // ANDROID_MEDIA_TRANSCODING_NDK_COMMON_H
diff --git a/media/libnblog/ReportPerformance.cpp b/media/libnblog/ReportPerformance.cpp
index aa678ba..4c78b01 100644
--- a/media/libnblog/ReportPerformance.cpp
+++ b/media/libnblog/ReportPerformance.cpp
@@ -92,8 +92,8 @@
(*dataJson)["threadNum"] = item.first;
root.append(*dataJson);
}
- Json::StyledWriter writer;
- std::string rootStr = writer.write(root);
+ Json::StreamWriterBuilder factory;
+ std::string rootStr = Json::writeString(factory, root);
write(fd, rootStr.c_str(), rootStr.size());
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 3e5f9ea..7fa870a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -118,6 +118,11 @@
static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on"; /* 0..n */
static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off"; /* 0..n */
static const char *kCodecFirstFrameIndexLowLatencyModeOn = "android.media.mediacodec.low-latency.first-frame"; /* 0..n */
+static const char *kCodecChannelCount = "android.media.mediacodec.channelCount";
+static const char *kCodecSampleRate = "android.media.mediacodec.sampleRate";
+static const char *kCodecVideoEncodedBytes = "android.media.mediacodec.vencode.bytes";
+static const char *kCodecVideoEncodedFrames = "android.media.mediacodec.vencode.frames";
+static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
// the kCodecRecent* fields appear only in getMetrics() results
static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max"; /* in us */
@@ -695,6 +700,10 @@
mHavePendingInputBuffers(false),
mCpuBoostRequested(false),
mLatencyUnknown(0),
+ mBytesEncoded(0),
+ mEarliestEncodedPtsUs(INT64_MAX),
+ mLatestEncodedPtsUs(INT64_MIN),
+ mFramesEncoded(0),
mNumLowLatencyEnables(0),
mNumLowLatencyDisables(0),
mIsLowLatencyModeOn(false),
@@ -802,6 +811,18 @@
mediametrics_setInt64(mMetricsHandle, kCodecLifetimeMs, lifetime);
}
+ if (mBytesEncoded) {
+ Mutex::Autolock al(mOutputStatsLock);
+
+ mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedBytes, mBytesEncoded);
+ int64_t duration = 0;
+ if (mLatestEncodedPtsUs > mEarliestEncodedPtsUs) {
+ duration = mLatestEncodedPtsUs - mEarliestEncodedPtsUs;
+ }
+ mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedDurationUs, duration);
+ mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedFrames, mFramesEncoded);
+ }
+
{
Mutex::Autolock al(mLatencyLock);
mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOn, mNumLowLatencyEnables);
@@ -1005,10 +1026,34 @@
}
// when we get a buffer back from the codec
-void MediaCodec::statsBufferReceived(int64_t presentationUs) {
+void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
CHECK_NE(mState, UNINITIALIZED);
+ if (mIsVideo && (mFlags & kFlagIsEncoder)) {
+ int32_t flags = 0;
+ (void) buffer->meta()->findInt32("flags", &flags);
+
+ // some of these frames, we don't want to count
+ // standalone EOS.... has an invalid timestamp
+ if ((flags & (BUFFER_FLAG_CODECCONFIG|BUFFER_FLAG_EOS)) == 0) {
+ mBytesEncoded += buffer->size();
+ mFramesEncoded++;
+
+ Mutex::Autolock al(mOutputStatsLock);
+ int64_t timeUs = 0;
+ if (buffer->meta()->findInt64("timeUs", &timeUs)) {
+ if (timeUs > mLatestEncodedPtsUs) {
+ mLatestEncodedPtsUs = timeUs;
+ }
+ // can't chain as an else-if or this never triggers
+ if (timeUs < mEarliestEncodedPtsUs) {
+ mEarliestEncodedPtsUs = timeUs;
+ }
+ }
+ }
+ }
+
// mutex access to mBuffersInFlight and other stats
Mutex::Autolock al(mLatencyLock);
@@ -1064,7 +1109,7 @@
return;
}
- // nowNs start our calculations
+ // now start our calculations
const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
int64_t latencyUs = (nowNs - startdata.startedNs + 500) / 1000;
@@ -1337,6 +1382,17 @@
ALOGE("Invalid size(s), width=%d, height=%d", mVideoWidth, mVideoHeight);
return BAD_VALUE;
}
+ } else {
+ if (mMetricsHandle != 0) {
+ int32_t channelCount;
+ if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
+ mediametrics_setInt32(mMetricsHandle, kCodecChannelCount, channelCount);
+ }
+ int32_t sampleRate;
+ if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
+ mediametrics_setInt32(mMetricsHandle, kCodecSampleRate, sampleRate);
+ }
+ }
}
updateLowLatency(format);
@@ -2183,14 +2239,15 @@
int64_t timeUs;
CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
- statsBufferReceived(timeUs);
-
response->setInt64("timeUs", timeUs);
int32_t flags;
CHECK(buffer->meta()->findInt32("flags", &flags));
response->setInt32("flags", flags);
+
+ statsBufferReceived(timeUs, buffer);
+
response->postReply(replyID);
}
@@ -4337,13 +4394,13 @@
msg->setInt64("timeUs", timeUs);
- statsBufferReceived(timeUs);
-
int32_t flags;
CHECK(buffer->meta()->findInt32("flags", &flags));
msg->setInt32("flags", flags);
+ statsBufferReceived(timeUs, buffer);
+
msg->post();
}
}
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index a28d479..5f64686 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -528,6 +528,14 @@
std::deque<BufferFlightTiming_t> mBuffersInFlight;
Mutex mLatencyLock;
int64_t mLatencyUnknown; // buffers for which we couldn't calculate latency
+
+ Mutex mOutputStatsLock;
+ int64_t mBytesEncoded = 0;
+ int64_t mEarliestEncodedPtsUs = INT64_MAX;
+ int64_t mLatestEncodedPtsUs = INT64_MIN;
+ int32_t mFramesEncoded = 0;
+
+
int64_t mNumLowLatencyEnables; // how many times low latency mode is enabled
int64_t mNumLowLatencyDisables; // how many times low latency mode is disabled
bool mIsLowLatencyModeOn; // is low latency mode on currently
@@ -544,7 +552,7 @@
sp<BatteryChecker> mBatteryChecker;
void statsBufferSent(int64_t presentationUs);
- void statsBufferReceived(int64_t presentationUs);
+ void statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer);
enum {
// the default shape of our latency histogram buckets
diff --git a/media/libstagefright/include/media/stagefright/MediaErrors.h b/media/libstagefright/include/media/stagefright/MediaErrors.h
index cfd5608..d1df2ca 100644
--- a/media/libstagefright/include/media/stagefright/MediaErrors.h
+++ b/media/libstagefright/include/media/stagefright/MediaErrors.h
@@ -94,12 +94,13 @@
ERROR_DRM_PROVISIONING_CERTIFICATE = DRM_ERROR_BASE - 31,
ERROR_DRM_PROVISIONING_CONFIG = DRM_ERROR_BASE - 32,
ERROR_DRM_PROVISIONING_PARSE = DRM_ERROR_BASE - 33,
- ERROR_DRM_PROVISIONING_RETRY = DRM_ERROR_BASE - 34,
- ERROR_DRM_SECURE_STOP_RELEASE = DRM_ERROR_BASE - 35,
- ERROR_DRM_STORAGE_READ = DRM_ERROR_BASE - 36,
- ERROR_DRM_STORAGE_WRITE = DRM_ERROR_BASE - 37,
- ERROR_DRM_ZERO_SUBSAMPLES = DRM_ERROR_BASE - 38,
- ERROR_DRM_LAST_USED_ERRORCODE = DRM_ERROR_BASE - 38,
+ ERROR_DRM_PROVISIONING_REQUEST_REJECTED = DRM_ERROR_BASE - 34,
+ ERROR_DRM_PROVISIONING_RETRY = DRM_ERROR_BASE - 35,
+ ERROR_DRM_SECURE_STOP_RELEASE = DRM_ERROR_BASE - 36,
+ ERROR_DRM_STORAGE_READ = DRM_ERROR_BASE - 37,
+ ERROR_DRM_STORAGE_WRITE = DRM_ERROR_BASE - 38,
+ ERROR_DRM_ZERO_SUBSAMPLES = DRM_ERROR_BASE - 39,
+ ERROR_DRM_LAST_USED_ERRORCODE = ERROR_DRM_ZERO_SUBSAMPLES,
ERROR_DRM_VENDOR_MAX = DRM_ERROR_BASE - 500,
ERROR_DRM_VENDOR_MIN = DRM_ERROR_BASE - 999,
@@ -202,6 +203,7 @@
STATUS_CASE(ERROR_DRM_PROVISIONING_CERTIFICATE);
STATUS_CASE(ERROR_DRM_PROVISIONING_CONFIG);
STATUS_CASE(ERROR_DRM_PROVISIONING_PARSE);
+ STATUS_CASE(ERROR_DRM_PROVISIONING_REQUEST_REJECTED);
STATUS_CASE(ERROR_DRM_PROVISIONING_RETRY);
STATUS_CASE(ERROR_DRM_SECURE_STOP_RELEASE);
STATUS_CASE(ERROR_DRM_STORAGE_READ);
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 59d74de..819e146 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -39,10 +39,9 @@
static std::atomic<int> curAudioHalPids = 0;
if (update) {
- audioHalPids[(curAudioHalPids + 1) % kNumAudioHalPidsVectors] = *pids;
- curAudioHalPids++;
+ audioHalPids[(curAudioHalPids++ + 1) % kNumAudioHalPidsVectors] = *pids;
} else {
- *pids = audioHalPids[curAudioHalPids];
+ *pids = audioHalPids[curAudioHalPids % kNumAudioHalPidsVectors];
}
}
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index ae9fc64..dacb758 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -85,15 +85,6 @@
#include "TypedLogger.h"
-#define VALUE_OR_FATAL(result) \
- ({ \
- auto _tmp = (result); \
- LOG_ALWAYS_FATAL_IF(!_tmp.ok(), \
- "Failed result (%d)", \
- _tmp.error()); \
- std::move(_tmp.value()); \
- })
-
// ----------------------------------------------------------------------------
// Note: the following macro is used for extremely verbose logging message. In
diff --git a/services/audiopolicy/common/managerdefinitions/include/Serializer.h b/services/audiopolicy/common/managerdefinitions/include/Serializer.h
index 48c4147..b70c595 100644
--- a/services/audiopolicy/common/managerdefinitions/include/Serializer.h
+++ b/services/audiopolicy/common/managerdefinitions/include/Serializer.h
@@ -21,5 +21,9 @@
namespace android {
status_t deserializeAudioPolicyFile(const char *fileName, AudioPolicyConfig *config);
+// In VTS mode all vendor extensions are ignored. This is done because
+// VTS tests are built using AOSP code and thus can not use vendor overlays
+// of system libraries.
+status_t deserializeAudioPolicyFileForVts(const char *fileName, AudioPolicyConfig *config);
} // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 129f6f6..562c213 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -20,8 +20,8 @@
#include <memory>
#include <string>
#include <utility>
+#include <variant>
-#include <hidl/Status.h>
#include <libxml/parser.h>
#include <libxml/xinclude.h>
#include <media/convert.h>
@@ -36,12 +36,14 @@
namespace {
-// TODO(mnaganov): Consider finding an alternative for using HIDL code.
-using hardware::Return;
-using hardware::Status;
-using hardware::Void;
using utilities::convertTo;
+static inline bool maybeVendorExtension(const std::string& s) {
+ // Only checks whether the string starts with the "vendor prefix".
+ static const std::string vendorPrefix = "VX_";
+ return s.size() > vendorPrefix.size() && s.substr(0, vendorPrefix.size()) == vendorPrefix;
+}
+
template<typename E, typename C>
struct AndroidCollectionTraits {
typedef sp<E> Element;
@@ -187,7 +189,7 @@
struct GlobalConfigTraits
{
- typedef void Element;
+ typedef std::monostate Element;
static constexpr const char *tag = "globalConfiguration";
@@ -203,7 +205,7 @@
struct SurroundSoundTraits
{
- typedef void Element;
+ typedef std::monostate Element;
static constexpr const char *tag = "surroundSound";
@@ -226,14 +228,15 @@
class PolicySerializer
{
public:
- status_t deserialize(const char *configFile, AudioPolicyConfig *config);
+ status_t deserialize(const char *configFile, AudioPolicyConfig *config,
+ bool ignoreVendorExtensions = false);
template <class Trait>
status_t deserializeCollection(const xmlNode *cur,
typename Trait::Collection *collection,
typename Trait::PtrSerializingCtx serializingContext);
template <class Trait>
- Return<typename Trait::Element> deserialize(const xmlNode *cur,
+ std::variant<status_t, typename Trait::Element> deserialize(const xmlNode *cur,
typename Trait::PtrSerializingCtx serializingContext);
private:
@@ -242,6 +245,7 @@
typedef AudioPolicyConfig Element;
+ bool mIgnoreVendorExtensions = false;
std::string mChannelMasksSeparator = ",";
std::string mSamplingRatesSeparator = ",";
std::string mFlagsSeparator = "|";
@@ -307,14 +311,17 @@
}
for (; child != NULL; child = child->next) {
if (!xmlStrcmp(child->name, reinterpret_cast<const xmlChar*>(Trait::tag))) {
- auto element = deserialize<Trait>(child, serializingContext);
- if (element.isOk()) {
- status_t status = Trait::addElementToCollection(element, collection);
+ auto maybeElement = deserialize<Trait>(child, serializingContext);
+ if (maybeElement.index() == 1) {
+ status_t status = Trait::addElementToCollection(
+ std::get<1>(maybeElement), collection);
if (status != NO_ERROR) {
ALOGE("%s: could not add element to %s collection", __func__,
Trait::collectionTag);
return status;
}
+ } else if (mIgnoreVendorExtensions && std::get<status_t>(maybeElement) == NO_INIT) {
+ // Skip a vendor extension element.
} else {
return BAD_VALUE;
}
@@ -328,8 +335,8 @@
}
template<>
-Return<AudioGainTraits::Element> PolicySerializer::deserialize<AudioGainTraits>(const xmlNode *cur,
- AudioGainTraits::PtrSerializingCtx /*serializingContext*/)
+std::variant<status_t, AudioGainTraits::Element> PolicySerializer::deserialize<AudioGainTraits>(
+ const xmlNode *cur, AudioGainTraits::PtrSerializingCtx /*serializingContext*/)
{
using Attributes = AudioGainTraits::Attributes;
@@ -393,12 +400,13 @@
if (gain->getMode() != 0) {
return gain;
} else {
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
}
}
template<>
-Return<AudioProfileTraits::Element> PolicySerializer::deserialize<AudioProfileTraits>(
+std::variant<status_t, AudioProfileTraits::Element>
+PolicySerializer::deserialize<AudioProfileTraits>(
const xmlNode *cur, AudioProfileTraits::PtrSerializingCtx /*serializingContext*/)
{
using Attributes = AudioProfileTraits::Attributes;
@@ -407,6 +415,10 @@
std::string format = getXmlAttribute(cur, Attributes::format);
std::string channels = getXmlAttribute(cur, Attributes::channelMasks);
+ if (mIgnoreVendorExtensions && maybeVendorExtension(format)) {
+ ALOGI("%s: vendor extension format \"%s\" skipped", __func__, format.c_str());
+ return NO_INIT;
+ }
AudioProfileTraits::Element profile = new AudioProfile(formatFromString(format, gDynamicFormat),
channelMasksFromString(channels, mChannelMasksSeparator.c_str()),
samplingRatesFromString(samplingRates, mSamplingRatesSeparator.c_str()));
@@ -419,21 +431,21 @@
}
template<>
-Return<MixPortTraits::Element> PolicySerializer::deserialize<MixPortTraits>(const xmlNode *child,
- MixPortTraits::PtrSerializingCtx /*serializingContext*/)
+std::variant<status_t, MixPortTraits::Element> PolicySerializer::deserialize<MixPortTraits>(
+ const xmlNode *child, MixPortTraits::PtrSerializingCtx /*serializingContext*/)
{
using Attributes = MixPortTraits::Attributes;
std::string name = getXmlAttribute(child, Attributes::name);
if (name.empty()) {
ALOGE("%s: No %s found", __func__, Attributes::name);
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
}
ALOGV("%s: %s %s=%s", __func__, MixPortTraits::tag, Attributes::name, name.c_str());
std::string role = getXmlAttribute(child, Attributes::role);
if (role.empty()) {
ALOGE("%s: No %s found", __func__, Attributes::role);
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
}
ALOGV("%s: Role=%s", __func__, role.c_str());
audio_port_role_t portRole = (role == Attributes::roleSource) ?
@@ -444,7 +456,7 @@
AudioProfileTraits::Collection profiles;
status_t status = deserializeCollection<AudioProfileTraits>(child, &profiles, NULL);
if (status != NO_ERROR) {
- return Status::fromStatusT(status);
+ return status;
}
if (profiles.empty()) {
profiles.add(AudioProfile::createFullDynamic(gDynamicFormat));
@@ -476,7 +488,7 @@
AudioGainTraits::Collection gains;
status = deserializeCollection<AudioGainTraits>(child, &gains, NULL);
if (status != NO_ERROR) {
- return Status::fromStatusT(status);
+ return status;
}
mixPort->setGains(gains);
@@ -484,7 +496,7 @@
}
template<>
-Return<DevicePortTraits::Element> PolicySerializer::deserialize<DevicePortTraits>(
+std::variant<status_t, DevicePortTraits::Element> PolicySerializer::deserialize<DevicePortTraits>(
const xmlNode *cur, DevicePortTraits::PtrSerializingCtx /*serializingContext*/)
{
using Attributes = DevicePortTraits::Attributes;
@@ -493,30 +505,34 @@
std::string name = getXmlAttribute(cur, Attributes::tagName);
if (name.empty()) {
ALOGE("%s: No %s found", __func__, Attributes::tagName);
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
}
ALOGV("%s: %s %s=%s", __func__, tag, Attributes::tagName, name.c_str());
std::string typeName = getXmlAttribute(cur, Attributes::type);
if (typeName.empty()) {
ALOGE("%s: no type for %s", __func__, name.c_str());
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
}
ALOGV("%s: %s %s=%s", __func__, tag, Attributes::type, typeName.c_str());
std::string role = getXmlAttribute(cur, Attributes::role);
if (role.empty()) {
ALOGE("%s: No %s found", __func__, Attributes::role);
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
}
ALOGV("%s: %s %s=%s", __func__, tag, Attributes::role, role.c_str());
audio_port_role_t portRole = (role == Attributes::roleSource) ?
AUDIO_PORT_ROLE_SOURCE : AUDIO_PORT_ROLE_SINK;
+ if (mIgnoreVendorExtensions && maybeVendorExtension(typeName)) {
+ ALOGI("%s: vendor extension device type \"%s\" skipped", __func__, typeName.c_str());
+ return NO_INIT;
+ }
audio_devices_t type = AUDIO_DEVICE_NONE;
if (!DeviceConverter::fromString(typeName, type) ||
(!audio_is_input_device(type) && portRole == AUDIO_PORT_ROLE_SOURCE) ||
(!audio_is_output_devices(type) && portRole == AUDIO_PORT_ROLE_SINK)) {
ALOGW("%s: bad type %08x", __func__, type);
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
}
std::string encodedFormatsLiteral = getXmlAttribute(cur, Attributes::encodedFormats);
ALOGV("%s: %s %s=%s", __func__, tag, Attributes::encodedFormats, encodedFormatsLiteral.c_str());
@@ -531,7 +547,7 @@
AudioProfileTraits::Collection profiles;
status_t status = deserializeCollection<AudioProfileTraits>(cur, &profiles, NULL);
if (status != NO_ERROR) {
- return Status::fromStatusT(status);
+ return status;
}
if (profiles.empty()) {
profiles.add(AudioProfile::createFullDynamic(gDynamicFormat));
@@ -544,7 +560,7 @@
// Deserialize AudioGain children
status = deserializeCollection<AudioGainTraits>(cur, &deviceDesc->mGains, NULL);
if (status != NO_ERROR) {
- return Status::fromStatusT(status);
+ return status;
}
ALOGV("%s: adding device tag %s type %08x address %s", __func__,
deviceDesc->getName().c_str(), type, deviceDesc->address().c_str());
@@ -552,7 +568,7 @@
}
template<>
-Return<RouteTraits::Element> PolicySerializer::deserialize<RouteTraits>(
+std::variant<status_t, RouteTraits::Element> PolicySerializer::deserialize<RouteTraits>(
const xmlNode *cur, RouteTraits::PtrSerializingCtx ctx)
{
using Attributes = RouteTraits::Attributes;
@@ -560,7 +576,7 @@
std::string type = getXmlAttribute(cur, Attributes::type);
if (type.empty()) {
ALOGE("%s: No %s found", __func__, Attributes::type);
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
}
audio_route_type_t routeType = (type == Attributes::typeMix) ?
AUDIO_ROUTE_MIX : AUDIO_ROUTE_MUX;
@@ -571,20 +587,24 @@
std::string sinkAttr = getXmlAttribute(cur, Attributes::sink);
if (sinkAttr.empty()) {
ALOGE("%s: No %s found", __func__, Attributes::sink);
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
}
// Convert Sink name to port pointer
sp<PolicyAudioPort> sink = ctx->findPortByTagName(sinkAttr);
- if (sink == NULL) {
+ if (sink == NULL && !mIgnoreVendorExtensions) {
ALOGE("%s: no sink found with name=%s", __func__, sinkAttr.c_str());
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
+ } else if (sink == NULL) {
+ ALOGW("Skipping route to sink \"%s\" as it likely has vendor extension type",
+ sinkAttr.c_str());
+ return NO_INIT;
}
route->setSink(sink);
std::string sourcesAttr = getXmlAttribute(cur, Attributes::sources);
if (sourcesAttr.empty()) {
ALOGE("%s: No %s found", __func__, Attributes::sources);
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
}
// Tokenize and Convert Sources name to port pointer
PolicyAudioPortVector sources;
@@ -594,11 +614,15 @@
while (devTag != NULL) {
if (strlen(devTag) != 0) {
sp<PolicyAudioPort> source = ctx->findPortByTagName(devTag);
- if (source == NULL) {
+ if (source == NULL && !mIgnoreVendorExtensions) {
ALOGE("%s: no source found with name=%s", __func__, devTag);
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
+ } else if (source == NULL) {
+ ALOGW("Skipping route source \"%s\" as it likely has vendor extension type",
+ devTag);
+ } else {
+ sources.add(source);
}
- sources.add(source);
}
devTag = strtok(NULL, ",");
}
@@ -613,7 +637,7 @@
}
template<>
-Return<ModuleTraits::Element> PolicySerializer::deserialize<ModuleTraits>(
+std::variant<status_t, ModuleTraits::Element> PolicySerializer::deserialize<ModuleTraits>(
const xmlNode *cur, ModuleTraits::PtrSerializingCtx ctx)
{
using Attributes = ModuleTraits::Attributes;
@@ -625,7 +649,7 @@
std::string name = getXmlAttribute(cur, Attributes::name);
if (name.empty()) {
ALOGE("%s: No %s found", __func__, Attributes::name);
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
}
uint32_t versionMajor = 0, versionMinor = 0;
std::string versionLiteral = getXmlAttribute(cur, Attributes::version);
@@ -643,21 +667,21 @@
MixPortTraits::Collection mixPorts;
status_t status = deserializeCollection<MixPortTraits>(cur, &mixPorts, NULL);
if (status != NO_ERROR) {
- return Status::fromStatusT(status);
+ return status;
}
module->setProfiles(mixPorts);
DevicePortTraits::Collection devicePorts;
status = deserializeCollection<DevicePortTraits>(cur, &devicePorts, NULL);
if (status != NO_ERROR) {
- return Status::fromStatusT(status);
+ return status;
}
module->setDeclaredDevices(devicePorts);
RouteTraits::Collection routes;
status = deserializeCollection<RouteTraits>(cur, &routes, module.get());
if (status != NO_ERROR) {
- return Status::fromStatusT(status);
+ return status;
}
module->setRoutes(routes);
@@ -677,6 +701,12 @@
sp<DeviceDescriptor> device = module->getDeclaredDevices().
getDeviceFromTagName(std::string(reinterpret_cast<const char*>(
attachedDevice.get())));
+ if (device == nullptr && mIgnoreVendorExtensions) {
+ ALOGW("Skipped attached device \"%s\" because it likely uses a vendor"
+ "extension type",
+ reinterpret_cast<const char*>(attachedDevice.get()));
+ continue;
+ }
ctx->addDevice(device);
}
}
@@ -703,7 +733,8 @@
}
template<>
-Return<GlobalConfigTraits::Element> PolicySerializer::deserialize<GlobalConfigTraits>(
+std::variant<status_t, GlobalConfigTraits::Element>
+PolicySerializer::deserialize<GlobalConfigTraits>(
const xmlNode *root, GlobalConfigTraits::PtrSerializingCtx config)
{
using Attributes = GlobalConfigTraits::Attributes;
@@ -725,14 +756,15 @@
if (!engineLibrarySuffix.empty()) {
config->setEngineLibraryNameSuffix(engineLibrarySuffix);
}
- return Void();
+ return NO_ERROR;
}
}
- return Void();
+ return NO_ERROR;
}
template<>
-Return<SurroundSoundTraits::Element> PolicySerializer::deserialize<SurroundSoundTraits>(
+std::variant<status_t, SurroundSoundTraits::Element>
+PolicySerializer::deserialize<SurroundSoundTraits>(
const xmlNode *root, SurroundSoundTraits::PtrSerializingCtx config)
{
config->setDefaultSurroundFormats();
@@ -745,14 +777,15 @@
if (status == NO_ERROR) {
config->setSurroundFormats(formats);
}
- return Void();
+ return NO_ERROR;
}
}
- return Void();
+ return NO_ERROR;
}
template<>
-Return<SurroundSoundFormatTraits::Element> PolicySerializer::deserialize<SurroundSoundFormatTraits>(
+std::variant<status_t, SurroundSoundFormatTraits::Element>
+PolicySerializer::deserialize<SurroundSoundFormatTraits>(
const xmlNode *cur, SurroundSoundFormatTraits::PtrSerializingCtx /*serializingContext*/)
{
using Attributes = SurroundSoundFormatTraits::Attributes;
@@ -760,12 +793,16 @@
std::string formatLiteral = getXmlAttribute(cur, Attributes::name);
if (formatLiteral.empty()) {
ALOGE("%s: No %s found for a surround format", __func__, Attributes::name);
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
+ }
+ if (mIgnoreVendorExtensions && maybeVendorExtension(formatLiteral)) {
+ ALOGI("%s: vendor extension format \"%s\" skipped", __func__, formatLiteral.c_str());
+ return NO_INIT;
}
audio_format_t format = formatFromString(formatLiteral);
if (format == AUDIO_FORMAT_DEFAULT) {
ALOGE("%s: Unrecognized format %s", __func__, formatLiteral.c_str());
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
}
SurroundSoundFormatTraits::Element pair = std::make_pair(
format, SurroundSoundFormatTraits::Collection::mapped_type{});
@@ -777,14 +814,16 @@
auto result = pair.second.insert(subformat);
if (!result.second) {
ALOGE("%s: could not add subformat %x to collection", __func__, subformat);
- return Status::fromStatusT(BAD_VALUE);
+ return BAD_VALUE;
}
}
return pair;
}
-status_t PolicySerializer::deserialize(const char *configFile, AudioPolicyConfig *config)
+status_t PolicySerializer::deserialize(const char *configFile, AudioPolicyConfig *config,
+ bool ignoreVendorExtensions)
{
+ mIgnoreVendorExtensions = ignoreVendorExtensions;
auto doc = make_xmlUnique(xmlParseFile(configFile));
if (doc == nullptr) {
ALOGE("%s: Could not parse %s document.", __func__, configFile);
@@ -845,4 +884,12 @@
return status;
}
+status_t deserializeAudioPolicyFileForVts(const char *fileName, AudioPolicyConfig *config)
+{
+ PolicySerializer serializer;
+ status_t status = serializer.deserialize(fileName, config, true /*ignoreVendorExtensions*/);
+ if (status != OK) config->clear();
+ return status;
+}
+
} // namespace android
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index d038ce5..380bf6b 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -142,47 +142,19 @@
return EngineBase::setForceUse(usage, config);
}
-DeviceVector Engine::getDevicesForStrategyInt(legacy_strategy strategy,
- DeviceVector availableOutputDevices,
- DeviceVector availableInputDevices,
- const SwAudioOutputCollection &outputs) const
+void Engine::filterOutputDevicesForStrategy(legacy_strategy strategy,
+ DeviceVector& availableOutputDevices,
+ const DeviceVector availableInputDevices,
+ const SwAudioOutputCollection &outputs) const
{
- DeviceVector devices;
-
switch (strategy) {
-
- case STRATEGY_TRANSMITTED_THROUGH_SPEAKER:
- devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
- break;
-
- case STRATEGY_SONIFICATION_RESPECTFUL:
- if (isInCall() || outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL))) {
- devices = getDevicesForStrategyInt(
- STRATEGY_SONIFICATION, availableOutputDevices, availableInputDevices, outputs);
- } else {
- bool media_active_locally =
- outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_MUSIC),
- SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)
- || outputs.isActiveLocally(
- toVolumeSource(AUDIO_STREAM_ACCESSIBILITY),
- SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY);
+ case STRATEGY_SONIFICATION_RESPECTFUL: {
+ if (!(isInCall() || outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL)))) {
// routing is same as media without the "remote" device
availableOutputDevices.remove(availableOutputDevices.getDevicesFromType(
AUDIO_DEVICE_OUT_REMOTE_SUBMIX));
- devices = getDevicesForStrategyInt(STRATEGY_MEDIA,
- availableOutputDevices,
- availableInputDevices, outputs);
- // if no media is playing on the device, check for mandatory use of "safe" speaker
- // when media would have played on speaker, and the safe speaker path is available
- if (!media_active_locally) {
- devices.replaceDevicesByType(
- AUDIO_DEVICE_OUT_SPEAKER,
- availableOutputDevices.getDevicesFromType(
- AUDIO_DEVICE_OUT_SPEAKER_SAFE));
- }
}
- break;
-
+ } break;
case STRATEGY_DTMF:
case STRATEGY_PHONE: {
// Force use of only devices on primary output if:
@@ -214,6 +186,64 @@
availableOutputDevices = availPrimaryOutputDevices;
}
}
+ } break;
+ case STRATEGY_ACCESSIBILITY: {
+ // do not route accessibility prompts to a digital output currently configured with a
+ // compressed format as they would likely not be mixed and dropped.
+ for (size_t i = 0; i < outputs.size(); i++) {
+ sp<AudioOutputDescriptor> desc = outputs.valueAt(i);
+ if (desc->isActive() && !audio_is_linear_pcm(desc->getFormat())) {
+ availableOutputDevices.remove(desc->devices().getDevicesFromTypes({
+ AUDIO_DEVICE_OUT_HDMI, AUDIO_DEVICE_OUT_SPDIF,
+ AUDIO_DEVICE_OUT_HDMI_ARC}));
+ }
+ }
+ } break;
+ default:
+ break;
+ }
+}
+
+DeviceVector Engine::getDevicesForStrategyInt(legacy_strategy strategy,
+ DeviceVector availableOutputDevices,
+ DeviceVector availableInputDevices,
+ const SwAudioOutputCollection &outputs) const
+{
+ DeviceVector devices;
+
+ switch (strategy) {
+
+ case STRATEGY_TRANSMITTED_THROUGH_SPEAKER:
+ devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_SPEAKER);
+ break;
+
+ case STRATEGY_SONIFICATION_RESPECTFUL:
+ if (isInCall() || outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL))) {
+ devices = getDevicesForStrategyInt(
+ STRATEGY_SONIFICATION, availableOutputDevices, availableInputDevices, outputs);
+ } else {
+ bool media_active_locally =
+ outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_MUSIC),
+ SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)
+ || outputs.isActiveLocally(
+ toVolumeSource(AUDIO_STREAM_ACCESSIBILITY),
+ SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY);
+ devices = getDevicesForStrategyInt(STRATEGY_MEDIA,
+ availableOutputDevices,
+ availableInputDevices, outputs);
+ // if no media is playing on the device, check for mandatory use of "safe" speaker
+ // when media would have played on speaker, and the safe speaker path is available
+ if (!media_active_locally) {
+ devices.replaceDevicesByType(
+ AUDIO_DEVICE_OUT_SPEAKER,
+ availableOutputDevices.getDevicesFromType(
+ AUDIO_DEVICE_OUT_SPEAKER_SAFE));
+ }
+ }
+ break;
+
+ case STRATEGY_DTMF:
+ case STRATEGY_PHONE: {
devices = availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_HEARING_AID);
if (!devices.isEmpty()) break;
devices = availableOutputDevices.getFirstDevicesFromTypes({
@@ -286,16 +316,6 @@
case STRATEGY_ACCESSIBILITY:
if (strategy == STRATEGY_ACCESSIBILITY) {
- // do not route accessibility prompts to a digital output currently configured with a
- // compressed format as they would likely not be mixed and dropped.
- for (size_t i = 0; i < outputs.size(); i++) {
- sp<AudioOutputDescriptor> desc = outputs.valueAt(i);
- if (desc->isActive() && !audio_is_linear_pcm(desc->getFormat())) {
- availableOutputDevices.remove(desc->devices().getDevicesFromTypes({
- AUDIO_DEVICE_OUT_HDMI, AUDIO_DEVICE_OUT_SPDIF,
- AUDIO_DEVICE_OUT_HDMI_ARC}));
- }
- }
if (outputs.isActive(toVolumeSource(AUDIO_STREAM_RING)) ||
outputs.isActive(toVolumeSource(AUDIO_STREAM_ALARM))) {
return getDevicesForStrategyInt(
@@ -634,11 +654,18 @@
auto legacyStrategy = mLegacyStrategyMap.find(strategy) != end(mLegacyStrategyMap) ?
mLegacyStrategyMap.at(strategy) : STRATEGY_NONE;
- // When not in call, STRATEGY_PHONE and STRATEGY_DTMF follow STRATEGY_MEDIA
- if (!isInCall() && (legacyStrategy == STRATEGY_PHONE || legacyStrategy == STRATEGY_DTMF)) {
+ // When not in call, STRATEGY_DTMF follows STRATEGY_MEDIA
+ if (!isInCall() && legacyStrategy == STRATEGY_DTMF) {
legacyStrategy = STRATEGY_MEDIA;
strategy = getProductStrategyFromLegacy(STRATEGY_MEDIA);
}
+
+ DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
+ const SwAudioOutputCollection& outputs = getApmObserver()->getOutputs();
+
+ filterOutputDevicesForStrategy(legacyStrategy, availableOutputDevices,
+ availableInputDevices, outputs);
+
// check if this strategy has a preferred device that is available,
// if yes, give priority to it.
DeviceVector preferredAvailableDevVec =
@@ -647,9 +674,6 @@
return preferredAvailableDevVec;
}
- DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
- const SwAudioOutputCollection& outputs = getApmObserver()->getOutputs();
-
return getDevicesForStrategyInt(legacyStrategy,
availableOutputDevices,
availableInputDevices, outputs);
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 6214fe7..6dc6cd0 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -74,6 +74,11 @@
status_t setDefaultDevice(audio_devices_t device);
+ void filterOutputDevicesForStrategy(legacy_strategy strategy,
+ DeviceVector& availableOutputDevices,
+ const DeviceVector availableInputDevices,
+ const SwAudioOutputCollection &outputs) const;
+
DeviceVector getDevicesForStrategyInt(legacy_strategy strategy,
DeviceVector availableOutputDevices,
DeviceVector availableInputDevices,
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 75f0c1b..6b664dd 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -261,11 +261,7 @@
} else {
checkCloseOutputs();
}
-
- if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
- DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
- updateCallRouting(newDevices);
- }
+ (void)updateCallRouting(false /*fromCache*/);
std::vector<audio_io_handle_t> outputsToReopen;
const DeviceVector msdOutDevices = getMsdAudioOutDevices();
const DeviceVector activeMediaDevices =
@@ -382,10 +378,7 @@
// getDeviceForStrategy() cache
updateDevicesAndOutputs();
- if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
- DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
- updateCallRouting(newDevices);
- }
+ (void)updateCallRouting(false /*fromCache*/);
// Reconnect Audio Source
for (const auto &strategy : mEngine->getOrderedProductStrategies()) {
auto attributes = mEngine->getAllAttributesForProductStrategy(strategy).front();
@@ -550,23 +543,58 @@
return status;
}
-uint32_t AudioPolicyManager::updateCallRouting(const DeviceVector &rxDevices, uint32_t delayMs)
+DeviceVector AudioPolicyManager::selectBestRxSinkDevicesForCall(bool fromCache)
+{
+ DeviceVector rxSinkdevices{};
+ rxSinkdevices = mEngine->getOutputDevicesForAttributes(
+ attributes_initializer(AUDIO_USAGE_VOICE_COMMUNICATION), nullptr, fromCache);
+ if (!rxSinkdevices.isEmpty() && mAvailableOutputDevices.contains(rxSinkdevices.itemAt(0))) {
+ auto rxSinkDevice = rxSinkdevices.itemAt(0);
+ auto telephonyRxModule = mHwModules.getModuleForDeviceType(
+ AUDIO_DEVICE_IN_TELEPHONY_RX, AUDIO_FORMAT_DEFAULT);
+ // retrieve Rx Source device descriptor
+ sp<DeviceDescriptor> rxSourceDevice = mAvailableInputDevices.getDevice(
+ AUDIO_DEVICE_IN_TELEPHONY_RX, String8(), AUDIO_FORMAT_DEFAULT);
+
+ // RX Telephony and Rx sink devices are declared by Primary Audio HAL
+ if (isPrimaryModule(telephonyRxModule) && (telephonyRxModule->getHalVersionMajor() >= 3) &&
+ telephonyRxModule->supportsPatch(rxSourceDevice, rxSinkDevice)) {
+ ALOGW("%s() device %s using HW Bridge", __func__, rxSinkDevice->toString().c_str());
+ return DeviceVector(rxSinkDevice);
+ }
+ }
+ // Note that despite the fact that getNewOutputDevices() is called on the primary output,
+ // the device returned is not necessarily reachable via this output
+ // (filter later by setOutputDevices())
+ return getNewOutputDevices(mPrimaryOutput, fromCache);
+}
+
+status_t AudioPolicyManager::updateCallRouting(bool fromCache, uint32_t delayMs, uint32_t *waitMs)
+{
+ if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
+ DeviceVector rxDevices = selectBestRxSinkDevicesForCall(fromCache);
+ return updateCallRoutingInternal(rxDevices, delayMs, waitMs);
+ }
+ return INVALID_OPERATION;
+}
+
+status_t AudioPolicyManager::updateCallRoutingInternal(
+ const DeviceVector &rxDevices, uint32_t delayMs, uint32_t *waitMs)
{
bool createTxPatch = false;
bool createRxPatch = false;
uint32_t muteWaitMs = 0;
-
if(!hasPrimaryOutput() ||
mPrimaryOutput->devices().onlyContainsDevicesWithType(AUDIO_DEVICE_OUT_STUB)) {
- return muteWaitMs;
+ return INVALID_OPERATION;
}
- ALOG_ASSERT(!rxDevices.isEmpty(), "updateCallRouting() no selected output device");
+ ALOG_ASSERT(!rxDevices.isEmpty(), "%s() no selected output device", __func__);
audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
- ALOG_ASSERT(txSourceDevice != 0, "updateCallRouting() input selected device not available");
+ ALOG_ASSERT(txSourceDevice != 0, "%s() input selected device not available", __func__);
- ALOGV("updateCallRouting device rxDevice %s txDevice %s",
+ ALOGV("%s device rxDevice %s txDevice %s", __func__,
rxDevices.itemAt(0)->toString().c_str(), txSourceDevice->toString().c_str());
disconnectTelephonyRxAudioSource();
@@ -595,8 +623,8 @@
(telephonyRxModule->getHalVersionMajor() >= 3)) {
if (rxSourceDevice == 0 || txSinkDevice == 0) {
// RX / TX Telephony device(s) is(are) not currently available
- ALOGE("updateCallRouting() no telephony Tx and/or RX device");
- return muteWaitMs;
+ ALOGE("%s() no telephony Tx and/or RX device", __func__);
+ return INVALID_OPERATION;
}
// createAudioPatchInternal now supports both HW / SW bridging
createRxPatch = true;
@@ -634,8 +662,10 @@
}
mCallTxPatch = createTelephonyPatch(false /*isRx*/, txSourceDevice, delayMs);
}
-
- return muteWaitMs;
+ if (waitMs != nullptr) {
+ *waitMs = muteWaitMs;
+ }
+ return NO_ERROR;
}
sp<AudioPatch> AudioPolicyManager::createTelephonyPatch(
@@ -753,25 +783,22 @@
}
if (hasPrimaryOutput()) {
- // Note that despite the fact that getNewOutputDevices() is called on the primary output,
- // the device returned is not necessarily reachable via this output
- DeviceVector rxDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
- // force routing command to audio hardware when ending call
- // even if no device change is needed
- if (isStateInCall(oldState) && rxDevices.isEmpty()) {
- rxDevices = mPrimaryOutput->devices();
- }
-
if (state == AUDIO_MODE_IN_CALL) {
- updateCallRouting(rxDevices, delayMs);
- } else if (oldState == AUDIO_MODE_IN_CALL) {
- disconnectTelephonyRxAudioSource();
- if (mCallTxPatch != 0) {
- releaseAudioPatchInternal(mCallTxPatch->getHandle());
- mCallTxPatch.clear();
- }
- setOutputDevices(mPrimaryOutput, rxDevices, force, 0);
+ (void)updateCallRouting(false /*fromCache*/, delayMs);
} else {
+ DeviceVector rxDevices = getNewOutputDevices(mPrimaryOutput, false /*fromCache*/);
+ // force routing command to audio hardware when ending call
+ // even if no device change is needed
+ if (isStateInCall(oldState) && rxDevices.isEmpty()) {
+ rxDevices = mPrimaryOutput->devices();
+ }
+ if (oldState == AUDIO_MODE_IN_CALL) {
+ disconnectTelephonyRxAudioSource();
+ if (mCallTxPatch != 0) {
+ releaseAudioPatchInternal(mCallTxPatch->getHandle());
+ mCallTxPatch.clear();
+ }
+ }
setOutputDevices(mPrimaryOutput, rxDevices, force, 0);
}
}
@@ -3300,9 +3327,7 @@
void AudioPolicyManager::updateCallAndOutputRouting(bool forceVolumeReeval, uint32_t delayMs)
{
uint32_t waitMs = 0;
- if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL && hasPrimaryOutput()) {
- DeviceVector newDevices = getNewOutputDevices(mPrimaryOutput, true /*fromCache*/);
- waitMs = updateCallRouting(newDevices, delayMs);
+ if (updateCallRouting(true /*fromCache*/, delayMs, &waitMs) == NO_ERROR) {
// Only apply special touch sound delay once
delayMs = 0;
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index bf73f75..bdf82ef 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -735,9 +735,22 @@
String8(devices.itemAt(0)->address().c_str()) : String8("");
}
- uint32_t updateCallRouting(const DeviceVector &rxDevices, uint32_t delayMs = 0);
+ status_t updateCallRouting(
+ bool fromCache, uint32_t delayMs = 0, uint32_t *waitMs = nullptr);
+ status_t updateCallRoutingInternal(
+ const DeviceVector &rxDevices, uint32_t delayMs, uint32_t *waitMs);
sp<AudioPatch> createTelephonyPatch(bool isRx, const sp<DeviceDescriptor> &device,
uint32_t delayMs);
+ /**
+ * @brief selectBestRxSinkDevicesForCall: if the primary module host both Telephony Rx/Tx
+ * devices, and it declares also supporting a HW bridge between the Telephony Rx and the
+ * given sink device for Voice Call audio attributes, select this device in prio.
+ * Otherwise, getNewOutputDevices() is called on the primary output to select sink device.
+ * @param fromCache true to prevent engine reconsidering all product strategies and retrieve
+ * from engine cache.
+ * @return vector of devices, empty if none is found.
+ */
+ DeviceVector selectBestRxSinkDevicesForCall(bool fromCache);
bool isDeviceOfModule(const sp<DeviceDescriptor>& devDesc, const char *moduleId) const;
status_t startSource(const sp<SwAudioOutputDescriptor>& outputDesc,
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index b738633..5dac55b 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -970,7 +970,7 @@
for (const auto& deviceEffectsIter : mDeviceEffects) {
const auto& deviceEffects = deviceEffectsIter.second;
for (const auto& effectDesc : deviceEffects->mEffectDescriptors->mEffects) {
- auto fx = std::make_unique<AudioEffect>(String16("android"));
+ sp<AudioEffect> fx = new AudioEffect(String16("android"));
fx->set(EFFECT_UUID_NULL, &effectDesc->mUuid, 0, nullptr,
nullptr, AUDIO_SESSION_DEVICE, AUDIO_IO_HANDLE_NONE,
AudioDeviceTypeAddr{deviceEffects->getDeviceType(),
@@ -987,7 +987,7 @@
ALOGV("%s(): create Fx %s added on port type=%d address=%s", __func__,
effectDesc->mName, deviceEffects->getDeviceType(),
deviceEffects->getDeviceAddress().c_str());
- deviceEffects->mEffects.push_back(std::move(fx));
+ deviceEffects->mEffects.push_back(fx);
}
}
}
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index 81c728d..13d5d0c 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -207,7 +207,7 @@
mDeviceType(device), mDeviceAddress(address) {}
/*virtual*/ ~DeviceEffects() = default;
- std::vector<std::unique_ptr<AudioEffect>> mEffects;
+ std::vector< sp<AudioEffect> > mEffects;
audio_devices_t getDeviceType() const { return mDeviceType; }
std::string getDeviceAddress() const { return mDeviceAddress; }
const std::unique_ptr<EffectDescVector> mEffectDescriptors;
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index e9f95cb..dd37135 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -143,11 +143,13 @@
"android.hardware.camera.provider@2.4",
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
+ "android.hardware.camera.provider@2.7",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
"android.hardware.camera.device@3.4",
"android.hardware.camera.device@3.5",
- "android.hardware.camera.device@3.6"
+ "android.hardware.camera.device@3.6",
+ "android.hardware.camera.device@3.7"
],
static_libs: [
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 1234dfd..706197e 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -805,6 +805,7 @@
case CAMERA_DEVICE_API_VERSION_3_4:
case CAMERA_DEVICE_API_VERSION_3_5:
case CAMERA_DEVICE_API_VERSION_3_6:
+ case CAMERA_DEVICE_API_VERSION_3_7:
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new Camera2Client(cameraService, tmp, packageName, featureId,
@@ -2271,6 +2272,7 @@
case CAMERA_DEVICE_API_VERSION_3_4:
case CAMERA_DEVICE_API_VERSION_3_5:
case CAMERA_DEVICE_API_VERSION_3_6:
+ case CAMERA_DEVICE_API_VERSION_3_7:
ALOGV("%s: Camera id %s uses HAL3.2 or newer, supports api1/api2 directly",
__FUNCTION__, id.string());
*isSupported = true;
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index e062c14..8164df0 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -29,7 +29,6 @@
#include "Parameters.h"
#include "system/camera.h"
-#include "hardware/camera_common.h"
#include <android/hardware/ICamera.h>
#include <media/MediaProfiles.h>
#include <media/mediarecorder.h>
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 0701b6f..9fdc727 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -237,7 +237,7 @@
if (mInputStreamId == NO_STREAM) {
res = device->createInputStream(params.fastInfo.maxZslSize.width,
params.fastInfo.maxZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
- &mInputStreamId);
+ /*isMultiResolution*/false, &mInputStreamId);
if (res != OK) {
ALOGE("%s: Camera %d: Can't create input stream: "
"%s (%d)", __FUNCTION__, client->getCameraId(),
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index d47014e..8cccbb1 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -584,7 +584,7 @@
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
- hardware::camera::device::V3_4::StreamConfiguration streamConfiguration;
+ hardware::camera::device::V3_7::StreamConfiguration streamConfiguration;
bool earlyExit = false;
metadataGetter getMetadata = [this](const String8 &id) {return mDevice->infoPhysical(id);};
std::vector<std::string> physicalCameraIds;
@@ -738,6 +738,7 @@
bool isShared = outputConfiguration.isShared();
String8 physicalCameraId = String8(outputConfiguration.getPhysicalCameraId());
bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
+ bool isMultiResolution = outputConfiguration.isMultiResolution();
res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
outputConfiguration.getSurfaceType());
@@ -809,7 +810,7 @@
streamInfo.height, streamInfo.format,
static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
- isShared);
+ isShared, isMultiResolution);
if (err == OK) {
mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
compositeStream);
@@ -819,7 +820,7 @@
streamInfo.height, streamInfo.format, streamInfo.dataSpace,
static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
- isShared);
+ isShared, isMultiResolution);
}
if (err != OK) {
@@ -888,7 +889,7 @@
static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, &surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared,
- consumerUsage);
+ outputConfiguration.isMultiResolution(), consumerUsage);
if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@@ -943,12 +944,13 @@
}
binder::Status CameraDeviceClient::createInputStream(
- int width, int height, int format,
+ int width, int height, int format, bool isMultiResolution,
/*out*/
int32_t* newStreamId) {
ATRACE_CALL();
- ALOGV("%s (w = %d, h = %d, f = 0x%x)", __FUNCTION__, width, height, format);
+ ALOGV("%s (w = %d, h = %d, f = 0x%x, isMultiResolution %d)", __FUNCTION__,
+ width, height, format, isMultiResolution);
binder::Status res;
if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -967,7 +969,7 @@
}
int streamId = -1;
- status_t err = mDevice->createInputStream(width, height, format, &streamId);
+ status_t err = mDevice->createInputStream(width, height, format, isMultiResolution, &streamId);
if (err == OK) {
mInputStream.configured = true;
mInputStream.width = width;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 5588285..9f7a4af 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -115,6 +115,7 @@
// Create an input stream of width, height, and format.
virtual binder::Status createInputStream(int width, int height, int format,
+ bool isMultiResolution,
/*out*/
int32_t* newStreamId = NULL) override;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 2f8ca6b..515b7f2 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -47,7 +47,7 @@
status_t CompositeStream::createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int * id, const String8& physicalCameraId,
- std::vector<int> * surfaceIds, int streamSetId, bool isShared) {
+ std::vector<int> * surfaceIds, int streamSetId, bool isShared, bool isMultiResolution) {
if (hasDeferredConsumer) {
ALOGE("%s: Deferred consumers not supported in case of composite streams!",
__FUNCTION__);
@@ -66,6 +66,12 @@
return BAD_VALUE;
}
+ if (isMultiResolution) {
+ ALOGE("%s: Multi-resolution output not supported in case of composite streams!",
+ __FUNCTION__);
+ return BAD_VALUE;
+ }
+
return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation, id,
physicalCameraId, surfaceIds, streamSetId, isShared);
}
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 2a934df..1bf137a 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -44,7 +44,7 @@
status_t createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
- std::vector<int> *surfaceIds, int streamSetId, bool isShared);
+ std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution);
status_t deleteStream();
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 1be46d6..5acbb99 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -166,7 +166,8 @@
const String8& physicalCameraId,
std::vector<int> *surfaceIds = nullptr,
int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
- bool isShared = false, uint64_t consumerUsage = 0) = 0;
+ bool isShared = false, bool isMultiResolution = false,
+ uint64_t consumerUsage = 0) = 0;
/**
* Create an output stream of the requested size, format, rotation and
@@ -181,7 +182,8 @@
const String8& physicalCameraId,
std::vector<int> *surfaceIds = nullptr,
int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
- bool isShared = false, uint64_t consumerUsage = 0) = 0;
+ bool isShared = false, bool isMultiResolution = false,
+ uint64_t consumerUsage = 0) = 0;
/**
* Create an input stream of width, height, and format.
@@ -189,7 +191,7 @@
* Return value is the stream ID if non-negative and an error if negative.
*/
virtual status_t createInputStream(uint32_t width, uint32_t height,
- int32_t format, /*out*/ int32_t *id) = 0;
+ int32_t format, bool multiResolution, /*out*/ int32_t *id) = 0;
struct StreamInfo {
uint32_t width;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index e9dcb01..dfe2409 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -20,7 +20,7 @@
#include "CameraProviderManager.h"
-#include <android/hardware/camera/device/3.5/ICameraDevice.h>
+#include <android/hardware/camera/device/3.7/ICameraDevice.h>
#include <algorithm>
#include <chrono>
@@ -28,7 +28,6 @@
#include <dlfcn.h>
#include <future>
#include <inttypes.h>
-#include <hardware/camera_common.h>
#include <android/hidl/manager/1.2/IServiceManager.h>
#include <hidl/ServiceManagement.h>
#include <functional>
@@ -49,7 +48,7 @@
using namespace ::android::hardware::camera::common::V1_0;
using std::literals::chrono_literals::operator""s;
using hardware::camera2::utils::CameraIdAndSessionConfiguration;
-using hardware::camera::provider::V2_6::CameraIdAndStreamCombination;
+using hardware::camera::provider::V2_7::CameraIdAndStreamCombination;
namespace {
const bool kEnableLazyHal(property_get_bool("ro.camera.enableLazyHal", false));
@@ -267,7 +266,7 @@
}
status_t CameraProviderManager::isSessionConfigurationSupported(const std::string& id,
- const hardware::camera::device::V3_4::StreamConfiguration &configuration,
+ const hardware::camera::device::V3_7::StreamConfiguration &configuration,
bool *status /*out*/) const {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
auto deviceInfo = findDeviceInfoLocked(id);
@@ -1302,6 +1301,14 @@
mMinorVersion = 5;
}
}
+ } else {
+ auto cast2_7 = provider::V2_7::ICameraProvider::castFrom(interface);
+ if (cast2_7.isOk()) {
+ sp<provider::V2_7::ICameraProvider> interface2_7 = cast2_7;
+ if (interface2_7 != nullptr) {
+ mMinorVersion = 7;
+ }
+ }
}
// cameraDeviceStatusChange callbacks may be called (and causing new devices added)
@@ -1973,38 +1980,67 @@
// TODO: This might be some other problem
return INVALID_OPERATION;
}
- auto castResult = provider::V2_6::ICameraProvider::castFrom(interface);
- if (castResult.isOk()) {
- sp<provider::V2_6::ICameraProvider> interface_2_6 = castResult;
- if (interface_2_6 != nullptr) {
- Status callStatus;
- auto cb =
- [&isSupported, &callStatus](Status s, bool supported) {
- callStatus = s;
- *isSupported = supported; };
+ auto castResult2_6 = provider::V2_6::ICameraProvider::castFrom(interface);
+ auto castResult2_7 = provider::V2_7::ICameraProvider::castFrom(interface);
+ Status callStatus;
+ auto cb =
+ [&isSupported, &callStatus](Status s, bool supported) {
+ callStatus = s;
+ *isSupported = supported; };
- auto ret = interface_2_6->isConcurrentStreamCombinationSupported(
- halCameraIdsAndStreamCombinations, cb);
- if (ret.isOk()) {
- switch (callStatus) {
- case Status::OK:
- // Expected case, do nothing.
- res = OK;
- break;
- case Status::METHOD_NOT_SUPPORTED:
- res = INVALID_OPERATION;
- break;
- default:
- ALOGE("%s: Session configuration query failed: %d", __FUNCTION__,
- callStatus);
- res = UNKNOWN_ERROR;
- }
- } else {
- ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.description().c_str());
- res = UNKNOWN_ERROR;
- }
- return res;
+ ::android::hardware::Return<void> ret;
+ sp<provider::V2_7::ICameraProvider> interface_2_7;
+ sp<provider::V2_6::ICameraProvider> interface_2_6;
+ if (mMinorVersion >= 7 && castResult2_7.isOk()) {
+ interface_2_7 = castResult2_7;
+ if (interface_2_7 != nullptr) {
+ ret = interface_2_7->isConcurrentStreamCombinationSupported_2_7(
+ halCameraIdsAndStreamCombinations, cb);
}
+ } else if (mMinorVersion == 6 && castResult2_6.isOk()) {
+ interface_2_6 = castResult2_6;
+ if (interface_2_6 != nullptr) {
+ hardware::hidl_vec<provider::V2_6::CameraIdAndStreamCombination>
+ halCameraIdsAndStreamCombinations_2_6;
+ size_t numStreams = halCameraIdsAndStreamCombinations.size();
+ halCameraIdsAndStreamCombinations_2_6.resize(numStreams);
+ for (size_t i = 0; i < numStreams; i++) {
+ auto const& combination = halCameraIdsAndStreamCombinations[i];
+ halCameraIdsAndStreamCombinations_2_6[i].cameraId = combination.cameraId;
+ bool success =
+ SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+ halCameraIdsAndStreamCombinations_2_6[i].streamConfiguration,
+ combination.streamConfiguration);
+ if (!success) {
+ *isSupported = false;
+ return OK;
+ }
+ }
+ ret = interface_2_6->isConcurrentStreamCombinationSupported(
+ halCameraIdsAndStreamCombinations_2_6, cb);
+ }
+ }
+
+ if (interface_2_7 != nullptr || interface_2_6 != nullptr) {
+ if (ret.isOk()) {
+ switch (callStatus) {
+ case Status::OK:
+ // Expected case, do nothing.
+ res = OK;
+ break;
+ case Status::METHOD_NOT_SUPPORTED:
+ res = INVALID_OPERATION;
+ break;
+ default:
+ ALOGE("%s: Session configuration query failed: %d", __FUNCTION__,
+ callStatus);
+ res = UNKNOWN_ERROR;
+ }
+ } else {
+ ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.description().c_str());
+ res = UNKNOWN_ERROR;
+ }
+ return res;
}
}
// unsupported operation
@@ -2374,7 +2410,7 @@
}
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::isSessionConfigurationSupported(
- const hardware::camera::device::V3_4::StreamConfiguration &configuration,
+ const hardware::camera::device::V3_7::StreamConfiguration &configuration,
bool *status /*out*/) {
const sp<CameraProviderManager::ProviderInfo::DeviceInfo3::InterfaceT> interface =
@@ -2382,19 +2418,37 @@
if (interface == nullptr) {
return DEAD_OBJECT;
}
- auto castResult = device::V3_5::ICameraDevice::castFrom(interface);
- sp<hardware::camera::device::V3_5::ICameraDevice> interface_3_5 = castResult;
- if (interface_3_5 == nullptr) {
- return INVALID_OPERATION;
- }
+ auto castResult_3_5 = device::V3_5::ICameraDevice::castFrom(interface);
+ sp<hardware::camera::device::V3_5::ICameraDevice> interface_3_5 = castResult_3_5;
+ auto castResult_3_7 = device::V3_7::ICameraDevice::castFrom(interface);
+ sp<hardware::camera::device::V3_7::ICameraDevice> interface_3_7 = castResult_3_7;
status_t res;
Status callStatus;
- auto ret = interface_3_5->isStreamCombinationSupported(configuration,
+ ::android::hardware::Return<void> ret;
+ if (interface_3_7 != nullptr) {
+ ret = interface_3_7->isStreamCombinationSupported_3_7(configuration,
[&callStatus, &status] (Status s, bool combStatus) {
callStatus = s;
*status = combStatus;
});
+ } else if (interface_3_5 != nullptr) {
+ hardware::camera::device::V3_4::StreamConfiguration configuration_3_4;
+ bool success = SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+ configuration_3_4, configuration);
+ if (!success) {
+ *status = false;
+ return OK;
+ }
+
+ ret = interface_3_5->isStreamCombinationSupported(configuration_3_4,
+ [&callStatus, &status] (Status s, bool combStatus) {
+ callStatus = s;
+ *status = combStatus;
+ });
+ } else {
+ return INVALID_OPERATION;
+ }
if (ret.isOk()) {
switch (callStatus) {
case Status::OK:
@@ -2769,7 +2823,7 @@
bool shouldExit = false;
status_t res = OK;
for (auto &cameraIdAndSessionConfig : cameraIdsAndSessionConfigs) {
- hardware::camera::device::V3_4::StreamConfiguration streamConfiguration;
+ hardware::camera::device::V3_7::StreamConfiguration streamConfiguration;
CameraMetadata deviceInfo;
res = getCameraCharacteristicsLocked(cameraIdAndSessionConfig.mCameraId, &deviceInfo);
if (res != OK) {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 8727e7f..fa9cc1c 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -33,7 +33,8 @@
#include <android/hardware/camera/provider/2.5/ICameraProvider.h>
#include <android/hardware/camera/provider/2.6/ICameraProviderCallback.h>
#include <android/hardware/camera/provider/2.6/ICameraProvider.h>
-#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
+#include <android/hardware/camera/provider/2.7/ICameraProvider.h>
+#include <android/hardware/camera/device/3.7/types.h>
#include <android/hidl/manager/1.0/IServiceNotification.h>
#include <camera/VendorTagDescriptor.h>
@@ -78,6 +79,16 @@
HIDDEN_SECURE_CAMERA
};
+#define CAMERA_DEVICE_API_VERSION_1_0 HARDWARE_DEVICE_API_VERSION(1, 0)
+#define CAMERA_DEVICE_API_VERSION_3_0 HARDWARE_DEVICE_API_VERSION(3, 0)
+#define CAMERA_DEVICE_API_VERSION_3_1 HARDWARE_DEVICE_API_VERSION(3, 1)
+#define CAMERA_DEVICE_API_VERSION_3_2 HARDWARE_DEVICE_API_VERSION(3, 2)
+#define CAMERA_DEVICE_API_VERSION_3_3 HARDWARE_DEVICE_API_VERSION(3, 3)
+#define CAMERA_DEVICE_API_VERSION_3_4 HARDWARE_DEVICE_API_VERSION(3, 4)
+#define CAMERA_DEVICE_API_VERSION_3_5 HARDWARE_DEVICE_API_VERSION(3, 5)
+#define CAMERA_DEVICE_API_VERSION_3_6 HARDWARE_DEVICE_API_VERSION(3, 6)
+#define CAMERA_DEVICE_API_VERSION_3_7 HARDWARE_DEVICE_API_VERSION(3, 7)
+
/**
* A manager for all camera providers available on an Android device.
*
@@ -227,7 +238,7 @@
* Check for device support of specific stream combination.
*/
status_t isSessionConfigurationSupported(const std::string& id,
- const hardware::camera::device::V3_4::StreamConfiguration &configuration,
+ const hardware::camera::device::V3_7::StreamConfiguration &configuration,
bool *status /*out*/) const;
/**
@@ -430,7 +441,7 @@
*/
status_t isConcurrentSessionConfigurationSupported(
const hardware::hidl_vec<
- hardware::camera::provider::V2_6::CameraIdAndStreamCombination>
+ hardware::camera::provider::V2_7::CameraIdAndStreamCombination>
&halCameraIdsAndStreamCombinations,
bool *isSupported);
@@ -470,7 +481,7 @@
}
virtual status_t isSessionConfigurationSupported(
- const hardware::camera::device::V3_4::StreamConfiguration &/*configuration*/,
+ const hardware::camera::device::V3_7::StreamConfiguration &/*configuration*/,
bool * /*status*/) {
return INVALID_OPERATION;
}
@@ -529,7 +540,7 @@
virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
CameraMetadata *characteristics) const override;
virtual status_t isSessionConfigurationSupported(
- const hardware::camera::device::V3_4::StreamConfiguration &configuration,
+ const hardware::camera::device::V3_7::StreamConfiguration &configuration,
bool *status /*out*/)
override;
@@ -684,7 +695,7 @@
status_t convertToHALStreamCombinationAndCameraIdsLocked(
const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>
&cameraIdsAndSessionConfigs,
- hardware::hidl_vec<hardware::camera::provider::V2_6::CameraIdAndStreamCombination>
+ hardware::hidl_vec<hardware::camera::provider::V2_7::CameraIdAndStreamCombination>
*halCameraIdsAndStreamCombinations,
bool *earlyExit);
};
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index d6bf83e..a556200 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -40,16 +40,17 @@
ATRACE_CALL();
int streamId = streamInfo.streamId;
- int streamSetId = streamInfo.streamSetId;
+ StreamSetKey streamSetKey = {streamInfo.streamSetId, streamInfo.isMultiRes};
- if (streamId == CAMERA3_STREAM_ID_INVALID || streamSetId == CAMERA3_STREAM_SET_ID_INVALID) {
+ if (streamId == CAMERA3_STREAM_ID_INVALID ||
+ streamSetKey.id == CAMERA3_STREAM_SET_ID_INVALID) {
ALOGE("%s: Stream id (%d) or stream set id (%d) is invalid",
- __FUNCTION__, streamId, streamSetId);
+ __FUNCTION__, streamId, streamSetKey.id);
return BAD_VALUE;
}
if (streamInfo.totalBufferCount > kMaxBufferCount || streamInfo.totalBufferCount == 0) {
ALOGE("%s: Stream id (%d) with stream set id (%d) total buffer count %zu is invalid",
- __FUNCTION__, streamId, streamSetId, streamInfo.totalBufferCount);
+ __FUNCTION__, streamId, streamSetKey.id, streamInfo.totalBufferCount);
return BAD_VALUE;
}
if (!streamInfo.isConfigured) {
@@ -75,7 +76,8 @@
for (size_t i = 0; i < mStreamSetMap.size(); i++) {
ssize_t streamIdx = mStreamSetMap[i].streamInfoMap.indexOfKey(streamId);
if (streamIdx != NAME_NOT_FOUND &&
- mStreamSetMap[i].streamInfoMap[streamIdx].streamSetId != streamInfo.streamSetId) {
+ mStreamSetMap[i].streamInfoMap[streamIdx].streamSetId != streamInfo.streamSetId &&
+ mStreamSetMap[i].streamInfoMap[streamIdx].isMultiRes != streamInfo.isMultiRes) {
ALOGE("%s: It is illegal to register the same stream id with different stream set",
__FUNCTION__);
return BAD_VALUE;
@@ -83,20 +85,20 @@
}
// Check if there is an existing stream set registered; if not, create one; otherwise, add this
// stream info to the existing stream set entry.
- ssize_t setIdx = mStreamSetMap.indexOfKey(streamSetId);
+ ssize_t setIdx = mStreamSetMap.indexOfKey(streamSetKey);
if (setIdx == NAME_NOT_FOUND) {
- ALOGV("%s: stream set %d is not registered to stream set map yet, create it.",
- __FUNCTION__, streamSetId);
+ ALOGV("%s: stream set %d(%d) is not registered to stream set map yet, create it.",
+ __FUNCTION__, streamSetKey.id, streamSetKey.isMultiRes);
// Create stream info map, then add to mStreamsetMap.
StreamSet newStreamSet;
- setIdx = mStreamSetMap.add(streamSetId, newStreamSet);
+ setIdx = mStreamSetMap.add(streamSetKey, newStreamSet);
}
// Update stream set map and water mark.
StreamSet& currentStreamSet = mStreamSetMap.editValueAt(setIdx);
ssize_t streamIdx = currentStreamSet.streamInfoMap.indexOfKey(streamId);
if (streamIdx != NAME_NOT_FOUND) {
- ALOGW("%s: stream %d was already registered with stream set %d",
- __FUNCTION__, streamId, streamSetId);
+ ALOGW("%s: stream %d was already registered with stream set %d(%d)",
+ __FUNCTION__, streamId, streamSetKey.id, streamSetKey.isMultiRes);
return OK;
}
currentStreamSet.streamInfoMap.add(streamId, streamInfo);
@@ -113,21 +115,22 @@
return OK;
}
-status_t Camera3BufferManager::unregisterStream(int streamId, int streamSetId) {
+status_t Camera3BufferManager::unregisterStream(int streamId, int streamSetId, bool isMultiRes) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
- ALOGV("%s: unregister stream %d with stream set %d", __FUNCTION__,
- streamId, streamSetId);
+ ALOGV("%s: unregister stream %d with stream set %d(%d)", __FUNCTION__,
+ streamId, streamSetId, isMultiRes);
- if (!checkIfStreamRegisteredLocked(streamId, streamSetId)){
- ALOGE("%s: stream %d with set id %d wasn't properly registered to this buffer manager!",
- __FUNCTION__, streamId, streamSetId);
+ StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+ if (!checkIfStreamRegisteredLocked(streamId, streamSetKey)){
+ ALOGE("%s: stream %d with set %d(%d) wasn't properly registered to this"
+ " buffer manager!", __FUNCTION__, streamId, streamSetId, isMultiRes);
return BAD_VALUE;
}
// De-list all the buffers associated with this stream first.
- StreamSet& currentSet = mStreamSetMap.editValueFor(streamSetId);
+ StreamSet& currentSet = mStreamSetMap.editValueFor(streamSetKey);
BufferCountMap& handOutBufferCounts = currentSet.handoutBufferCountMap;
BufferCountMap& attachedBufferCounts = currentSet.attachedBufferCountMap;
InfoMap& infoMap = currentSet.streamInfoMap;
@@ -150,26 +153,28 @@
// Remove this stream set if all its streams have been removed.
if (handOutBufferCounts.size() == 0 && infoMap.size() == 0) {
- mStreamSetMap.removeItem(streamSetId);
+ mStreamSetMap.removeItem(streamSetKey);
}
return OK;
}
-void Camera3BufferManager::notifyBufferRemoved(int streamId, int streamSetId) {
+void Camera3BufferManager::notifyBufferRemoved(int streamId, int streamSetId, bool isMultiRes) {
Mutex::Autolock l(mLock);
- StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetId);
+ StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+ StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetKey);
size_t& attachedBufferCount =
streamSet.attachedBufferCountMap.editValueFor(streamId);
attachedBufferCount--;
}
status_t Camera3BufferManager::checkAndFreeBufferOnOtherStreamsLocked(
- int streamId, int streamSetId) {
+ int streamId, StreamSetKey streamSetKey) {
StreamId firstOtherStreamId = CAMERA3_STREAM_ID_INVALID;
- StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetId);
+ StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetKey);
if (streamSet.streamInfoMap.size() == 1) {
- ALOGV("StreamSet %d has no other stream available to free", streamSetId);
+ ALOGV("StreamSet %d(%d) has no other stream available to free",
+ streamSetKey.id, streamSetKey.isMultiRes);
return OK;
}
@@ -190,7 +195,8 @@
firstOtherStreamId = CAMERA3_STREAM_ID_INVALID;
}
if (firstOtherStreamId == CAMERA3_STREAM_ID_INVALID || !freeBufferIsAttached) {
- ALOGV("StreamSet %d has no buffer available to free", streamSetId);
+ ALOGV("StreamSet %d(%d) has no buffer available to free",
+ streamSetKey.id, streamSetKey.isMultiRes);
return OK;
}
@@ -237,20 +243,21 @@
}
status_t Camera3BufferManager::getBufferForStream(int streamId, int streamSetId,
- sp<GraphicBuffer>* gb, int* fenceFd, bool noFreeBufferAtConsumer) {
+ bool isMultiRes, sp<GraphicBuffer>* gb, int* fenceFd, bool noFreeBufferAtConsumer) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
- ALOGV("%s: get buffer for stream %d with stream set %d", __FUNCTION__,
- streamId, streamSetId);
+ ALOGV("%s: get buffer for stream %d with stream set %d(%d)", __FUNCTION__,
+ streamId, streamSetId, isMultiRes);
- if (!checkIfStreamRegisteredLocked(streamId, streamSetId)) {
- ALOGE("%s: stream %d is not registered with stream set %d yet!!!",
- __FUNCTION__, streamId, streamSetId);
+ StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+ if (!checkIfStreamRegisteredLocked(streamId, streamSetKey)) {
+ ALOGE("%s: stream %d is not registered with stream set %d(%d) yet!!!",
+ __FUNCTION__, streamId, streamSetId, isMultiRes);
return BAD_VALUE;
}
- StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetId);
+ StreamSet &streamSet = mStreamSetMap.editValueFor(streamSetKey);
BufferCountMap& handOutBufferCounts = streamSet.handoutBufferCountMap;
size_t& bufferCount = handOutBufferCounts.editValueFor(streamId);
BufferCountMap& attachedBufferCounts = streamSet.attachedBufferCountMap;
@@ -272,7 +279,8 @@
bufferCount++;
return ALREADY_EXISTS;
}
- ALOGV("Stream %d set %d: Get buffer for stream: Allocate new", streamId, streamSetId);
+ ALOGV("Stream %d set %d(%d): Get buffer for stream: Allocate new",
+ streamId, streamSetId, isMultiRes);
if (mGrallocVersion < HARDWARE_DEVICE_API_VERSION(1,0)) {
const StreamInfo& info = streamSet.streamInfoMap.valueFor(streamId);
@@ -313,13 +321,13 @@
// in returnBufferForStream() if we want to free buffer more quickly.
// TODO: probably should find out all the inactive stream IDs, and free the firstly found
// buffers for them.
- res = checkAndFreeBufferOnOtherStreamsLocked(streamId, streamSetId);
+ res = checkAndFreeBufferOnOtherStreamsLocked(streamId, streamSetKey);
if (res != OK) {
return res;
}
// Since we just allocated one new buffer above, try free one more buffer from other streams
// to prevent total buffer count from growing
- res = checkAndFreeBufferOnOtherStreamsLocked(streamId, streamSetId);
+ res = checkAndFreeBufferOnOtherStreamsLocked(streamId, streamSetKey);
if (res != OK) {
return res;
}
@@ -332,7 +340,7 @@
}
status_t Camera3BufferManager::onBufferReleased(
- int streamId, int streamSetId, bool* shouldFreeBuffer) {
+ int streamId, int streamSetId, bool isMultiRes, bool* shouldFreeBuffer) {
ATRACE_CALL();
if (shouldFreeBuffer == nullptr) {
@@ -341,22 +349,24 @@
}
Mutex::Autolock l(mLock);
- ALOGV("Stream %d set %d: Buffer released", streamId, streamSetId);
+ ALOGV("Stream %d set %d(%d): Buffer released", streamId, streamSetId, isMultiRes);
*shouldFreeBuffer = false;
- if (!checkIfStreamRegisteredLocked(streamId, streamSetId)){
+ StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+ if (!checkIfStreamRegisteredLocked(streamId, streamSetKey)){
ALOGV("%s: signaling buffer release for an already unregistered stream "
- "(stream %d with set id %d)", __FUNCTION__, streamId, streamSetId);
+ "(stream %d with set id %d(%d))", __FUNCTION__, streamId, streamSetId,
+ isMultiRes);
return OK;
}
if (mGrallocVersion < HARDWARE_DEVICE_API_VERSION(1,0)) {
- StreamSet& streamSet = mStreamSetMap.editValueFor(streamSetId);
+ StreamSet& streamSet = mStreamSetMap.editValueFor(streamSetKey);
BufferCountMap& handOutBufferCounts = streamSet.handoutBufferCountMap;
size_t& bufferCount = handOutBufferCounts.editValueFor(streamId);
bufferCount--;
- ALOGV("%s: Stream %d set %d: Buffer count now %zu", __FUNCTION__, streamId, streamSetId,
- bufferCount);
+ ALOGV("%s: Stream %d set %d(%d): Buffer count now %zu", __FUNCTION__, streamId,
+ streamSetId, isMultiRes, bufferCount);
size_t totalAllocatedBufferCount = 0;
size_t totalHandOutBufferCount = 0;
@@ -371,8 +381,9 @@
// BufferManager got more than enough buffers, so decrease watermark
// to trigger more buffers free operation.
streamSet.allocatedBufferWaterMark = newWaterMark;
- ALOGV("%s: Stream %d set %d: watermark--; now %zu",
- __FUNCTION__, streamId, streamSetId, streamSet.allocatedBufferWaterMark);
+ ALOGV("%s: Stream %d set %d(%d): watermark--; now %zu",
+ __FUNCTION__, streamId, streamSetId, isMultiRes,
+ streamSet.allocatedBufferWaterMark);
}
size_t attachedBufferCount = streamSet.attachedBufferCountMap.valueFor(streamId);
@@ -395,20 +406,22 @@
return OK;
}
-status_t Camera3BufferManager::onBuffersRemoved(int streamId, int streamSetId, size_t count) {
+status_t Camera3BufferManager::onBuffersRemoved(int streamId, int streamSetId,
+ bool isMultiRes, size_t count) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
- ALOGV("Stream %d set %d: Buffer removed", streamId, streamSetId);
+ ALOGV("Stream %d set %d(%d): Buffer removed", streamId, streamSetId, isMultiRes);
- if (!checkIfStreamRegisteredLocked(streamId, streamSetId)){
+ StreamSetKey streamSetKey = {streamSetId, isMultiRes};
+ if (!checkIfStreamRegisteredLocked(streamId, streamSetKey)){
ALOGV("%s: signaling buffer removal for an already unregistered stream "
- "(stream %d with set id %d)", __FUNCTION__, streamId, streamSetId);
+ "(stream %d with set id %d(%d))", __FUNCTION__, streamId, streamSetId, isMultiRes);
return OK;
}
if (mGrallocVersion < HARDWARE_DEVICE_API_VERSION(1,0)) {
- StreamSet& streamSet = mStreamSetMap.editValueFor(streamSetId);
+ StreamSet& streamSet = mStreamSetMap.editValueFor(streamSetKey);
BufferCountMap& handOutBufferCounts = streamSet.handoutBufferCountMap;
size_t& totalHandoutCount = handOutBufferCounts.editValueFor(streamId);
BufferCountMap& attachedBufferCounts = streamSet.attachedBufferCountMap;
@@ -427,8 +440,9 @@
totalHandoutCount -= count;
totalAttachedCount -= count;
- ALOGV("%s: Stream %d set %d: Buffer count now %zu, attached buffer count now %zu",
- __FUNCTION__, streamId, streamSetId, totalHandoutCount, totalAttachedCount);
+ ALOGV("%s: Stream %d set %d(%d): Buffer count now %zu, attached buffer count now %zu",
+ __FUNCTION__, streamId, streamSetId, isMultiRes, totalHandoutCount,
+ totalAttachedCount);
} else {
// TODO: implement gralloc V1 support
return BAD_VALUE;
@@ -444,7 +458,8 @@
String8 lines;
lines.appendFormat(" Total stream sets: %zu\n", mStreamSetMap.size());
for (size_t i = 0; i < mStreamSetMap.size(); i++) {
- lines.appendFormat(" Stream set %d has below streams:\n", mStreamSetMap.keyAt(i));
+ lines.appendFormat(" Stream set %d(%d) has below streams:\n",
+ mStreamSetMap.keyAt(i).id, mStreamSetMap.keyAt(i).isMultiRes);
for (size_t j = 0; j < mStreamSetMap[i].streamInfoMap.size(); j++) {
lines.appendFormat(" Stream %d\n", mStreamSetMap[i].streamInfoMap[j].streamId);
}
@@ -470,11 +485,12 @@
write(fd, lines.string(), lines.size());
}
-bool Camera3BufferManager::checkIfStreamRegisteredLocked(int streamId, int streamSetId) const {
- ssize_t setIdx = mStreamSetMap.indexOfKey(streamSetId);
+bool Camera3BufferManager::checkIfStreamRegisteredLocked(int streamId,
+ StreamSetKey streamSetKey) const {
+ ssize_t setIdx = mStreamSetMap.indexOfKey(streamSetKey);
if (setIdx == NAME_NOT_FOUND) {
- ALOGV("%s: stream set %d is not registered to stream set map yet!",
- __FUNCTION__, streamSetId);
+ ALOGV("%s: stream set %d(%d) is not registered to stream set map yet!",
+ __FUNCTION__, streamSetKey.id, streamSetKey.isMultiRes);
return false;
}
@@ -486,9 +502,10 @@
size_t bufferWaterMark = mStreamSetMap[setIdx].maxAllowedBufferCount;
if (bufferWaterMark == 0 || bufferWaterMark > kMaxBufferCount) {
- ALOGW("%s: stream %d with stream set %d is not registered correctly to stream set map,"
+ ALOGW("%s: stream %d with stream set %d(%d) is not registered correctly to stream set map,"
" as the water mark (%zu) is wrong!",
- __FUNCTION__, streamId, streamSetId, bufferWaterMark);
+ __FUNCTION__, streamId, streamSetKey.id, streamSetKey.isMultiRes,
+ bufferWaterMark);
return false;
}
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.h b/services/camera/libcameraservice/device3/Camera3BufferManager.h
index f0de1c1..64aaa230 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.h
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.h
@@ -99,7 +99,7 @@
* combination doesn't match what was registered, or this stream wasn't registered
* to this buffer manager before.
*/
- status_t unregisterStream(int streamId, int streamSetId);
+ status_t unregisterStream(int streamId, int streamSetId, bool isMultiRes);
/**
* This method obtains a buffer for a stream from this buffer manager.
@@ -127,8 +127,8 @@
* NO_MEMORY: Unable to allocate a buffer for this stream at this time.
*/
status_t getBufferForStream(
- int streamId, int streamSetId, sp<GraphicBuffer>* gb, int* fenceFd,
- bool noFreeBufferAtConsumer = false);
+ int streamId, int streamSetId, bool isMultiRes, sp<GraphicBuffer>* gb,
+ int* fenceFd, bool noFreeBufferAtConsumer = false);
/**
* This method notifies the manager that a buffer has been released by the consumer.
@@ -153,7 +153,8 @@
* combination doesn't match what was registered, or this stream wasn't registered
* to this buffer manager before, or shouldFreeBuffer is null/
*/
- status_t onBufferReleased(int streamId, int streamSetId, /*out*/bool* shouldFreeBuffer);
+ status_t onBufferReleased(int streamId, int streamSetId, bool isMultiRes,
+ /*out*/bool* shouldFreeBuffer);
/**
* This method notifies the manager that certain buffers has been removed from the
@@ -171,13 +172,13 @@
* to this buffer manager before, or the removed buffer count is larger than
* current total handoutCount or attachedCount.
*/
- status_t onBuffersRemoved(int streamId, int streamSetId, size_t count);
+ status_t onBuffersRemoved(int streamId, int streamSetId, bool isMultiRes, size_t count);
/**
* This method notifiers the manager that a buffer is freed from the buffer queue, usually
* because onBufferReleased signals the caller to free a buffer via the shouldFreeBuffer flag.
*/
- void notifyBufferRemoved(int streamId, int streamSetId);
+ void notifyBufferRemoved(int streamId, int streamSetId, bool isMultiRes);
/**
* Dump the buffer manager statistics.
@@ -292,8 +293,20 @@
/**
* Stream set map managed by this buffer manager.
*/
- typedef int StreamSetId;
- KeyedVector<StreamSetId, StreamSet> mStreamSetMap;
+ struct StreamSetKey {
+ // The stream set ID
+ int id;
+ // Whether this stream set is for multi-resolution output streams. It's
+ // valid for 2 stream sets to have the same stream set ID if: one is for
+ // multi-resolution output stream, and the other one is not.
+ bool isMultiRes;
+
+ inline bool operator<(const StreamSetKey& other) const {
+ return (isMultiRes < other.isMultiRes) ||
+ ((isMultiRes == other.isMultiRes) && (id < other.id));
+ }
+ };
+ KeyedVector<StreamSetKey, StreamSet> mStreamSetMap;
KeyedVector<StreamId, wp<Camera3OutputStream>> mStreamMap;
// TODO: There is no easy way to query the Gralloc version in this code yet, we have different
@@ -304,13 +317,13 @@
* Check if this stream was successfully registered already. This method needs to be called with
* mLock held.
*/
- bool checkIfStreamRegisteredLocked(int streamId, int streamSetId) const;
+ bool checkIfStreamRegisteredLocked(int streamId, StreamSetKey streamSetKey) const;
/**
* Check if other streams in the stream set has extra buffer available to be freed, and
* free one if so.
*/
- status_t checkAndFreeBufferOnOtherStreamsLocked(int streamId, int streamSetId);
+ status_t checkAndFreeBufferOnOtherStreamsLocked(int streamId, StreamSetKey streamSetKey);
};
} // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 73a133f..18eb57e 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -313,6 +313,7 @@
mFakeStreamId = NO_STREAM;
mNeedConfig = true;
mPauseStateNotify = false;
+ mIsInputStreamMultiResolution = false;
// Measure the clock domain offset between camera and video/hw_composer
camera_metadata_entry timestampSource =
@@ -481,7 +482,7 @@
return gotLock;
}
-Camera3Device::Size Camera3Device::getMaxJpegResolution() const {
+camera3::Size Camera3Device::getMaxJpegResolution() const {
int32_t maxJpegWidth = 0, maxJpegHeight = 0;
const int STREAM_CONFIGURATION_SIZE = 4;
const int STREAM_FORMAT_OFFSET = 0;
@@ -492,7 +493,7 @@
mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
if (availableStreamConfigs.count == 0 ||
availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
- return Size(0, 0);
+ return camera3::Size(0, 0);
}
// Get max jpeg size (area-wise).
@@ -509,7 +510,7 @@
}
}
- return Size(maxJpegWidth, maxJpegHeight);
+ return camera3::Size(maxJpegWidth, maxJpegHeight);
}
nsecs_t Camera3Device::getMonoToBoottimeOffset() {
@@ -603,7 +604,7 @@
ssize_t Camera3Device::getJpegBufferSize(uint32_t width, uint32_t height) const {
// Get max jpeg size (area-wise).
- Size maxJpegResolution = getMaxJpegResolution();
+ camera3::Size maxJpegResolution = getMaxJpegResolution();
if (maxJpegResolution.width == 0) {
ALOGE("%s: Camera %s: Can't find valid available jpeg sizes in static metadata!",
__FUNCTION__, mId.string());
@@ -1252,7 +1253,7 @@
}
status_t Camera3Device::createInputStream(
- uint32_t width, uint32_t height, int format, int *id) {
+ uint32_t width, uint32_t height, int format, bool isMultiResolution, int *id) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
@@ -1299,6 +1300,7 @@
newStream->setStatusTracker(mStatusTracker);
mInputStream = newStream;
+ mIsInputStreamMultiResolution = isMultiResolution;
*id = mNextStreamId++;
@@ -1323,7 +1325,8 @@
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
const String8& physicalCameraId,
- std::vector<int> *surfaceIds, int streamSetId, bool isShared, uint64_t consumerUsage) {
+ std::vector<int> *surfaceIds, int streamSetId, bool isShared,
+ bool isMultiResolution, uint64_t consumerUsage) {
ATRACE_CALL();
if (consumer == nullptr) {
@@ -1336,23 +1339,24 @@
return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
format, dataSpace, rotation, id, physicalCameraId, surfaceIds, streamSetId,
- isShared, consumerUsage);
+ isShared, isMultiResolution, consumerUsage);
}
status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
const String8& physicalCameraId,
- std::vector<int> *surfaceIds, int streamSetId, bool isShared, uint64_t consumerUsage) {
+ std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
+ uint64_t consumerUsage) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
Mutex::Autolock l(mLock);
ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
- " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s", mId.string(),
- mNextStreamId, width, height, format, dataSpace, rotation, consumerUsage, isShared,
- physicalCameraId.string());
+ " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d",
+ mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
+ consumerUsage, isShared, physicalCameraId.string(), isMultiResolution);
status_t res;
bool wasActive = false;
@@ -1414,7 +1418,7 @@
}
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, blobBufferSize, format, dataSpace, rotation,
- mTimestampOffset, physicalCameraId, streamSetId);
+ mTimestampOffset, physicalCameraId, streamSetId, isMultiResolution);
} else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
ssize_t rawOpaqueBufferSize = getRawOpaqueBufferSize(width, height);
if (rawOpaqueBufferSize <= 0) {
@@ -1423,20 +1427,19 @@
}
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
- mTimestampOffset, physicalCameraId, streamSetId);
+ mTimestampOffset, physicalCameraId, streamSetId, isMultiResolution);
} else if (isShared) {
newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
width, height, format, consumerUsage, dataSpace, rotation,
- mTimestampOffset, physicalCameraId, streamSetId,
- mUseHalBufManager);
+ mTimestampOffset, physicalCameraId, streamSetId, mUseHalBufManager);
} else if (consumers.size() == 0 && hasDeferredConsumer) {
newStream = new Camera3OutputStream(mNextStreamId,
width, height, format, consumerUsage, dataSpace, rotation,
- mTimestampOffset, physicalCameraId, streamSetId);
+ mTimestampOffset, physicalCameraId, streamSetId, isMultiResolution);
} else {
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, format, dataSpace, rotation,
- mTimestampOffset, physicalCameraId, streamSetId);
+ mTimestampOffset, physicalCameraId, streamSetId, isMultiResolution);
}
size_t consumerCount = consumers.size();
@@ -2549,8 +2552,9 @@
if (mInputStream != NULL && notifyRequestThread) {
while (true) {
camera_stream_buffer_t inputBuffer;
+ camera3::Size inputBufferSize;
status_t res = mInputStream->getInputBuffer(&inputBuffer,
- /*respectHalLimit*/ false);
+ &inputBufferSize, /*respectHalLimit*/ false);
if (res != OK) {
// Exhausted acquiring all input buffers.
break;
@@ -2587,6 +2591,7 @@
camera_stream_configuration config;
config.operation_mode = mOperatingMode;
config.num_streams = (mInputStream != NULL) + mOutputStreams.size();
+ config.input_is_multi_resolution = false;
Vector<camera3::camera_stream_t*> streams;
streams.setCapacity(config.num_streams);
@@ -2602,6 +2607,8 @@
return INVALID_OPERATION;
}
streams.add(inputStream);
+
+ config.input_is_multi_resolution = mIsInputStreamMultiResolution;
}
for (size_t i = 0; i < mOutputStreams.size(); i++) {
@@ -2999,6 +3006,10 @@
mSupportOfflineProcessing(supportOfflineProcessing) {
// Check with hardware service manager if we can downcast these interfaces
// Somewhat expensive, so cache the results at startup
+ auto castResult_3_7 = device::V3_7::ICameraDeviceSession::castFrom(mHidlSession);
+ if (castResult_3_7.isOk()) {
+ mHidlSession_3_7 = castResult_3_7;
+ }
auto castResult_3_6 = device::V3_6::ICameraDeviceSession::castFrom(mHidlSession);
if (castResult_3_6.isOk()) {
mHidlSession_3_6 = castResult_3_6;
@@ -3032,6 +3043,7 @@
}
void Camera3Device::HalInterface::clear() {
+ mHidlSession_3_7.clear();
mHidlSession_3_6.clear();
mHidlSession_3_5.clear();
mHidlSession_3_4.clear();
@@ -3158,15 +3170,23 @@
if (!valid()) return INVALID_OPERATION;
status_t res = OK;
+ if (config->input_is_multi_resolution && mHidlSession_3_7 == nullptr) {
+ ALOGE("%s: Camera device doesn't support multi-resolution input stream", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
// Convert stream config to HIDL
std::set<int> activeStreams;
device::V3_2::StreamConfiguration requestedConfiguration3_2;
device::V3_4::StreamConfiguration requestedConfiguration3_4;
+ device::V3_7::StreamConfiguration requestedConfiguration3_7;
requestedConfiguration3_2.streams.resize(config->num_streams);
requestedConfiguration3_4.streams.resize(config->num_streams);
+ requestedConfiguration3_7.streams.resize(config->num_streams);
for (size_t i = 0; i < config->num_streams; i++) {
device::V3_2::Stream &dst3_2 = requestedConfiguration3_2.streams[i];
device::V3_4::Stream &dst3_4 = requestedConfiguration3_4.streams[i];
+ device::V3_7::Stream &dst3_7 = requestedConfiguration3_7.streams[i];
camera3::camera_stream_t *src = config->streams[i];
Camera3Stream* cam3stream = Camera3Stream::cast(src);
@@ -3207,6 +3227,8 @@
if (src->physical_camera_id != nullptr) {
dst3_4.physicalCameraId = src->physical_camera_id;
}
+ dst3_7.v3_4 = dst3_4;
+ dst3_7.groupId = cam3stream->getHalStreamGroupId();
activeStreams.insert(streamId);
// Create Buffer ID map if necessary
@@ -3227,6 +3249,10 @@
requestedConfiguration3_4.sessionParams.setToExternal(
reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
get_camera_metadata_size(sessionParams));
+ requestedConfiguration3_7.operationMode = operationMode;
+ requestedConfiguration3_7.sessionParams.setToExternal(
+ reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
+ get_camera_metadata_size(sessionParams));
// Invoke configureStreams
device::V3_3::HalStreamConfiguration finalConfiguration;
@@ -3273,7 +3299,17 @@
};
// See which version of HAL we have
- if (mHidlSession_3_6 != nullptr) {
+ if (mHidlSession_3_7 != nullptr) {
+ ALOGV("%s: v3.7 device found", __FUNCTION__);
+ requestedConfiguration3_7.streamConfigCounter = mNextStreamConfigCounter++;
+ requestedConfiguration3_7.multiResolutionInputImage = config->input_is_multi_resolution;
+ auto err = mHidlSession_3_7->configureStreams_3_7(
+ requestedConfiguration3_7, configStream36Cb);
+ res = postprocConfigStream36(err);
+ if (res != OK) {
+ return res;
+ }
+ } else if (mHidlSession_3_6 != nullptr) {
ALOGV("%s: v3.6 device found", __FUNCTION__);
device::V3_5::StreamConfiguration requestedConfiguration3_5;
requestedConfiguration3_5.v3_4 = requestedConfiguration3_4;
@@ -3531,6 +3567,11 @@
if (!valid()) return INVALID_OPERATION;
sp<device::V3_4::ICameraDeviceSession> hidlSession_3_4;
+ sp<device::V3_7::ICameraDeviceSession> hidlSession_3_7;
+ auto castResult_3_7 = device::V3_7::ICameraDeviceSession::castFrom(mHidlSession);
+ if (castResult_3_7.isOk()) {
+ hidlSession_3_7 = castResult_3_7;
+ }
auto castResult_3_4 = device::V3_4::ICameraDeviceSession::castFrom(mHidlSession);
if (castResult_3_4.isOk()) {
hidlSession_3_4 = castResult_3_4;
@@ -3538,8 +3579,11 @@
hardware::hidl_vec<device::V3_2::CaptureRequest> captureRequests;
hardware::hidl_vec<device::V3_4::CaptureRequest> captureRequests_3_4;
+ hardware::hidl_vec<device::V3_7::CaptureRequest> captureRequests_3_7;
size_t batchSize = requests.size();
- if (hidlSession_3_4 != nullptr) {
+ if (hidlSession_3_7 != nullptr) {
+ captureRequests_3_7.resize(batchSize);
+ } else if (hidlSession_3_4 != nullptr) {
captureRequests_3_4.resize(batchSize);
} else {
captureRequests.resize(batchSize);
@@ -3549,7 +3593,10 @@
status_t res = OK;
for (size_t i = 0; i < batchSize; i++) {
- if (hidlSession_3_4 != nullptr) {
+ if (hidlSession_3_7 != nullptr) {
+ res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests_3_7[i].v3_4.v3_2,
+ /*out*/&handlesCreated, /*out*/&inflightBuffers);
+ } else if (hidlSession_3_4 != nullptr) {
res = wrapAsHidlRequest(requests[i], /*out*/&captureRequests_3_4[i].v3_2,
/*out*/&handlesCreated, /*out*/&inflightBuffers);
} else {
@@ -3582,7 +3629,9 @@
for (size_t i = 0; i < batchSize; i++) {
camera_capture_request_t* request = requests[i];
device::V3_2::CaptureRequest* captureRequest;
- if (hidlSession_3_4 != nullptr) {
+ if (hidlSession_3_7 != nullptr) {
+ captureRequest = &captureRequests_3_7[i].v3_4.v3_2;
+ } else if (hidlSession_3_4 != nullptr) {
captureRequest = &captureRequests_3_4[i].v3_2;
} else {
captureRequest = &captureRequests[i];
@@ -3609,33 +3658,42 @@
captureRequest->fmqSettingsSize = 0u;
}
- if (hidlSession_3_4 != nullptr) {
- captureRequests_3_4[i].physicalCameraSettings.resize(request->num_physcam_settings);
+ // hidl session 3.7 specific handling.
+ if (hidlSession_3_7 != nullptr) {
+ captureRequests_3_7[i].inputWidth = request->input_width;
+ captureRequests_3_7[i].inputHeight = request->input_height;
+ }
+
+ // hidl session 3.7 and 3.4 specific handling.
+ if (hidlSession_3_7 != nullptr || hidlSession_3_4 != nullptr) {
+ hardware::hidl_vec<device::V3_4::PhysicalCameraSetting>& physicalCameraSettings =
+ (hidlSession_3_7 != nullptr) ?
+ captureRequests_3_7[i].v3_4.physicalCameraSettings :
+ captureRequests_3_4[i].physicalCameraSettings;
+ physicalCameraSettings.resize(request->num_physcam_settings);
for (size_t j = 0; j < request->num_physcam_settings; j++) {
if (request->physcam_settings != nullptr) {
size_t settingsSize = get_camera_metadata_size(request->physcam_settings[j]);
if (mRequestMetadataQueue != nullptr && mRequestMetadataQueue->write(
reinterpret_cast<const uint8_t*>(request->physcam_settings[j]),
settingsSize)) {
- captureRequests_3_4[i].physicalCameraSettings[j].settings.resize(0);
- captureRequests_3_4[i].physicalCameraSettings[j].fmqSettingsSize =
- settingsSize;
+ physicalCameraSettings[j].settings.resize(0);
+ physicalCameraSettings[j].fmqSettingsSize = settingsSize;
} else {
if (mRequestMetadataQueue != nullptr) {
ALOGW("%s: couldn't utilize fmq, fallback to hwbinder", __FUNCTION__);
}
- captureRequests_3_4[i].physicalCameraSettings[j].settings.setToExternal(
+ physicalCameraSettings[j].settings.setToExternal(
reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(
request->physcam_settings[j])),
get_camera_metadata_size(request->physcam_settings[j]));
- captureRequests_3_4[i].physicalCameraSettings[j].fmqSettingsSize = 0u;
+ physicalCameraSettings[j].fmqSettingsSize = 0u;
}
} else {
captureRequests_3_4[i].physicalCameraSettings[j].fmqSettingsSize = 0u;
captureRequests_3_4[i].physicalCameraSettings[j].settings.resize(0);
}
- captureRequests_3_4[i].physicalCameraSettings[j].physicalCameraId =
- request->physcam_id[j];
+ physicalCameraSettings[j].physicalCameraId = request->physcam_id[j];
}
}
}
@@ -3646,7 +3704,10 @@
status = s;
*numRequestProcessed = n;
};
- if (hidlSession_3_4 != nullptr) {
+ if (hidlSession_3_7 != nullptr) {
+ err = hidlSession_3_7->processCaptureRequest_3_7(captureRequests_3_7, cachesToRemove,
+ resultCallback);
+ } else if (hidlSession_3_4 != nullptr) {
err = hidlSession_3_4->processCaptureRequest_3_4(captureRequests_3_4, cachesToRemove,
resultCallback);
} else {
@@ -4055,8 +4116,9 @@
// Abort the input buffers for reprocess requests.
if ((*it)->mInputStream != NULL) {
camera_stream_buffer_t inputBuffer;
+ camera3::Size inputBufferSize;
status_t res = (*it)->mInputStream->getInputBuffer(&inputBuffer,
- /*respectHalLimit*/ false);
+ &inputBufferSize, /*respectHalLimit*/ false);
if (res != OK) {
ALOGW("%s: %d: couldn't get input buffer while clearing the request "
"list: %s (%d)", __FUNCTION__, __LINE__, strerror(-res), res);
@@ -4262,33 +4324,34 @@
void Camera3Device::RequestThread::updateNextRequest(NextRequest& nextRequest) {
// Update the latest request sent to HAL
- if (nextRequest.halRequest.settings != NULL) { // Don't update if they were unchanged
+ camera_capture_request_t& halRequest = nextRequest.halRequest;
+ if (halRequest.settings != NULL) { // Don't update if they were unchanged
Mutex::Autolock al(mLatestRequestMutex);
- camera_metadata_t* cloned = clone_camera_metadata(nextRequest.halRequest.settings);
+ camera_metadata_t* cloned = clone_camera_metadata(halRequest.settings);
mLatestRequest.acquire(cloned);
mLatestPhysicalRequest.clear();
- for (uint32_t i = 0; i < nextRequest.halRequest.num_physcam_settings; i++) {
- cloned = clone_camera_metadata(nextRequest.halRequest.physcam_settings[i]);
- mLatestPhysicalRequest.emplace(nextRequest.halRequest.physcam_id[i],
+ for (uint32_t i = 0; i < halRequest.num_physcam_settings; i++) {
+ cloned = clone_camera_metadata(halRequest.physcam_settings[i]);
+ mLatestPhysicalRequest.emplace(halRequest.physcam_id[i],
CameraMetadata(cloned));
}
sp<Camera3Device> parent = mParent.promote();
if (parent != NULL) {
parent->monitorMetadata(TagMonitor::REQUEST,
- nextRequest.halRequest.frame_number,
+ halRequest.frame_number,
0, mLatestRequest, mLatestPhysicalRequest);
}
}
- if (nextRequest.halRequest.settings != NULL) {
+ if (halRequest.settings != NULL) {
nextRequest.captureRequest->mSettingsList.begin()->metadata.unlock(
- nextRequest.halRequest.settings);
+ halRequest.settings);
}
- cleanupPhysicalSettings(nextRequest.captureRequest, &nextRequest.halRequest);
+ cleanupPhysicalSettings(nextRequest.captureRequest, &halRequest);
}
bool Camera3Device::RequestThread::updateSessionParameters(const CameraMetadata& settings) {
@@ -4651,6 +4714,9 @@
// Fill in buffers
if (captureRequest->mInputStream != NULL) {
halRequest->input_buffer = &captureRequest->mInputBuffer;
+
+ halRequest->input_width = captureRequest->mInputBufferSize.width;
+ halRequest->input_height = captureRequest->mInputBufferSize.height;
totalNumBuffers += 1;
} else {
halRequest->input_buffer = NULL;
@@ -4754,13 +4820,7 @@
}
String8 physicalCameraId = outputStream->getPhysicalCameraId();
-
if (!physicalCameraId.isEmpty()) {
- // Physical stream isn't supported for input request.
- if (halRequest->input_buffer) {
- CLOGE("Physical stream is not supported for input request");
- return INVALID_OPERATION;
- }
requestedPhysicalCameras.insert(physicalCameraId);
}
halRequest->num_output_buffers++;
@@ -5237,7 +5297,8 @@
// Since RequestThread::clear() removes buffers from the input stream,
// get the right buffer here before unlocking mRequestLock
if (nextRequest->mInputStream != NULL) {
- res = nextRequest->mInputStream->getInputBuffer(&nextRequest->mInputBuffer);
+ res = nextRequest->mInputStream->getInputBuffer(&nextRequest->mInputBuffer,
+ &nextRequest->mInputBufferSize);
if (res != OK) {
// Can't get input buffer from gralloc queue - this could be due to
// disconnected queue or other producer misbehavior, so not a fatal
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 09fa30a..018dbe5 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -35,6 +35,7 @@
#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
#include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
#include <android/hardware/camera/device/3.6/ICameraDeviceSession.h>
+#include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
#include <android/hardware/camera/device/3.2/ICameraDeviceCallback.h>
#include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
#include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
@@ -52,6 +53,7 @@
#include "device3/InFlightRequest.h"
#include "device3/Camera3OutputInterface.h"
#include "device3/Camera3OfflineSession.h"
+#include "device3/Camera3StreamInterface.h"
#include "utils/TagMonitor.h"
#include "utils/LatencyHistogram.h"
#include <camera_metadata_hidden.h>
@@ -71,7 +73,6 @@
class Camera3Stream;
class Camera3ZslStream;
-class Camera3OutputStreamInterface;
class Camera3StreamInterface;
} // namespace camera3
@@ -133,17 +134,19 @@
const String8& physicalCameraId,
std::vector<int> *surfaceIds = nullptr,
int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
- bool isShared = false, uint64_t consumerUsage = 0) override;
+ bool isShared = false, bool isMultiResolution = false,
+ uint64_t consumerUsage = 0) override;
status_t createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
const String8& physicalCameraId,
std::vector<int> *surfaceIds = nullptr,
int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
- bool isShared = false, uint64_t consumerUsage = 0) override;
+ bool isShared = false, bool isMultiResolution = false,
+ uint64_t consumerUsage = 0) override;
status_t createInputStream(
- uint32_t width, uint32_t height, int format,
+ uint32_t width, uint32_t height, int format, bool isMultiResolution,
int *id) override;
status_t getStreamInfo(int id, StreamInfo *streamInfo) override;
@@ -418,6 +421,8 @@
sp<hardware::camera::device::V3_5::ICameraDeviceSession> mHidlSession_3_5;
// Valid if ICameraDeviceSession is @3.6 or newer
sp<hardware::camera::device::V3_6::ICameraDeviceSession> mHidlSession_3_6;
+ // Valid if ICameraDeviceSession is @3.7 or newer
+ sp<hardware::camera::device::V3_7::ICameraDeviceSession> mHidlSession_3_7;
std::shared_ptr<RequestMetadataQueue> mRequestMetadataQueue;
@@ -490,6 +495,7 @@
camera3::StreamSet mOutputStreams;
sp<camera3::Camera3Stream> mInputStream;
+ bool mIsInputStreamMultiResolution;
SessionStatsBuilder mSessionStatsBuilder;
int mNextStreamId;
@@ -523,6 +529,7 @@
PhysicalCameraSettingsList mSettingsList;
sp<camera3::Camera3Stream> mInputStream;
camera_stream_buffer_t mInputBuffer;
+ camera3::Size mInputBufferSize;
Vector<sp<camera3::Camera3OutputStreamInterface> >
mOutputStreams;
SurfaceMap mOutputSurfaces;
@@ -748,7 +755,7 @@
* Helper function to get the largest Jpeg resolution (in area)
* Return Size(0, 0) if static metatdata is invalid
*/
- Size getMaxJpegResolution() const;
+ camera3::Size getMaxJpegResolution() const;
/**
* Helper function to get the offset between MONOTONIC and BOOTTIME
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index f6acda8..a837900 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -32,10 +32,10 @@
Camera3IOStreamBase::Camera3IOStreamBase(int id, camera_stream_type_t type,
uint32_t width, uint32_t height, size_t maxSize, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation,
- const String8& physicalCameraId, int setId) :
+ const String8& physicalCameraId, int setId, bool isMultiResolution) :
Camera3Stream(id, type,
width, height, maxSize, format, dataSpace, rotation,
- physicalCameraId, setId),
+ physicalCameraId, setId, isMultiResolution),
mTotalBufferCount(0),
mHandoutTotalBufferCount(0),
mHandoutOutputBufferCount(0),
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 719fa14..2e744ee 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -36,7 +36,7 @@
uint32_t width, uint32_t height, size_t maxSize, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation,
const String8& physicalCameraId,
- int setId = CAMERA3_STREAM_SET_ID_INVALID);
+ int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
public:
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index ad70a3a..b00a963 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -46,10 +46,14 @@
}
status_t Camera3InputStream::getInputBufferLocked(
- camera_stream_buffer *buffer) {
+ camera_stream_buffer *buffer, Size *size) {
ATRACE_CALL();
status_t res;
+ if (size == nullptr) {
+ ALOGE("%s: size must not be null", __FUNCTION__);
+ return BAD_VALUE;
+ }
// FIXME: will not work in (re-)registration
if (mState == STATE_IN_CONFIG || mState == STATE_IN_RECONFIG) {
ALOGE("%s: Stream %d: Buffer registration for input streams"
@@ -77,10 +81,12 @@
return res;
}
+ size->width = bufferItem.mGraphicBuffer->getWidth();
+ size->height = bufferItem.mGraphicBuffer->getHeight();
+
anb = bufferItem.mGraphicBuffer->getNativeBuffer();
assert(anb != NULL);
fenceFd = bufferItem.mFence->dup();
-
/**
* FenceFD now owned by HAL except in case of error,
* in which case we reassign it to acquire_fence
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index 03afa17..46221d1 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -70,7 +70,7 @@
* Camera3Stream interface
*/
- virtual status_t getInputBufferLocked(camera_stream_buffer *buffer);
+ virtual status_t getInputBufferLocked(camera_stream_buffer *buffer, Size *size);
virtual status_t returnInputBufferLocked(
const camera_stream_buffer &buffer);
virtual status_t getInputBufferProducerLocked(
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index c835f51..3ec3b6b 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -44,10 +44,10 @@
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation,
nsecs_t timestampOffset, const String8& physicalCameraId,
- int setId) :
+ int setId, bool isMultiResolution) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
- physicalCameraId, setId),
+ physicalCameraId, setId, isMultiResolution),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
@@ -70,9 +70,11 @@
sp<Surface> consumer,
uint32_t width, uint32_t height, size_t maxSize, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation,
- nsecs_t timestampOffset, const String8& physicalCameraId, int setId) :
+ nsecs_t timestampOffset, const String8& physicalCameraId, int setId,
+ bool isMultiResolution) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
- format, dataSpace, rotation, physicalCameraId, setId),
+ format, dataSpace, rotation, physicalCameraId, setId,
+ isMultiResolution),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
@@ -102,10 +104,10 @@
uint32_t width, uint32_t height, int format,
uint64_t consumerUsage, android_dataspace dataSpace,
camera_stream_rotation_t rotation, nsecs_t timestampOffset,
- const String8& physicalCameraId, int setId) :
+ const String8& physicalCameraId, int setId, bool isMultiResolution) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
- physicalCameraId, setId),
+ physicalCameraId, setId, isMultiResolution),
mConsumer(nullptr),
mTransform(0),
mTraceFirstBuffer(true),
@@ -141,11 +143,11 @@
camera_stream_rotation_t rotation,
const String8& physicalCameraId,
uint64_t consumerUsage, nsecs_t timestampOffset,
- int setId) :
+ int setId, bool isMultiResolution) :
Camera3IOStreamBase(id, type, width, height,
/*maxSize*/0,
format, dataSpace, rotation,
- physicalCameraId, setId),
+ physicalCameraId, setId, isMultiResolution),
mTransform(0),
mTraceFirstBuffer(true),
mUseMonoTimestamp(false),
@@ -570,10 +572,12 @@
!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
uint64_t consumerUsage = 0;
getEndpointUsage(&consumerUsage);
+ uint32_t width = (mMaxSize == 0) ? getWidth() : mMaxSize;
+ uint32_t height = (mMaxSize == 0) ? getHeight() : 1;
StreamInfo streamInfo(
- getId(), getStreamSetId(), getWidth(), getHeight(), getFormat(), getDataSpace(),
+ getId(), getStreamSetId(), width, height, getFormat(), getDataSpace(),
mUsage | consumerUsage, mTotalBufferCount,
- /*isConfigured*/true);
+ /*isConfigured*/true, isMultiResolution());
wp<Camera3OutputStream> weakThis(this);
res = mBufferManager->registerStream(weakThis,
streamInfo);
@@ -604,7 +608,8 @@
if (mUseBufferManager) {
sp<GraphicBuffer> gb;
- res = mBufferManager->getBufferForStream(getId(), getStreamSetId(), &gb, fenceFd);
+ res = mBufferManager->getBufferForStream(getId(), getStreamSetId(),
+ isMultiResolution(), &gb, fenceFd);
if (res == OK) {
// Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
// successful return.
@@ -693,7 +698,8 @@
sp<GraphicBuffer> gb;
res = mBufferManager->getBufferForStream(
- getId(), getStreamSetId(), &gb, fenceFd, /*noFreeBuffer*/true);
+ getId(), getStreamSetId(), isMultiResolution(),
+ &gb, fenceFd, /*noFreeBuffer*/true);
if (res == OK) {
// Attach this buffer to the bufferQueue: the buffer will be in dequeue state after
@@ -740,7 +746,8 @@
onBuffersRemovedLocked(removedBuffers);
if (notifyBufferManager && mUseBufferManager && removedBuffers.size() > 0) {
- mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), removedBuffers.size());
+ mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), isMultiResolution(),
+ removedBuffers.size());
}
}
}
@@ -802,7 +809,7 @@
// Since device is already idle, there is no getBuffer call to buffer manager, unregister the
// stream at this point should be safe.
if (mUseBufferManager) {
- res = mBufferManager->unregisterStream(getId(), getStreamSetId());
+ res = mBufferManager->unregisterStream(getId(), getStreamSetId(), isMultiResolution());
if (res != OK) {
ALOGE("%s: Unable to unregister stream %d from buffer manager "
"(error %d %s)", __FUNCTION__, mId, res, strerror(-res));
@@ -914,7 +921,8 @@
ALOGV("Stream %d: Buffer released", stream->getId());
bool shouldFreeBuffer = false;
status_t res = stream->mBufferManager->onBufferReleased(
- stream->getId(), stream->getStreamSetId(), &shouldFreeBuffer);
+ stream->getId(), stream->getStreamSetId(), stream->isMultiResolution(),
+ &shouldFreeBuffer);
if (res != OK) {
ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
strerror(-res), res);
@@ -927,7 +935,7 @@
stream->detachBufferLocked(&buffer, /*fenceFd*/ nullptr);
if (buffer.get() != nullptr) {
stream->mBufferManager->notifyBufferRemoved(
- stream->getId(), stream->getStreamSetId());
+ stream->getId(), stream->getStreamSetId(), stream->isMultiResolution());
}
}
}
@@ -945,7 +953,7 @@
stream->onBuffersRemovedLocked(buffers);
if (stream->mUseBufferManager) {
stream->mBufferManager->onBuffersRemoved(stream->getId(),
- stream->getStreamSetId(), buffers.size());
+ stream->getStreamSetId(), stream->isMultiResolution(), buffers.size());
}
ALOGV("Stream %d: %zu Buffers discarded.", stream->getId(), buffers.size());
}
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 366d22a..c82f2a6 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -48,6 +48,7 @@
uint64_t combinedUsage;
size_t totalBufferCount;
bool isConfigured;
+ bool isMultiRes;
explicit StreamInfo(int id = CAMERA3_STREAM_ID_INVALID,
int setId = CAMERA3_STREAM_SET_ID_INVALID,
uint32_t w = 0,
@@ -56,7 +57,8 @@
android_dataspace ds = HAL_DATASPACE_UNKNOWN,
uint64_t usage = 0,
size_t bufferCount = 0,
- bool configured = false) :
+ bool configured = false,
+ bool multiRes = false) :
streamId(id),
streamSetId(setId),
width(w),
@@ -65,7 +67,8 @@
dataSpace(ds),
combinedUsage(usage),
totalBufferCount(bufferCount),
- isConfigured(configured){}
+ isConfigured(configured),
+ isMultiRes(multiRes) {}
};
/**
@@ -84,7 +87,7 @@
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation,
nsecs_t timestampOffset, const String8& physicalCameraId,
- int setId = CAMERA3_STREAM_SET_ID_INVALID);
+ int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
/**
* Set up a stream for formats that have a variable buffer size for the same
@@ -96,7 +99,7 @@
uint32_t width, uint32_t height, size_t maxSize, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation,
nsecs_t timestampOffset, const String8& physicalCameraId,
- int setId = CAMERA3_STREAM_SET_ID_INVALID);
+ int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
/**
* Set up a stream with deferred consumer for formats that have 2 dimensions, such as
@@ -107,7 +110,7 @@
uint64_t consumerUsage, android_dataspace dataSpace,
camera_stream_rotation_t rotation, nsecs_t timestampOffset,
const String8& physicalCameraId,
- int setId = CAMERA3_STREAM_SET_ID_INVALID);
+ int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
virtual ~Camera3OutputStream();
@@ -232,7 +235,7 @@
android_dataspace dataSpace, camera_stream_rotation_t rotation,
const String8& physicalCameraId,
uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0,
- int setId = CAMERA3_STREAM_SET_ID_INVALID);
+ int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false);
/**
* Note that we release the lock briefly in this function
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 772fe6e..142889a 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -46,6 +46,7 @@
uint32_t num_streams;
camera_stream_t **streams;
uint32_t operation_mode;
+ bool input_is_multi_resolution;
} camera_stream_configuration_t;
typedef struct camera_capture_request {
@@ -57,6 +58,8 @@
uint32_t num_physcam_settings;
const char **physcam_id;
const camera_metadata_t **physcam_settings;
+ int32_t input_width;
+ int32_t input_height;
} camera_capture_request_t;
typedef struct camera_capture_result {
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 4cb954e..c6e7002 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -49,7 +49,7 @@
camera_stream_type type,
uint32_t width, uint32_t height, size_t maxSize, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation,
- const String8& physicalCameraId, int setId) :
+ const String8& physicalCameraId, int setId, bool isMultiResolution) :
camera_stream(),
mId(id),
mSetId(setId),
@@ -73,7 +73,8 @@
mDataSpaceOverridden(false),
mOriginalDataSpace(dataSpace),
mPhysicalCameraId(physicalCameraId),
- mLastTimestamp(0) {
+ mLastTimestamp(0),
+ mIsMultiResolution(isMultiResolution) {
camera_stream::stream_type = type;
camera_stream::width = width;
@@ -99,6 +100,14 @@
return mSetId;
}
+int Camera3Stream::getHalStreamGroupId() const {
+ return mIsMultiResolution ? mSetId : -1;
+}
+
+bool Camera3Stream::isMultiResolution() const {
+ return mIsMultiResolution;
+}
+
uint32_t Camera3Stream::getWidth() const {
return camera_stream::width;
}
@@ -743,11 +752,16 @@
return res;
}
-status_t Camera3Stream::getInputBuffer(camera_stream_buffer *buffer, bool respectHalLimit) {
+status_t Camera3Stream::getInputBuffer(camera_stream_buffer *buffer,
+ Size* size, bool respectHalLimit) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
status_t res = OK;
+ if (size == nullptr) {
+ ALOGE("%s: size must not be null", __FUNCTION__);
+ return BAD_VALUE;
+ }
// This function should be only called when the stream is configured already.
if (mState != STATE_CONFIGURED) {
ALOGE("%s: Stream %d: Can't get input buffers if stream is not in CONFIGURED state %d",
@@ -769,7 +783,7 @@
}
}
- res = getInputBufferLocked(buffer);
+ res = getInputBufferLocked(buffer, size);
if (res == OK) {
fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/false);
if (buffer->buffer) {
@@ -918,7 +932,7 @@
ALOGE("%s: This type of stream does not support output", __FUNCTION__);
return INVALID_OPERATION;
}
-status_t Camera3Stream::getInputBufferLocked(camera_stream_buffer *) {
+status_t Camera3Stream::getInputBufferLocked(camera_stream_buffer *, Size *) {
ALOGE("%s: This type of stream does not support input", __FUNCTION__);
return INVALID_OPERATION;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 55ed2f2..45d8478 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -147,6 +147,14 @@
* Get the output stream set id.
*/
int getStreamSetId() const;
+ /**
+ * Is this stream part of a multi-resolution stream set
+ */
+ bool isMultiResolution() const;
+ /**
+ * Get the HAL stream group id for a multi-resolution stream set
+ */
+ int getHalStreamGroupId() const;
/**
* Get the stream's dimensions and format
@@ -356,10 +364,13 @@
* For bidirectional streams, this method applies to the input-side
* buffers.
*
+ * This method also returns the size of the returned input buffer.
+ *
* Normally this call will block until the handed out buffer count is less than the stream
* max buffer count; if respectHalLimit is set to false, this is ignored.
*/
- status_t getInputBuffer(camera_stream_buffer *buffer, bool respectHalLimit = true);
+ status_t getInputBuffer(camera_stream_buffer *buffer,
+ Size* size, bool respectHalLimit = true);
/**
* Return a buffer to the stream after use by the HAL.
@@ -487,7 +498,7 @@
Camera3Stream(int id, camera_stream_type type,
uint32_t width, uint32_t height, size_t maxSize, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation,
- const String8& physicalCameraId, int setId);
+ const String8& physicalCameraId, int setId, bool isMultiResolution);
wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
@@ -509,7 +520,7 @@
virtual status_t getBuffersLocked(std::vector<OutstandingBuffer>*);
- virtual status_t getInputBufferLocked(camera_stream_buffer *buffer);
+ virtual status_t getInputBufferLocked(camera_stream_buffer *buffer, Size* size);
virtual status_t returnInputBufferLocked(
const camera_stream_buffer &buffer);
@@ -608,6 +619,7 @@
String8 mPhysicalCameraId;
nsecs_t mLastTimestamp;
+ bool mIsMultiResolution = false;
bool mSupportOfflineProcessing = false;
}; // class Camera3Stream
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index c558b07..a567cb4 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -72,6 +72,12 @@
int release_fence;
} camera_stream_buffer_t;
+struct Size {
+ uint32_t width;
+ uint32_t height;
+ explicit Size(uint32_t w = 0, uint32_t h = 0) : width(w), height(h){}
+};
+
enum {
/**
* This stream set ID indicates that the set ID is invalid, and this stream doesn't intend to
@@ -352,7 +358,8 @@
* Normally this call will block until the handed out buffer count is less than the stream
* max buffer count; if respectHalLimit is set to false, this is ignored.
*/
- virtual status_t getInputBuffer(camera_stream_buffer *buffer, bool respectHalLimit = true) = 0;
+ virtual status_t getInputBuffer(camera_stream_buffer *buffer,
+ Size *size, bool respectHalLimit = true) = 0;
/**
* Return a buffer to the stream after use by the HAL.
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index d06b2c5..c7d7c4b 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -50,12 +50,14 @@
"android.hardware.camera.provider@2.4",
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
+ "android.hardware.camera.provider@2.7",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
"android.hardware.camera.device@3.4",
"android.hardware.camera.device@3.5",
"android.hardware.camera.device@3.6",
+ "android.hardware.camera.device@3.7",
],
fuzz_config: {
cc: [
diff --git a/services/camera/libcameraservice/tests/Android.mk b/services/camera/libcameraservice/tests/Android.mk
index b530342..0b5ad79 100644
--- a/services/camera/libcameraservice/tests/Android.mk
+++ b/services/camera/libcameraservice/tests/Android.mk
@@ -33,9 +33,11 @@
android.hardware.camera.provider@2.4 \
android.hardware.camera.provider@2.5 \
android.hardware.camera.provider@2.6 \
+ android.hardware.camera.provider@2.7 \
android.hardware.camera.device@1.0 \
android.hardware.camera.device@3.2 \
android.hardware.camera.device@3.4 \
+ android.hardware.camera.device@3.7 \
android.hidl.token@1.0-utils
LOCAL_STATIC_LIBRARIES := \
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index c28f427..8f42a85 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -262,23 +262,24 @@
void SessionConfigurationUtils::mapStreamInfo(const OutputStreamInfo &streamInfo,
camera3::camera_stream_rotation_t rotation, String8 physicalId,
- hardware::camera::device::V3_4::Stream *stream /*out*/) {
+ int32_t groupId, hardware::camera::device::V3_7::Stream *stream /*out*/) {
if (stream == nullptr) {
return;
}
- stream->v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT;
- stream->v3_2.width = streamInfo.width;
- stream->v3_2.height = streamInfo.height;
- stream->v3_2.format = Camera3Device::mapToPixelFormat(streamInfo.format);
+ stream->v3_4.v3_2.streamType = hardware::camera::device::V3_2::StreamType::OUTPUT;
+ stream->v3_4.v3_2.width = streamInfo.width;
+ stream->v3_4.v3_2.height = streamInfo.height;
+ stream->v3_4.v3_2.format = Camera3Device::mapToPixelFormat(streamInfo.format);
auto u = streamInfo.consumerUsage;
camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
- stream->v3_2.usage = Camera3Device::mapToConsumerUsage(u);
- stream->v3_2.dataSpace = Camera3Device::mapToHidlDataspace(streamInfo.dataSpace);
- stream->v3_2.rotation = Camera3Device::mapToStreamRotation(rotation);
- stream->v3_2.id = -1; // Invalid stream id
- stream->physicalCameraId = std::string(physicalId.string());
- stream->bufferSize = 0;
+ stream->v3_4.v3_2.usage = Camera3Device::mapToConsumerUsage(u);
+ stream->v3_4.v3_2.dataSpace = Camera3Device::mapToHidlDataspace(streamInfo.dataSpace);
+ stream->v3_4.v3_2.rotation = Camera3Device::mapToStreamRotation(rotation);
+ stream->v3_4.v3_2.id = -1; // Invalid stream id
+ stream->v3_4.physicalCameraId = std::string(physicalId.string());
+ stream->v3_4.bufferSize = 0;
+ stream->groupId = groupId;
}
binder::Status SessionConfigurationUtils::checkPhysicalCameraId(
@@ -358,7 +359,7 @@
const SessionConfiguration& sessionConfiguration,
const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
- hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration, bool *earlyExit) {
+ hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration, bool *earlyExit) {
auto operatingMode = sessionConfiguration.getOperatingMode();
binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId);
@@ -393,14 +394,16 @@
streamConfiguration.streams.resize(streamCount);
size_t streamIdx = 0;
if (isInputValid) {
- streamConfiguration.streams[streamIdx++] = {{/*streamId*/0,
+ streamConfiguration.streams[streamIdx++] = {{{/*streamId*/0,
hardware::camera::device::V3_2::StreamType::INPUT,
static_cast<uint32_t> (sessionConfiguration.getInputWidth()),
static_cast<uint32_t> (sessionConfiguration.getInputHeight()),
Camera3Device::mapToPixelFormat(sessionConfiguration.getInputFormat()),
/*usage*/ 0, HAL_DATASPACE_UNKNOWN,
hardware::camera::device::V3_2::StreamRotation::ROTATION_0},
- /*physicalId*/ nullptr, /*bufferSize*/0};
+ /*physicalId*/ nullptr, /*bufferSize*/0}, /*groupId*/-1};
+ streamConfiguration.multiResolutionInputImage =
+ sessionConfiguration.inputIsMultiResolution();
}
for (const auto &it : outputConfigs) {
@@ -410,6 +413,7 @@
String8 physicalCameraId = String8(it.getPhysicalCameraId());
size_t numBufferProducers = bufferProducers.size();
bool isStreamInfoValid = false;
+ int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
OutputStreamInfo streamInfo;
res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
@@ -432,7 +436,7 @@
if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
}
- mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId,
+ mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
&streamConfiguration.streams[streamIdx++]);
isStreamInfoValid = true;
@@ -488,12 +492,13 @@
for (const auto& compositeStream : compositeStreams) {
mapStreamInfo(compositeStream,
static_cast<camera_stream_rotation_t> (it.getRotation()),
- physicalCameraId, &streamConfiguration.streams[streamIdx++]);
+ physicalCameraId, groupId,
+ &streamConfiguration.streams[streamIdx++]);
}
} else {
mapStreamInfo(streamInfo,
static_cast<camera_stream_rotation_t> (it.getRotation()),
- physicalCameraId, &streamConfiguration.streams[streamIdx++]);
+ physicalCameraId, groupId, &streamConfiguration.streams[streamIdx++]);
}
isStreamInfoValid = true;
}
@@ -503,4 +508,27 @@
}
+bool SessionConfigurationUtils::convertHALStreamCombinationFromV37ToV34(
+ hardware::camera::device::V3_4::StreamConfiguration &streamConfigV34,
+ const hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37) {
+ if (streamConfigV37.multiResolutionInputImage) {
+ // ICameraDevice older than 3.7 doesn't support multi-resolution input image.
+ return false;
+ }
+
+ streamConfigV34.streams.resize(streamConfigV37.streams.size());
+ for (size_t i = 0; i < streamConfigV37.streams.size(); i++) {
+ if (streamConfigV37.streams[i].groupId != -1) {
+ // ICameraDevice older than 3.7 doesn't support multi-resolution output
+ // image
+ return false;
+ }
+ streamConfigV34.streams[i] = streamConfigV37.streams[i].v3_4;
+ }
+ streamConfigV34.operationMode = streamConfigV37.operationMode;
+ streamConfigV34.sessionParams = streamConfigV37.sessionParams;
+
+ return true;
+}
+
}// namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 6ac7ab4..36e1dd7 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -21,7 +21,7 @@
#include <camera/camera2/OutputConfiguration.h>
#include <camera/camera2/SessionConfiguration.h>
#include <camera/camera2/SubmitInfo.h>
-#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
+#include <android/hardware/camera/device/3.7/types.h>
#include <device3/Camera3StreamInterface.h>
@@ -53,8 +53,8 @@
const String8 &cameraId, const CameraMetadata &physicalCameraMetadata);
static void mapStreamInfo(const camera3::OutputStreamInfo &streamInfo,
- camera3::camera_stream_rotation_t rotation, String8 physicalId,
- hardware::camera::device::V3_4::Stream *stream /*out*/);
+ camera3::camera_stream_rotation_t rotation, String8 physicalId, int32_t groupId,
+ hardware::camera::device::V3_7::Stream *stream /*out*/);
// Check that the physicalCameraId passed in is spported by the camera
// device.
@@ -76,9 +76,16 @@
convertToHALStreamCombination(const SessionConfiguration& sessionConfiguration,
const String8 &cameraId, const CameraMetadata &deviceInfo,
metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
- hardware::camera::device::V3_4::StreamConfiguration &streamConfiguration,
+ hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
bool *earlyExit);
+ // Utility function to convert a V3_7::StreamConfiguration to
+ // V3_4::StreamConfiguration. Return false if the original V3_7 configuration cannot
+ // be used by older version HAL.
+ static bool convertHALStreamCombinationFromV37ToV34(
+ hardware::camera::device::V3_4::StreamConfiguration &streamConfigV34,
+ const hardware::camera::device::V3_7::StreamConfiguration &streamConfigV37);
+
static const int32_t MAX_SURFACES_PER_STREAM = 4;
static const int32_t ROUNDING_WIDTH_CAP = 1920;
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 9b7da0f..9630907 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -138,24 +138,53 @@
"connection_count",
};
-// static constexpr const char * const AAudioStreamFields[] {
-// "mediametrics_aaudiostream_reported",
-// "caller_name",
-// "path",
-// "direction",
-// "frames_per_burst",
-// "buffer_size",
-// "buffer_capacity",
-// "channel_count",
-// "total_frames_transferred",
-// "perf_mode_requested",
-// "perf_mode_actual",
-// "sharing",
-// "xrun_count",
-// "device_type",
-// "format_app",
-// "format_device",
-// };
+static constexpr const char * const AAudioStreamFields[] {
+ "mediametrics_aaudiostream_reported",
+ "caller_name",
+ "path",
+ "direction",
+ "frames_per_burst",
+ "buffer_size",
+ "buffer_capacity",
+ "channel_count",
+ "total_frames_transferred",
+ "perf_mode_requested",
+ "perf_mode_actual",
+ "sharing",
+ "xrun_count",
+ "device_type",
+ "format_app",
+ "format_device",
+ "log_session_id",
+};
+
+/**
+ * printFields is a helper method that prints the fields and corresponding values
+ * in a human readable style.
+ */
+template <size_t N, typename ...Types>
+std::string printFields(const char * const (& fields)[N], Types ... args)
+{
+ std::stringstream ss;
+ ss << " { ";
+ stringutils::fieldPrint(ss, fields, args...);
+ ss << "}";
+ return ss.str();
+}
+
+/**
+ * sendToStatsd is a helper method that sends the arguments to statsd
+ */
+template <typename ...Types>
+int sendToStatsd(Types ... args)
+{
+ int result = 0;
+
+#ifdef STATSD_ENABLE
+ result = android::util::stats_write(args...);
+#endif
+ return result;
+}
/**
* sendToStatsd is a helper method that sends the arguments to statsd
@@ -951,7 +980,7 @@
mAudioAnalytics.mAnalyticsState->timeMachine().get(
key, AMEDIAMETRICS_PROP_UNDERRUN, &xrunCount);
- std::string deviceType;
+ std::string serializedDeviceTypes;
// TODO: only routed device id is logged, but no device type
int32_t formatApp = 0;
@@ -962,6 +991,9 @@
key, AMEDIAMETRICS_PROP_ENCODING, &formatDeviceStr);
const auto formatDevice = types::lookup<types::ENCODING, int32_t>(formatDeviceStr);
+ std::string logSessionId;
+ // TODO: log logSessionId
+
LOG(LOG_LEVEL) << "key:" << key
<< " caller_name:" << callerName << "(" << callerNameStr << ")"
<< " path:" << path
@@ -975,33 +1007,59 @@
<< " perf_mode_actual:" << perfModeActual
<< " sharing:" << sharingMode << "(" << sharingModeStr << ")"
<< " xrun_count:" << xrunCount
- << " device_type:" << deviceType
+ << " device_type:" << serializedDeviceTypes
<< " format_app:" << formatApp
- << " format_device: " << formatDevice << "(" << formatDeviceStr << ")";
+ << " format_device: " << formatDevice << "(" << formatDeviceStr << ")"
+ << " log_session_id: " << logSessionId;
- // TODO: send the metric to statsd when the proto is ready
- // if (mAudioAnalytics.mDeliverStatistics) {
- // const auto [ result, str ] = sendToStatsd(AAudioStreamFields,
- // CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
- // , callerName
- // , path
- // , direction
- // , framesPerBurst
- // , bufferSizeInFrames
- // , bufferCapacityInFrames
- // , channelCount
- // , totalFramesTransferred
- // , perfModeRequested
- // , perfModeActual
- // , sharingMode
- // , xrunCount
- // , deviceType.c_str()
- // , formatApp
- // , formatDevice
- // );
- // ALOGV("%s: statsd %s", __func__, str.c_str());
- // mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
- // }
+ if (mAudioAnalytics.mDeliverStatistics) {
+ android::util::BytesField bf_serialized(
+ serializedDeviceTypes.c_str(), serializedDeviceTypes.size());
+ const auto result = sendToStatsd(
+ CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+ , callerName
+ , path
+ , direction
+ , framesPerBurst
+ , bufferSizeInFrames
+ , bufferCapacityInFrames
+ , channelCount
+ , totalFramesTransferred
+ , perfModeRequested
+ , perfModeActual
+ , sharingMode
+ , xrunCount
+ , bf_serialized
+ , formatApp
+ , formatDevice
+ , logSessionId.c_str()
+ );
+ std::stringstream ss;
+ ss << "result:" << result;
+ const auto fieldsStr = printFields(AAudioStreamFields,
+ CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+ , callerName
+ , path
+ , direction
+ , framesPerBurst
+ , bufferSizeInFrames
+ , bufferCapacityInFrames
+ , channelCount
+ , totalFramesTransferred
+ , perfModeRequested
+ , perfModeActual
+ , sharingMode
+ , xrunCount
+ , serializedDeviceTypes.c_str()
+ , formatApp
+ , formatDevice
+ , logSessionId.c_str()
+ );
+ ss << " " << fieldsStr;
+ std::string str = ss.str();
+ ALOGV("%s: statsd %s", __func__, str.c_str());
+ mAudioAnalytics.mStatsdLog.log("%s", str.c_str());
+ }
}
} // namespace android::mediametrics
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index d502b30..1c5ab77 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -186,6 +186,16 @@
metrics_proto.set_lifetime_millis(lifetimeMs);
}
+ // new for S; need to plumb through to westworld
+ // android.media.mediacodec.channelCount int32
+ // android.media.mediacodec.sampleRate int32
+
+ // new for S; need to plumb through to westworld
+ // TODO PWG may want these fuzzed up a bit to obscure some precision
+ // android.media.mediacodec.vencode.bytes int64
+ // android.media.mediacodec.vencode.frames int64
+ // android.media.mediacodec.vencode.durationUs int64
+
std::string serialized;
if (!metrics_proto.SerializeToString(&serialized)) {
ALOGE("Failed to serialize codec metrics");
diff --git a/services/mediametrics/tests/Android.bp b/services/mediametrics/tests/Android.bp
index 71193a2..3baf739 100644
--- a/services/mediametrics/tests/Android.bp
+++ b/services/mediametrics/tests/Android.bp
@@ -11,6 +11,10 @@
name: "mediametrics_tests",
test_suites: ["device-tests"],
+ // not all shared libraries are populated in the 2nd architecture in
+ // particular, libmediametricsservice we use to have a tame copy of the service
+ compile_multilib: "first",
+
cflags: [
"-Wall",
"-Werror",
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/services/mediatranscoding/MediaTranscodingService.cpp
index cca36fb..8b64134 100644
--- a/services/mediatranscoding/MediaTranscodingService.cpp
+++ b/services/mediatranscoding/MediaTranscodingService.cpp
@@ -24,6 +24,7 @@
#include <cutils/properties.h>
#include <media/TranscoderWrapper.h>
#include <media/TranscodingClientManager.h>
+#include <media/TranscodingLogger.h>
#include <media/TranscodingResourcePolicy.h>
#include <media/TranscodingSessionController.h>
#include <media/TranscodingThermalPolicy.h>
@@ -41,20 +42,49 @@
errorCode, \
String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, ##__VA_ARGS__))
-MediaTranscodingService::MediaTranscodingService(bool simulated)
+static constexpr int64_t kTranscoderHeartBeatIntervalUs = 1000000LL;
+
+MediaTranscodingService::MediaTranscodingService()
: mUidPolicy(new TranscodingUidPolicy()),
mResourcePolicy(new TranscodingResourcePolicy()),
- mThermalPolicy(new TranscodingThermalPolicy()) {
+ mThermalPolicy(new TranscodingThermalPolicy()),
+ mLogger(new TranscodingLogger()) {
ALOGV("MediaTranscodingService is created");
- mSessionController.reset(new TranscodingSessionController(
- [simulated](const std::shared_ptr<TranscoderCallbackInterface>& cb,
- int64_t heartBeatUs) -> std::shared_ptr<TranscoderInterface> {
- if (simulated) {
- return std::make_shared<SimulatedTranscoder>(cb, heartBeatUs);
- }
- return std::make_shared<TranscoderWrapper>(cb, heartBeatUs);
- },
- mUidPolicy, mResourcePolicy, mThermalPolicy));
+ bool simulated = property_get_bool("debug.transcoding.simulated_transcoder", false);
+ if (simulated) {
+ // Overrid default config params with shorter values for testing.
+ TranscodingSessionController::ControllerConfig config = {
+ .pacerBurstThresholdMs = 500,
+ .pacerBurstCountQuota = 10,
+ .pacerBurstTimeQuotaSeconds = 3,
+ };
+ mSessionController.reset(new TranscodingSessionController(
+ [](const std::shared_ptr<TranscoderCallbackInterface>& cb)
+ -> std::shared_ptr<TranscoderInterface> {
+ return std::make_shared<SimulatedTranscoder>(cb);
+ },
+ mUidPolicy, mResourcePolicy, mThermalPolicy, &config));
+ } else {
+ int32_t overrideBurstCountQuota =
+ property_get_int32("persist.transcoding.burst_count_quota", -1);
+ int32_t pacerBurstTimeQuotaSeconds =
+ property_get_int32("persist.transcoding.burst_time_quota_seconds", -1);
+ // Override default config params with properties if present.
+ TranscodingSessionController::ControllerConfig config;
+ if (overrideBurstCountQuota > 0) {
+ config.pacerBurstCountQuota = overrideBurstCountQuota;
+ }
+ if (pacerBurstTimeQuotaSeconds > 0) {
+ config.pacerBurstTimeQuotaSeconds = pacerBurstTimeQuotaSeconds;
+ }
+ mSessionController.reset(new TranscodingSessionController(
+ [logger = mLogger](const std::shared_ptr<TranscoderCallbackInterface>& cb)
+ -> std::shared_ptr<TranscoderInterface> {
+ return std::make_shared<TranscoderWrapper>(cb, logger,
+ kTranscoderHeartBeatIntervalUs);
+ },
+ mUidPolicy, mResourcePolicy, mThermalPolicy, &config));
+ }
mClientManager.reset(new TranscodingClientManager(mSessionController));
mUidPolicy->setCallback(mSessionController);
mResourcePolicy->setCallback(mSessionController);
@@ -100,8 +130,7 @@
//static
void MediaTranscodingService::instantiate() {
std::shared_ptr<MediaTranscodingService> service =
- ::ndk::SharedRefBase::make<MediaTranscodingService>(
- property_get_bool("debug.transcoding.simulated_transcoder", false));
+ ::ndk::SharedRefBase::make<MediaTranscodingService>();
binder_status_t status =
AServiceManager_addService(service->asBinder().get(), getServiceName());
if (status != STATUS_OK) {
diff --git a/services/mediatranscoding/MediaTranscodingService.h b/services/mediatranscoding/MediaTranscodingService.h
index d024c54..12be131 100644
--- a/services/mediatranscoding/MediaTranscodingService.h
+++ b/services/mediatranscoding/MediaTranscodingService.h
@@ -29,6 +29,7 @@
using ::aidl::android::media::TranscodingRequestParcel;
using ::aidl::android::media::TranscodingSessionParcel;
class TranscodingClientManager;
+class TranscodingLogger;
class TranscodingSessionController;
class UidPolicyInterface;
class ResourcePolicyInterface;
@@ -39,7 +40,7 @@
static constexpr int32_t kInvalidSessionId = -1;
static constexpr int32_t kInvalidClientId = -1;
- MediaTranscodingService(bool simulated);
+ MediaTranscodingService();
virtual ~MediaTranscodingService();
static void instantiate();
@@ -62,6 +63,7 @@
std::shared_ptr<UidPolicyInterface> mUidPolicy;
std::shared_ptr<ResourcePolicyInterface> mResourcePolicy;
std::shared_ptr<ThermalPolicyInterface> mThermalPolicy;
+ std::shared_ptr<TranscodingLogger> mLogger;
std::shared_ptr<TranscodingSessionController> mSessionController;
std::shared_ptr<TranscodingClientManager> mClientManager;
};
diff --git a/services/mediatranscoding/SimulatedTranscoder.cpp b/services/mediatranscoding/SimulatedTranscoder.cpp
index db83ccb..e80dbc5 100644
--- a/services/mediatranscoding/SimulatedTranscoder.cpp
+++ b/services/mediatranscoding/SimulatedTranscoder.cpp
@@ -47,8 +47,7 @@
return "(unknown)";
}
-SimulatedTranscoder::SimulatedTranscoder(const std::shared_ptr<TranscoderCallbackInterface>& cb,
- int64_t heartBeatUs __unused)
+SimulatedTranscoder::SimulatedTranscoder(const std::shared_ptr<TranscoderCallbackInterface>& cb)
: mCallback(cb), mLooperReady(false) {
ALOGV("SimulatedTranscoder CTOR: %p", this);
}
@@ -59,6 +58,7 @@
void SimulatedTranscoder::start(
ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+ uid_t /*callingUid*/,
const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
{
auto lock = std::scoped_lock(mLock);
@@ -91,6 +91,7 @@
void SimulatedTranscoder::resume(
ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& /*request*/,
+ uid_t /*callingUid*/,
const std::shared_ptr<ITranscodingClientCallback>& /*clientCallback*/) {
queueEvent(Event::Resume, clientId, sessionId, [=] {
auto callback = mCallback.lock();
@@ -130,7 +131,7 @@
void SimulatedTranscoder::threadLoop() {
bool running = false;
- std::chrono::system_clock::time_point lastRunningTime;
+ std::chrono::steady_clock::time_point lastRunningTime;
Event lastRunningEvent;
std::unique_lock<std::mutex> lock(mLock);
@@ -162,8 +163,9 @@
// Advance last running time and remaining time. This is needed to guard
// against bad events (which will be ignored) or spurious wakeups, in that
// case we don't want to wait for the same time again.
- auto now = std::chrono::system_clock::now();
- mRemainingTimeMap[key] -= (now - lastRunningTime);
+ auto now = std::chrono::steady_clock::now();
+ mRemainingTimeMap[key] -= std::chrono::duration_cast<std::chrono::microseconds>(
+ now - lastRunningTime);
lastRunningTime = now;
}
}
@@ -182,7 +184,7 @@
SessionKeyType key = std::make_pair(event.clientId, event.sessionId);
if (!running && (event.type == Event::Start || event.type == Event::Resume)) {
running = true;
- lastRunningTime = std::chrono::system_clock::now();
+ lastRunningTime = std::chrono::steady_clock::now();
lastRunningEvent = event;
ALOGV("%s: session {%lld, %d}: remaining time: %lld", __FUNCTION__,
(long long)event.clientId, event.sessionId,
@@ -193,7 +195,8 @@
if (event.type == Event::Stop) {
mRemainingTimeMap.erase(key);
} else {
- mRemainingTimeMap[key] -= (std::chrono::system_clock::now() - lastRunningTime);
+ mRemainingTimeMap[key] -= std::chrono::duration_cast<std::chrono::microseconds>(
+ std::chrono::steady_clock::now() - lastRunningTime);
}
} else {
ALOGW("%s: discarding bad event: session {%lld, %d}: %s", __FUNCTION__,
diff --git a/services/mediatranscoding/SimulatedTranscoder.h b/services/mediatranscoding/SimulatedTranscoder.h
index 010f0f0..58e2e30 100644
--- a/services/mediatranscoding/SimulatedTranscoder.h
+++ b/services/mediatranscoding/SimulatedTranscoder.h
@@ -49,17 +49,16 @@
static constexpr int64_t kSessionDurationUs = 1000000;
- SimulatedTranscoder(const std::shared_ptr<TranscoderCallbackInterface>& cb,
- int64_t heartBeatUs);
+ SimulatedTranscoder(const std::shared_ptr<TranscoderCallbackInterface>& cb);
~SimulatedTranscoder();
// TranscoderInterface
void start(ClientIdType clientId, SessionIdType sessionId,
- const TranscodingRequestParcel& request,
+ const TranscodingRequestParcel& request, uid_t callingUid,
const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
void pause(ClientIdType clientId, SessionIdType sessionId) override;
void resume(ClientIdType clientId, SessionIdType sessionId,
- const TranscodingRequestParcel& request,
+ const TranscodingRequestParcel& request, uid_t callingUid,
const std::shared_ptr<ITranscodingClientCallback>& clientCallback) override;
void stop(ClientIdType clientId, SessionIdType sessionId, bool abandon = false) override;
// ~TranscoderInterface
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
index 5256a3f..3f7d8d6 100644
--- a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -208,7 +208,9 @@
std::unique_lock lock(mLock);
mEventQueue.push_back(event);
- mLastErr = err;
+ if (err != TranscodingErrorCode::kNoError) {
+ mLastErrQueue.push_back(err);
+ }
mCondition.notify_one();
}
@@ -226,7 +228,12 @@
TranscodingErrorCode getLastError() {
std::unique_lock lock(mLock);
- return mLastErr;
+ if (mLastErrQueue.empty()) {
+ return TranscodingErrorCode::kNoError;
+ }
+ TranscodingErrorCode err = mLastErrQueue.front();
+ mLastErrQueue.pop_front();
+ return err;
}
private:
@@ -234,7 +241,7 @@
std::condition_variable mCondition;
Event mPoppedEvent;
std::list<Event> mEventQueue;
- TranscodingErrorCode mLastErr;
+ std::list<TranscodingErrorCode> mLastErrQueue;
int mUpdateCount = 0;
int mLastProgress = -1;
};
diff --git a/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
index b8a6f76..c8994ac 100644
--- a/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
+++ b/services/mediatranscoding/tests/mediatranscodingservice_simulated_tests.cpp
@@ -54,6 +54,10 @@
constexpr int64_t kPaddingUs = 1000000;
constexpr int64_t kSessionWithPaddingUs = SimulatedTranscoder::kSessionDurationUs + kPaddingUs;
constexpr int64_t kWatchdogTimeoutUs = 3000000;
+// Pacer settings used for simulated tests. Listed here for reference.
+constexpr int32_t kSimulatedPacerBurstThresholdMs = 500;
+//constexpr int32_t kSimulatedPacerBurstCountQuota = 10;
+//constexpr int32_t kSimulatedPacerBurstTimeQuotaSec = 3;
constexpr const char* kClientOpPackageName = "TestClientPackage";
@@ -64,6 +68,25 @@
virtual ~MediaTranscodingServiceSimulatedTest() {
ALOGI("MediaTranscodingServiceResourceTest destroyed");
}
+
+ void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess) {
+ // Idle to clear out burst history.
+ usleep(kSimulatedPacerBurstThresholdMs * 2 * 1000);
+ for (int i = 0; i < numSubmits; i++) {
+ EXPECT_TRUE(mClient3->submit(i, "test_source_file_0", "test_destination_file_0",
+ TranscodingSessionPriority::kNormal, -1 /*bitrateBps*/,
+ -1 /*overridePid*/, -1 /*overrideUid*/,
+ sessionDurationMs));
+ }
+ for (int i = 0; i < expectedSuccess; i++) {
+ EXPECT_EQ(mClient3->pop(kPaddingUs), EventTracker::Start(CLIENT(3), i));
+ EXPECT_EQ(mClient3->pop(kSessionWithPaddingUs), EventTracker::Finished(CLIENT(3), i));
+ }
+ for (int i = expectedSuccess; i < numSubmits; i++) {
+ EXPECT_EQ(mClient3->pop(kPaddingUs), EventTracker::Failed(CLIENT(3), i));
+ EXPECT_EQ(mClient3->getLastError(), TranscodingErrorCode::kDroppedByService);
+ }
+ }
};
TEST_F(MediaTranscodingServiceSimulatedTest, TestRegisterNullClient) {
@@ -414,5 +437,36 @@
ALOGD("TestTranscodingWatchdog finished.");
}
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingPacerOverCountQuotaOnly) {
+ ALOGD("TestTranscodingPacerOverCountQuotaOnly starting...");
+
+ registerMultipleClients();
+ testPacerHelper(12 /*numSubmits*/, 100 /*sessionDurationMs*/, 12 /*expectedSuccess*/);
+ unregisterMultipleClients();
+
+ ALOGD("TestTranscodingPacerOverCountQuotaOnly finished.");
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingPacerOverTimeQuotaOnly) {
+ ALOGD("TestTranscodingPacerOverTimeQuotaOnly starting...");
+
+ registerMultipleClients();
+ testPacerHelper(5 /*numSubmits*/, 1000 /*sessionDurationMs*/, 5 /*expectedSuccess*/);
+ unregisterMultipleClients();
+
+ ALOGD("TestTranscodingPacerOverTimeQuotaOnly finished.");
+}
+
+TEST_F(MediaTranscodingServiceSimulatedTest, TestTranscodingPacerOverQuota) {
+ ALOGD("TestTranscodingPacerOverQuota starting...");
+
+ registerMultipleClients();
+ testPacerHelper(12 /*numSubmits*/, 400 /*sessionDurationMs*/, 10 /*expectedSuccess*/);
+ unregisterMultipleClients();
+
+ // Idle to clear out burst history. Since we expect it to actually fail, wait for cooldown.
+ ALOGD("TestTranscodingPacerOverQuota finished.");
+}
+
} // namespace media
} // namespace android
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
new file mode 100644
index 0000000..78ef3fc
--- /dev/null
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -0,0 +1,70 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_fuzz {
+ name: "oboeservice_fuzzer",
+ srcs: [
+ "oboeservice_fuzzer.cpp",
+ ],
+ shared_libs: [
+ "libaaudio_internal",
+ "libaudioclient",
+ "libaudioflinger",
+ "libaudioutils",
+ "libmedia_helper",
+ "libmediametrics",
+ "libmediautils",
+ "libbase",
+ "libbinder",
+ "libcutils",
+ "liblog",
+ "libutils",
+ "aaudio-aidl-cpp",
+ ],
+ static_libs: [
+ "libaaudioservice",
+ ],
+ include_dirs: [
+ "frameworks/av/services/oboeservice",
+ ],
+ header_libs: [
+ "libaudiohal_headers",
+ ],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wno-unused-parameter",
+ ],
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
diff --git a/services/oboeservice/fuzzer/README.md b/services/oboeservice/fuzzer/README.md
new file mode 100644
index 0000000..00b85df
--- /dev/null
+++ b/services/oboeservice/fuzzer/README.md
@@ -0,0 +1,65 @@
+# Fuzzer for libaaudioservice
+
+## Plugin Design Considerations
+The fuzzer plugin for libaaudioservice is designed based on the
+understanding of the service and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+AAudio Service request contains the following parameters:
+1. AAudioFormat
+2. UserId
+3. ProcessId
+4. InService
+5. DeviceId
+6. SampleRate
+7. SamplesPerFrame
+8. Direction
+9. SharingMode
+10. Usage
+11. ContentType
+12. InputPreset
+13. BufferCapacity
+
+| Parameter| Valid Input Values| Configured Value|
+|------------- |-------------| ----- |
+| `AAudioFormat` | `AAUDIO_FORMAT_UNSPECIFIED`, `AAUDIO_FORMAT_PCM_I16`, `AAUDIO_FORMAT_PCM_FLOAT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `UserId` | `INT32_MIN` to `INT32_MAX` | Value obtained from getuid() |
+| `ProcessId` | `INT32_MIN` to `INT32_MAX` | Value obtained from getpid() |
+| `InService` | `bool` | Value obtained from FuzzedDataProvider |
+| `DeviceId` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `SampleRate` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `SamplesPerFrame` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+| `Direction` | `AAUDIO_DIRECTION_OUTPUT`, `AAUDIO_DIRECTION_INPUT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `SharingMode` | `AAUDIO_SHARING_MODE_EXCLUSIVE`, `AAUDIO_SHARING_MODE_SHARED` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `Usage` | `AAUDIO_USAGE_MEDIA`, `AAUDIO_USAGE_VOICE_COMMUNICATION`, `AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING`, `AAUDIO_USAGE_ALARM`, `AAUDIO_USAGE_NOTIFICATION`, `AAUDIO_USAGE_NOTIFICATION_RINGTONE`, `AAUDIO_USAGE_NOTIFICATION_EVENT`, `AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY`, `AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE`, `AAUDIO_USAGE_ASSISTANCE_SONIFICATION`, `AAUDIO_USAGE_GAME`, `AAUDIO_USAGE_ASSISTANT`, `AAUDIO_SYSTEM_USAGE_EMERGENCY`, `AAUDIO_SYSTEM_USAGE_SAFETY`, `AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS`, `AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `ContentType` | `AAUDIO_CONTENT_TYPE_SPEECH`, `AAUDIO_CONTENT_TYPE_MUSIC`, `AAUDIO_CONTENT_TYPE_MOVIE`, `AAUDIO_CONTENT_TYPE_SONIFICATION` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `InputPreset` | `AAUDIO_INPUT_PRESET_GENERIC`, `AAUDIO_INPUT_PRESET_CAMCORDER`, `AAUDIO_INPUT_PRESET_VOICE_RECOGNITION`, `AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION`, `AAUDIO_INPUT_PRESET_UNPROCESSED`, `AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE` | Value chosen from valid values by obtaining index from FuzzedDataProvider |
+| `BufferCapacity` | `INT32_MIN` to `INT32_MAX` | Value obtained from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+## Build
+
+This describes steps to build oboeservice_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+ $ mm -j$(nproc) oboeservice_fuzzer
+```
+
+#### Steps to run
+To run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/oboeservice_fuzzer/oboeservice_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
new file mode 100644
index 0000000..163eae8
--- /dev/null
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -0,0 +1,365 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+#include <fuzzer/FuzzedDataProvider.h>
+#include <stdio.h>
+
+#include <AAudioService.h>
+#include <aaudio/AAudio.h>
+#include "aaudio/BnAAudioClient.h"
+
+#define UNUSED_PARAM __attribute__((unused))
+
+using namespace android;
+using namespace aaudio;
+
+aaudio_format_t kAAudioFormats[] = {
+ AAUDIO_FORMAT_UNSPECIFIED,
+ AAUDIO_FORMAT_PCM_I16,
+ AAUDIO_FORMAT_PCM_FLOAT,
+};
+
+aaudio_usage_t kAAudioUsages[] = {
+ AAUDIO_USAGE_MEDIA,
+ AAUDIO_USAGE_VOICE_COMMUNICATION,
+ AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+ AAUDIO_USAGE_ALARM,
+ AAUDIO_USAGE_NOTIFICATION,
+ AAUDIO_USAGE_NOTIFICATION_RINGTONE,
+ AAUDIO_USAGE_NOTIFICATION_EVENT,
+ AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
+ AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ AAUDIO_USAGE_ASSISTANCE_SONIFICATION,
+ AAUDIO_USAGE_GAME,
+ AAUDIO_USAGE_ASSISTANT,
+ AAUDIO_SYSTEM_USAGE_EMERGENCY,
+ AAUDIO_SYSTEM_USAGE_SAFETY,
+ AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS,
+ AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT,
+};
+
+aaudio_content_type_t kAAudioContentTypes[] = {
+ AAUDIO_CONTENT_TYPE_SPEECH,
+ AAUDIO_CONTENT_TYPE_MUSIC,
+ AAUDIO_CONTENT_TYPE_MOVIE,
+ AAUDIO_CONTENT_TYPE_SONIFICATION,
+};
+
+aaudio_input_preset_t kAAudioInputPresets[] = {
+ AAUDIO_INPUT_PRESET_GENERIC, AAUDIO_INPUT_PRESET_CAMCORDER,
+ AAUDIO_INPUT_PRESET_VOICE_RECOGNITION, AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION,
+ AAUDIO_INPUT_PRESET_UNPROCESSED, AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE,
+};
+
+const size_t kNumAAudioFormats = std::size(kAAudioFormats);
+const size_t kNumAAudioUsages = std::size(kAAudioUsages);
+const size_t kNumAAudioContentTypes = std::size(kAAudioContentTypes);
+const size_t kNumAAudioInputPresets = std::size(kAAudioInputPresets);
+
+class FuzzAAudioClient : public virtual RefBase, public AAudioServiceInterface {
+ public:
+ FuzzAAudioClient(sp<AAudioService> service);
+
+ virtual ~FuzzAAudioClient();
+
+ AAudioServiceInterface *getAAudioService();
+
+ void dropAAudioService();
+
+ void registerClient(const sp<IAAudioClient> &client UNUSED_PARAM) override {}
+
+ aaudio_handle_t openStream(const AAudioStreamRequest &request,
+ AAudioStreamConfiguration &configurationOutput) override;
+
+ aaudio_result_t closeStream(aaudio_handle_t streamHandle) override;
+
+ aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+ AudioEndpointParcelable &parcelable) override;
+
+ aaudio_result_t startStream(aaudio_handle_t streamHandle) override;
+
+ aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override;
+
+ aaudio_result_t stopStream(aaudio_handle_t streamHandle) override;
+
+ aaudio_result_t flushStream(aaudio_handle_t streamHandle) override;
+
+ aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle, pid_t clientThreadId,
+ int64_t periodNanoseconds) override;
+
+ aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
+ pid_t clientThreadId) override;
+
+ aaudio_result_t startClient(aaudio_handle_t streamHandle UNUSED_PARAM,
+ const AudioClient &client UNUSED_PARAM,
+ const audio_attributes_t *attr UNUSED_PARAM,
+ audio_port_handle_t *clientHandle UNUSED_PARAM) override {
+ return AAUDIO_ERROR_UNAVAILABLE;
+ }
+
+ aaudio_result_t stopClient(aaudio_handle_t streamHandle UNUSED_PARAM,
+ audio_port_handle_t clientHandle UNUSED_PARAM) override {
+ return AAUDIO_ERROR_UNAVAILABLE;
+ }
+
+ void onStreamChange(aaudio_handle_t handle, int32_t opcode, int32_t value) {}
+
+ int getDeathCount() { return mDeathCount; }
+
+ void incDeathCount() { ++mDeathCount; }
+
+ class AAudioClient : public IBinder::DeathRecipient, public BnAAudioClient {
+ public:
+ AAudioClient(wp<FuzzAAudioClient> fuzzAAudioClient) : mBinderClient(fuzzAAudioClient) {}
+
+ virtual void binderDied(const wp<IBinder> &who UNUSED_PARAM) {
+ sp<FuzzAAudioClient> client = mBinderClient.promote();
+ if (client.get()) {
+ client->dropAAudioService();
+ client->incDeathCount();
+ }
+ }
+
+ android::binder::Status onStreamChange(int32_t handle, int32_t opcode, int32_t value) {
+ static_assert(std::is_same_v<aaudio_handle_t, int32_t>);
+ android::sp<FuzzAAudioClient> client = mBinderClient.promote();
+ if (client.get() != nullptr) {
+ client->onStreamChange(handle, opcode, value);
+ }
+ return android::binder::Status::ok();
+ }
+
+ private:
+ wp<FuzzAAudioClient> mBinderClient;
+ };
+
+ private:
+ sp<AAudioService> mAAudioService;
+ sp<AAudioClient> mAAudioClient;
+ AAudioServiceInterface *mAAudioServiceInterface;
+ int mDeathCount;
+};
+
+FuzzAAudioClient::FuzzAAudioClient(sp<AAudioService> service) : AAudioServiceInterface() {
+ mAAudioService = service;
+ mAAudioServiceInterface = &service->asAAudioServiceInterface();
+ mAAudioClient = new AAudioClient(this);
+ mDeathCount = 0;
+ if (mAAudioClient.get() && mAAudioService.get()) {
+ mAAudioService->linkToDeath(mAAudioClient);
+ mAAudioService->registerClient(mAAudioClient);
+ }
+}
+
+FuzzAAudioClient::~FuzzAAudioClient() { dropAAudioService(); }
+
+AAudioServiceInterface *FuzzAAudioClient::getAAudioService() {
+ if (!mAAudioServiceInterface && mAAudioService.get()) {
+ mAAudioServiceInterface = &mAAudioService->asAAudioServiceInterface();
+ }
+ return mAAudioServiceInterface;
+}
+
+void FuzzAAudioClient::dropAAudioService() {
+ mAAudioService.clear();
+}
+
+aaudio_handle_t FuzzAAudioClient::openStream(const AAudioStreamRequest &request,
+ AAudioStreamConfiguration &configurationOutput) {
+ aaudio_handle_t stream;
+ for (int i = 0; i < 2; ++i) {
+ AAudioServiceInterface *service = getAAudioService();
+ if (!service) {
+ return AAUDIO_ERROR_NO_SERVICE;
+ }
+
+ stream = service->openStream(request, configurationOutput);
+
+ if (stream == AAUDIO_ERROR_NO_SERVICE) {
+ dropAAudioService();
+ } else {
+ break;
+ }
+ }
+ return stream;
+}
+
+aaudio_result_t FuzzAAudioClient::closeStream(aaudio_handle_t streamHandle) {
+ AAudioServiceInterface *service = getAAudioService();
+ if (!service) {
+ return AAUDIO_ERROR_NO_SERVICE;
+ }
+ return service->closeStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::getStreamDescription(aaudio_handle_t streamHandle,
+ AudioEndpointParcelable &parcelable) {
+ AAudioServiceInterface *service = getAAudioService();
+ if (!service) {
+ return AAUDIO_ERROR_NO_SERVICE;
+ }
+ return service->getStreamDescription(streamHandle, parcelable);
+}
+
+aaudio_result_t FuzzAAudioClient::startStream(aaudio_handle_t streamHandle) {
+ AAudioServiceInterface *service = getAAudioService();
+ if (!service) {
+ return AAUDIO_ERROR_NO_SERVICE;
+ }
+ return service->startStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::pauseStream(aaudio_handle_t streamHandle) {
+ AAudioServiceInterface *service = getAAudioService();
+ if (!service) {
+ return AAUDIO_ERROR_NO_SERVICE;
+ }
+ return service->pauseStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::stopStream(aaudio_handle_t streamHandle) {
+ AAudioServiceInterface *service = getAAudioService();
+ if (!service) {
+ return AAUDIO_ERROR_NO_SERVICE;
+ }
+ return service->stopStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::flushStream(aaudio_handle_t streamHandle) {
+ AAudioServiceInterface *service = getAAudioService();
+ if (!service) {
+ return AAUDIO_ERROR_NO_SERVICE;
+ }
+ return service->flushStream(streamHandle);
+}
+
+aaudio_result_t FuzzAAudioClient::registerAudioThread(aaudio_handle_t streamHandle,
+ pid_t clientThreadId,
+ int64_t periodNanoseconds) {
+ AAudioServiceInterface *service = getAAudioService();
+ if (!service) {
+ return AAUDIO_ERROR_NO_SERVICE;
+ }
+ return service->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds);
+}
+
+aaudio_result_t FuzzAAudioClient::unregisterAudioThread(aaudio_handle_t streamHandle,
+ pid_t clientThreadId) {
+ AAudioServiceInterface *service = getAAudioService();
+ if (!service) {
+ return AAUDIO_ERROR_NO_SERVICE;
+ }
+ return service->unregisterAudioThread(streamHandle, clientThreadId);
+}
+
+class OboeserviceFuzzer {
+ public:
+ OboeserviceFuzzer();
+ ~OboeserviceFuzzer() = default;
+ void process(const uint8_t *data, size_t size);
+
+ private:
+ sp<FuzzAAudioClient> mClient;
+};
+
+OboeserviceFuzzer::OboeserviceFuzzer() {
+ sp<AAudioService> service = new AAudioService();
+ mClient = new FuzzAAudioClient(service);
+}
+
+void OboeserviceFuzzer::process(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ AAudioStreamRequest request;
+ AAudioStreamConfiguration configurationOutput;
+
+ // Initialize stream request
+ request.getConfiguration().setFormat((audio_format_t)(
+ fdp.ConsumeBool()
+ ? fdp.ConsumeIntegral<int32_t>()
+ : kAAudioFormats[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioFormats - 1)]));
+
+ request.setUserId(getuid());
+ request.setProcessId(getpid());
+ request.setInService(fdp.ConsumeBool());
+
+ request.getConfiguration().setDeviceId(fdp.ConsumeIntegral<int32_t>());
+ request.getConfiguration().setSampleRate(fdp.ConsumeIntegral<int32_t>());
+ request.getConfiguration().setSamplesPerFrame(fdp.ConsumeIntegral<int32_t>());
+ request.getConfiguration().setDirection(
+ fdp.ConsumeBool() ? fdp.ConsumeIntegral<int32_t>()
+ : (fdp.ConsumeBool() ? AAUDIO_DIRECTION_OUTPUT : AAUDIO_DIRECTION_INPUT));
+ request.getConfiguration().setSharingMode(
+ fdp.ConsumeBool()
+ ? fdp.ConsumeIntegral<int32_t>()
+ : (fdp.ConsumeBool() ? AAUDIO_SHARING_MODE_EXCLUSIVE : AAUDIO_SHARING_MODE_SHARED));
+
+ request.getConfiguration().setUsage(
+ fdp.ConsumeBool()
+ ? fdp.ConsumeIntegral<int32_t>()
+ : kAAudioUsages[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioUsages - 1)]);
+ request.getConfiguration().setContentType(
+ fdp.ConsumeBool() ? fdp.ConsumeIntegral<int32_t>()
+ : kAAudioContentTypes[fdp.ConsumeIntegralInRange<int32_t>(
+ 0, kNumAAudioContentTypes - 1)]);
+ request.getConfiguration().setInputPreset(
+ fdp.ConsumeBool() ? fdp.ConsumeIntegral<int32_t>()
+ : kAAudioInputPresets[fdp.ConsumeIntegralInRange<int32_t>(
+ 0, kNumAAudioInputPresets - 1)]);
+ request.getConfiguration().setPrivacySensitive(fdp.ConsumeBool());
+
+ request.getConfiguration().setBufferCapacity(fdp.ConsumeIntegral<int32_t>());
+
+ aaudio_handle_t stream = mClient->openStream(request, configurationOutput);
+ if (stream < 0) {
+ // invalid request, stream not opened.
+ return;
+ }
+ while (fdp.remaining_bytes()) {
+ AudioEndpointParcelable audioEndpointParcelable;
+ int action = fdp.ConsumeIntegralInRange<int32_t>(0, 4);
+ switch (action) {
+ case 0:
+ mClient->getStreamDescription(stream, audioEndpointParcelable);
+ break;
+ case 1:
+ mClient->startStream(stream);
+ break;
+ case 2:
+ mClient->pauseStream(stream);
+ break;
+ case 3:
+ mClient->stopStream(stream);
+ break;
+ case 4:
+ mClient->flushStream(stream);
+ break;
+ }
+ }
+ mClient->closeStream(stream);
+ assert(mClient->getDeathCount() == 0);
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ if (size < 1) {
+ return 0;
+ }
+ OboeserviceFuzzer oboeserviceFuzzer;
+ oboeserviceFuzzer.process(data, size);
+ return 0;
+}