Look for prediction model in vendor partition
When loading the prediction model, we should first check the vendor
partition, and then, if that doesn't work, use the system one. This will
allow OEMs to customize this model.
Bug: 210158587
Test: loaded model into /vendor and deleted from /system, checked that
prediction works
Bug: 271455682
Test: atest libinput_tests inputflinger_tests
Change-Id: I0a369e5ec5cec8ac20b66fb4fcf265e7b1dde38a
diff --git a/libs/input/TfLiteMotionPredictor.cpp b/libs/input/TfLiteMotionPredictor.cpp
index 691e87c..3b061d1 100644
--- a/libs/input/TfLiteMotionPredictor.cpp
+++ b/libs/input/TfLiteMotionPredictor.cpp
@@ -61,8 +61,21 @@
constexpr char OUTPUT_PHI[] = "phi";
constexpr char OUTPUT_PRESSURE[] = "pressure";
+// Ideally, we would just use std::filesystem::exists here, but it requires libc++fs, which causes
+// build issues in other parts of the system.
+#if defined(__ANDROID__)
+bool fileExists(const char* filename) {
+ struct stat buffer;
+ return stat(filename, &buffer) == 0;
+}
+#endif
+
std::string getModelPath() {
#if defined(__ANDROID__)
+ static const char* oemModel = "/vendor/etc/motion_predictor_model.fb";
+ if (fileExists(oemModel)) {
+ return oemModel;
+ }
return "/system/etc/motion_predictor_model.fb";
#else
return base::GetExecutableDirectory() + "/motion_predictor_model.fb";
@@ -217,7 +230,7 @@
std::unique_ptr<TfLiteMotionPredictorModel> TfLiteMotionPredictorModel::create() {
const std::string modelPath = getModelPath();
- const int fd = open(modelPath.c_str(), O_RDONLY);
+ android::base::unique_fd fd(open(modelPath.c_str(), O_RDONLY));
if (fd == -1) {
PLOG(FATAL) << "Could not read model from " << modelPath;
}
@@ -232,9 +245,6 @@
if (!modelBuffer) {
PLOG(FATAL) << "Failed to mmap model";
}
- if (close(fd) == -1) {
- PLOG(FATAL) << "Failed to close model fd";
- }
return std::unique_ptr<TfLiteMotionPredictorModel>(
new TfLiteMotionPredictorModel(std::move(modelBuffer)));