Add native MotionPredictor

The native MotionPredictor will call neural net code to create motion
predictions for the provided gesture stream.

In this CL, a simple linear extrapolation model is provided in order to
test the API.

To enable prediction:
adb shell setprop persist.input.enable_motion_prediction true

Bug: 167946763
Test: atest InputTests inputflinger_tests libinput_tests
Change-Id: Id8a23b728aeb844288d5b8daae0829d61d4c1482
diff --git a/libs/input/Android.bp b/libs/input/Android.bp
index 34ef7b4..8ddd18d 100644
--- a/libs/input/Android.bp
+++ b/libs/input/Android.bp
@@ -50,6 +50,7 @@
         "Keyboard.cpp",
         "KeyCharacterMap.cpp",
         "KeyLayoutMap.cpp",
+        "MotionPredictor.cpp",
         "PrintTools.cpp",
         "PropertyMap.cpp",
         "TouchVideoFrame.cpp",
@@ -63,8 +64,9 @@
 
     shared_libs: [
         "libbase",
-        "liblog",
         "libcutils",
+        "liblog",
+        "libPlatformProperties",
         "libvintf",
     ],
 
diff --git a/libs/input/MotionPredictor.cpp b/libs/input/MotionPredictor.cpp
new file mode 100644
index 0000000..0719fe5
--- /dev/null
+++ b/libs/input/MotionPredictor.cpp
@@ -0,0 +1,156 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MotionPredictor"
+
+#include <input/MotionPredictor.h>
+
+/**
+ * Log debug messages about predictions.
+ * Enable this via "adb shell setprop log.tag.MotionPredictor DEBUG"
+ */
+static bool isDebug() {
+    return __android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG, ANDROID_LOG_INFO);
+}
+
+namespace android {
+
+// --- MotionPredictor ---
+
+MotionPredictor::MotionPredictor(nsecs_t predictionTimestampOffsetNanos,
+                                 std::function<bool()> checkMotionPredictionEnabled)
+      : mPredictionTimestampOffsetNanos(predictionTimestampOffsetNanos),
+        mCheckMotionPredictionEnabled(std::move(checkMotionPredictionEnabled)) {}
+
+void MotionPredictor::record(const MotionEvent& event) {
+    mEvents.push_back({});
+    mEvents.back().copyFrom(&event, /*keepHistory=*/true);
+    if (mEvents.size() > 2) {
+        // Just need 2 samples in order to extrapolate
+        mEvents.erase(mEvents.begin());
+    }
+}
+
+/**
+ * This is an example implementation that should be replaced with the actual prediction.
+ * The returned MotionEvent should be similar to the incoming MotionEvent, except for the
+ * fields that are predicted:
+ *
+ * 1) event.getEventTime
+ * 2) event.getPointerCoords
+ *
+ * The returned event should not contain any of the real, existing data. It should only
+ * contain the predicted samples.
+ */
+std::vector<std::unique_ptr<MotionEvent>> MotionPredictor::predict() {
+    if (mEvents.size() < 2) {
+        return {};
+    }
+
+    const MotionEvent& event = mEvents.back();
+    if (!isPredictionAvailable(event.getDeviceId(), event.getSource())) {
+        return {};
+    }
+
+    std::unique_ptr<MotionEvent> prediction = std::make_unique<MotionEvent>();
+    std::vector<PointerCoords> futureCoords;
+    const int64_t futureTime = getExpectedPresentationTimeNanos() + mPredictionTimestampOffsetNanos;
+    const nsecs_t currentTime = event.getEventTime();
+    const MotionEvent& previous = mEvents.rbegin()[1];
+    const nsecs_t oldTime = previous.getEventTime();
+    if (currentTime == oldTime) {
+        // This can happen if it's an ACTION_POINTER_DOWN event, for example.
+        return {}; // prevent division by zero.
+    }
+
+    for (size_t i = 0; i < event.getPointerCount(); i++) {
+        const int32_t pointerId = event.getPointerId(i);
+        PointerCoords coords;
+        coords.clear();
+
+        ssize_t index = previous.findPointerIndex(pointerId);
+        if (index >= 0) {
+            // We have old data for this pointer. Compute the prediction.
+            const float oldX = previous.getRawX(index);
+            const float oldY = previous.getRawY(index);
+            const float currentX = event.getRawX(i);
+            const float currentY = event.getRawY(i);
+
+            // Let's do a linear interpolation while waiting for a real model
+            const float scale =
+                    static_cast<float>(futureTime - currentTime) / (currentTime - oldTime);
+            const float futureX = currentX + (currentX - oldX) * scale;
+            const float futureY = currentY + (currentY - oldY) * scale;
+
+            coords.setAxisValue(AMOTION_EVENT_AXIS_X, futureX);
+            coords.setAxisValue(AMOTION_EVENT_AXIS_Y, futureY);
+        }
+
+        futureCoords.push_back(coords);
+    }
+
+    ALOGD_IF(isDebug(), "Prediction is %.1f ms away from the event",
+             (futureTime - event.getEventTime()) * 1E-6);
+    /**
+     * The process of adding samples is different for the first and subsequent samples:
+     * 1. Add the first sample via 'initialize' as below
+     * 2. Add subsequent samples via 'addSample'
+     */
+    prediction->initialize(event.getId(), event.getDeviceId(), event.getSource(),
+                           event.getDisplayId(), event.getHmac(), event.getAction(),
+                           event.getActionButton(), event.getFlags(), event.getEdgeFlags(),
+                           event.getMetaState(), event.getButtonState(), event.getClassification(),
+                           event.getTransform(), event.getXPrecision(), event.getYPrecision(),
+                           event.getRawXCursorPosition(), event.getRawYCursorPosition(),
+                           event.getRawTransform(), event.getDownTime(), futureTime,
+                           event.getPointerCount(), event.getPointerProperties(),
+                           futureCoords.data());
+
+    // To add more predicted samples, use 'addSample':
+    prediction->addSample(futureTime + 1, futureCoords.data());
+
+    std::vector<std::unique_ptr<MotionEvent>> out;
+    out.push_back(std::move(prediction));
+    return out;
+}
+
+bool MotionPredictor::isPredictionAvailable(int32_t /*deviceId*/, int32_t source) {
+    // Global flag override
+    if (!mCheckMotionPredictionEnabled()) {
+        ALOGD_IF(isDebug(), "Prediction not available due to flag override");
+        return false;
+    }
+
+    // Prediction is only supported for stylus sources.
+    if (!isFromSource(source, AINPUT_SOURCE_STYLUS)) {
+        ALOGD_IF(isDebug(), "Prediction not available for non-stylus source: %s",
+                 inputEventSourceToString(source).c_str());
+        return false;
+    }
+    return true;
+}
+
+int64_t MotionPredictor::getExpectedPresentationTimeNanos() {
+    std::scoped_lock lock(mLock);
+    return mExpectedPresentationTimeNanos;
+}
+
+void MotionPredictor::setExpectedPresentationTimeNanos(int64_t expectedPresentationTimeNanos) {
+    std::scoped_lock lock(mLock);
+    mExpectedPresentationTimeNanos = expectedPresentationTimeNanos;
+}
+
+} // namespace android
diff --git a/libs/input/tests/Android.bp b/libs/input/tests/Android.bp
index 5aae37d..e2c0860 100644
--- a/libs/input/tests/Android.bp
+++ b/libs/input/tests/Android.bp
@@ -17,6 +17,7 @@
         "InputDevice_test.cpp",
         "InputEvent_test.cpp",
         "InputPublisherAndConsumer_test.cpp",
+        "MotionPredictor_test.cpp",
         "TouchResampling_test.cpp",
         "TouchVideoFrame_test.cpp",
         "VelocityTracker_test.cpp",
@@ -37,6 +38,7 @@
         "libbinder",
         "libcutils",
         "liblog",
+        "libPlatformProperties",
         "libutils",
         "libvintf",
     ],
diff --git a/libs/input/tests/MotionPredictor_test.cpp b/libs/input/tests/MotionPredictor_test.cpp
new file mode 100644
index 0000000..0f39055
--- /dev/null
+++ b/libs/input/tests/MotionPredictor_test.cpp
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+#include <gui/constants.h>
+#include <input/Input.h>
+#include <input/MotionPredictor.h>
+
+namespace android {
+
+constexpr int32_t DOWN = AMOTION_EVENT_ACTION_DOWN;
+constexpr int32_t MOVE = AMOTION_EVENT_ACTION_MOVE;
+
+static MotionEvent getMotionEvent(int32_t action, float x, float y, nsecs_t eventTime) {
+    MotionEvent event;
+    constexpr size_t pointerCount = 1;
+    std::vector<PointerProperties> pointerProperties;
+    std::vector<PointerCoords> pointerCoords;
+    for (size_t i = 0; i < pointerCount; i++) {
+        PointerProperties properties;
+        properties.clear();
+        properties.id = i;
+        pointerProperties.push_back(properties);
+        PointerCoords coords;
+        coords.clear();
+        coords.setAxisValue(AMOTION_EVENT_AXIS_X, x);
+        coords.setAxisValue(AMOTION_EVENT_AXIS_Y, y);
+        pointerCoords.push_back(coords);
+    }
+
+    ui::Transform identityTransform;
+    event.initialize(InputEvent::nextId(), /*deviceId=*/0, AINPUT_SOURCE_STYLUS,
+                     ADISPLAY_ID_DEFAULT, {0}, action, /*actionButton=*/0, /*flags=*/0,
+                     AMOTION_EVENT_EDGE_FLAG_NONE, AMETA_NONE, /*buttonState=*/0,
+                     MotionClassification::NONE, identityTransform, /*xPrecision=*/0.1,
+                     /*yPrecision=*/0.2, /*xCursorPosition=*/280, /*yCursorPosition=*/540,
+                     identityTransform, /*downTime=*/100, eventTime, pointerCount,
+                     pointerProperties.data(), pointerCoords.data());
+    return event;
+}
+
+/**
+ * A linear motion should be predicted to be linear in the future
+ */
+TEST(MotionPredictorTest, LinearPrediction) {
+    MotionPredictor predictor(/*predictionTimestampOffsetNanos=*/0,
+                              []() { return true /*enable prediction*/; });
+
+    predictor.record(getMotionEvent(DOWN, 0, 1, 0));
+    predictor.record(getMotionEvent(MOVE, 1, 3, 10));
+    predictor.record(getMotionEvent(MOVE, 2, 5, 20));
+    predictor.record(getMotionEvent(MOVE, 3, 7, 30));
+    predictor.setExpectedPresentationTimeNanos(40);
+    std::vector<std::unique_ptr<MotionEvent>> predicted = predictor.predict();
+    ASSERT_EQ(1u, predicted.size());
+    ASSERT_EQ(predicted[0]->getX(0), 4);
+    ASSERT_EQ(predicted[0]->getY(0), 9);
+}
+
+/**
+ * A still motion should be predicted to remain still
+ */
+TEST(MotionPredictorTest, StationaryPrediction) {
+    MotionPredictor predictor(/*predictionTimestampOffsetNanos=*/0,
+                              []() { return true /*enable prediction*/; });
+
+    predictor.record(getMotionEvent(DOWN, 0, 1, 0));
+    predictor.record(getMotionEvent(MOVE, 0, 1, 10));
+    predictor.record(getMotionEvent(MOVE, 0, 1, 20));
+    predictor.record(getMotionEvent(MOVE, 0, 1, 30));
+    predictor.setExpectedPresentationTimeNanos(40);
+    std::vector<std::unique_ptr<MotionEvent>> predicted = predictor.predict();
+    ASSERT_EQ(1u, predicted.size());
+    ASSERT_EQ(predicted[0]->getX(0), 0);
+    ASSERT_EQ(predicted[0]->getY(0), 1);
+}
+
+TEST(MotionPredictorTest, IsPredictionAvailable) {
+    MotionPredictor predictor(/*predictionTimestampOffsetNanos=*/0,
+                              []() { return true /*enable prediction*/; });
+    ASSERT_TRUE(predictor.isPredictionAvailable(/*deviceId=*/1, AINPUT_SOURCE_STYLUS));
+    ASSERT_FALSE(predictor.isPredictionAvailable(/*deviceId=*/1, AINPUT_SOURCE_TOUCHSCREEN));
+}
+
+TEST(MotionPredictorTest, Offset) {
+    MotionPredictor predictor(/*predictionTimestampOffsetNanos=*/1,
+                              []() { return true /*enable prediction*/; });
+    predictor.setExpectedPresentationTimeNanos(40);
+    predictor.record(getMotionEvent(DOWN, 0, 1, 30));
+    predictor.record(getMotionEvent(MOVE, 0, 1, 35));
+    std::vector<std::unique_ptr<MotionEvent>> predicted = predictor.predict();
+    ASSERT_EQ(1u, predicted.size());
+    ASSERT_GE(predicted[0]->getEventTime(), 41);
+}
+
+TEST(MotionPredictionTest, FlagDisablesPrediction) {
+    MotionPredictor predictor(/*predictionTimestampOffsetNanos=*/0,
+                              []() { return false /*disable prediction*/; });
+    predictor.setExpectedPresentationTimeNanos(40);
+    predictor.record(getMotionEvent(DOWN, 0, 1, 30));
+    predictor.record(getMotionEvent(MOVE, 0, 1, 35));
+    std::vector<std::unique_ptr<MotionEvent>> predicted = predictor.predict();
+    ASSERT_EQ(0u, predicted.size());
+    ASSERT_FALSE(predictor.isPredictionAvailable(/*deviceId=*/1, AINPUT_SOURCE_STYLUS));
+    ASSERT_FALSE(predictor.isPredictionAvailable(/*deviceId=*/1, AINPUT_SOURCE_TOUCHSCREEN));
+}
+
+} // namespace android