blob: 0719fe5f6cc67033c59b3cd52782f022a0fc1ba4 [file] [log] [blame]
Siarhei Vishniakou39147ce2022-11-15 12:13:04 -08001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "MotionPredictor"
18
19#include <input/MotionPredictor.h>
20
21/**
22 * Log debug messages about predictions.
23 * Enable this via "adb shell setprop log.tag.MotionPredictor DEBUG"
24 */
25static bool isDebug() {
26 return __android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG, ANDROID_LOG_INFO);
27}
28
29namespace android {
30
31// --- MotionPredictor ---
32
33MotionPredictor::MotionPredictor(nsecs_t predictionTimestampOffsetNanos,
34 std::function<bool()> checkMotionPredictionEnabled)
35 : mPredictionTimestampOffsetNanos(predictionTimestampOffsetNanos),
36 mCheckMotionPredictionEnabled(std::move(checkMotionPredictionEnabled)) {}
37
38void MotionPredictor::record(const MotionEvent& event) {
39 mEvents.push_back({});
40 mEvents.back().copyFrom(&event, /*keepHistory=*/true);
41 if (mEvents.size() > 2) {
42 // Just need 2 samples in order to extrapolate
43 mEvents.erase(mEvents.begin());
44 }
45}
46
47/**
48 * This is an example implementation that should be replaced with the actual prediction.
49 * The returned MotionEvent should be similar to the incoming MotionEvent, except for the
50 * fields that are predicted:
51 *
52 * 1) event.getEventTime
53 * 2) event.getPointerCoords
54 *
55 * The returned event should not contain any of the real, existing data. It should only
56 * contain the predicted samples.
57 */
58std::vector<std::unique_ptr<MotionEvent>> MotionPredictor::predict() {
59 if (mEvents.size() < 2) {
60 return {};
61 }
62
63 const MotionEvent& event = mEvents.back();
64 if (!isPredictionAvailable(event.getDeviceId(), event.getSource())) {
65 return {};
66 }
67
68 std::unique_ptr<MotionEvent> prediction = std::make_unique<MotionEvent>();
69 std::vector<PointerCoords> futureCoords;
70 const int64_t futureTime = getExpectedPresentationTimeNanos() + mPredictionTimestampOffsetNanos;
71 const nsecs_t currentTime = event.getEventTime();
72 const MotionEvent& previous = mEvents.rbegin()[1];
73 const nsecs_t oldTime = previous.getEventTime();
74 if (currentTime == oldTime) {
75 // This can happen if it's an ACTION_POINTER_DOWN event, for example.
76 return {}; // prevent division by zero.
77 }
78
79 for (size_t i = 0; i < event.getPointerCount(); i++) {
80 const int32_t pointerId = event.getPointerId(i);
81 PointerCoords coords;
82 coords.clear();
83
84 ssize_t index = previous.findPointerIndex(pointerId);
85 if (index >= 0) {
86 // We have old data for this pointer. Compute the prediction.
87 const float oldX = previous.getRawX(index);
88 const float oldY = previous.getRawY(index);
89 const float currentX = event.getRawX(i);
90 const float currentY = event.getRawY(i);
91
92 // Let's do a linear interpolation while waiting for a real model
93 const float scale =
94 static_cast<float>(futureTime - currentTime) / (currentTime - oldTime);
95 const float futureX = currentX + (currentX - oldX) * scale;
96 const float futureY = currentY + (currentY - oldY) * scale;
97
98 coords.setAxisValue(AMOTION_EVENT_AXIS_X, futureX);
99 coords.setAxisValue(AMOTION_EVENT_AXIS_Y, futureY);
100 }
101
102 futureCoords.push_back(coords);
103 }
104
105 ALOGD_IF(isDebug(), "Prediction is %.1f ms away from the event",
106 (futureTime - event.getEventTime()) * 1E-6);
107 /**
108 * The process of adding samples is different for the first and subsequent samples:
109 * 1. Add the first sample via 'initialize' as below
110 * 2. Add subsequent samples via 'addSample'
111 */
112 prediction->initialize(event.getId(), event.getDeviceId(), event.getSource(),
113 event.getDisplayId(), event.getHmac(), event.getAction(),
114 event.getActionButton(), event.getFlags(), event.getEdgeFlags(),
115 event.getMetaState(), event.getButtonState(), event.getClassification(),
116 event.getTransform(), event.getXPrecision(), event.getYPrecision(),
117 event.getRawXCursorPosition(), event.getRawYCursorPosition(),
118 event.getRawTransform(), event.getDownTime(), futureTime,
119 event.getPointerCount(), event.getPointerProperties(),
120 futureCoords.data());
121
122 // To add more predicted samples, use 'addSample':
123 prediction->addSample(futureTime + 1, futureCoords.data());
124
125 std::vector<std::unique_ptr<MotionEvent>> out;
126 out.push_back(std::move(prediction));
127 return out;
128}
129
130bool MotionPredictor::isPredictionAvailable(int32_t /*deviceId*/, int32_t source) {
131 // Global flag override
132 if (!mCheckMotionPredictionEnabled()) {
133 ALOGD_IF(isDebug(), "Prediction not available due to flag override");
134 return false;
135 }
136
137 // Prediction is only supported for stylus sources.
138 if (!isFromSource(source, AINPUT_SOURCE_STYLUS)) {
139 ALOGD_IF(isDebug(), "Prediction not available for non-stylus source: %s",
140 inputEventSourceToString(source).c_str());
141 return false;
142 }
143 return true;
144}
145
146int64_t MotionPredictor::getExpectedPresentationTimeNanos() {
147 std::scoped_lock lock(mLock);
148 return mExpectedPresentationTimeNanos;
149}
150
151void MotionPredictor::setExpectedPresentationTimeNanos(int64_t expectedPresentationTimeNanos) {
152 std::scoped_lock lock(mLock);
153 mExpectedPresentationTimeNanos = expectedPresentationTimeNanos;
154}
155
156} // namespace android