Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 1 | /** |
| 2 | * Copyright 2024 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Siarhei Vishniakou | c0bf389 | 2024-03-29 16:29:13 +0000 | [diff] [blame] | 17 | #include <cstdint> |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 18 | #define LOG_TAG "InputTransport" |
| 19 | #define ATRACE_TAG ATRACE_TAG_INPUT |
| 20 | |
| 21 | #include <errno.h> |
| 22 | #include <fcntl.h> |
| 23 | #include <inttypes.h> |
| 24 | #include <math.h> |
| 25 | #include <poll.h> |
| 26 | #include <sys/socket.h> |
| 27 | #include <sys/types.h> |
| 28 | #include <unistd.h> |
| 29 | |
| 30 | #include <android-base/logging.h> |
| 31 | #include <android-base/properties.h> |
| 32 | #include <android-base/stringprintf.h> |
| 33 | #include <binder/Parcel.h> |
| 34 | #include <cutils/properties.h> |
| 35 | #include <ftl/enum.h> |
| 36 | #include <log/log.h> |
| 37 | #include <utils/Trace.h> |
| 38 | |
| 39 | #include <com_android_input_flags.h> |
| 40 | #include <input/InputConsumer.h> |
| 41 | #include <input/PrintTools.h> |
| 42 | #include <input/TraceTools.h> |
| 43 | |
| 44 | namespace input_flags = com::android::input::flags; |
| 45 | |
| 46 | namespace android { |
| 47 | |
| 48 | namespace { |
| 49 | |
| 50 | /** |
| 51 | * Log debug messages relating to the consumer end of the transport channel. |
| 52 | * Enable this via "adb shell setprop log.tag.InputTransportConsumer DEBUG" (requires restart) |
| 53 | */ |
| 54 | |
| 55 | const bool DEBUG_TRANSPORT_CONSUMER = |
| 56 | __android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG "Consumer", ANDROID_LOG_INFO); |
| 57 | |
| 58 | const bool IS_DEBUGGABLE_BUILD = |
| 59 | #if defined(__ANDROID__) |
| 60 | android::base::GetBoolProperty("ro.debuggable", false); |
| 61 | #else |
| 62 | true; |
| 63 | #endif |
| 64 | |
| 65 | /** |
| 66 | * Log debug messages about touch event resampling. |
| 67 | * |
| 68 | * Enable this via "adb shell setprop log.tag.InputTransportResampling DEBUG". |
| 69 | * This requires a restart on non-debuggable (e.g. user) builds, but should take effect immediately |
| 70 | * on debuggable builds (e.g. userdebug). |
| 71 | */ |
| 72 | bool debugResampling() { |
| 73 | if (!IS_DEBUGGABLE_BUILD) { |
| 74 | static const bool DEBUG_TRANSPORT_RESAMPLING = |
| 75 | __android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG "Resampling", |
| 76 | ANDROID_LOG_INFO); |
| 77 | return DEBUG_TRANSPORT_RESAMPLING; |
| 78 | } |
| 79 | return __android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG "Resampling", ANDROID_LOG_INFO); |
| 80 | } |
| 81 | |
| 82 | void initializeKeyEvent(KeyEvent& event, const InputMessage& msg) { |
| 83 | event.initialize(msg.body.key.eventId, msg.body.key.deviceId, msg.body.key.source, |
Linnan Li | 13bf76a | 2024-05-05 19:18:02 +0800 | [diff] [blame] | 84 | ui::LogicalDisplayId{msg.body.key.displayId}, msg.body.key.hmac, |
| 85 | msg.body.key.action, msg.body.key.flags, msg.body.key.keyCode, |
| 86 | msg.body.key.scanCode, msg.body.key.metaState, msg.body.key.repeatCount, |
| 87 | msg.body.key.downTime, msg.body.key.eventTime); |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 88 | } |
| 89 | |
| 90 | void initializeFocusEvent(FocusEvent& event, const InputMessage& msg) { |
| 91 | event.initialize(msg.body.focus.eventId, msg.body.focus.hasFocus); |
| 92 | } |
| 93 | |
| 94 | void initializeCaptureEvent(CaptureEvent& event, const InputMessage& msg) { |
| 95 | event.initialize(msg.body.capture.eventId, msg.body.capture.pointerCaptureEnabled); |
| 96 | } |
| 97 | |
| 98 | void initializeDragEvent(DragEvent& event, const InputMessage& msg) { |
| 99 | event.initialize(msg.body.drag.eventId, msg.body.drag.x, msg.body.drag.y, |
| 100 | msg.body.drag.isExiting); |
| 101 | } |
| 102 | |
| 103 | void initializeMotionEvent(MotionEvent& event, const InputMessage& msg) { |
| 104 | uint32_t pointerCount = msg.body.motion.pointerCount; |
| 105 | PointerProperties pointerProperties[pointerCount]; |
| 106 | PointerCoords pointerCoords[pointerCount]; |
| 107 | for (uint32_t i = 0; i < pointerCount; i++) { |
| 108 | pointerProperties[i] = msg.body.motion.pointers[i].properties; |
| 109 | pointerCoords[i] = msg.body.motion.pointers[i].coords; |
| 110 | } |
| 111 | |
| 112 | ui::Transform transform; |
| 113 | transform.set({msg.body.motion.dsdx, msg.body.motion.dtdx, msg.body.motion.tx, |
| 114 | msg.body.motion.dtdy, msg.body.motion.dsdy, msg.body.motion.ty, 0, 0, 1}); |
| 115 | ui::Transform displayTransform; |
| 116 | displayTransform.set({msg.body.motion.dsdxRaw, msg.body.motion.dtdxRaw, msg.body.motion.txRaw, |
| 117 | msg.body.motion.dtdyRaw, msg.body.motion.dsdyRaw, msg.body.motion.tyRaw, |
| 118 | 0, 0, 1}); |
| 119 | event.initialize(msg.body.motion.eventId, msg.body.motion.deviceId, msg.body.motion.source, |
Linnan Li | 13bf76a | 2024-05-05 19:18:02 +0800 | [diff] [blame] | 120 | ui::LogicalDisplayId{msg.body.motion.displayId}, msg.body.motion.hmac, |
| 121 | msg.body.motion.action, msg.body.motion.actionButton, msg.body.motion.flags, |
| 122 | msg.body.motion.edgeFlags, msg.body.motion.metaState, |
| 123 | msg.body.motion.buttonState, msg.body.motion.classification, transform, |
| 124 | msg.body.motion.xPrecision, msg.body.motion.yPrecision, |
| 125 | msg.body.motion.xCursorPosition, msg.body.motion.yCursorPosition, |
| 126 | displayTransform, msg.body.motion.downTime, msg.body.motion.eventTime, |
| 127 | pointerCount, pointerProperties, pointerCoords); |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 128 | } |
| 129 | |
| 130 | void addSample(MotionEvent& event, const InputMessage& msg) { |
| 131 | uint32_t pointerCount = msg.body.motion.pointerCount; |
| 132 | PointerCoords pointerCoords[pointerCount]; |
| 133 | for (uint32_t i = 0; i < pointerCount; i++) { |
| 134 | pointerCoords[i] = msg.body.motion.pointers[i].coords; |
| 135 | } |
| 136 | |
| 137 | event.setMetaState(event.getMetaState() | msg.body.motion.metaState); |
jioana | 71c6f73 | 2024-07-16 15:42:56 +0000 | [diff] [blame] | 138 | event.addSample(msg.body.motion.eventTime, pointerCoords, msg.body.motion.eventId); |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 139 | } |
| 140 | |
| 141 | void initializeTouchModeEvent(TouchModeEvent& event, const InputMessage& msg) { |
| 142 | event.initialize(msg.body.touchMode.eventId, msg.body.touchMode.isInTouchMode); |
| 143 | } |
| 144 | |
| 145 | // Nanoseconds per milliseconds. |
| 146 | constexpr nsecs_t NANOS_PER_MS = 1000000; |
| 147 | |
| 148 | // Latency added during resampling. A few milliseconds doesn't hurt much but |
| 149 | // reduces the impact of mispredicted touch positions. |
| 150 | const std::chrono::duration RESAMPLE_LATENCY = 5ms; |
| 151 | |
| 152 | // Minimum time difference between consecutive samples before attempting to resample. |
| 153 | const nsecs_t RESAMPLE_MIN_DELTA = 2 * NANOS_PER_MS; |
| 154 | |
| 155 | // Maximum time difference between consecutive samples before attempting to resample |
| 156 | // by extrapolation. |
| 157 | const nsecs_t RESAMPLE_MAX_DELTA = 20 * NANOS_PER_MS; |
| 158 | |
| 159 | // Maximum time to predict forward from the last known state, to avoid predicting too |
| 160 | // far into the future. This time is further bounded by 50% of the last time delta. |
| 161 | const nsecs_t RESAMPLE_MAX_PREDICTION = 8 * NANOS_PER_MS; |
| 162 | |
| 163 | /** |
| 164 | * System property for enabling / disabling touch resampling. |
| 165 | * Resampling extrapolates / interpolates the reported touch event coordinates to better |
| 166 | * align them to the VSYNC signal, thus resulting in smoother scrolling performance. |
| 167 | * Resampling is not needed (and should be disabled) on hardware that already |
| 168 | * has touch events triggered by VSYNC. |
| 169 | * Set to "1" to enable resampling (default). |
| 170 | * Set to "0" to disable resampling. |
| 171 | * Resampling is enabled by default. |
| 172 | */ |
| 173 | const char* PROPERTY_RESAMPLING_ENABLED = "ro.input.resampling"; |
| 174 | |
| 175 | inline float lerp(float a, float b, float alpha) { |
| 176 | return a + alpha * (b - a); |
| 177 | } |
| 178 | |
| 179 | inline bool isPointerEvent(int32_t source) { |
| 180 | return (source & AINPUT_SOURCE_CLASS_POINTER) == AINPUT_SOURCE_CLASS_POINTER; |
| 181 | } |
| 182 | |
| 183 | bool shouldResampleTool(ToolType toolType) { |
Hiroki Sato | 09b2388 | 2024-05-28 16:29:06 +0900 | [diff] [blame] | 184 | return toolType == ToolType::FINGER || toolType == ToolType::MOUSE || |
| 185 | toolType == ToolType::STYLUS || toolType == ToolType::UNKNOWN; |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 186 | } |
| 187 | |
| 188 | } // namespace |
| 189 | |
| 190 | using android::base::Result; |
| 191 | using android::base::StringPrintf; |
| 192 | |
| 193 | // --- InputConsumer --- |
| 194 | |
| 195 | InputConsumer::InputConsumer(const std::shared_ptr<InputChannel>& channel) |
| 196 | : InputConsumer(channel, isTouchResamplingEnabled()) {} |
| 197 | |
| 198 | InputConsumer::InputConsumer(const std::shared_ptr<InputChannel>& channel, |
| 199 | bool enableTouchResampling) |
Siarhei Vishniakou | c0bf389 | 2024-03-29 16:29:13 +0000 | [diff] [blame] | 200 | : mResampleTouch(enableTouchResampling), |
| 201 | mChannel(channel), |
| 202 | mProcessingTraceTag(StringPrintf("InputConsumer processing on %s (%p)", |
| 203 | mChannel->getName().c_str(), this)), |
| 204 | mLifetimeTraceTag(StringPrintf("InputConsumer lifetime on %s (%p)", |
| 205 | mChannel->getName().c_str(), this)), |
| 206 | mLifetimeTraceCookie( |
| 207 | static_cast<int32_t>(reinterpret_cast<std::uintptr_t>(this) & 0xFFFFFFFF)), |
| 208 | mMsgDeferred(false) { |
| 209 | ATRACE_ASYNC_BEGIN(mLifetimeTraceTag.c_str(), /*cookie=*/mLifetimeTraceCookie); |
| 210 | } |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 211 | |
Siarhei Vishniakou | c0bf389 | 2024-03-29 16:29:13 +0000 | [diff] [blame] | 212 | InputConsumer::~InputConsumer() { |
| 213 | ATRACE_ASYNC_END(mLifetimeTraceTag.c_str(), /*cookie=*/mLifetimeTraceCookie); |
| 214 | } |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 215 | |
| 216 | bool InputConsumer::isTouchResamplingEnabled() { |
| 217 | return property_get_bool(PROPERTY_RESAMPLING_ENABLED, true); |
| 218 | } |
| 219 | |
| 220 | status_t InputConsumer::consume(InputEventFactoryInterface* factory, bool consumeBatches, |
| 221 | nsecs_t frameTime, uint32_t* outSeq, InputEvent** outEvent) { |
| 222 | ALOGD_IF(DEBUG_TRANSPORT_CONSUMER, |
| 223 | "channel '%s' consumer ~ consume: consumeBatches=%s, frameTime=%" PRId64, |
| 224 | mChannel->getName().c_str(), toString(consumeBatches), frameTime); |
| 225 | |
| 226 | *outSeq = 0; |
| 227 | *outEvent = nullptr; |
| 228 | |
| 229 | // Fetch the next input message. |
| 230 | // Loop until an event can be returned or no additional events are received. |
| 231 | while (!*outEvent) { |
| 232 | if (mMsgDeferred) { |
| 233 | // mMsg contains a valid input message from the previous call to consume |
| 234 | // that has not yet been processed. |
| 235 | mMsgDeferred = false; |
| 236 | } else { |
| 237 | // Receive a fresh message. |
Paul Ramirez | 0c25e86 | 2024-06-18 21:33:33 +0000 | [diff] [blame] | 238 | android::base::Result<InputMessage> result = mChannel->receiveMessage(); |
| 239 | if (result.ok()) { |
| 240 | mMsg = std::move(result.value()); |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 241 | const auto [_, inserted] = |
| 242 | mConsumeTimes.emplace(mMsg.header.seq, systemTime(SYSTEM_TIME_MONOTONIC)); |
| 243 | LOG_ALWAYS_FATAL_IF(!inserted, "Already have a consume time for seq=%" PRIu32, |
| 244 | mMsg.header.seq); |
| 245 | |
| 246 | // Trace the event processing timeline - event was just read from the socket |
Siarhei Vishniakou | c0bf389 | 2024-03-29 16:29:13 +0000 | [diff] [blame] | 247 | ATRACE_ASYNC_BEGIN(mProcessingTraceTag.c_str(), /*cookie=*/mMsg.header.seq); |
Paul Ramirez | 0c25e86 | 2024-06-18 21:33:33 +0000 | [diff] [blame] | 248 | } else { |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 249 | // Consume the next batched event unless batches are being held for later. |
Paul Ramirez | 0c25e86 | 2024-06-18 21:33:33 +0000 | [diff] [blame] | 250 | if (consumeBatches || result.error().code() != WOULD_BLOCK) { |
| 251 | result = android::base::Error( |
| 252 | consumeBatch(factory, frameTime, outSeq, outEvent)); |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 253 | if (*outEvent) { |
| 254 | ALOGD_IF(DEBUG_TRANSPORT_CONSUMER, |
| 255 | "channel '%s' consumer ~ consumed batch event, seq=%u", |
| 256 | mChannel->getName().c_str(), *outSeq); |
| 257 | break; |
| 258 | } |
| 259 | } |
Paul Ramirez | 0c25e86 | 2024-06-18 21:33:33 +0000 | [diff] [blame] | 260 | return result.error().code(); |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 261 | } |
| 262 | } |
| 263 | |
| 264 | switch (mMsg.header.type) { |
| 265 | case InputMessage::Type::KEY: { |
| 266 | KeyEvent* keyEvent = factory->createKeyEvent(); |
| 267 | if (!keyEvent) return NO_MEMORY; |
| 268 | |
| 269 | initializeKeyEvent(*keyEvent, mMsg); |
| 270 | *outSeq = mMsg.header.seq; |
| 271 | *outEvent = keyEvent; |
| 272 | ALOGD_IF(DEBUG_TRANSPORT_CONSUMER, |
| 273 | "channel '%s' consumer ~ consumed key event, seq=%u", |
| 274 | mChannel->getName().c_str(), *outSeq); |
| 275 | break; |
| 276 | } |
| 277 | |
| 278 | case InputMessage::Type::MOTION: { |
| 279 | ssize_t batchIndex = findBatch(mMsg.body.motion.deviceId, mMsg.body.motion.source); |
| 280 | if (batchIndex >= 0) { |
| 281 | Batch& batch = mBatches[batchIndex]; |
| 282 | if (canAddSample(batch, &mMsg)) { |
| 283 | batch.samples.push_back(mMsg); |
| 284 | ALOGD_IF(DEBUG_TRANSPORT_CONSUMER, |
| 285 | "channel '%s' consumer ~ appended to batch event", |
| 286 | mChannel->getName().c_str()); |
| 287 | break; |
| 288 | } else if (isPointerEvent(mMsg.body.motion.source) && |
| 289 | mMsg.body.motion.action == AMOTION_EVENT_ACTION_CANCEL) { |
| 290 | // No need to process events that we are going to cancel anyways |
| 291 | const size_t count = batch.samples.size(); |
| 292 | for (size_t i = 0; i < count; i++) { |
| 293 | const InputMessage& msg = batch.samples[i]; |
| 294 | sendFinishedSignal(msg.header.seq, false); |
| 295 | } |
| 296 | batch.samples.erase(batch.samples.begin(), batch.samples.begin() + count); |
| 297 | mBatches.erase(mBatches.begin() + batchIndex); |
| 298 | } else { |
| 299 | // We cannot append to the batch in progress, so we need to consume |
| 300 | // the previous batch right now and defer the new message until later. |
| 301 | mMsgDeferred = true; |
| 302 | status_t result = consumeSamples(factory, batch, batch.samples.size(), |
| 303 | outSeq, outEvent); |
| 304 | mBatches.erase(mBatches.begin() + batchIndex); |
| 305 | if (result) { |
| 306 | return result; |
| 307 | } |
| 308 | ALOGD_IF(DEBUG_TRANSPORT_CONSUMER, |
| 309 | "channel '%s' consumer ~ consumed batch event and " |
| 310 | "deferred current event, seq=%u", |
| 311 | mChannel->getName().c_str(), *outSeq); |
| 312 | break; |
| 313 | } |
| 314 | } |
| 315 | |
| 316 | // Start a new batch if needed. |
| 317 | if (mMsg.body.motion.action == AMOTION_EVENT_ACTION_MOVE || |
| 318 | mMsg.body.motion.action == AMOTION_EVENT_ACTION_HOVER_MOVE) { |
| 319 | Batch batch; |
| 320 | batch.samples.push_back(mMsg); |
| 321 | mBatches.push_back(batch); |
| 322 | ALOGD_IF(DEBUG_TRANSPORT_CONSUMER, |
| 323 | "channel '%s' consumer ~ started batch event", |
| 324 | mChannel->getName().c_str()); |
| 325 | break; |
| 326 | } |
| 327 | |
| 328 | MotionEvent* motionEvent = factory->createMotionEvent(); |
| 329 | if (!motionEvent) return NO_MEMORY; |
| 330 | |
| 331 | updateTouchState(mMsg); |
| 332 | initializeMotionEvent(*motionEvent, mMsg); |
| 333 | *outSeq = mMsg.header.seq; |
| 334 | *outEvent = motionEvent; |
| 335 | |
| 336 | ALOGD_IF(DEBUG_TRANSPORT_CONSUMER, |
| 337 | "channel '%s' consumer ~ consumed motion event, seq=%u", |
| 338 | mChannel->getName().c_str(), *outSeq); |
| 339 | break; |
| 340 | } |
| 341 | |
| 342 | case InputMessage::Type::FINISHED: |
| 343 | case InputMessage::Type::TIMELINE: { |
Siarhei Vishniakou | 11d223b | 2024-03-26 21:52:38 +0000 | [diff] [blame] | 344 | LOG(FATAL) << "Consumed a " << ftl::enum_string(mMsg.header.type) |
| 345 | << " message, which should never be seen by " |
| 346 | "InputConsumer on " |
| 347 | << mChannel->getName(); |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 348 | break; |
| 349 | } |
| 350 | |
| 351 | case InputMessage::Type::FOCUS: { |
| 352 | FocusEvent* focusEvent = factory->createFocusEvent(); |
| 353 | if (!focusEvent) return NO_MEMORY; |
| 354 | |
| 355 | initializeFocusEvent(*focusEvent, mMsg); |
| 356 | *outSeq = mMsg.header.seq; |
| 357 | *outEvent = focusEvent; |
| 358 | break; |
| 359 | } |
| 360 | |
| 361 | case InputMessage::Type::CAPTURE: { |
| 362 | CaptureEvent* captureEvent = factory->createCaptureEvent(); |
| 363 | if (!captureEvent) return NO_MEMORY; |
| 364 | |
| 365 | initializeCaptureEvent(*captureEvent, mMsg); |
| 366 | *outSeq = mMsg.header.seq; |
| 367 | *outEvent = captureEvent; |
| 368 | break; |
| 369 | } |
| 370 | |
| 371 | case InputMessage::Type::DRAG: { |
| 372 | DragEvent* dragEvent = factory->createDragEvent(); |
| 373 | if (!dragEvent) return NO_MEMORY; |
| 374 | |
| 375 | initializeDragEvent(*dragEvent, mMsg); |
| 376 | *outSeq = mMsg.header.seq; |
| 377 | *outEvent = dragEvent; |
| 378 | break; |
| 379 | } |
| 380 | |
| 381 | case InputMessage::Type::TOUCH_MODE: { |
| 382 | TouchModeEvent* touchModeEvent = factory->createTouchModeEvent(); |
| 383 | if (!touchModeEvent) return NO_MEMORY; |
| 384 | |
| 385 | initializeTouchModeEvent(*touchModeEvent, mMsg); |
| 386 | *outSeq = mMsg.header.seq; |
| 387 | *outEvent = touchModeEvent; |
| 388 | break; |
| 389 | } |
| 390 | } |
| 391 | } |
| 392 | return OK; |
| 393 | } |
| 394 | |
| 395 | status_t InputConsumer::consumeBatch(InputEventFactoryInterface* factory, nsecs_t frameTime, |
| 396 | uint32_t* outSeq, InputEvent** outEvent) { |
| 397 | status_t result; |
| 398 | for (size_t i = mBatches.size(); i > 0;) { |
| 399 | i--; |
| 400 | Batch& batch = mBatches[i]; |
| 401 | if (frameTime < 0) { |
| 402 | result = consumeSamples(factory, batch, batch.samples.size(), outSeq, outEvent); |
| 403 | mBatches.erase(mBatches.begin() + i); |
| 404 | return result; |
| 405 | } |
| 406 | |
| 407 | nsecs_t sampleTime = frameTime; |
| 408 | if (mResampleTouch) { |
| 409 | sampleTime -= std::chrono::nanoseconds(RESAMPLE_LATENCY).count(); |
| 410 | } |
| 411 | ssize_t split = findSampleNoLaterThan(batch, sampleTime); |
| 412 | if (split < 0) { |
| 413 | continue; |
| 414 | } |
| 415 | |
| 416 | result = consumeSamples(factory, batch, split + 1, outSeq, outEvent); |
| 417 | const InputMessage* next; |
| 418 | if (batch.samples.empty()) { |
| 419 | mBatches.erase(mBatches.begin() + i); |
| 420 | next = nullptr; |
| 421 | } else { |
| 422 | next = &batch.samples[0]; |
| 423 | } |
| 424 | if (!result && mResampleTouch) { |
| 425 | resampleTouchState(sampleTime, static_cast<MotionEvent*>(*outEvent), next); |
| 426 | } |
| 427 | return result; |
| 428 | } |
| 429 | |
| 430 | return WOULD_BLOCK; |
| 431 | } |
| 432 | |
| 433 | status_t InputConsumer::consumeSamples(InputEventFactoryInterface* factory, Batch& batch, |
| 434 | size_t count, uint32_t* outSeq, InputEvent** outEvent) { |
| 435 | MotionEvent* motionEvent = factory->createMotionEvent(); |
| 436 | if (!motionEvent) return NO_MEMORY; |
| 437 | |
| 438 | uint32_t chain = 0; |
| 439 | for (size_t i = 0; i < count; i++) { |
| 440 | InputMessage& msg = batch.samples[i]; |
| 441 | updateTouchState(msg); |
| 442 | if (i) { |
| 443 | SeqChain seqChain; |
| 444 | seqChain.seq = msg.header.seq; |
| 445 | seqChain.chain = chain; |
| 446 | mSeqChains.push_back(seqChain); |
| 447 | addSample(*motionEvent, msg); |
| 448 | } else { |
| 449 | initializeMotionEvent(*motionEvent, msg); |
| 450 | } |
| 451 | chain = msg.header.seq; |
| 452 | } |
| 453 | batch.samples.erase(batch.samples.begin(), batch.samples.begin() + count); |
| 454 | |
| 455 | *outSeq = chain; |
| 456 | *outEvent = motionEvent; |
| 457 | return OK; |
| 458 | } |
| 459 | |
| 460 | void InputConsumer::updateTouchState(InputMessage& msg) { |
| 461 | if (!mResampleTouch || !isPointerEvent(msg.body.motion.source)) { |
| 462 | return; |
| 463 | } |
| 464 | |
| 465 | int32_t deviceId = msg.body.motion.deviceId; |
| 466 | int32_t source = msg.body.motion.source; |
| 467 | |
| 468 | // Update the touch state history to incorporate the new input message. |
| 469 | // If the message is in the past relative to the most recently produced resampled |
| 470 | // touch, then use the resampled time and coordinates instead. |
| 471 | switch (msg.body.motion.action & AMOTION_EVENT_ACTION_MASK) { |
| 472 | case AMOTION_EVENT_ACTION_DOWN: { |
| 473 | ssize_t index = findTouchState(deviceId, source); |
| 474 | if (index < 0) { |
| 475 | mTouchStates.push_back({}); |
| 476 | index = mTouchStates.size() - 1; |
| 477 | } |
| 478 | TouchState& touchState = mTouchStates[index]; |
| 479 | touchState.initialize(deviceId, source); |
| 480 | touchState.addHistory(msg); |
| 481 | break; |
| 482 | } |
| 483 | |
| 484 | case AMOTION_EVENT_ACTION_MOVE: { |
| 485 | ssize_t index = findTouchState(deviceId, source); |
| 486 | if (index >= 0) { |
| 487 | TouchState& touchState = mTouchStates[index]; |
| 488 | touchState.addHistory(msg); |
| 489 | rewriteMessage(touchState, msg); |
| 490 | } |
| 491 | break; |
| 492 | } |
| 493 | |
| 494 | case AMOTION_EVENT_ACTION_POINTER_DOWN: { |
| 495 | ssize_t index = findTouchState(deviceId, source); |
| 496 | if (index >= 0) { |
| 497 | TouchState& touchState = mTouchStates[index]; |
| 498 | touchState.lastResample.idBits.clearBit(msg.body.motion.getActionId()); |
| 499 | rewriteMessage(touchState, msg); |
| 500 | } |
| 501 | break; |
| 502 | } |
| 503 | |
| 504 | case AMOTION_EVENT_ACTION_POINTER_UP: { |
| 505 | ssize_t index = findTouchState(deviceId, source); |
| 506 | if (index >= 0) { |
| 507 | TouchState& touchState = mTouchStates[index]; |
| 508 | rewriteMessage(touchState, msg); |
| 509 | touchState.lastResample.idBits.clearBit(msg.body.motion.getActionId()); |
| 510 | } |
| 511 | break; |
| 512 | } |
| 513 | |
| 514 | case AMOTION_EVENT_ACTION_SCROLL: { |
| 515 | ssize_t index = findTouchState(deviceId, source); |
| 516 | if (index >= 0) { |
| 517 | TouchState& touchState = mTouchStates[index]; |
| 518 | rewriteMessage(touchState, msg); |
| 519 | } |
| 520 | break; |
| 521 | } |
| 522 | |
| 523 | case AMOTION_EVENT_ACTION_UP: |
| 524 | case AMOTION_EVENT_ACTION_CANCEL: { |
| 525 | ssize_t index = findTouchState(deviceId, source); |
| 526 | if (index >= 0) { |
| 527 | TouchState& touchState = mTouchStates[index]; |
| 528 | rewriteMessage(touchState, msg); |
| 529 | mTouchStates.erase(mTouchStates.begin() + index); |
| 530 | } |
| 531 | break; |
| 532 | } |
| 533 | } |
| 534 | } |
| 535 | |
| 536 | /** |
| 537 | * Replace the coordinates in msg with the coordinates in lastResample, if necessary. |
| 538 | * |
| 539 | * If lastResample is no longer valid for a specific pointer (i.e. the lastResample time |
| 540 | * is in the past relative to msg and the past two events do not contain identical coordinates), |
| 541 | * then invalidate the lastResample data for that pointer. |
| 542 | * If the two past events have identical coordinates, then lastResample data for that pointer will |
| 543 | * remain valid, and will be used to replace these coordinates. Thus, if a certain coordinate x0 is |
| 544 | * resampled to the new value x1, then x1 will always be used to replace x0 until some new value |
| 545 | * not equal to x0 is received. |
| 546 | */ |
| 547 | void InputConsumer::rewriteMessage(TouchState& state, InputMessage& msg) { |
| 548 | nsecs_t eventTime = msg.body.motion.eventTime; |
| 549 | for (uint32_t i = 0; i < msg.body.motion.pointerCount; i++) { |
| 550 | uint32_t id = msg.body.motion.pointers[i].properties.id; |
| 551 | if (state.lastResample.idBits.hasBit(id)) { |
| 552 | if (eventTime < state.lastResample.eventTime || |
| 553 | state.recentCoordinatesAreIdentical(id)) { |
| 554 | PointerCoords& msgCoords = msg.body.motion.pointers[i].coords; |
| 555 | const PointerCoords& resampleCoords = state.lastResample.getPointerById(id); |
| 556 | ALOGD_IF(debugResampling(), "[%d] - rewrite (%0.3f, %0.3f), old (%0.3f, %0.3f)", id, |
| 557 | resampleCoords.getX(), resampleCoords.getY(), msgCoords.getX(), |
| 558 | msgCoords.getY()); |
| 559 | msgCoords.setAxisValue(AMOTION_EVENT_AXIS_X, resampleCoords.getX()); |
| 560 | msgCoords.setAxisValue(AMOTION_EVENT_AXIS_Y, resampleCoords.getY()); |
| 561 | msgCoords.isResampled = true; |
| 562 | } else { |
| 563 | state.lastResample.idBits.clearBit(id); |
| 564 | } |
| 565 | } |
| 566 | } |
| 567 | } |
| 568 | |
| 569 | void InputConsumer::resampleTouchState(nsecs_t sampleTime, MotionEvent* event, |
| 570 | const InputMessage* next) { |
| 571 | if (!mResampleTouch || !(isPointerEvent(event->getSource())) || |
| 572 | event->getAction() != AMOTION_EVENT_ACTION_MOVE) { |
| 573 | return; |
| 574 | } |
| 575 | |
| 576 | ssize_t index = findTouchState(event->getDeviceId(), event->getSource()); |
| 577 | if (index < 0) { |
| 578 | ALOGD_IF(debugResampling(), "Not resampled, no touch state for device."); |
| 579 | return; |
| 580 | } |
| 581 | |
| 582 | TouchState& touchState = mTouchStates[index]; |
| 583 | if (touchState.historySize < 1) { |
| 584 | ALOGD_IF(debugResampling(), "Not resampled, no history for device."); |
| 585 | return; |
| 586 | } |
| 587 | |
| 588 | // Ensure that the current sample has all of the pointers that need to be reported. |
| 589 | const History* current = touchState.getHistory(0); |
| 590 | size_t pointerCount = event->getPointerCount(); |
| 591 | for (size_t i = 0; i < pointerCount; i++) { |
| 592 | uint32_t id = event->getPointerId(i); |
| 593 | if (!current->idBits.hasBit(id)) { |
| 594 | ALOGD_IF(debugResampling(), "Not resampled, missing id %d", id); |
| 595 | return; |
| 596 | } |
Hiroki Sato | 09b2388 | 2024-05-28 16:29:06 +0900 | [diff] [blame] | 597 | if (!shouldResampleTool(event->getToolType(i))) { |
| 598 | ALOGD_IF(debugResampling(), |
| 599 | "Not resampled, containing unsupported tool type at pointer %d", id); |
| 600 | return; |
| 601 | } |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 602 | } |
| 603 | |
| 604 | // Find the data to use for resampling. |
| 605 | const History* other; |
| 606 | History future; |
| 607 | float alpha; |
| 608 | if (next) { |
| 609 | // Interpolate between current sample and future sample. |
| 610 | // So current->eventTime <= sampleTime <= future.eventTime. |
| 611 | future.initializeFrom(*next); |
| 612 | other = &future; |
| 613 | nsecs_t delta = future.eventTime - current->eventTime; |
| 614 | if (delta < RESAMPLE_MIN_DELTA) { |
| 615 | ALOGD_IF(debugResampling(), "Not resampled, delta time is too small: %" PRId64 " ns.", |
| 616 | delta); |
| 617 | return; |
| 618 | } |
| 619 | alpha = float(sampleTime - current->eventTime) / delta; |
| 620 | } else if (touchState.historySize >= 2) { |
| 621 | // Extrapolate future sample using current sample and past sample. |
| 622 | // So other->eventTime <= current->eventTime <= sampleTime. |
| 623 | other = touchState.getHistory(1); |
| 624 | nsecs_t delta = current->eventTime - other->eventTime; |
| 625 | if (delta < RESAMPLE_MIN_DELTA) { |
| 626 | ALOGD_IF(debugResampling(), "Not resampled, delta time is too small: %" PRId64 " ns.", |
| 627 | delta); |
| 628 | return; |
| 629 | } else if (delta > RESAMPLE_MAX_DELTA) { |
| 630 | ALOGD_IF(debugResampling(), "Not resampled, delta time is too large: %" PRId64 " ns.", |
| 631 | delta); |
| 632 | return; |
| 633 | } |
| 634 | nsecs_t maxPredict = current->eventTime + std::min(delta / 2, RESAMPLE_MAX_PREDICTION); |
| 635 | if (sampleTime > maxPredict) { |
| 636 | ALOGD_IF(debugResampling(), |
| 637 | "Sample time is too far in the future, adjusting prediction " |
| 638 | "from %" PRId64 " to %" PRId64 " ns.", |
| 639 | sampleTime - current->eventTime, maxPredict - current->eventTime); |
| 640 | sampleTime = maxPredict; |
| 641 | } |
| 642 | alpha = float(current->eventTime - sampleTime) / delta; |
| 643 | } else { |
| 644 | ALOGD_IF(debugResampling(), "Not resampled, insufficient data."); |
| 645 | return; |
| 646 | } |
| 647 | |
| 648 | if (current->eventTime == sampleTime) { |
Hiroki Sato | 09b2388 | 2024-05-28 16:29:06 +0900 | [diff] [blame] | 649 | ALOGD_IF(debugResampling(), "Not resampled, 2 events with identical times."); |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 650 | return; |
| 651 | } |
| 652 | |
Hiroki Sato | 09b2388 | 2024-05-28 16:29:06 +0900 | [diff] [blame] | 653 | for (size_t i = 0; i < pointerCount; i++) { |
| 654 | uint32_t id = event->getPointerId(i); |
| 655 | if (!other->idBits.hasBit(id)) { |
| 656 | ALOGD_IF(debugResampling(), "Not resampled, the other doesn't have pointer id %d.", id); |
| 657 | return; |
| 658 | } |
| 659 | } |
| 660 | |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 661 | // Resample touch coordinates. |
| 662 | History oldLastResample; |
| 663 | oldLastResample.initializeFrom(touchState.lastResample); |
| 664 | touchState.lastResample.eventTime = sampleTime; |
| 665 | touchState.lastResample.idBits.clear(); |
| 666 | for (size_t i = 0; i < pointerCount; i++) { |
| 667 | uint32_t id = event->getPointerId(i); |
| 668 | touchState.lastResample.idToIndex[id] = i; |
| 669 | touchState.lastResample.idBits.markBit(id); |
| 670 | if (oldLastResample.hasPointerId(id) && touchState.recentCoordinatesAreIdentical(id)) { |
| 671 | // We maintain the previously resampled value for this pointer (stored in |
| 672 | // oldLastResample) when the coordinates for this pointer haven't changed since then. |
| 673 | // This way we don't introduce artificial jitter when pointers haven't actually moved. |
| 674 | // The isResampled flag isn't cleared as the values don't reflect what the device is |
| 675 | // actually reporting. |
| 676 | |
| 677 | // We know here that the coordinates for the pointer haven't changed because we |
| 678 | // would've cleared the resampled bit in rewriteMessage if they had. We can't modify |
| 679 | // lastResample in place because the mapping from pointer ID to index may have changed. |
| 680 | touchState.lastResample.pointers[i] = oldLastResample.getPointerById(id); |
| 681 | continue; |
| 682 | } |
| 683 | |
| 684 | PointerCoords& resampledCoords = touchState.lastResample.pointers[i]; |
| 685 | const PointerCoords& currentCoords = current->getPointerById(id); |
| 686 | resampledCoords = currentCoords; |
| 687 | resampledCoords.isResampled = true; |
Hiroki Sato | 09b2388 | 2024-05-28 16:29:06 +0900 | [diff] [blame] | 688 | const PointerCoords& otherCoords = other->getPointerById(id); |
| 689 | resampledCoords.setAxisValue(AMOTION_EVENT_AXIS_X, |
| 690 | lerp(currentCoords.getX(), otherCoords.getX(), alpha)); |
| 691 | resampledCoords.setAxisValue(AMOTION_EVENT_AXIS_Y, |
| 692 | lerp(currentCoords.getY(), otherCoords.getY(), alpha)); |
| 693 | ALOGD_IF(debugResampling(), |
| 694 | "[%d] - out (%0.3f, %0.3f), cur (%0.3f, %0.3f), " |
| 695 | "other (%0.3f, %0.3f), alpha %0.3f", |
| 696 | id, resampledCoords.getX(), resampledCoords.getY(), currentCoords.getX(), |
| 697 | currentCoords.getY(), otherCoords.getX(), otherCoords.getY(), alpha); |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 698 | } |
| 699 | |
jioana | 71c6f73 | 2024-07-16 15:42:56 +0000 | [diff] [blame] | 700 | event->addSample(sampleTime, touchState.lastResample.pointers, event->getId()); |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 701 | } |
| 702 | |
| 703 | status_t InputConsumer::sendFinishedSignal(uint32_t seq, bool handled) { |
| 704 | ALOGD_IF(DEBUG_TRANSPORT_CONSUMER, |
| 705 | "channel '%s' consumer ~ sendFinishedSignal: seq=%u, handled=%s", |
| 706 | mChannel->getName().c_str(), seq, toString(handled)); |
| 707 | |
| 708 | if (!seq) { |
| 709 | ALOGE("Attempted to send a finished signal with sequence number 0."); |
| 710 | return BAD_VALUE; |
| 711 | } |
| 712 | |
| 713 | // Send finished signals for the batch sequence chain first. |
| 714 | size_t seqChainCount = mSeqChains.size(); |
| 715 | if (seqChainCount) { |
| 716 | uint32_t currentSeq = seq; |
| 717 | uint32_t chainSeqs[seqChainCount]; |
| 718 | size_t chainIndex = 0; |
| 719 | for (size_t i = seqChainCount; i > 0;) { |
| 720 | i--; |
| 721 | const SeqChain& seqChain = mSeqChains[i]; |
| 722 | if (seqChain.seq == currentSeq) { |
| 723 | currentSeq = seqChain.chain; |
| 724 | chainSeqs[chainIndex++] = currentSeq; |
| 725 | mSeqChains.erase(mSeqChains.begin() + i); |
| 726 | } |
| 727 | } |
| 728 | status_t status = OK; |
| 729 | while (!status && chainIndex > 0) { |
| 730 | chainIndex--; |
| 731 | status = sendUnchainedFinishedSignal(chainSeqs[chainIndex], handled); |
| 732 | } |
| 733 | if (status) { |
| 734 | // An error occurred so at least one signal was not sent, reconstruct the chain. |
| 735 | for (;;) { |
| 736 | SeqChain seqChain; |
| 737 | seqChain.seq = chainIndex != 0 ? chainSeqs[chainIndex - 1] : seq; |
| 738 | seqChain.chain = chainSeqs[chainIndex]; |
| 739 | mSeqChains.push_back(seqChain); |
| 740 | if (!chainIndex) break; |
| 741 | chainIndex--; |
| 742 | } |
| 743 | return status; |
| 744 | } |
| 745 | } |
| 746 | |
| 747 | // Send finished signal for the last message in the batch. |
| 748 | return sendUnchainedFinishedSignal(seq, handled); |
| 749 | } |
| 750 | |
| 751 | status_t InputConsumer::sendTimeline(int32_t inputEventId, |
| 752 | std::array<nsecs_t, GraphicsTimeline::SIZE> graphicsTimeline) { |
| 753 | ALOGD_IF(DEBUG_TRANSPORT_CONSUMER, |
| 754 | "channel '%s' consumer ~ sendTimeline: inputEventId=%" PRId32 |
| 755 | ", gpuCompletedTime=%" PRId64 ", presentTime=%" PRId64, |
| 756 | mChannel->getName().c_str(), inputEventId, |
| 757 | graphicsTimeline[GraphicsTimeline::GPU_COMPLETED_TIME], |
| 758 | graphicsTimeline[GraphicsTimeline::PRESENT_TIME]); |
| 759 | |
| 760 | InputMessage msg; |
| 761 | msg.header.type = InputMessage::Type::TIMELINE; |
| 762 | msg.header.seq = 0; |
| 763 | msg.body.timeline.eventId = inputEventId; |
| 764 | msg.body.timeline.graphicsTimeline = std::move(graphicsTimeline); |
| 765 | return mChannel->sendMessage(&msg); |
| 766 | } |
| 767 | |
| 768 | nsecs_t InputConsumer::getConsumeTime(uint32_t seq) const { |
| 769 | auto it = mConsumeTimes.find(seq); |
| 770 | // Consume time will be missing if either 'finishInputEvent' is called twice, or if it was |
| 771 | // called for the wrong (synthetic?) input event. Either way, it is a bug that should be fixed. |
| 772 | LOG_ALWAYS_FATAL_IF(it == mConsumeTimes.end(), "Could not find consume time for seq=%" PRIu32, |
| 773 | seq); |
| 774 | return it->second; |
| 775 | } |
| 776 | |
| 777 | void InputConsumer::popConsumeTime(uint32_t seq) { |
| 778 | mConsumeTimes.erase(seq); |
| 779 | } |
| 780 | |
| 781 | status_t InputConsumer::sendUnchainedFinishedSignal(uint32_t seq, bool handled) { |
| 782 | InputMessage msg; |
| 783 | msg.header.type = InputMessage::Type::FINISHED; |
| 784 | msg.header.seq = seq; |
| 785 | msg.body.finished.handled = handled; |
| 786 | msg.body.finished.consumeTime = getConsumeTime(seq); |
| 787 | status_t result = mChannel->sendMessage(&msg); |
| 788 | if (result == OK) { |
| 789 | // Remove the consume time if the socket write succeeded. We will not need to ack this |
| 790 | // message anymore. If the socket write did not succeed, we will try again and will still |
| 791 | // need consume time. |
| 792 | popConsumeTime(seq); |
| 793 | |
| 794 | // Trace the event processing timeline - event was just finished |
Siarhei Vishniakou | c0bf389 | 2024-03-29 16:29:13 +0000 | [diff] [blame] | 795 | ATRACE_ASYNC_END(mProcessingTraceTag.c_str(), /*cookie=*/seq); |
Siarhei Vishniakou | 0438ca8 | 2024-03-12 14:27:25 -0700 | [diff] [blame] | 796 | } |
| 797 | return result; |
| 798 | } |
| 799 | |
| 800 | bool InputConsumer::hasPendingBatch() const { |
| 801 | return !mBatches.empty(); |
| 802 | } |
| 803 | |
| 804 | int32_t InputConsumer::getPendingBatchSource() const { |
| 805 | if (mBatches.empty()) { |
| 806 | return AINPUT_SOURCE_CLASS_NONE; |
| 807 | } |
| 808 | |
| 809 | const Batch& batch = mBatches[0]; |
| 810 | const InputMessage& head = batch.samples[0]; |
| 811 | return head.body.motion.source; |
| 812 | } |
| 813 | |
| 814 | bool InputConsumer::probablyHasInput() const { |
| 815 | return hasPendingBatch() || mChannel->probablyHasInput(); |
| 816 | } |
| 817 | |
| 818 | ssize_t InputConsumer::findBatch(int32_t deviceId, int32_t source) const { |
| 819 | for (size_t i = 0; i < mBatches.size(); i++) { |
| 820 | const Batch& batch = mBatches[i]; |
| 821 | const InputMessage& head = batch.samples[0]; |
| 822 | if (head.body.motion.deviceId == deviceId && head.body.motion.source == source) { |
| 823 | return i; |
| 824 | } |
| 825 | } |
| 826 | return -1; |
| 827 | } |
| 828 | |
| 829 | ssize_t InputConsumer::findTouchState(int32_t deviceId, int32_t source) const { |
| 830 | for (size_t i = 0; i < mTouchStates.size(); i++) { |
| 831 | const TouchState& touchState = mTouchStates[i]; |
| 832 | if (touchState.deviceId == deviceId && touchState.source == source) { |
| 833 | return i; |
| 834 | } |
| 835 | } |
| 836 | return -1; |
| 837 | } |
| 838 | |
| 839 | bool InputConsumer::canAddSample(const Batch& batch, const InputMessage* msg) { |
| 840 | const InputMessage& head = batch.samples[0]; |
| 841 | uint32_t pointerCount = msg->body.motion.pointerCount; |
| 842 | if (head.body.motion.pointerCount != pointerCount || |
| 843 | head.body.motion.action != msg->body.motion.action) { |
| 844 | return false; |
| 845 | } |
| 846 | for (size_t i = 0; i < pointerCount; i++) { |
| 847 | if (head.body.motion.pointers[i].properties != msg->body.motion.pointers[i].properties) { |
| 848 | return false; |
| 849 | } |
| 850 | } |
| 851 | return true; |
| 852 | } |
| 853 | |
| 854 | ssize_t InputConsumer::findSampleNoLaterThan(const Batch& batch, nsecs_t time) { |
| 855 | size_t numSamples = batch.samples.size(); |
| 856 | size_t index = 0; |
| 857 | while (index < numSamples && batch.samples[index].body.motion.eventTime <= time) { |
| 858 | index += 1; |
| 859 | } |
| 860 | return ssize_t(index) - 1; |
| 861 | } |
| 862 | |
| 863 | std::string InputConsumer::dump() const { |
| 864 | std::string out; |
| 865 | out = out + "mResampleTouch = " + toString(mResampleTouch) + "\n"; |
| 866 | out = out + "mChannel = " + mChannel->getName() + "\n"; |
| 867 | out = out + "mMsgDeferred: " + toString(mMsgDeferred) + "\n"; |
| 868 | if (mMsgDeferred) { |
| 869 | out = out + "mMsg : " + ftl::enum_string(mMsg.header.type) + "\n"; |
| 870 | } |
| 871 | out += "Batches:\n"; |
| 872 | for (const Batch& batch : mBatches) { |
| 873 | out += " Batch:\n"; |
| 874 | for (const InputMessage& msg : batch.samples) { |
| 875 | out += android::base::StringPrintf(" Message %" PRIu32 ": %s ", msg.header.seq, |
| 876 | ftl::enum_string(msg.header.type).c_str()); |
| 877 | switch (msg.header.type) { |
| 878 | case InputMessage::Type::KEY: { |
| 879 | out += android::base::StringPrintf("action=%s keycode=%" PRId32, |
| 880 | KeyEvent::actionToString( |
| 881 | msg.body.key.action), |
| 882 | msg.body.key.keyCode); |
| 883 | break; |
| 884 | } |
| 885 | case InputMessage::Type::MOTION: { |
| 886 | out = out + "action=" + MotionEvent::actionToString(msg.body.motion.action); |
| 887 | for (uint32_t i = 0; i < msg.body.motion.pointerCount; i++) { |
| 888 | const float x = msg.body.motion.pointers[i].coords.getX(); |
| 889 | const float y = msg.body.motion.pointers[i].coords.getY(); |
| 890 | out += android::base::StringPrintf("\n Pointer %" PRIu32 |
| 891 | " : x=%.1f y=%.1f", |
| 892 | i, x, y); |
| 893 | } |
| 894 | break; |
| 895 | } |
| 896 | case InputMessage::Type::FINISHED: { |
| 897 | out += android::base::StringPrintf("handled=%s, consumeTime=%" PRId64, |
| 898 | toString(msg.body.finished.handled), |
| 899 | msg.body.finished.consumeTime); |
| 900 | break; |
| 901 | } |
| 902 | case InputMessage::Type::FOCUS: { |
| 903 | out += android::base::StringPrintf("hasFocus=%s", |
| 904 | toString(msg.body.focus.hasFocus)); |
| 905 | break; |
| 906 | } |
| 907 | case InputMessage::Type::CAPTURE: { |
| 908 | out += android::base::StringPrintf("hasCapture=%s", |
| 909 | toString(msg.body.capture |
| 910 | .pointerCaptureEnabled)); |
| 911 | break; |
| 912 | } |
| 913 | case InputMessage::Type::DRAG: { |
| 914 | out += android::base::StringPrintf("x=%.1f y=%.1f, isExiting=%s", |
| 915 | msg.body.drag.x, msg.body.drag.y, |
| 916 | toString(msg.body.drag.isExiting)); |
| 917 | break; |
| 918 | } |
| 919 | case InputMessage::Type::TIMELINE: { |
| 920 | const nsecs_t gpuCompletedTime = |
| 921 | msg.body.timeline |
| 922 | .graphicsTimeline[GraphicsTimeline::GPU_COMPLETED_TIME]; |
| 923 | const nsecs_t presentTime = |
| 924 | msg.body.timeline.graphicsTimeline[GraphicsTimeline::PRESENT_TIME]; |
| 925 | out += android::base::StringPrintf("inputEventId=%" PRId32 |
| 926 | ", gpuCompletedTime=%" PRId64 |
| 927 | ", presentTime=%" PRId64, |
| 928 | msg.body.timeline.eventId, gpuCompletedTime, |
| 929 | presentTime); |
| 930 | break; |
| 931 | } |
| 932 | case InputMessage::Type::TOUCH_MODE: { |
| 933 | out += android::base::StringPrintf("isInTouchMode=%s", |
| 934 | toString(msg.body.touchMode.isInTouchMode)); |
| 935 | break; |
| 936 | } |
| 937 | } |
| 938 | out += "\n"; |
| 939 | } |
| 940 | } |
| 941 | if (mBatches.empty()) { |
| 942 | out += " <empty>\n"; |
| 943 | } |
| 944 | out += "mSeqChains:\n"; |
| 945 | for (const SeqChain& chain : mSeqChains) { |
| 946 | out += android::base::StringPrintf(" chain: seq = %" PRIu32 " chain=%" PRIu32, chain.seq, |
| 947 | chain.chain); |
| 948 | } |
| 949 | if (mSeqChains.empty()) { |
| 950 | out += " <empty>\n"; |
| 951 | } |
| 952 | out += "mConsumeTimes:\n"; |
| 953 | for (const auto& [seq, consumeTime] : mConsumeTimes) { |
| 954 | out += android::base::StringPrintf(" seq = %" PRIu32 " consumeTime = %" PRId64, seq, |
| 955 | consumeTime); |
| 956 | } |
| 957 | if (mConsumeTimes.empty()) { |
| 958 | out += " <empty>\n"; |
| 959 | } |
| 960 | return out; |
| 961 | } |
| 962 | |
| 963 | } // namespace android |