Skip to main content

agg_gui/
touch_state.rs

1//! Multi-touch gesture recogniser.
2//!
3//! The platform shells (web JS, native winit) forward raw touch events
4//! to [`App::on_touch_start/move/end/cancel`].  [`TouchState`] maintains
5//! the set of active touches and, once two or more fingers are down,
6//! aggregates them each frame into a [`MultiTouchInfo`] describing zoom,
7//! rotation, pan, and average pressure relative to the previous frame.
8//!
9//! Widgets that want to react to gestures read the current frame's
10//! aggregate via [`current_multi_touch`], a thread-local written by
11//! [`App::publish_multi_touch`] at the start of each paint.  Single-
12//! finger touches continue to flow through the regular mouse-emulation
13//! path, so existing widgets keep working with no changes.
14//!
15//! The API shape deliberately mirrors egui's (`zoom_delta`,
16//! `rotation_delta`, `translation_delta`, `num_touches`, `center_pos`)
17//! so ports from egui code read cleanly.
18
19use std::cell::RefCell;
20use std::collections::BTreeMap;
21
22use crate::geometry::Point;
23
24// ---------------------------------------------------------------------------
25// Identifier newtypes
26// ---------------------------------------------------------------------------
27
28/// Stable per-device identifier.  Different physical input surfaces
29/// (e.g. a laptop's built-in touchscreen and a connected tablet) hash
30/// to different values.  The web shell always uses `0` (the browser
31/// doesn't expose multiple touch devices to pages); winit passes
32/// through its device id.
33#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Hash, PartialOrd, Ord)]
34pub struct TouchDeviceId(pub u64);
35
36/// Per-finger identifier, stable from Start through End/Cancel.  Re-
37/// used after lift — browsers and winit both guarantee identifiers
38/// are unique only for the lifetime of the touch.
39#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
40pub struct TouchId(pub u64);
41
42/// Which phase of the gesture this touch event represents.
43#[derive(Copy, Clone, Debug, PartialEq, Eq)]
44pub enum TouchPhase {
45    /// Finger first made contact.
46    Start,
47    /// Finger moved while in contact.
48    Move,
49    /// Finger lifted normally.
50    End,
51    /// Touch was cancelled by the platform (phone call, gesture
52    /// hand-off to browser, etc.).
53    Cancel,
54}
55
56// ---------------------------------------------------------------------------
57// MultiTouchInfo — the per-frame aggregate
58// ---------------------------------------------------------------------------
59
60/// Gesture aggregate for the current frame, produced when two or more
61/// fingers are on the same device.  All deltas are relative to the
62/// previous frame's positions — the widget just accumulates them into
63/// its own angle / scale / translation state (see `LionView` for the
64/// canonical consumer).
65#[derive(Copy, Clone, Debug)]
66pub struct MultiTouchInfo {
67    /// Device that owns these touches.  Useful only when the host
68    /// actually distinguishes multiple touchscreens; most apps ignore.
69    pub device_id: TouchDeviceId,
70    /// Number of fingers currently down (always ≥ 2 — a single-finger
71    /// frame produces `None` instead of a [`MultiTouchInfo`]).
72    pub num_touches: usize,
73    /// Multiplicative zoom factor since the last frame.  `1.0` means
74    /// "no pinch this frame"; `1.1` means the fingers spread by 10 %.
75    pub zoom_delta: f32,
76    /// Rotation in radians since the last frame.  Positive = CCW in
77    /// widget-local (Y-up) space, i.e. visually counter-clockwise on
78    /// screen.
79    pub rotation_delta: f32,
80    /// Translation of the centroid since the last frame, in widget-
81    /// local pixels.  Widgets that want the gesture to orbit the pinch
82    /// centre should combine this with `zoom_delta` / `rotation_delta`.
83    pub translation_delta: Point,
84    /// Average `force` across active touches, or `0.0` when the
85    /// platform doesn't report pressure.
86    pub force: f32,
87    /// Centroid of the active touches in app-local coordinates this
88    /// frame.  Widgets that want to hit-test "is the gesture over me?"
89    /// compare this against their own absolute bounds.
90    pub center_pos: Point,
91}
92
93// ---------------------------------------------------------------------------
94// TouchState — per-frame gesture recogniser
95// ---------------------------------------------------------------------------
96
97/// One finger's tracked position, updated every Move event.
98#[derive(Copy, Clone, Debug)]
99struct ActiveTouch {
100    /// Latest position reported by the platform.
101    pos: Point,
102    /// Position at the last `update_gesture` call — used as the basis
103    /// for the next delta.
104    prev_pos: Point,
105    /// Latest force (0.0 when unsupported).
106    force: f32,
107}
108
109/// Tracks every active touch across every known device.  Lives on
110/// `App`; widgets never see this directly.
111#[derive(Default)]
112pub struct TouchState {
113    active: BTreeMap<(TouchDeviceId, TouchId), ActiveTouch>,
114    /// Result of the most recent `update_gesture` call — `None` while
115    /// fewer than two fingers are down on any one device.  Published
116    /// to the thread-local so widgets can read it during paint.
117    last: Option<MultiTouchInfo>,
118    /// Set by Start / End / Cancel so `update_gesture` can reseed
119    /// `prev_pos` on the frame after a finger count change — without
120    /// this, newly-arrived fingers contribute a spurious delta equal
121    /// to their full spread on their first move.
122    topology_changed: bool,
123}
124
125impl TouchState {
126    pub fn new() -> Self {
127        Self::default()
128    }
129
130    pub fn on_start(&mut self, device: TouchDeviceId, id: TouchId, pos: Point, force: Option<f32>) {
131        self.active.insert(
132            (device, id),
133            ActiveTouch {
134                pos,
135                prev_pos: pos,
136                force: force.unwrap_or(0.0),
137            },
138        );
139        self.topology_changed = true;
140    }
141
142    pub fn on_move(&mut self, device: TouchDeviceId, id: TouchId, pos: Point, force: Option<f32>) {
143        if let Some(t) = self.active.get_mut(&(device, id)) {
144            t.pos = pos;
145            if let Some(f) = force {
146                t.force = f;
147            }
148        }
149    }
150
151    pub fn on_end_or_cancel(&mut self, device: TouchDeviceId, id: TouchId) {
152        if self.active.remove(&(device, id)).is_some() {
153            self.topology_changed = true;
154        }
155        if self.active.len() < 2 {
156            self.last = None;
157        }
158    }
159
160    /// Recompute the per-frame aggregate.  Called by `App` right before
161    /// the multi-touch value is published, so every `paint` / `on_event`
162    /// in the same frame sees consistent deltas.
163    pub fn update_gesture(&mut self) {
164        // Only the most-populated device contributes — the common case
165        // is a single touchscreen, and cross-device gestures aren't a
166        // useful abstraction.
167        let device = self.active.keys().next().map(|(d, _)| *d);
168        let Some(device) = device else {
169            self.last = None;
170            return;
171        };
172        let touches: Vec<ActiveTouch> = self
173            .active
174            .iter()
175            .filter(|((d, _), _)| *d == device)
176            .map(|(_, t)| *t)
177            .collect();
178        if touches.len() < 2 {
179            self.last = None;
180            return;
181        }
182
183        // Centroid (previous vs current) drives the translation delta.
184        let n = touches.len() as f64;
185        let (mut cx, mut cy) = (0.0, 0.0);
186        let (mut pcx, mut pcy) = (0.0, 0.0);
187        for t in &touches {
188            cx += t.pos.x;
189            cy += t.pos.y;
190            pcx += t.prev_pos.x;
191            pcy += t.prev_pos.y;
192        }
193        cx /= n;
194        cy /= n;
195        pcx /= n;
196        pcy /= n;
197
198        // Average pinch + rotation across pairs.  Using every
199        // (touch, centroid) ray means the signal scales sensibly with
200        // finger count; egui does the same.
201        let mut zoom_sum = 0.0_f32;
202        let mut rotation_sum = 0.0_f32;
203        let mut force_sum = 0.0_f32;
204        let mut zoom_count = 0;
205        for t in &touches {
206            force_sum += t.force;
207            let dx = (t.pos.x - cx) as f32;
208            let dy = (t.pos.y - cy) as f32;
209            let pdx = (t.prev_pos.x - pcx) as f32;
210            let pdy = (t.prev_pos.y - pcy) as f32;
211            let r = (dx * dx + dy * dy).sqrt();
212            let pr = (pdx * pdx + pdy * pdy).sqrt();
213            if pr > 1.0 && r > 1.0 {
214                zoom_sum += r / pr;
215                rotation_sum += dy.atan2(dx) - pdy.atan2(pdx);
216                zoom_count += 1;
217            }
218        }
219        // Skip producing a frame-delta when topology just changed —
220        // the jump from "no prev_pos" to "current pos" would otherwise
221        // read as a huge one-frame zoom.  We still emit an info entry
222        // so widgets can react to finger count; just with zeroed
223        // deltas.
224        let (zoom_delta, rotation_delta) = if self.topology_changed || zoom_count == 0 {
225            (1.0, 0.0)
226        } else {
227            // Normalise rotation to `[-pi, pi]` so wrap-around at the
228            // ±pi seam doesn't flip sign of the delta.
229            let mut rot = rotation_sum / zoom_count as f32;
230            use std::f32::consts::PI;
231            while rot > PI {
232                rot -= 2.0 * PI;
233            }
234            while rot < -PI {
235                rot += 2.0 * PI;
236            }
237            (zoom_sum / zoom_count as f32, rot)
238        };
239
240        let translation_delta = if self.topology_changed {
241            Point::new(0.0, 0.0)
242        } else {
243            Point::new(cx - pcx, cy - pcy)
244        };
245
246        self.last = Some(MultiTouchInfo {
247            device_id: device,
248            num_touches: touches.len(),
249            zoom_delta,
250            rotation_delta,
251            translation_delta,
252            force: force_sum / n as f32,
253            center_pos: Point::new(cx, cy),
254        });
255
256        // Latch current positions as the new baseline for the next
257        // frame, then clear the topology flag.
258        for t in self.active.values_mut() {
259            t.prev_pos = t.pos;
260        }
261        self.topology_changed = false;
262    }
263
264    pub fn current(&self) -> Option<MultiTouchInfo> {
265        self.last
266    }
267
268    /// Total number of fingers currently down (across all devices).
269    /// Useful as a lightweight "are we in a gesture?" probe when a
270    /// widget doesn't care about the per-delta aggregate.
271    pub fn active_count(&self) -> usize {
272        self.active.len()
273    }
274}
275
276// ---------------------------------------------------------------------------
277// Thread-local publish / read
278// ---------------------------------------------------------------------------
279
280thread_local! {
281    static CURRENT: RefCell<Option<MultiTouchInfo>> = RefCell::new(None);
282    /// Wall-clock time of the most recent touch lifecycle event
283    /// (`Start` / `Move` / `End` / `Cancel`).  Set by `App`'s touch
284    /// entry points.  Mouse events the touch shell synthesises arrive
285    /// within milliseconds of a touch event — widgets that need to
286    /// distinguish a touch tap from a desktop click read
287    /// [`last_touch_event_age`] and treat anything under a few tens
288    /// of milliseconds as touch-synthesised.
289    static LAST_TOUCH_EVENT_AT: std::cell::Cell<Option<web_time::Instant>> =
290        const { std::cell::Cell::new(None) };
291}
292
293/// Publish this frame's multi-touch aggregate.  Called by
294/// `App::paint` right before painting begins.
295pub fn set_current(info: Option<MultiTouchInfo>) {
296    CURRENT.with(|c| *c.borrow_mut() = info);
297}
298
299/// Fetch the current frame's multi-touch aggregate.  Returns `None`
300/// when fewer than two fingers are down on any device, so a widget
301/// writes: `if let Some(mt) = current_multi_touch() { … }`.
302pub fn current_multi_touch() -> Option<MultiTouchInfo> {
303    CURRENT.with(|c| *c.borrow())
304}
305
306/// Record that a touch lifecycle event just fired.  Called from
307/// `App::on_touch_start/move/end/cancel`.
308pub(crate) fn note_touch_event() {
309    LAST_TOUCH_EVENT_AT.with(|c| c.set(Some(web_time::Instant::now())));
310}
311
312/// Time elapsed since the most recent touch lifecycle event, or
313/// `None` if no touch event has ever fired.  Mouse events
314/// synthesised from a touchstart / touchend by the web shell arrive
315/// within a millisecond of the touch event — widgets needing to
316/// tell touch-synthesised mouse events apart from real desktop
317/// clicks check this against a small threshold.
318pub fn last_touch_event_age() -> Option<std::time::Duration> {
319    LAST_TOUCH_EVENT_AT.with(|c| c.get()).map(|t| t.elapsed())
320}
321
322/// Forget any prior touch event so the next mouse event reads as
323/// "from desktop" until [`note_touch_event`] runs again.  Tests use
324/// this to isolate desktop-mouse scenarios from a sibling test that
325/// just simulated a touch tap.
326#[doc(hidden)]
327pub fn clear_last_touch_event_for_testing() {
328    LAST_TOUCH_EVENT_AT.with(|c| c.set(None));
329}