canvas_core/event.rs
1//! Input events for canvas interaction.
2
3use serde::{Deserialize, Serialize};
4
5use crate::ElementId;
6
7/// Phase of a touch event.
8#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
9#[serde(rename_all = "lowercase")]
10pub enum TouchPhase {
11 /// Touch started (finger down).
12 Start,
13 /// Touch moved (finger dragging).
14 Move,
15 /// Touch ended (finger up).
16 End,
17 /// Touch cancelled (e.g., palm rejection).
18 Cancel,
19}
20
21/// A single touch point.
22#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
23pub struct TouchPoint {
24 /// Touch identifier (for multi-touch).
25 pub id: u32,
26 /// X position in canvas coordinates.
27 pub x: f32,
28 /// Y position in canvas coordinates.
29 pub y: f32,
30 /// Pressure (0.0 to 1.0, if available).
31 pub pressure: Option<f32>,
32 /// Touch radius in pixels (if available).
33 pub radius: Option<f32>,
34}
35
36/// A touch event with one or more touch points.
37#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
38pub struct TouchEvent {
39 /// Phase of this touch event.
40 pub phase: TouchPhase,
41 /// All current touch points.
42 pub touches: Vec<TouchPoint>,
43 /// Timestamp in milliseconds since canvas start.
44 pub timestamp_ms: u64,
45 /// Element ID that was touched (if any).
46 pub target_element: Option<ElementId>,
47}
48
49impl TouchEvent {
50 /// Create a new touch event.
51 #[must_use]
52 pub fn new(phase: TouchPhase, touches: Vec<TouchPoint>, timestamp_ms: u64) -> Self {
53 Self {
54 phase,
55 touches,
56 timestamp_ms,
57 target_element: None,
58 }
59 }
60
61 /// Get the primary (first) touch point.
62 #[must_use]
63 pub fn primary_touch(&self) -> Option<&TouchPoint> {
64 self.touches.first()
65 }
66
67 /// Check if this is a multi-touch event.
68 #[must_use]
69 pub fn is_multi_touch(&self) -> bool {
70 self.touches.len() > 1
71 }
72}
73
74/// A voice input event from speech recognition.
75///
76/// Represents transcribed speech from Web Speech API or similar
77/// speech recognition systems.
78#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
79pub struct VoiceEvent {
80 /// The recognized speech transcript.
81 pub transcript: String,
82 /// Confidence score (0.0 to 1.0).
83 pub confidence: f32,
84 /// Whether this is a final (committed) result.
85 ///
86 /// Interim results may change as speech recognition continues.
87 /// Final results are stable and ready for processing.
88 pub is_final: bool,
89 /// Timestamp when the speech was recognized (ms since epoch).
90 pub timestamp_ms: u64,
91}
92
93impl VoiceEvent {
94 /// Create a new voice event.
95 #[must_use]
96 pub fn new(transcript: String, confidence: f32, is_final: bool, timestamp_ms: u64) -> Self {
97 Self {
98 transcript,
99 confidence,
100 is_final,
101 timestamp_ms,
102 }
103 }
104
105 /// Create an interim (non-final) voice event.
106 #[must_use]
107 pub fn interim(transcript: String, confidence: f32, timestamp_ms: u64) -> Self {
108 Self::new(transcript, confidence, false, timestamp_ms)
109 }
110
111 /// Create a final voice event.
112 #[must_use]
113 pub fn final_result(transcript: String, confidence: f32, timestamp_ms: u64) -> Self {
114 Self::new(transcript, confidence, true, timestamp_ms)
115 }
116}
117
118/// Recognized gestures from touch input.
119#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
120#[serde(tag = "gesture", content = "data")]
121#[allow(missing_docs)] // Enum variant fields documented at variant level
122pub enum Gesture {
123 /// Single tap at a point (x, y coordinates).
124 Tap {
125 /// X coordinate.
126 x: f32,
127 /// Y coordinate.
128 y: f32,
129 },
130
131 /// Double tap at a point (x, y coordinates).
132 DoubleTap {
133 /// X coordinate.
134 x: f32,
135 /// Y coordinate.
136 y: f32,
137 },
138
139 /// Long press at a point with duration.
140 LongPress {
141 /// X coordinate.
142 x: f32,
143 /// Y coordinate.
144 y: f32,
145 /// Duration in milliseconds.
146 duration_ms: u64,
147 },
148
149 /// Drag from one point to another.
150 Drag {
151 /// Starting X coordinate.
152 start_x: f32,
153 /// Starting Y coordinate.
154 start_y: f32,
155 /// Current X coordinate.
156 current_x: f32,
157 /// Current Y coordinate.
158 current_y: f32,
159 /// Delta X from last position.
160 delta_x: f32,
161 /// Delta Y from last position.
162 delta_y: f32,
163 },
164
165 /// Pinch to zoom gesture.
166 Pinch {
167 /// Center X coordinate.
168 center_x: f32,
169 /// Center Y coordinate.
170 center_y: f32,
171 /// Scale factor (1.0 = no change).
172 scale: f32,
173 },
174
175 /// Two-finger rotate gesture.
176 Rotate {
177 /// Center X coordinate.
178 center_x: f32,
179 /// Center Y coordinate.
180 center_y: f32,
181 /// Rotation angle in radians.
182 angle_radians: f32,
183 },
184}
185
186/// All input events the canvas can receive.
187#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
188#[serde(tag = "type", content = "data")]
189pub enum InputEvent {
190 /// Raw touch event.
191 Touch(TouchEvent),
192
193 /// Recognized gesture.
194 Gesture(Gesture),
195
196 /// Pointer (mouse) event.
197 Pointer {
198 /// X coordinate.
199 x: f32,
200 /// Y coordinate.
201 y: f32,
202 /// Mouse button (0 = left, 1 = middle, 2 = right).
203 button: u8,
204 /// Whether the button is pressed.
205 pressed: bool,
206 },
207
208 /// Keyboard event.
209 Key {
210 /// Key name or code.
211 key: String,
212 /// Whether the key is pressed.
213 pressed: bool,
214 /// Active modifier keys.
215 modifiers: KeyModifiers,
216 },
217
218 /// Voice input from speech recognition.
219 Voice(VoiceEvent),
220}
221
222/// Keyboard modifiers.
223#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)]
224#[allow(clippy::struct_excessive_bools)]
225pub struct KeyModifiers {
226 /// Shift key pressed.
227 pub shift: bool,
228 /// Control key pressed.
229 pub ctrl: bool,
230 /// Alt/Option key pressed.
231 pub alt: bool,
232 /// Meta/Command key pressed.
233 pub meta: bool,
234}