selfware/ui/tui/animation/
token_stream.rs1use super::{colors, Animation};
9use ratatui::{
10 buffer::Buffer,
11 layout::Rect,
12 style::{Color, Modifier, Style},
13 widgets::Widget,
14};
15use std::collections::VecDeque;
16
17#[derive(Debug, Clone, Copy, PartialEq, Eq)]
19pub enum TokenSize {
20 Small,
22 Medium,
24 Large,
26 Massive,
28}
29
30impl TokenSize {
31 pub fn symbol(&self) -> &'static str {
33 match self {
34 TokenSize::Small => "●",
35 TokenSize::Medium => "◆",
36 TokenSize::Large => "▲",
37 TokenSize::Massive => "★",
38 }
39 }
40
41 pub fn color(&self) -> Color {
43 match self {
44 TokenSize::Small => colors::SECONDARY, TokenSize::Medium => colors::ACCENT, TokenSize::Large => colors::WARNING, TokenSize::Massive => colors::PRIMARY, }
49 }
50
51 pub fn from_count(count: u64) -> Self {
53 if count >= 500_000 {
54 TokenSize::Massive
55 } else if count >= 100_000 {
56 TokenSize::Large
57 } else if count >= 10_000 {
58 TokenSize::Medium
59 } else {
60 TokenSize::Small
61 }
62 }
63}
64
65#[derive(Debug, Clone)]
67struct TokenParticle {
68 position: f32,
70 speed: f32,
72 size: TokenSize,
74 wave_offset: f32,
76}
77
78pub struct TokenStream {
80 particles: VecDeque<TokenParticle>,
82 max_particles: usize,
84 wave_phase: f32,
86 total_tokens: u64,
88 tokens_per_second: f64,
90 auto_spawn: bool,
92 spawn_timer: f32,
94}
95
96impl TokenStream {
97 pub fn new(max_particles: usize) -> Self {
98 Self {
99 particles: VecDeque::new(),
100 max_particles,
101 wave_phase: 0.0,
102 total_tokens: 0,
103 tokens_per_second: 0.0,
104 auto_spawn: true,
105 spawn_timer: 0.0,
106 }
107 }
108
109 pub fn with_auto_spawn(mut self, auto: bool) -> Self {
110 self.auto_spawn = auto;
111 self
112 }
113
114 pub fn add_token(&mut self, size: TokenSize) {
116 if self.particles.len() >= self.max_particles {
117 self.particles.pop_front();
118 }
119
120 let base_speed = 0.3;
122 let speed_variation = (self.particles.len() as f32 * 0.1) % 0.2;
123 let speed = base_speed + speed_variation;
124
125 let wave_offset = (self.particles.len() as f32 * 0.5) % std::f32::consts::PI;
127
128 self.particles.push_back(TokenParticle {
129 position: 0.0,
130 speed,
131 size,
132 wave_offset,
133 });
134 }
135
136 pub fn set_rate(&mut self, rate: f64) {
138 self.tokens_per_second = rate;
139 }
140
141 pub fn set_total(&mut self, total: u64) {
143 self.total_tokens = total;
144 }
145
146 pub fn total_tokens(&self) -> u64 {
148 self.total_tokens
149 }
150
151 pub fn rate(&self) -> f64 {
153 self.tokens_per_second
154 }
155
156 pub fn particle_count(&self) -> usize {
158 self.particles.len()
159 }
160}
161
162impl Animation for TokenStream {
163 fn update(&mut self, delta_time: f32) {
164 self.wave_phase += delta_time * 3.0;
166 if self.wave_phase > std::f32::consts::PI * 2.0 {
167 self.wave_phase -= std::f32::consts::PI * 2.0;
168 }
169
170 for particle in &mut self.particles {
172 particle.position += particle.speed * delta_time;
173 }
174
175 self.particles.retain(|p| p.position < 1.5);
177
178 if self.auto_spawn && self.tokens_per_second > 0.0 {
180 self.spawn_timer += delta_time;
181 let spawn_interval = 1.0 / (self.tokens_per_second as f32 / 1000.0).max(0.1);
182
183 if self.spawn_timer >= spawn_interval {
184 self.spawn_timer = 0.0;
185 let size = TokenSize::from_count(self.total_tokens / 10);
187 self.add_token(size);
188 }
189 }
190 }
191
192 fn is_complete(&self) -> bool {
193 false }
195}
196
197impl Widget for &TokenStream {
198 fn render(self, area: Rect, buf: &mut Buffer) {
199 if area.width < 5 || area.height < 3 {
200 return;
201 }
202
203 let wave_symbols = ["≋", "≈", "∿", "~"];
205
206 for x in area.x..area.x + area.width {
208 let wave_idx = ((x as f32 + self.wave_phase * 5.0) as usize) % wave_symbols.len();
209 let wave_color = Color::Rgb(0x25, 0x25, 0x3D);
210
211 for y in area.y..area.y + area.height {
212 let symbol_idx = (wave_idx + (y - area.y) as usize) % wave_symbols.len();
214 buf[(x, y)]
215 .set_symbol(wave_symbols[symbol_idx])
216 .set_style(Style::default().fg(wave_color));
217 }
218 }
219
220 let center_y = area.y + area.height / 2;
222
223 for particle in &self.particles {
224 let x = area.x + (particle.position * area.width as f32) as u16;
225
226 if x >= area.x && x < area.x + area.width {
227 let wave = (self.wave_phase + particle.wave_offset).sin();
229 let y_offset = (wave * (area.height as f32 / 4.0)) as i16;
230 let y = (center_y as i16 + y_offset)
231 .clamp(area.y as i16, (area.y + area.height - 1) as i16)
232 as u16;
233
234 let symbol = particle.size.symbol();
235 let color = particle.size.color();
236
237 buf[(x, y)]
239 .set_symbol(symbol)
240 .set_style(Style::default().fg(color).add_modifier(Modifier::BOLD));
241
242 if x > area.x {
244 buf[(x - 1, y)]
245 .set_symbol("·")
246 .set_style(Style::default().fg(color));
247 }
248 if x < area.x + area.width - 1 {
249 buf[(x + 1, y)]
250 .set_symbol("·")
251 .set_style(Style::default().fg(color));
252 }
253 }
254 }
255
256 if area.height > 2 {
258 let stats = format!(
259 "💫 {} tok/s │ {} total",
260 self.tokens_per_second as u64,
261 if self.total_tokens >= 1_000_000 {
262 format!("{:.1}M", self.total_tokens as f64 / 1_000_000.0)
263 } else if self.total_tokens >= 1_000 {
264 format!("{}K", self.total_tokens / 1_000)
265 } else {
266 format!("{}", self.total_tokens)
267 }
268 );
269
270 let stats_y = area.y + area.height - 1;
271 for (i, ch) in stats.chars().enumerate() {
272 let x = area.x + i as u16;
273 if x < area.x + area.width {
274 buf[(x, stats_y)]
275 .set_symbol(&ch.to_string())
276 .set_style(Style::default().fg(Color::Gray));
277 }
278 }
279 }
280 }
281}
282
283#[cfg(test)]
284mod tests {
285 use super::*;
286
287 #[test]
288 fn test_token_size_from_count() {
289 assert_eq!(TokenSize::from_count(500), TokenSize::Small);
290 assert_eq!(TokenSize::from_count(50_000), TokenSize::Medium);
291 assert_eq!(TokenSize::from_count(200_000), TokenSize::Large);
292 assert_eq!(TokenSize::from_count(1_000_000), TokenSize::Massive);
293 }
294
295 #[test]
296 fn test_token_stream_new() {
297 let stream = TokenStream::new(100);
298 assert_eq!(stream.particle_count(), 0);
299 assert_eq!(stream.total_tokens(), 0);
300 }
301
302 #[test]
303 fn test_token_stream_add_token() {
304 let mut stream = TokenStream::new(5);
305
306 stream.add_token(TokenSize::Small);
307 assert_eq!(stream.particle_count(), 1);
308
309 for _ in 0..10 {
311 stream.add_token(TokenSize::Medium);
312 }
313 assert_eq!(stream.particle_count(), 5);
314 }
315
316 #[test]
317 fn test_token_stream_update() {
318 let mut stream = TokenStream::new(10).with_auto_spawn(false);
319 stream.add_token(TokenSize::Small);
320
321 stream.update(0.5);
323 for _ in 0..10 {
325 stream.update(0.5);
326 }
327 assert_eq!(stream.particle_count(), 0);
328 }
329
330 #[test]
331 fn test_token_size_symbol() {
332 assert_eq!(TokenSize::Small.symbol(), "●");
333 assert_eq!(TokenSize::Medium.symbol(), "◆");
334 assert_eq!(TokenSize::Large.symbol(), "▲");
335 assert_eq!(TokenSize::Massive.symbol(), "★");
336 }
337
338 #[test]
339 fn test_token_size_color() {
340 let _ = TokenSize::Small.color();
342 let _ = TokenSize::Medium.color();
343 let _ = TokenSize::Large.color();
344 let _ = TokenSize::Massive.color();
345 }
346
347 #[test]
348 fn test_set_rate_and_rate() {
349 let mut stream = TokenStream::new(10);
350 stream.set_rate(500.0);
351 assert!((stream.rate() - 500.0).abs() < 0.01);
352 }
353
354 #[test]
355 fn test_set_total_and_total_tokens() {
356 let mut stream = TokenStream::new(10);
357 stream.set_total(42000);
358 assert_eq!(stream.total_tokens(), 42000);
359 }
360
361 #[test]
362 fn test_is_complete() {
363 let stream = TokenStream::new(10);
364 assert!(!stream.is_complete());
365 }
366
367 #[test]
368 fn test_auto_spawn_with_high_rate() {
369 let mut stream = TokenStream::new(100).with_auto_spawn(true);
370 stream.set_rate(10000.0);
371 stream.set_total(50000);
372 for _ in 0..20 {
374 stream.update(0.1);
375 }
376 assert!(stream.particle_count() > 0);
377 }
378
379 #[test]
380 fn test_token_size_boundary_values() {
381 assert_eq!(TokenSize::from_count(0), TokenSize::Small);
382 assert_eq!(TokenSize::from_count(9_999), TokenSize::Small);
383 assert_eq!(TokenSize::from_count(10_000), TokenSize::Medium);
384 assert_eq!(TokenSize::from_count(99_999), TokenSize::Medium);
385 assert_eq!(TokenSize::from_count(100_000), TokenSize::Large);
386 assert_eq!(TokenSize::from_count(499_999), TokenSize::Large);
387 assert_eq!(TokenSize::from_count(500_000), TokenSize::Massive);
388 }
389}