mockforge_chaos/
latency_metrics.rs

1//! Latency metrics tracking for real-time visualization
2
3use parking_lot::RwLock;
4use serde::{Deserialize, Serialize};
5use std::collections::VecDeque;
6use std::sync::Arc;
7use std::time::SystemTime;
8
9/// Single latency sample
10#[derive(Debug, Clone, Serialize, Deserialize)]
11pub struct LatencySample {
12    /// Timestamp in milliseconds since epoch
13    pub timestamp: u64,
14    /// Latency in milliseconds
15    pub latency_ms: u64,
16}
17
18/// Latency metrics tracker
19/// Tracks recent latency samples for real-time visualization
20#[derive(Debug, Clone)]
21pub struct LatencyMetricsTracker {
22    /// Recent latency samples (max 1000 samples or 5 minutes)
23    samples: Arc<RwLock<VecDeque<LatencySample>>>,
24    /// Maximum number of samples to keep
25    max_samples: usize,
26    /// Maximum age of samples in seconds (5 minutes)
27    max_age_seconds: u64,
28}
29
30impl LatencyMetricsTracker {
31    /// Create a new latency metrics tracker
32    pub fn new() -> Self {
33        Self {
34            samples: Arc::new(RwLock::new(VecDeque::new())),
35            max_samples: 1000,
36            max_age_seconds: 300, // 5 minutes
37        }
38    }
39
40    /// Record a latency sample
41    pub fn record_latency(&self, latency_ms: u64) {
42        let now = SystemTime::now()
43            .duration_since(std::time::UNIX_EPOCH)
44            .unwrap_or_default()
45            .as_millis() as u64;
46
47        let sample = LatencySample {
48            timestamp: now,
49            latency_ms,
50        };
51
52        let mut samples = self.samples.write();
53        samples.push_back(sample);
54
55        // Clean up old samples
56        self.cleanup_old_samples(&mut samples);
57    }
58
59    /// Get all latency samples within the time window
60    pub fn get_samples(&self) -> Vec<LatencySample> {
61        let mut samples = self.samples.write();
62        self.cleanup_old_samples(&mut samples);
63        samples.iter().cloned().collect()
64    }
65
66    /// Get samples within a time range
67    pub fn get_samples_in_range(&self, start_ms: u64, end_ms: u64) -> Vec<LatencySample> {
68        let samples = self.samples.read();
69        samples
70            .iter()
71            .filter(|s| s.timestamp >= start_ms && s.timestamp <= end_ms)
72            .cloned()
73            .collect()
74    }
75
76    /// Clean up old samples
77    fn cleanup_old_samples(&self, samples: &mut VecDeque<LatencySample>) {
78        let now = SystemTime::now()
79            .duration_since(std::time::UNIX_EPOCH)
80            .unwrap_or_default()
81            .as_millis() as u64;
82        let cutoff = now.saturating_sub(self.max_age_seconds * 1000);
83
84        // Remove samples older than cutoff
85        while samples.front().map(|s| s.timestamp < cutoff).unwrap_or(false) {
86            samples.pop_front();
87        }
88
89        // Limit to max_samples
90        while samples.len() > self.max_samples {
91            samples.pop_front();
92        }
93    }
94
95    /// Clear all samples
96    pub fn clear(&self) {
97        let mut samples = self.samples.write();
98        samples.clear();
99    }
100
101    /// Get statistics about current samples
102    pub fn get_stats(&self) -> LatencyStats {
103        let samples = self.get_samples();
104        if samples.is_empty() {
105            return LatencyStats {
106                count: 0,
107                min_ms: 0,
108                max_ms: 0,
109                avg_ms: 0.0,
110                p50_ms: 0,
111                p95_ms: 0,
112                p99_ms: 0,
113            };
114        }
115
116        let mut latencies: Vec<u64> = samples.iter().map(|s| s.latency_ms).collect();
117        latencies.sort();
118
119        let count = latencies.len();
120        let min_ms = latencies[0];
121        let max_ms = latencies[count - 1];
122        let sum: u64 = latencies.iter().sum();
123        let avg_ms = sum as f64 / count as f64;
124
125        let p50_ms = latencies[count / 2];
126        let p95_ms = latencies[(count * 95) / 100];
127        let p99_ms = latencies[(count * 99) / 100];
128
129        LatencyStats {
130            count,
131            min_ms,
132            max_ms,
133            avg_ms,
134            p50_ms,
135            p95_ms,
136            p99_ms,
137        }
138    }
139}
140
141impl Default for LatencyMetricsTracker {
142    fn default() -> Self {
143        Self::new()
144    }
145}
146
147/// Latency statistics
148#[derive(Debug, Clone, Serialize, Deserialize)]
149pub struct LatencyStats {
150    /// Number of samples
151    pub count: usize,
152    /// Minimum latency in ms
153    pub min_ms: u64,
154    /// Maximum latency in ms
155    pub max_ms: u64,
156    /// Average latency in ms
157    pub avg_ms: f64,
158    /// 50th percentile (median) latency in ms
159    pub p50_ms: u64,
160    /// 95th percentile latency in ms
161    pub p95_ms: u64,
162    /// 99th percentile latency in ms
163    pub p99_ms: u64,
164}
165
166#[cfg(test)]
167mod tests {
168    use super::*;
169    use std::thread;
170    use std::time::Duration;
171
172    #[test]
173    fn test_latency_metrics_tracker_new() {
174        let tracker = LatencyMetricsTracker::new();
175        let samples = tracker.get_samples();
176        assert_eq!(samples.len(), 0);
177    }
178
179    #[test]
180    fn test_latency_metrics_tracker_default() {
181        let tracker = LatencyMetricsTracker::default();
182        let samples = tracker.get_samples();
183        assert_eq!(samples.len(), 0);
184    }
185
186    #[test]
187    fn test_record_single_latency() {
188        let tracker = LatencyMetricsTracker::new();
189        tracker.record_latency(100);
190
191        let samples = tracker.get_samples();
192        assert_eq!(samples.len(), 1);
193        assert_eq!(samples[0].latency_ms, 100);
194    }
195
196    #[test]
197    fn test_record_multiple_latencies() {
198        let tracker = LatencyMetricsTracker::new();
199        tracker.record_latency(100);
200        tracker.record_latency(200);
201        tracker.record_latency(150);
202
203        let samples = tracker.get_samples();
204        assert_eq!(samples.len(), 3);
205        assert_eq!(samples[0].latency_ms, 100);
206        assert_eq!(samples[1].latency_ms, 200);
207        assert_eq!(samples[2].latency_ms, 150);
208    }
209
210    #[test]
211    fn test_clear_samples() {
212        let tracker = LatencyMetricsTracker::new();
213        tracker.record_latency(100);
214        tracker.record_latency(200);
215
216        assert_eq!(tracker.get_samples().len(), 2);
217
218        tracker.clear();
219        assert_eq!(tracker.get_samples().len(), 0);
220    }
221
222    #[test]
223    fn test_get_stats_empty() {
224        let tracker = LatencyMetricsTracker::new();
225        let stats = tracker.get_stats();
226
227        assert_eq!(stats.count, 0);
228        assert_eq!(stats.min_ms, 0);
229        assert_eq!(stats.max_ms, 0);
230        assert_eq!(stats.avg_ms, 0.0);
231        assert_eq!(stats.p50_ms, 0);
232        assert_eq!(stats.p95_ms, 0);
233        assert_eq!(stats.p99_ms, 0);
234    }
235
236    #[test]
237    fn test_get_stats_single_sample() {
238        let tracker = LatencyMetricsTracker::new();
239        tracker.record_latency(100);
240
241        let stats = tracker.get_stats();
242        assert_eq!(stats.count, 1);
243        assert_eq!(stats.min_ms, 100);
244        assert_eq!(stats.max_ms, 100);
245        assert_eq!(stats.avg_ms, 100.0);
246        assert_eq!(stats.p50_ms, 100);
247        assert_eq!(stats.p95_ms, 100);
248        assert_eq!(stats.p99_ms, 100);
249    }
250
251    #[test]
252    fn test_get_stats_multiple_samples() {
253        let tracker = LatencyMetricsTracker::new();
254        tracker.record_latency(100);
255        tracker.record_latency(200);
256        tracker.record_latency(150);
257        tracker.record_latency(300);
258        tracker.record_latency(50);
259
260        let stats = tracker.get_stats();
261        assert_eq!(stats.count, 5);
262        assert_eq!(stats.min_ms, 50);
263        assert_eq!(stats.max_ms, 300);
264        assert_eq!(stats.avg_ms, 160.0);
265    }
266
267    #[test]
268    fn test_get_stats_percentiles() {
269        let tracker = LatencyMetricsTracker::new();
270        // Add 100 samples from 1 to 100
271        for i in 1..=100 {
272            tracker.record_latency(i);
273        }
274
275        let stats = tracker.get_stats();
276        assert_eq!(stats.count, 100);
277        assert_eq!(stats.min_ms, 1);
278        assert_eq!(stats.max_ms, 100);
279        // For 100 samples [1..=100], p50 = arr[50] = 51 (0-indexed)
280        // p95 = arr[95] = 96, p99 = arr[99] = 100
281        assert_eq!(stats.p50_ms, 51); // Median (index 50)
282        assert_eq!(stats.p95_ms, 96); // 95th percentile (index 95)
283        assert_eq!(stats.p99_ms, 100); // 99th percentile (index 99)
284    }
285
286    #[test]
287    fn test_latency_sample_serialize() {
288        let sample = LatencySample {
289            timestamp: 1234567890,
290            latency_ms: 100,
291        };
292
293        let json = serde_json::to_value(&sample).unwrap();
294        assert_eq!(json["timestamp"], 1234567890u64);
295        assert_eq!(json["latency_ms"], 100);
296    }
297
298    #[test]
299    fn test_latency_sample_deserialize() {
300        let json = serde_json::json!({
301            "timestamp": 1234567890u64,
302            "latency_ms": 200
303        });
304
305        let sample: LatencySample = serde_json::from_value(json).unwrap();
306        assert_eq!(sample.timestamp, 1234567890);
307        assert_eq!(sample.latency_ms, 200);
308    }
309
310    #[test]
311    fn test_latency_stats_serialize() {
312        let stats = LatencyStats {
313            count: 100,
314            min_ms: 10,
315            max_ms: 500,
316            avg_ms: 150.5,
317            p50_ms: 140,
318            p95_ms: 450,
319            p99_ms: 490,
320        };
321
322        let json = serde_json::to_value(&stats).unwrap();
323        assert_eq!(json["count"], 100);
324        assert_eq!(json["min_ms"], 10);
325        assert_eq!(json["max_ms"], 500);
326        assert_eq!(json["avg_ms"], 150.5);
327        assert_eq!(json["p50_ms"], 140);
328        assert_eq!(json["p95_ms"], 450);
329        assert_eq!(json["p99_ms"], 490);
330    }
331
332    #[test]
333    fn test_latency_stats_deserialize() {
334        let json = serde_json::json!({
335            "count": 50,
336            "min_ms": 20,
337            "max_ms": 300,
338            "avg_ms": 120.3,
339            "p50_ms": 110,
340            "p95_ms": 280,
341            "p99_ms": 295
342        });
343
344        let stats: LatencyStats = serde_json::from_value(json).unwrap();
345        assert_eq!(stats.count, 50);
346        assert_eq!(stats.min_ms, 20);
347        assert_eq!(stats.max_ms, 300);
348        assert_eq!(stats.avg_ms, 120.3);
349        assert_eq!(stats.p50_ms, 110);
350        assert_eq!(stats.p95_ms, 280);
351        assert_eq!(stats.p99_ms, 295);
352    }
353
354    #[test]
355    fn test_get_samples_in_range() {
356        let tracker = LatencyMetricsTracker::new();
357
358        let now =
359            SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap().as_millis() as u64;
360
361        // Record samples with different timestamps
362        tracker.record_latency(100);
363        thread::sleep(Duration::from_millis(10));
364        tracker.record_latency(200);
365        thread::sleep(Duration::from_millis(10));
366        tracker.record_latency(300);
367
368        let all_samples = tracker.get_samples();
369        assert_eq!(all_samples.len(), 3);
370
371        // Get samples in a range that should include all
372        let start = now - 1000;
373        let end = now + 1000;
374        let range_samples = tracker.get_samples_in_range(start, end);
375        assert_eq!(range_samples.len(), 3);
376    }
377
378    #[test]
379    fn test_get_samples_in_range_empty() {
380        let tracker = LatencyMetricsTracker::new();
381        tracker.record_latency(100);
382
383        // Query a range in the past that shouldn't include any samples
384        let samples = tracker.get_samples_in_range(0, 1000);
385        assert_eq!(samples.len(), 0);
386    }
387
388    #[test]
389    fn test_tracker_clone() {
390        let tracker1 = LatencyMetricsTracker::new();
391        tracker1.record_latency(100);
392
393        let tracker2 = tracker1.clone();
394        let samples = tracker2.get_samples();
395        assert_eq!(samples.len(), 1);
396        assert_eq!(samples[0].latency_ms, 100);
397
398        // Both trackers should share the same underlying data
399        tracker2.record_latency(200);
400        let samples1 = tracker1.get_samples();
401        assert_eq!(samples1.len(), 2);
402    }
403
404    #[test]
405    fn test_concurrent_access() {
406        use std::sync::Arc;
407
408        let tracker = Arc::new(LatencyMetricsTracker::new());
409        let mut handles = vec![];
410
411        // Spawn multiple threads that record latencies
412        for i in 0..5 {
413            let tracker_clone = tracker.clone();
414            let handle = thread::spawn(move || {
415                for j in 0..10 {
416                    tracker_clone.record_latency((i * 10 + j) as u64);
417                }
418            });
419            handles.push(handle);
420        }
421
422        // Wait for all threads
423        for handle in handles {
424            handle.join().unwrap();
425        }
426
427        // Should have 50 samples total
428        let samples = tracker.get_samples();
429        assert_eq!(samples.len(), 50);
430    }
431
432    #[test]
433    fn test_edge_case_zero_latency() {
434        let tracker = LatencyMetricsTracker::new();
435        tracker.record_latency(0);
436
437        let stats = tracker.get_stats();
438        assert_eq!(stats.min_ms, 0);
439        assert_eq!(stats.max_ms, 0);
440        assert_eq!(stats.avg_ms, 0.0);
441    }
442
443    #[test]
444    fn test_edge_case_large_latency() {
445        let tracker = LatencyMetricsTracker::new();
446        tracker.record_latency(u64::MAX);
447
448        let stats = tracker.get_stats();
449        assert_eq!(stats.min_ms, u64::MAX);
450        assert_eq!(stats.max_ms, u64::MAX);
451    }
452
453    #[test]
454    fn test_avg_calculation_precision() {
455        let tracker = LatencyMetricsTracker::new();
456        tracker.record_latency(100);
457        tracker.record_latency(200);
458        tracker.record_latency(300);
459
460        let stats = tracker.get_stats();
461        assert_eq!(stats.avg_ms, 200.0);
462    }
463
464    #[test]
465    fn test_percentile_calculation_small_dataset() {
466        let tracker = LatencyMetricsTracker::new();
467        tracker.record_latency(100);
468        tracker.record_latency(200);
469
470        let stats = tracker.get_stats();
471        assert_eq!(stats.count, 2);
472        assert!(stats.p50_ms >= 100 && stats.p50_ms <= 200);
473    }
474
475    #[test]
476    fn test_samples_ordering() {
477        let tracker = LatencyMetricsTracker::new();
478        tracker.record_latency(300);
479        tracker.record_latency(100);
480        tracker.record_latency(200);
481
482        let samples = tracker.get_samples();
483        // Samples should be returned in the order they were recorded
484        assert_eq!(samples[0].latency_ms, 300);
485        assert_eq!(samples[1].latency_ms, 100);
486        assert_eq!(samples[2].latency_ms, 200);
487    }
488
489    #[test]
490    fn test_stats_sorted_internally() {
491        let tracker = LatencyMetricsTracker::new();
492        tracker.record_latency(300);
493        tracker.record_latency(100);
494        tracker.record_latency(200);
495
496        let stats = tracker.get_stats();
497        // Stats should use sorted values
498        assert_eq!(stats.min_ms, 100);
499        assert_eq!(stats.max_ms, 300);
500        assert_eq!(stats.p50_ms, 200); // Median of [100, 200, 300]
501    }
502
503    #[test]
504    fn test_serialize_deserialize_roundtrip_sample() {
505        let original = LatencySample {
506            timestamp: 1234567890,
507            latency_ms: 150,
508        };
509
510        let json = serde_json::to_value(&original).unwrap();
511        let deserialized: LatencySample = serde_json::from_value(json).unwrap();
512
513        assert_eq!(original.timestamp, deserialized.timestamp);
514        assert_eq!(original.latency_ms, deserialized.latency_ms);
515    }
516
517    #[test]
518    fn test_serialize_deserialize_roundtrip_stats() {
519        let original = LatencyStats {
520            count: 100,
521            min_ms: 10,
522            max_ms: 500,
523            avg_ms: 150.5,
524            p50_ms: 140,
525            p95_ms: 450,
526            p99_ms: 490,
527        };
528
529        let json = serde_json::to_value(&original).unwrap();
530        let deserialized: LatencyStats = serde_json::from_value(json).unwrap();
531
532        assert_eq!(original.count, deserialized.count);
533        assert_eq!(original.min_ms, deserialized.min_ms);
534        assert_eq!(original.max_ms, deserialized.max_ms);
535        assert_eq!(original.avg_ms, deserialized.avg_ms);
536    }
537}