Skip to main content

netspeed_cli/
test_runner.rs

1//! Test runner orchestration for download and upload bandwidth tests.
2//!
3//! This module provides a reusable template for running bandwidth tests,
4//! eliminating the duplication between download and upload test orchestration
5//! in `main.rs`. Both tests follow the same pattern:
6//! 1. Set up progress tracking
7//! 2. Spawn latency-under-load monitoring in background
8//! 3. Run the actual bandwidth test
9//! 4. Stop latency monitoring
10//! 5. Aggregate results
11
12use crate::config::Config;
13use crate::error::SpeedtestError;
14use crate::http;
15use crate::progress::SpeedProgress;
16use crate::servers::measure_latency_under_load;
17use crate::types::Server;
18use std::sync::Arc;
19use std::sync::Mutex;
20use std::sync::atomic::AtomicBool;
21
22/// Result from a bandwidth test (download or upload).
23#[derive(PartialEq, Debug)]
24pub struct TestRunResult {
25    /// Average speed in bits per second
26    pub avg_bps: f64,
27    /// Peak speed in bits per second
28    pub peak_bps: f64,
29    /// Total bytes transferred
30    pub total_bytes: u64,
31    /// Duration of the test in seconds
32    pub duration_secs: f64,
33    /// Speed samples over time
34    pub speed_samples: Vec<f64>,
35    /// Average latency under load (ms), if measured
36    pub latency_under_load: Option<f64>,
37}
38
39impl Default for TestRunResult {
40    fn default() -> Self {
41        Self {
42            avg_bps: 0.0,
43            peak_bps: 0.0,
44            total_bytes: 0,
45            duration_secs: 0.0,
46            speed_samples: Vec::new(),
47            latency_under_load: None,
48        }
49    }
50}
51
52/// Run a bandwidth test with latency-under-load monitoring.
53///
54/// This is a template method that handles:
55/// - Progress bar setup
56/// - Background latency monitoring
57/// - Test execution via the provided closure
58/// - Result aggregation
59///
60/// # Arguments
61///
62/// * `config` - Configuration for HTTP client creation
63/// * `server` - Server to test against
64/// * `test_label` - Label for progress display (e.g., "Download", "Upload")
65/// * `is_verbose` - Whether to show visible progress
66/// * `test_fn` - Async closure that runs the actual bandwidth test
67///
68/// # Errors
69///
70/// Returns [`SpeedtestError`] if the test fails.
71pub async fn run_bandwidth_test<F, Fut>(
72    config: &Config,
73    server: &Server,
74    test_label: &str,
75    is_verbose: bool,
76    test_fn: F,
77) -> Result<TestRunResult, SpeedtestError>
78where
79    F: FnOnce(Arc<SpeedProgress>) -> Fut,
80    Fut: std::future::Future<Output = Result<(f64, f64, u64, Vec<f64>), SpeedtestError>>,
81{
82    let progress = Arc::new(if is_verbose {
83        SpeedProgress::new(test_label)
84    } else {
85        SpeedProgress::with_target(test_label, indicatif::ProgressDrawTarget::hidden())
86    });
87
88    // Set up latency-under-load monitoring
89    let latency_samples = Arc::new(Mutex::new(Vec::new()));
90    let stop_signal = Arc::new(AtomicBool::new(false));
91
92    let ping_client = http::create_client(config)?;
93    let ping_url = server.url.clone();
94    let samples_clone = Arc::clone(&latency_samples);
95    let stop_clone = Arc::clone(&stop_signal);
96    let ping_handle = tokio::spawn(async move {
97        measure_latency_under_load(ping_client, ping_url, samples_clone, stop_clone).await;
98    });
99
100    // Run the actual test
101    let test_start = std::time::Instant::now();
102    let (avg, peak, total_bytes, speed_samples) = test_fn(progress).await?;
103    let duration = test_start.elapsed().as_secs_f64();
104
105    // Stop latency monitoring
106    stop_signal.store(true, std::sync::atomic::Ordering::Relaxed);
107    let _ = ping_handle.await;
108
109    // Calculate average latency under load
110    let latency_under_load = {
111        let lock = latency_samples.lock().unwrap();
112        if lock.is_empty() {
113            None
114        } else {
115            Some(lock.iter().sum::<f64>() / lock.len() as f64)
116        }
117    };
118
119    Ok(TestRunResult {
120        avg_bps: avg,
121        peak_bps: peak,
122        total_bytes,
123        duration_secs: duration,
124        speed_samples,
125        latency_under_load,
126    })
127}
128
129#[cfg(test)]
130mod tests {
131    use super::*;
132
133    #[test]
134    fn test_test_run_result_structure() {
135        let result = TestRunResult {
136            avg_bps: 100_000_000.0,
137            peak_bps: 120_000_000.0,
138            total_bytes: 10_000_000,
139            duration_secs: 1.0,
140            speed_samples: vec![100_000_000.0],
141            latency_under_load: Some(15.0),
142        };
143        assert_eq!(result.avg_bps, 100_000_000.0);
144        assert_eq!(result.peak_bps, 120_000_000.0);
145    }
146
147    #[test]
148    fn test_test_run_result_default_values() {
149        let result = TestRunResult::default();
150        assert_eq!(result.avg_bps, 0.0);
151        assert_eq!(result.peak_bps, 0.0);
152        assert_eq!(result.total_bytes, 0);
153        assert_eq!(result.duration_secs, 0.0);
154        assert!(result.speed_samples.is_empty());
155        assert!(result.latency_under_load.is_none());
156    }
157
158    #[test]
159    fn test_test_run_result_default_explicit() {
160        let result = TestRunResult {
161            avg_bps: 0.0,
162            peak_bps: 0.0,
163            total_bytes: 0,
164            duration_secs: 0.0,
165            speed_samples: Vec::new(),
166            latency_under_load: None,
167        };
168        assert_eq!(result, TestRunResult::default());
169    }
170
171    #[test]
172    fn test_test_run_result_with_samples() {
173        let samples = vec![50_000_000.0, 75_000_000.0, 100_000_000.0];
174        let result = TestRunResult {
175            avg_bps: 75_000_000.0,
176            peak_bps: 100_000_000.0,
177            total_bytes: 5_000_000,
178            duration_secs: 0.5,
179            speed_samples: samples.clone(),
180            latency_under_load: Some(12.0),
181        };
182        assert_eq!(result.speed_samples, samples);
183        assert_eq!(result.speed_samples.len(), 3);
184    }
185
186    #[test]
187    fn test_test_run_result_peak_greater_than_average() {
188        let result = TestRunResult {
189            avg_bps: 100_000_000.0,
190            peak_bps: 150_000_000.0,
191            total_bytes: 8_000_000,
192            duration_secs: 0.8,
193            speed_samples: vec![100_000_000.0],
194            latency_under_load: None,
195        };
196        assert!(result.peak_bps > result.avg_bps);
197    }
198}