use std::fmt;
use std::time::{Duration, Instant};
#[derive(Debug, Clone)]
pub struct LoadTestConfig {
pub total_requests: usize,
pub concurrency: usize,
pub warmup: usize,
}
impl Default for LoadTestConfig {
fn default() -> Self {
Self {
total_requests: 1000,
concurrency: 1,
warmup: 0,
}
}
}
impl LoadTestConfig {
#[must_use]
pub fn new() -> Self {
Self::default()
}
#[must_use]
pub fn total_requests(mut self, n: usize) -> Self {
self.total_requests = n;
self
}
#[must_use]
pub fn concurrency(mut self, n: usize) -> Self {
self.concurrency = n.max(1);
self
}
#[must_use]
pub fn warmup(mut self, n: usize) -> Self {
self.warmup = n;
self
}
}
#[derive(Debug)]
struct RequestResult {
latency: Duration,
success: bool,
}
#[derive(Debug, Clone)]
pub struct LoadTestReport {
pub total: usize,
pub successes: usize,
pub failures: usize,
pub elapsed: Duration,
latencies: Vec<Duration>,
}
impl LoadTestReport {
#[must_use]
#[allow(clippy::cast_precision_loss)]
pub fn success_rate(&self) -> f64 {
if self.total == 0 {
return 0.0;
}
self.successes as f64 / self.total as f64
}
#[must_use]
pub fn error_rate(&self) -> f64 {
1.0 - self.success_rate()
}
#[must_use]
#[allow(clippy::cast_precision_loss)]
pub fn rps(&self) -> f64 {
if self.elapsed.is_zero() {
return 0.0;
}
self.total as f64 / self.elapsed.as_secs_f64()
}
#[must_use]
pub fn percentile(&self, p: f64) -> Option<Duration> {
if self.latencies.is_empty() {
return None;
}
#[allow(
clippy::cast_precision_loss,
clippy::cast_possible_truncation,
clippy::cast_sign_loss
)]
let idx = ((p * self.latencies.len() as f64) as usize).min(self.latencies.len() - 1);
Some(self.latencies[idx])
}
#[must_use]
pub fn min_latency(&self) -> Option<Duration> {
self.latencies.first().copied()
}
#[must_use]
pub fn max_latency(&self) -> Option<Duration> {
self.latencies.last().copied()
}
#[must_use]
pub fn mean_latency(&self) -> Option<Duration> {
if self.latencies.is_empty() {
return None;
}
let sum: Duration = self.latencies.iter().sum();
Some(sum / self.latencies.len() as u32)
}
}
impl fmt::Display for LoadTestReport {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "Load Test Report")?;
writeln!(f, " Total: {}", self.total)?;
writeln!(
f,
" Success: {} ({:.1}%)",
self.successes,
self.success_rate() * 100.0
)?;
writeln!(
f,
" Failures: {} ({:.1}%)",
self.failures,
self.error_rate() * 100.0
)?;
writeln!(f, " Elapsed: {:.2?}", self.elapsed)?;
writeln!(f, " RPS: {:.1}", self.rps())?;
if let Some(p50) = self.percentile(0.50) {
writeln!(f, " p50: {:.2?}", p50)?;
}
if let Some(p95) = self.percentile(0.95) {
writeln!(f, " p95: {:.2?}", p95)?;
}
if let Some(p99) = self.percentile(0.99) {
writeln!(f, " p99: {:.2?}", p99)?;
}
Ok(())
}
}
pub struct LoadTest;
impl LoadTest {
pub fn run<F>(config: &LoadTestConfig, mut handler: F) -> LoadTestReport
where
F: FnMut(usize) -> Result<(), Box<dyn std::error::Error>>,
{
for i in 0..config.warmup {
let _ = handler(i);
}
let mut results = Vec::with_capacity(config.total_requests);
let start = Instant::now();
let mut remaining = config.total_requests;
let mut req_index = 0;
while remaining > 0 {
let batch_size = remaining.min(config.concurrency);
for _ in 0..batch_size {
let req_start = Instant::now();
let success = handler(req_index).is_ok();
results.push(RequestResult {
latency: req_start.elapsed(),
success,
});
req_index += 1;
}
remaining -= batch_size;
}
let elapsed = start.elapsed();
let successes = results.iter().filter(|r| r.success).count();
let failures = results.len() - successes;
let mut latencies: Vec<Duration> = results.iter().map(|r| r.latency).collect();
latencies.sort();
LoadTestReport {
total: results.len(),
successes,
failures,
elapsed,
latencies,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn basic_load_test() {
let config = LoadTestConfig::new().total_requests(100).concurrency(5);
let report = LoadTest::run(&config, |_| Ok(()));
assert_eq!(report.total, 100);
assert_eq!(report.successes, 100);
assert_eq!(report.failures, 0);
assert!((report.success_rate() - 1.0).abs() < f64::EPSILON);
}
#[test]
fn load_test_with_failures() {
let config = LoadTestConfig::new().total_requests(100).concurrency(1);
let report = LoadTest::run(&config, |i| {
if i % 10 == 0 {
Err("fail".into())
} else {
Ok(())
}
});
assert_eq!(report.total, 100);
assert_eq!(report.failures, 10);
assert_eq!(report.successes, 90);
assert!((report.error_rate() - 0.1).abs() < f64::EPSILON);
}
#[test]
fn load_test_percentiles() {
let config = LoadTestConfig::new().total_requests(100).concurrency(1);
let report = LoadTest::run(&config, |_| Ok(()));
assert!(report.percentile(0.50).is_some());
assert!(report.percentile(0.95).is_some());
assert!(report.percentile(0.99).is_some());
assert!(report.min_latency().is_some());
assert!(report.max_latency().is_some());
assert!(report.mean_latency().is_some());
}
#[test]
fn load_test_rps() {
let config = LoadTestConfig::new().total_requests(50).concurrency(10);
let report = LoadTest::run(&config, |_| Ok(()));
assert!(report.rps() > 0.0);
}
#[test]
fn load_test_with_warmup() {
let config = LoadTestConfig::new()
.total_requests(50)
.warmup(10)
.concurrency(1);
let report = LoadTest::run(&config, |_| Ok(()));
assert_eq!(report.total, 50);
}
#[test]
fn load_test_display() {
let config = LoadTestConfig::new().total_requests(10).concurrency(1);
let report = LoadTest::run(&config, |_| Ok(()));
let display = format!("{report}");
assert!(display.contains("Load Test Report"));
assert!(display.contains("RPS:"));
}
#[test]
#[allow(clippy::float_cmp)]
fn empty_report() {
let config = LoadTestConfig::new().total_requests(0).concurrency(1);
let report = LoadTest::run(&config, |_| Ok(()));
assert_eq!(report.total, 0);
assert_eq!(report.success_rate(), 0.0);
assert_eq!(report.rps(), 0.0);
assert!(report.percentile(0.50).is_none());
}
#[test]
fn config_defaults() {
let config = LoadTestConfig::default();
assert_eq!(config.total_requests, 1000);
assert_eq!(config.concurrency, 1);
assert_eq!(config.warmup, 0);
}
#[test]
fn concurrency_minimum_is_one() {
let config = LoadTestConfig::new().concurrency(0);
assert_eq!(config.concurrency, 1);
}
}