use super::BenchmarkResults;
use std::process::Command;
use std::time::Instant;
pub fn benchmark_cli(
command: &str,
iterations: usize,
warmup: usize,
) -> Result<BenchmarkResults, String> {
for _ in 0..warmup {
let status = Command::new("sh")
.arg("-c")
.arg(command)
.output()
.map_err(|e| format!("Failed to execute warmup command: {e}"))?;
if !status.status.success() {
return Err(format!(
"Command failed during warmup: {}",
String::from_utf8_lossy(&status.stderr)
));
}
}
let mut request_times = Vec::with_capacity(iterations);
let mut successful = 0;
let mut failed = 0;
let start = Instant::now();
for _ in 0..iterations {
let iteration_start = Instant::now();
let status = Command::new("sh")
.arg("-c")
.arg(command)
.output()
.map_err(|e| format!("Failed to execute command: {e}"))?;
let elapsed = iteration_start.elapsed();
if status.status.success() {
request_times.push(elapsed);
successful += 1;
} else {
failed += 1;
}
}
let total_duration = start.elapsed();
Ok(BenchmarkResults {
total_requests: iterations,
successful_requests: successful,
failed_requests: failed,
total_duration,
request_times,
})
}
#[cfg(test)]
mod tests {
use super::*;
use std::time::Duration;
#[test]
fn test_cli_benchmark_echo() {
let results = benchmark_cli("echo 'test'", 5, 2).expect("operation should succeed in test");
assert_eq!(results.total_requests, 5);
assert_eq!(results.successful_requests, 5);
assert_eq!(results.failed_requests, 0);
assert!(!results.request_times.is_empty());
}
#[test]
fn test_cli_benchmark_failure() {
let results = benchmark_cli("exit 1", 3, 1);
assert!(results.is_err());
assert!(results.unwrap_err().contains("warmup"));
}
#[test]
fn test_cli_benchmark_timing() {
let results = benchmark_cli("sleep 0.01", 3, 1).expect("operation should succeed in test");
assert_eq!(results.total_requests, 3);
assert_eq!(results.successful_requests, 3);
for time in &results.request_times {
assert!(time.as_millis() >= 10, "Each sleep should take ≥10ms");
}
}
#[test]
fn test_cli_benchmark_warmup() {
let with_warmup =
benchmark_cli("echo 'test'", 10, 5).expect("operation should succeed in test");
let without_warmup =
benchmark_cli("echo 'test'", 10, 0).expect("operation should succeed in test");
assert_eq!(with_warmup.total_requests, 10);
assert_eq!(without_warmup.total_requests, 10);
assert_eq!(with_warmup.request_times.len(), 10);
assert_eq!(without_warmup.request_times.len(), 10);
}
#[test]
fn prop_all_iterations_recorded() {
use proptest::prelude::*;
proptest!(|(
iterations in 1usize..20,
warmup in 0usize..5,
)| {
let results = benchmark_cli("echo 'test'", iterations, warmup).expect("operation should succeed in test");
prop_assert_eq!(results.successful_requests, iterations);
prop_assert_eq!(results.failed_requests, 0);
prop_assert_eq!(results.request_times.len(), iterations);
let sum: Duration = results.request_times.iter().sum();
prop_assert!(results.total_duration >= sum);
});
}
#[test]
fn test_benchmark_results_fields() {
let results =
benchmark_cli("echo 'hello'", 3, 0).expect("operation should succeed in test");
assert_eq!(results.total_requests, 3);
assert!(results.total_duration > Duration::ZERO);
assert!(!results.request_times.is_empty());
}
#[test]
fn test_benchmark_zero_warmup() {
let results =
benchmark_cli("echo 'no warmup'", 2, 0).expect("operation should succeed in test");
assert_eq!(results.total_requests, 2);
assert_eq!(results.successful_requests, 2);
assert_eq!(results.failed_requests, 0);
}
#[test]
fn test_benchmark_command_output_ignored() {
let results = benchmark_cli("echo 'lots of output'; echo 'more output'", 2, 0)
.expect("should succeed");
assert_eq!(results.successful_requests, 2);
}
}