use colored::Colorize;
use super::metrics::PerfMetrics;
pub struct PerfReport;
impl PerfReport {
pub fn print_text(metrics: &PerfMetrics) {
println!();
println!("{}", "═══════════════════════════════════════════════════════════".cyan());
println!("{}", " PERFORMANCE RESULTS ".cyan().bold());
println!("{}", "═══════════════════════════════════════════════════════════".cyan());
println!();
Self::print_metrics_details(metrics);
if !metrics.endpoints.is_empty() {
println!();
println!("{}", "═══════════════════════════════════════════════════════════".cyan());
println!("{}", " ENDPOINT BREAKDOWN ".cyan().bold());
println!("{}", "═══════════════════════════════════════════════════════════".cyan());
let mut sorted_endpoints: Vec<_> = metrics.endpoints.iter().collect();
sorted_endpoints.sort_by_key(|(k, _)| *k);
for (label, stats) in sorted_endpoints {
println!();
println!("📍 {}", label.magenta().bold());
println!("{}", "───────────────────────────────────────────────────────────".dimmed());
Self::print_metrics_details(stats);
}
}
println!();
println!("{}", "═══════════════════════════════════════════════════════════".cyan());
}
fn print_metrics_details(metrics: &PerfMetrics) {
println!("{}", "📊 Request Summary".white().bold());
println!(" Total Requests: {}", metrics.total_requests.to_string().cyan());
println!(" Successful: {}", metrics.successful_requests.to_string().green());
println!(" Failed: {}",
if metrics.failed_requests > 0 {
metrics.failed_requests.to_string().red()
} else {
metrics.failed_requests.to_string().green()
}
);
println!(" Error Rate: {:.2}%", metrics.error_rate_percent);
println!();
println!("{}", "⏱️ Timing".white().bold());
println!(" Total Duration: {:.2} ms", metrics.total_duration_ms);
println!(" Requests/sec: {}", format!("{:.2}", metrics.requests_per_second).yellow().bold());
println!();
println!("{}", "📈 Latency Distribution".white().bold());
println!(" Min: {:.2} ms", metrics.latency_min_ms);
println!(" Max: {:.2} ms", metrics.latency_max_ms);
println!(" Avg: {:.2} ms", metrics.latency_avg_ms);
println!(" p50 (Median): {:.2} ms", metrics.latency_p50_ms);
println!(" p95: {:.2} ms", metrics.latency_p95_ms);
println!(" p99: {:.2} ms", metrics.latency_p99_ms);
}
pub fn print_json(metrics: &PerfMetrics) {
match serde_json::to_string_pretty(metrics) {
Ok(json) => println!("{}", json),
Err(e) => eprintln!("Failed to serialize metrics: {}", e),
}
}
pub fn print(metrics: &PerfMetrics, format: &str) {
match format.to_lowercase().as_str() {
"json" => Self::print_json(metrics),
_ => Self::print_text(metrics),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
fn sample_metrics() -> PerfMetrics {
PerfMetrics {
total_requests: 100,
successful_requests: 95,
failed_requests: 5,
total_duration_ms: 1000.0,
latency_min_ms: 10.0,
latency_max_ms: 100.0,
latency_avg_ms: 50.0,
latency_p50_ms: 45.0,
latency_p95_ms: 90.0,
latency_p99_ms: 98.0,
requests_per_second: 100.0,
error_rate_percent: 5.0,
endpoints: HashMap::new(),
}
}
#[test]
fn test_json_serialization() {
let metrics = sample_metrics();
let json = serde_json::to_string(&metrics).unwrap();
assert!(json.contains("total_requests"));
assert!(json.contains("100"));
}
#[test]
fn test_metrics_fields() {
let metrics = sample_metrics();
assert_eq!(metrics.total_requests, 100);
assert_eq!(metrics.failed_requests, 5);
assert!((metrics.error_rate_percent - 5.0).abs() < 0.01);
}
}