use std::time::{Duration, Instant};
use std::collections::HashMap;
use tracing::{info, debug};
#[derive(Debug, Clone)]
pub struct PerformanceMetrics {
pub name: String,
pub duration: Duration,
pub iterations: u64,
pub avg_duration: Duration,
pub peak_memory: Option<u64>,
}
impl PerformanceMetrics {
pub fn new(name: String, duration: Duration, iterations: u64) -> Self {
let avg_duration = if iterations > 0 {
Duration::from_nanos(duration.as_nanos() as u64 / iterations)
} else {
Duration::ZERO
};
Self {
name,
duration,
iterations,
avg_duration,
peak_memory: None,
}
}
pub fn with_peak_memory(mut self, peak_memory: u64) -> Self {
self.peak_memory = Some(peak_memory);
self
}
pub fn meets_target(&self, target: Duration) -> bool {
self.avg_duration <= target
}
pub fn format(&self) -> String {
let mut result = format!(
"{}: {:.2}ms avg ({:.2}ms total, {} iterations)",
self.name,
self.avg_duration.as_secs_f64() * 1000.0,
self.duration.as_secs_f64() * 1000.0,
self.iterations
);
if let Some(peak_mem) = self.peak_memory {
result.push_str(&format!(", peak memory: {:.2}MB", peak_mem as f64 / 1024.0 / 1024.0));
}
result
}
}
pub struct PerformanceProfiler {
metrics: HashMap<String, Vec<PerformanceMetrics>>,
}
impl PerformanceProfiler {
pub fn new() -> Self {
Self {
metrics: HashMap::new(),
}
}
pub fn record(&mut self, name: String, duration: Duration) {
let metrics = PerformanceMetrics::new(name.clone(), duration, 1);
self.metrics.entry(name).or_insert_with(Vec::new).push(metrics);
}
pub fn record_iterations(&mut self, name: String, total_duration: Duration, iterations: u64) {
let metrics = PerformanceMetrics::new(name.clone(), total_duration, iterations);
self.metrics.entry(name).or_insert_with(Vec::new).push(metrics);
}
pub fn metrics(&self) -> &HashMap<String, Vec<PerformanceMetrics>> {
&self.metrics
}
pub fn average_metrics(&self, name: &str) -> Option<PerformanceMetrics> {
let metrics = self.metrics.get(name)?;
if metrics.is_empty() {
return None;
}
let total_duration: Duration = metrics.iter().map(|m| m.duration).sum();
let total_iterations: u64 = metrics.iter().map(|m| m.iterations).sum();
let peak_memory = metrics.iter().filter_map(|m| m.peak_memory).max();
let mut avg = PerformanceMetrics::new(name.to_string(), total_duration, total_iterations);
if let Some(peak_mem) = peak_memory {
avg = avg.with_peak_memory(peak_mem);
}
Some(avg)
}
pub fn print_summary(&self) {
info!("=== Performance Summary ===");
for (name, metrics) in &self.metrics {
if let Some(avg) = self.average_metrics(name) {
info!("{}", avg.format());
}
}
}
pub fn clear(&mut self) {
self.metrics.clear();
}
}
impl Default for PerformanceProfiler {
fn default() -> Self {
Self::new()
}
}
pub struct Timer {
start: Instant,
name: String,
}
impl Timer {
pub fn new(name: impl Into<String>) -> Self {
let name = name.into();
debug!("Starting timer: {}", name);
Self {
start: Instant::now(),
name,
}
}
pub fn elapsed(&self) -> Duration {
self.start.elapsed()
}
pub fn elapsed_ms(&self) -> f64 {
self.elapsed().as_secs_f64() * 1000.0
}
pub fn stop(self) -> Duration {
let elapsed = self.elapsed();
info!("{}: {:.2}ms", self.name, elapsed.as_secs_f64() * 1000.0);
elapsed
}
pub fn stop_with_target(self, target: Duration) -> (Duration, bool) {
let elapsed = self.elapsed();
let meets_target = elapsed <= target;
let status = if meets_target { "✓" } else { "✗" };
info!(
"{}: {:.2}ms {} (target: {:.2}ms)",
self.name,
elapsed.as_secs_f64() * 1000.0,
status,
target.as_secs_f64() * 1000.0
);
(elapsed, meets_target)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_performance_metrics() {
let metrics = PerformanceMetrics::new(
"test_op".to_string(),
Duration::from_millis(100),
10,
);
assert_eq!(metrics.name, "test_op");
assert_eq!(metrics.duration, Duration::from_millis(100));
assert_eq!(metrics.iterations, 10);
assert_eq!(metrics.avg_duration, Duration::from_millis(10));
}
#[test]
fn test_performance_metrics_meets_target() {
let metrics = PerformanceMetrics::new(
"test_op".to_string(),
Duration::from_millis(100),
10,
);
assert!(metrics.meets_target(Duration::from_millis(20)));
assert!(!metrics.meets_target(Duration::from_millis(5)));
}
#[test]
fn test_profiler_record() {
let mut profiler = PerformanceProfiler::new();
profiler.record("op1".to_string(), Duration::from_millis(10));
profiler.record("op1".to_string(), Duration::from_millis(20));
let metrics = profiler.average_metrics("op1");
assert!(metrics.is_some());
let metrics = metrics.unwrap();
assert_eq!(metrics.iterations, 2);
}
#[test]
fn test_timer() {
let timer = Timer::new("test_timer");
std::thread::sleep(Duration::from_millis(10));
let elapsed = timer.elapsed();
assert!(elapsed >= Duration::from_millis(10));
}
}