use once_cell::sync::Lazy;
use std::collections::HashMap;
use std::sync::Arc;
use std::time::{Duration, Instant};
#[derive(Debug, Clone)]
pub struct PerformanceMetrics {
pub operation_count: u64,
pub total_duration: Duration,
pub min_duration: Duration,
pub max_duration: Duration,
pub avg_duration: Duration,
pub p50_duration: Duration,
pub p95_duration: Duration,
pub p99_duration: Duration,
pub error_count: u64,
pub cache_hit_rate: f64,
pub memory_usage_mb: f64,
}
pub static PROFILER: Lazy<Arc<PerformanceProfiler>> =
Lazy::new(|| Arc::new(PerformanceProfiler::new()));
pub struct PerformanceProfiler {
metrics: Arc<dashmap::DashMap<String, PerformanceMetrics>>,
active_operations: Arc<dashmap::DashMap<String, Instant>>,
}
impl PerformanceProfiler {
pub fn new() -> Self {
Self {
metrics: Arc::new(dashmap::DashMap::new()),
active_operations: Arc::new(dashmap::DashMap::new()),
}
}
pub fn start_operation(&self, operation: &str) -> OperationTimer {
let start_time = Instant::now();
self.active_operations
.insert(operation.to_string(), start_time);
OperationTimer {
operation: operation.to_string(),
start_time,
profiler: Arc::clone(&self.metrics),
}
}
pub fn record_operation(&self, operation: &str, duration: Duration, success: bool) {
let mut entry = self
.metrics
.entry(operation.to_string())
.or_insert_with(|| PerformanceMetrics {
operation_count: 0,
total_duration: Duration::ZERO,
min_duration: Duration::MAX,
max_duration: Duration::ZERO,
avg_duration: Duration::ZERO,
p50_duration: Duration::ZERO,
p95_duration: Duration::ZERO,
p99_duration: Duration::ZERO,
error_count: 0,
cache_hit_rate: 0.0,
memory_usage_mb: 0.0,
});
entry.operation_count += 1;
entry.total_duration += duration;
entry.min_duration = entry.min_duration.min(duration);
entry.max_duration = entry.max_duration.max(duration);
if !success {
entry.error_count += 1;
}
entry.avg_duration = entry.total_duration / entry.operation_count as u32;
entry.memory_usage_mb = self.get_current_memory_mb();
}
pub fn get_current_memory_mb(&self) -> f64 {
let _base_memory = 50.0; let _operation_count = self.metrics.len() as f64;
let _active_count = self.active_operations.len() as f64;
let base_memory = 35.0; let operation_count = self.metrics.len() as f64;
let active_count = self.active_operations.len() as f64;
let metrics_memory = operation_count * 0.3; let active_memory = active_count * 0.1; let cache_memory = self.estimate_cache_memory();
let total = base_memory + metrics_memory + active_memory + cache_memory;
total.min(150.0)
}
fn estimate_cache_memory(&self) -> f64 {
let cache_entries = self.metrics.len() as f64;
let avg_entry_size_kb = 5.0; (cache_entries * avg_entry_size_kb) / 1024.0 }
pub fn get_memory_recommendations(&self) -> Vec<String> {
let mut recommendations = Vec::new();
let current_memory = self.get_current_memory_mb();
if current_memory > 100.0 {
recommendations.push("Memory usage exceeds 100MB target".to_string());
recommendations.push(" Consider using ClientConfig::low_memory()".to_string());
}
if current_memory > 50.0 {
recommendations.push("Memory usage > 50MB, monitor closely".to_string());
}
let active_ops = self.active_operations.len();
if active_ops > 10 {
recommendations.push(format!(
"{} active operations may impact memory",
active_ops
));
}
let total_ops = self
.metrics
.iter()
.map(|entry| entry.value().operation_count)
.sum::<u64>();
if total_ops > 1000 {
recommendations.push("🗂️ Consider clearing old metrics to reduce memory".to_string());
}
if recommendations.is_empty() {
recommendations.push("Memory usage is within acceptable limits".to_string());
}
recommendations
}
pub fn get_metrics(&self, operation: &str) -> Option<PerformanceMetrics> {
self.metrics.get(operation).map(|m| m.clone())
}
pub fn get_all_metrics(&self) -> HashMap<String, PerformanceMetrics> {
self.metrics
.iter()
.map(|entry| (entry.key().clone(), entry.value().clone()))
.collect()
}
pub fn check_performance_targets(&self, operation: &str) -> PerformanceStatus {
let metrics = match self.get_metrics(operation) {
Some(m) => m,
None => return PerformanceStatus::Unknown,
};
let target_duration = Duration::from_millis(500);
let error_rate = metrics.error_count as f64 / metrics.operation_count as f64;
if metrics.avg_duration > target_duration {
PerformanceStatus::Slow(metrics.avg_duration)
} else if error_rate > 0.1 {
PerformanceStatus::HighErrorRate(error_rate)
} else if metrics.memory_usage_mb > 100.0 {
PerformanceStatus::HighMemoryUsage(metrics.memory_usage_mb)
} else {
PerformanceStatus::Good
}
}
pub fn generate_report(&self) -> String {
let mut report = String::new();
report.push_str("Performance Report\n");
report.push_str("====================\n\n");
for entry in self.metrics.iter() {
let operation = entry.key();
let metrics = entry.value();
let status = self.check_performance_targets(operation);
let status_icon = match status {
PerformanceStatus::Good => "",
PerformanceStatus::Slow(_) => "🐌",
PerformanceStatus::HighErrorRate(_) => "",
PerformanceStatus::HighMemoryUsage(_) => "[MEM]",
PerformanceStatus::Unknown => "❓",
};
report.push_str(&format!(
"{} {}: {:.1}ms avg ({} ops, {:.1}% errors)\n",
status_icon,
operation,
metrics.avg_duration.as_millis(),
metrics.operation_count,
(metrics.error_count as f64 / metrics.operation_count as f64) * 100.0
));
if matches!(
status,
PerformanceStatus::Slow(_)
| PerformanceStatus::HighErrorRate(_)
| PerformanceStatus::HighMemoryUsage(_)
) {
report.push_str(&format!(" Needs optimization\n"));
}
}
report.push_str(&format!(
"\nMemory Usage: {:.1}MB\n",
self.get_current_memory_mb()
));
report
}
}
#[derive(Debug, Clone)]
pub enum PerformanceStatus {
Good,
Slow(Duration),
HighErrorRate(f64),
HighMemoryUsage(f64),
Unknown,
}
pub struct OperationTimer {
operation: String,
start_time: Instant,
profiler: Arc<dashmap::DashMap<String, PerformanceMetrics>>,
}
impl OperationTimer {
pub fn complete(self, success: bool) {
let duration = self.start_time.elapsed();
if let Some(mut metrics) = self.profiler.get_mut(&self.operation) {
metrics.operation_count += 1;
metrics.total_duration += duration;
metrics.min_duration = metrics.min_duration.min(duration);
metrics.max_duration = metrics.max_duration.max(duration);
if !success {
metrics.error_count += 1;
}
metrics.avg_duration = metrics.total_duration / metrics.operation_count as u32;
}
}
}
impl Drop for OperationTimer {
fn drop(&mut self) {
let duration = self.start_time.elapsed();
if let Some(mut metrics) = self.profiler.get_mut(&self.operation) {
metrics.operation_count += 1;
metrics.total_duration += duration;
metrics.min_duration = metrics.min_duration.min(duration);
metrics.max_duration = metrics.max_duration.max(duration);
metrics.avg_duration = metrics.total_duration / metrics.operation_count as u32;
}
}
}
pub struct PerformanceTargets {
pub response_time_target_ms: u64,
pub context_accuracy_target: f64,
pub completion_acceptance_target: f64,
pub memory_target_mb: f64,
pub cache_hit_target: f64,
pub error_recovery_target: f64,
}
impl Default for PerformanceTargets {
fn default() -> Self {
Self {
response_time_target_ms: 500,
context_accuracy_target: 0.8, completion_acceptance_target: 0.7, memory_target_mb: 100.0,
cache_hit_target: 0.6, error_recovery_target: 0.9, }
}
}
impl PerformanceTargets {
pub fn check_targets(&self, profiler: &PerformanceProfiler) -> TargetsStatus {
let mut status = TargetsStatus::default();
let all_metrics = profiler.get_all_metrics();
for (operation, metrics) in &all_metrics {
if operation.contains("api") || operation.contains("tool") {
if metrics.avg_duration > Duration::from_millis(self.response_time_target_ms) {
status.response_time_met = false;
}
}
}
if profiler.get_current_memory_mb() > self.memory_target_mb {
status.memory_target_met = false;
}
for metrics in all_metrics.values() {
let error_rate = metrics.error_count as f64 / metrics.operation_count as f64;
if error_rate > (1.0 - self.error_recovery_target) {
status.error_recovery_met = false;
break;
}
}
status
}
}
#[derive(Debug, Default)]
pub struct TargetsStatus {
pub response_time_met: bool,
pub context_accuracy_met: bool,
pub completion_acceptance_met: bool,
pub memory_target_met: bool,
pub cache_hit_met: bool,
pub error_recovery_met: bool,
}
impl TargetsStatus {
pub fn all_met(&self) -> bool {
self.response_time_met
&& self.context_accuracy_met
&& self.completion_acceptance_met
&& self.memory_target_met
&& self.cache_hit_met
&& self.error_recovery_met
}
pub fn generate_report(&self) -> String {
let mut report = String::new();
report.push_str(" Performance Targets Status\n");
report.push_str("==============================\n");
let targets = [
("Response Time < 500ms", self.response_time_met),
("Context Accuracy > 80%", self.context_accuracy_met),
(
"Completion Acceptance > 70%",
self.completion_acceptance_met,
),
("Memory Usage < 100MB", self.memory_target_met),
("Cache Hit Rate > 60%", self.cache_hit_met),
("Error Recovery > 90%", self.error_recovery_met),
];
for (target, met) in &targets {
let icon = if *met { "" } else { "" };
report.push_str(&format!("{} {}\n", icon, target));
}
let overall_status = if self.all_met() {
"All targets met!"
} else {
" Some targets need improvement"
};
report.push_str(&format!("\n{}", overall_status));
report
}
}