use lazy_static::lazy_static;
use std::collections::HashMap;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::RwLock;
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct PerformanceMetrics {
pub response_times: Vec<Duration>,
pub cache_hit_rate: f64,
pub memory_usage: usize,
pub error_rate: f64,
pub throughput: usize, pub context_accuracy: f64,
}
pub struct PerformanceMonitor {
metrics: Arc<RwLock<PerformanceMetrics>>,
operation_start_times: Arc<RwLock<HashMap<String, Instant>>>,
total_requests: Arc<RwLock<usize>>,
successful_requests: Arc<RwLock<usize>>,
context_predictions: Arc<RwLock<usize>>,
context_correct: Arc<RwLock<usize>>,
}
impl PerformanceMonitor {
pub fn new() -> Self {
Self {
metrics: Arc::new(RwLock::new(PerformanceMetrics {
response_times: Vec::new(),
cache_hit_rate: 0.0,
memory_usage: 0,
error_rate: 0.0,
throughput: 0,
context_accuracy: 0.0,
})),
operation_start_times: Arc::new(RwLock::new(HashMap::new())),
total_requests: Arc::new(RwLock::new(0)),
successful_requests: Arc::new(RwLock::new(0)),
context_predictions: Arc::new(RwLock::new(0)),
context_correct: Arc::new(RwLock::new(0)),
}
}
pub async fn start_operation(&self, operation_id: String) {
let mut start_times = self.operation_start_times.write().await;
start_times.insert(operation_id, Instant::now());
let mut total = self.total_requests.write().await;
*total += 1;
}
pub async fn end_operation(&self, operation_id: String, success: bool) {
let start_time = {
let mut start_times = self.operation_start_times.write().await;
start_times.remove(&operation_id)
};
if let Some(start) = start_time {
let duration = start.elapsed();
let mut metrics = self.metrics.write().await;
metrics.response_times.push(duration);
if metrics.response_times.len() > 1000 {
metrics.response_times.remove(0);
}
if success {
let mut successful = self.successful_requests.write().await;
*successful += 1;
}
let total = *self.total_requests.read().await;
let successful = *self.successful_requests.read().await;
metrics.error_rate = if total > 0 {
(total - successful) as f64 / total as f64
} else {
0.0
};
}
}
pub async fn record_context_prediction(&self, correct: bool) {
let mut predictions = self.context_predictions.write().await;
*predictions += 1;
if correct {
let mut correct_predictions = self.context_correct.write().await;
*correct_predictions += 1;
}
let predictions = *self.context_predictions.read().await;
let correct = *self.context_correct.read().await;
let mut metrics = self.metrics.write().await;
metrics.context_accuracy = if predictions > 0 {
correct as f64 / predictions as f64
} else {
0.0
};
}
pub async fn update_cache_hit_rate(&self, hit_rate: f64) {
let mut metrics = self.metrics.write().await;
metrics.cache_hit_rate = hit_rate;
}
pub async fn update_memory_usage(&self, memory_mb: usize) {
let mut metrics = self.metrics.write().await;
metrics.memory_usage = memory_mb;
}
pub async fn get_metrics(&self) -> PerformanceMetrics {
let metrics = self.metrics.read().await;
metrics.clone()
}
pub async fn average_response_time(&self) -> Duration {
let metrics = self.metrics.read().await;
if metrics.response_times.is_empty() {
Duration::from_millis(0)
} else {
let total: Duration = metrics.response_times.iter().sum();
total / metrics.response_times.len() as u32
}
}
pub async fn percentile_95_response_time(&self) -> Duration {
let metrics = self.metrics.read().await;
if metrics.response_times.is_empty() {
Duration::from_millis(0)
} else {
let mut times = metrics.response_times.clone();
times.sort();
let index = (times.len() as f64 * 0.95) as usize;
let safe_index = index.min(times.len() - 1);
times[safe_index]
}
}
pub async fn check_phase1_targets(&self) -> Phase1Status {
let avg_response = self.average_response_time().await;
let p95_response = self.percentile_95_response_time().await;
let metrics = self.get_metrics().await;
Phase1Status {
response_time_target: avg_response < Duration::from_millis(500),
p95_response_time: p95_response,
memory_target: metrics.memory_usage < 100,
cache_target: metrics.cache_hit_rate >= 0.6,
error_recovery_target: metrics.error_rate <= 0.1,
context_target: metrics.context_accuracy >= 0.8,
}
}
pub async fn generate_report(&self) -> String {
let avg_response = self.average_response_time().await;
let p95_response = self.percentile_95_response_time().await;
let metrics = self.get_metrics().await;
let status = self.check_phase1_targets().await;
format!(
"Performance Report - Phase 1 Targets\n\n\
Response Times:\n\
• Average: {:.2}ms (Target: <500ms) {}\n\
• 95th percentile: {:.2}ms\n\n\
💾 Resource Usage:\n\
• Memory: {}MB (Target: <100MB) {}\n\
• Cache Hit Rate: {:.1}% (Target: ≥60%) {}\n\n\
System Health:\n\
• Error Rate: {:.1}% (Target: ≤10%) {}\n\
• Context Accuracy: {:.1}% (Target: ≥80%) {}\n\n\
Throughput: {} req/sec",
avg_response.as_millis(),
if status.response_time_target { "" } else { "" },
p95_response.as_millis(),
metrics.memory_usage,
if status.memory_target { "" } else { "" },
metrics.cache_hit_rate * 100.0,
if status.cache_target { "" } else { "" },
metrics.error_rate * 100.0,
if status.error_recovery_target { "" } else { "" },
metrics.context_accuracy * 100.0,
if status.context_target { "" } else { "" },
metrics.throughput
)
}
}
#[derive(Debug, Clone)]
pub struct Phase1Status {
pub response_time_target: bool,
pub p95_response_time: Duration,
pub memory_target: bool,
pub cache_target: bool,
pub error_recovery_target: bool,
pub context_target: bool,
}
impl Phase1Status {
pub fn all_targets_met(&self) -> bool {
self.response_time_target
&& self.memory_target
&& self.cache_target
&& self.error_recovery_target
&& self.context_target
}
pub fn completion_percentage(&self) -> f64 {
let targets = [
self.response_time_target,
self.memory_target,
self.cache_target,
self.error_recovery_target,
self.context_target,
];
let met = targets.iter().filter(|&&t| t).count();
met as f64 / targets.len() as f64
}
}
lazy_static! {
pub static ref PERFORMANCE_MONITOR: PerformanceMonitor = PerformanceMonitor::new();
}