pub mod cache;
pub mod parallel;
use std::sync::Arc;
use std::time::Duration;
#[derive(Debug, Clone)]
pub struct PerformanceConfig {
pub parallel_enabled: bool,
pub thread_count: usize,
pub cache_enabled: bool,
pub cache_ttl: Duration,
pub lazy_parsing: bool,
pub memory_pool_size: usize,
}
impl Default for PerformanceConfig {
fn default() -> Self {
Self {
parallel_enabled: true,
thread_count: 0, cache_enabled: true,
cache_ttl: Duration::from_secs(300), lazy_parsing: true,
memory_pool_size: 100, }
}
}
#[derive(Debug, Clone, Default)]
pub struct PerformanceMetrics {
pub validation_time_ms: u64,
pub memory_usage_bytes: u64,
pub files_processed: usize,
pub cache_hit_rate: f64,
pub parallel_speedup: f64,
}
impl PerformanceMetrics {
pub fn meets_targets(&self) -> bool {
self.validation_time_ms < 2000 && self.memory_usage_bytes < 100 * 1024 * 1024 }
pub fn report(&self) -> String {
format!(
"Performance Metrics:\n\
- Validation time: {}ms\n\
- Memory usage: {:.2}MB\n\
- Files processed: {}\n\
- Cache hit rate: {:.1}%\n\
- Parallel speedup: {:.2}x",
self.validation_time_ms,
self.memory_usage_bytes as f64 / (1024.0 * 1024.0),
self.files_processed,
self.cache_hit_rate * 100.0,
self.parallel_speedup
)
}
}
pub struct PerformanceMonitor {
#[allow(dead_code)]
config: Arc<PerformanceConfig>,
metrics: Arc<dashmap::DashMap<String, PerformanceMetrics>>,
}
impl PerformanceMonitor {
pub fn new(config: PerformanceConfig) -> Self {
Self {
config: Arc::new(config),
metrics: Arc::new(dashmap::DashMap::new()),
}
}
pub fn start_task(&self, task_name: &str) -> TaskMonitor {
TaskMonitor::new(task_name.to_string(), self.metrics.clone())
}
pub fn get_metrics(&self, task_name: &str) -> Option<PerformanceMetrics> {
self.metrics.get(task_name).map(|m| m.clone())
}
pub fn all_metrics(&self) -> Vec<(String, PerformanceMetrics)> {
self.metrics
.iter()
.map(|entry| (entry.key().clone(), entry.value().clone()))
.collect()
}
}
pub struct TaskMonitor {
task_name: String,
start_time: std::time::Instant,
metrics_map: Arc<dashmap::DashMap<String, PerformanceMetrics>>,
}
impl TaskMonitor {
fn new(
task_name: String,
metrics_map: Arc<dashmap::DashMap<String, PerformanceMetrics>>,
) -> Self {
Self {
task_name,
start_time: std::time::Instant::now(),
metrics_map,
}
}
pub fn complete(self, files_processed: usize) {
let elapsed = self.start_time.elapsed();
let metrics = PerformanceMetrics {
validation_time_ms: elapsed.as_millis() as u64,
memory_usage_bytes: estimate_memory_usage(),
files_processed,
cache_hit_rate: 0.0, parallel_speedup: 1.0, };
self.metrics_map.insert(self.task_name, metrics);
}
}
fn estimate_memory_usage() -> u64 {
let thread_count = rayon::current_num_threads();
let base_memory = 20 * 1024 * 1024; let per_thread = 5 * 1024 * 1024;
base_memory + (thread_count as u64 * per_thread)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_performance_config_default() {
let config = PerformanceConfig::default();
assert!(config.parallel_enabled);
assert!(config.cache_enabled);
assert_eq!(config.thread_count, 0);
}
#[test]
fn test_performance_metrics() {
let metrics = PerformanceMetrics {
validation_time_ms: 1500,
memory_usage_bytes: 50 * 1024 * 1024,
..Default::default()
};
assert!(metrics.meets_targets());
let metrics = PerformanceMetrics {
validation_time_ms: 3000,
memory_usage_bytes: 50 * 1024 * 1024,
..Default::default()
};
assert!(!metrics.meets_targets());
}
#[test]
fn test_performance_monitor() {
let config = PerformanceConfig::default();
let monitor = PerformanceMonitor::new(config);
let task = monitor.start_task("test_task");
task.complete(10);
let metrics = monitor.get_metrics("test_task");
assert!(metrics.is_some());
}
}