1use crate::error::Result;
7use lru::LruCache;
8use std::collections::HashMap;
9use std::sync::Arc;
10use std::sync::atomic::{AtomicU64, Ordering};
11use std::time::{Duration, Instant};
12
13#[derive(Debug, Clone)]
15pub struct PerformanceMetric {
16 pub operation: String,
17 pub duration: Duration,
18 pub timestamp: Instant,
19 pub success: bool,
20}
21
22#[derive(Debug, Clone)]
24pub struct ErrorMetric {
25 pub error_type: String,
26 pub error_message: String,
27 pub timestamp: Instant,
28 pub count: u64,
29}
30
31#[derive(Debug, Clone)]
33pub struct UsageMetric {
34 pub class_pattern: String,
35 pub usage_count: u64,
36 pub last_used: Instant,
37 pub average_generation_time: Duration,
38}
39
40#[derive(Debug)]
42pub struct ClassCache {
43 cache: Arc<tokio::sync::RwLock<LruCache<String, String>>>,
44 hit_rate: AtomicU64,
45 miss_rate: AtomicU64,
46 total_requests: AtomicU64,
47}
48
49impl ClassCache {
50 pub fn new(capacity: usize) -> Self {
52 Self {
53 cache: Arc::new(tokio::sync::RwLock::new(LruCache::new(
54 std::num::NonZeroUsize::new(capacity).unwrap(),
55 ))),
56 hit_rate: AtomicU64::new(0),
57 miss_rate: AtomicU64::new(0),
58 total_requests: AtomicU64::new(0),
59 }
60 }
61
62 pub async fn get(&self, key: &str) -> Option<String> {
64 self.total_requests.fetch_add(1, Ordering::Relaxed);
65
66 let mut cache = self.cache.write().await;
67 if let Some(value) = cache.get(key) {
68 self.hit_rate.fetch_add(1, Ordering::Relaxed);
69 Some(value.clone())
70 } else {
71 self.miss_rate.fetch_add(1, Ordering::Relaxed);
72 None
73 }
74 }
75
76 pub async fn put(&self, key: String, value: String) {
78 let mut cache = self.cache.write().await;
79 cache.put(key, value);
80 }
81
82 pub fn hit_rate(&self) -> f64 {
84 let hits = self.hit_rate.load(Ordering::Relaxed) as f64;
85 let total = self.total_requests.load(Ordering::Relaxed) as f64;
86
87 if total == 0.0 { 0.0 } else { hits / total }
88 }
89
90 pub fn miss_rate(&self) -> f64 {
92 let misses = self.miss_rate.load(Ordering::Relaxed) as f64;
93 let total = self.total_requests.load(Ordering::Relaxed) as f64;
94
95 if total == 0.0 { 0.0 } else { misses / total }
96 }
97
98 pub fn total_requests(&self) -> u64 {
100 self.total_requests.load(Ordering::Relaxed)
101 }
102
103 pub async fn clear(&self) {
105 let mut cache = self.cache.write().await;
106 cache.clear();
107 }
108
109 pub async fn len(&self) -> usize {
111 let cache = self.cache.read().await;
112 cache.len()
113 }
114
115 pub async fn is_empty(&self) -> bool {
117 let cache = self.cache.read().await;
118 cache.is_empty()
119 }
120}
121
122#[derive(Debug, Clone, Copy, PartialEq, Eq)]
124pub enum OptimizationLevel {
125 None,
126 Low,
127 Medium,
128 High,
129 Maximum,
130}
131
132impl OptimizationLevel {
133 pub fn cache_capacity(&self) -> usize {
135 match self {
136 OptimizationLevel::None => 0,
137 OptimizationLevel::Low => 100,
138 OptimizationLevel::Medium => 500,
139 OptimizationLevel::High => 1000,
140 OptimizationLevel::Maximum => 5000,
141 }
142 }
143
144 pub fn optimization_factor(&self) -> f64 {
146 match self {
147 OptimizationLevel::None => 1.0,
148 OptimizationLevel::Low => 1.2,
149 OptimizationLevel::Medium => 1.5,
150 OptimizationLevel::High => 2.0,
151 OptimizationLevel::Maximum => 3.0,
152 }
153 }
154}
155
156#[derive(Debug)]
158pub struct PerformanceOptimizer {
159 class_cache: ClassCache,
160 css_cache: ClassCache,
161 optimization_level: OptimizationLevel,
162 performance_metrics: Arc<tokio::sync::RwLock<Vec<PerformanceMetric>>>,
163 error_metrics: Arc<tokio::sync::RwLock<Vec<ErrorMetric>>>,
164 usage_metrics: Arc<tokio::sync::RwLock<HashMap<String, UsageMetric>>>,
165}
166
167impl PerformanceOptimizer {
168 pub fn new() -> Self {
170 Self::with_optimization_level(OptimizationLevel::Medium)
171 }
172
173 pub fn with_optimization_level(level: OptimizationLevel) -> Self {
175 let capacity = level.cache_capacity();
176 Self {
177 class_cache: ClassCache::new(capacity),
178 css_cache: ClassCache::new(capacity),
179 optimization_level: level,
180 performance_metrics: Arc::new(tokio::sync::RwLock::new(Vec::new())),
181 error_metrics: Arc::new(tokio::sync::RwLock::new(Vec::new())),
182 usage_metrics: Arc::new(tokio::sync::RwLock::new(HashMap::new())),
183 }
184 }
185
186 pub async fn optimize_class_generation(&mut self, classes: &[String]) -> Result<String> {
188 let start = Instant::now();
189 let cache_key = self.generate_cache_key(classes);
190
191 if let Some(cached_result) = self.class_cache.get(&cache_key).await {
193 self.record_performance("class_generation_cached", start.elapsed(), true)
194 .await;
195 return Ok(cached_result);
196 }
197
198 let result = self.generate_classes(classes).await?;
200
201 self.class_cache.put(cache_key, result.clone()).await;
203
204 self.record_performance("class_generation", start.elapsed(), true)
205 .await;
206 Ok(result)
207 }
208
209 pub async fn optimize_css_generation(&mut self, css: &str) -> Result<String> {
211 let start = Instant::now();
212 let cache_key = format!("css:{}", css);
213
214 if let Some(cached_result) = self.css_cache.get(&cache_key).await {
216 self.record_performance("css_generation_cached", start.elapsed(), true)
217 .await;
218 return Ok(cached_result);
219 }
220
221 let result = self.optimize_css(css).await?;
223
224 self.css_cache.put(cache_key, result.clone()).await;
226
227 self.record_performance("css_generation", start.elapsed(), true)
228 .await;
229 Ok(result)
230 }
231
232 fn generate_cache_key(&self, classes: &[String]) -> String {
234 let mut key = String::new();
235 for class in classes {
236 key.push_str(class);
237 key.push('|');
238 }
239 key
240 }
241
242 async fn generate_classes(&self, classes: &[String]) -> Result<String> {
244 let optimized_classes = match self.optimization_level {
246 OptimizationLevel::None => classes.to_vec(),
247 OptimizationLevel::Low => self.optimize_classes_low(classes).await,
248 OptimizationLevel::Medium => self.optimize_classes_medium(classes).await,
249 OptimizationLevel::High => self.optimize_classes_high(classes).await,
250 OptimizationLevel::Maximum => self.optimize_classes_maximum(classes).await,
251 };
252
253 Ok(optimized_classes.join(" "))
254 }
255
256 async fn optimize_classes_low(&self, classes: &[String]) -> Vec<String> {
258 let mut unique_classes: Vec<String> = classes.to_vec();
260 unique_classes.sort();
261 unique_classes.dedup();
262 unique_classes
263 }
264
265 async fn optimize_classes_medium(&self, classes: &[String]) -> Vec<String> {
267 let mut optimized = self.optimize_classes_low(classes).await;
268
269 optimized = self.remove_conflicting_classes(optimized).await;
271
272 optimized
273 }
274
275 async fn optimize_classes_high(&self, classes: &[String]) -> Vec<String> {
277 let mut optimized = self.optimize_classes_medium(classes).await;
278
279 optimized = self.merge_similar_classes(optimized).await;
281
282 optimized
283 }
284
285 async fn optimize_classes_maximum(&self, classes: &[String]) -> Vec<String> {
287 let mut optimized = self.optimize_classes_high(classes).await;
288
289 optimized = self.apply_advanced_optimizations(optimized).await;
291
292 optimized
293 }
294
295 async fn remove_conflicting_classes(&self, classes: Vec<String>) -> Vec<String> {
297 let mut result = Vec::new();
298 let mut seen_groups: HashMap<String, String> = HashMap::new();
299
300 for class in classes {
301 let group = self.get_class_group(&class);
302 if let Some(existing) = seen_groups.get(&group) {
303 if self.is_more_specific(&class, existing) {
305 if let Some(pos) = result.iter().position(|c| c == existing) {
306 result.remove(pos);
307 }
308 result.push(class.clone());
309 seen_groups.insert(group, class);
310 }
311 } else {
312 result.push(class.clone());
313 seen_groups.insert(group, class);
314 }
315 }
316
317 result
318 }
319
320 async fn merge_similar_classes(&self, classes: Vec<String>) -> Vec<String> {
322 classes
326 }
327
328 async fn apply_advanced_optimizations(&self, classes: Vec<String>) -> Vec<String> {
330 classes
336 }
337
338 fn get_class_group(&self, class: &str) -> String {
340 if class.starts_with("bg-") {
341 "background".to_string()
342 } else if class.starts_with("text-") {
343 "text".to_string()
344 } else if class.starts_with("border-") {
345 "border".to_string()
346 } else if class.starts_with("p-") || class.starts_with("px-") || class.starts_with("py-") {
347 "padding".to_string()
348 } else if class.starts_with("m-") || class.starts_with("mx-") || class.starts_with("my-") {
349 "margin".to_string()
350 } else {
351 "other".to_string()
352 }
353 }
354
355 fn is_more_specific(&self, class1: &str, class2: &str) -> bool {
357 class1.len() > class2.len()
359 }
360
361 async fn optimize_css(&self, css: &str) -> Result<String> {
363 let mut optimized = css.to_string();
364
365 optimized = optimized.replace(" ", " ");
367 optimized = optimized.replace("\n", "");
368 optimized = optimized.replace("\t", "");
369
370 optimized = optimized.replace(";}", "}");
372
373 Ok(optimized)
374 }
375
376 pub async fn record_performance(&self, operation: &str, duration: Duration, success: bool) {
378 let metric = PerformanceMetric {
379 operation: operation.to_string(),
380 duration,
381 timestamp: Instant::now(),
382 success,
383 };
384
385 let mut metrics = self.performance_metrics.write().await;
386 metrics.push(metric);
387
388 let len = metrics.len();
390 if len > 1000 {
391 metrics.drain(0..len - 1000);
392 }
393 }
394
395 pub async fn record_error(&self, error_type: &str, error: &dyn std::error::Error) {
397 let metric = ErrorMetric {
398 error_type: error_type.to_string(),
399 error_message: error.to_string(),
400 timestamp: Instant::now(),
401 count: 1,
402 };
403
404 let mut metrics = self.error_metrics.write().await;
405 metrics.push(metric);
406
407 let len = metrics.len();
409 if len > 1000 {
410 metrics.drain(0..len - 1000);
411 }
412 }
413
414 pub async fn record_usage(&self, class_pattern: &str, generation_time: Duration) {
416 let mut metrics = self.usage_metrics.write().await;
417
418 if let Some(usage) = metrics.get_mut(class_pattern) {
419 usage.usage_count += 1;
420 usage.last_used = Instant::now();
421 usage.average_generation_time = Duration::from_nanos(
422 ((usage.average_generation_time.as_nanos() + generation_time.as_nanos()) / 2)
423 as u64,
424 );
425 } else {
426 metrics.insert(
427 class_pattern.to_string(),
428 UsageMetric {
429 class_pattern: class_pattern.to_string(),
430 usage_count: 1,
431 last_used: Instant::now(),
432 average_generation_time: generation_time,
433 },
434 );
435 }
436 }
437
438 pub async fn get_performance_metrics(&self) -> Vec<PerformanceMetric> {
440 let metrics = self.performance_metrics.read().await;
441 metrics.clone()
442 }
443
444 pub async fn get_error_metrics(&self) -> Vec<ErrorMetric> {
446 let metrics = self.error_metrics.read().await;
447 metrics.clone()
448 }
449
450 pub async fn get_usage_metrics(&self) -> HashMap<String, UsageMetric> {
452 let metrics = self.usage_metrics.read().await;
453 metrics.clone()
454 }
455
456 pub async fn get_cache_stats(&self) -> CacheStats {
458 CacheStats {
459 class_cache_hit_rate: self.class_cache.hit_rate(),
460 class_cache_miss_rate: self.class_cache.miss_rate(),
461 class_cache_total_requests: self.class_cache.total_requests(),
462 class_cache_size: self.class_cache.len().await,
463 css_cache_hit_rate: self.css_cache.hit_rate(),
464 css_cache_miss_rate: self.css_cache.miss_rate(),
465 css_cache_total_requests: self.css_cache.total_requests(),
466 css_cache_size: self.css_cache.len().await,
467 }
468 }
469
470 pub fn set_optimization_level(&mut self, level: OptimizationLevel) {
472 self.optimization_level = level;
473 }
474
475 pub fn optimization_level(&self) -> OptimizationLevel {
477 self.optimization_level
478 }
479}
480
481impl Default for PerformanceOptimizer {
482 fn default() -> Self {
483 Self::new()
484 }
485}
486
487#[derive(Debug, Clone)]
489pub struct CacheStats {
490 pub class_cache_hit_rate: f64,
491 pub class_cache_miss_rate: f64,
492 pub class_cache_total_requests: u64,
493 pub class_cache_size: usize,
494 pub css_cache_hit_rate: f64,
495 pub css_cache_miss_rate: f64,
496 pub css_cache_total_requests: u64,
497 pub css_cache_size: usize,
498}
499
500#[cfg(test)]
501mod tests {
502 use super::*;
503
504 #[test]
505 fn test_optimization_level() {
506 assert_eq!(OptimizationLevel::None.cache_capacity(), 0);
507 assert_eq!(OptimizationLevel::Low.cache_capacity(), 100);
508 assert_eq!(OptimizationLevel::Medium.cache_capacity(), 500);
509 assert_eq!(OptimizationLevel::High.cache_capacity(), 1000);
510 assert_eq!(OptimizationLevel::Maximum.cache_capacity(), 5000);
511
512 assert_eq!(OptimizationLevel::None.optimization_factor(), 1.0);
513 assert_eq!(OptimizationLevel::Low.optimization_factor(), 1.2);
514 assert_eq!(OptimizationLevel::Medium.optimization_factor(), 1.5);
515 assert_eq!(OptimizationLevel::High.optimization_factor(), 2.0);
516 assert_eq!(OptimizationLevel::Maximum.optimization_factor(), 3.0);
517 }
518
519 #[test]
520 fn test_performance_optimizer_creation() {
521 let optimizer = PerformanceOptimizer::new();
522 assert_eq!(optimizer.optimization_level(), OptimizationLevel::Medium);
523 }
524
525 #[test]
526 fn test_performance_optimizer_with_level() {
527 let optimizer = PerformanceOptimizer::with_optimization_level(OptimizationLevel::High);
528 assert_eq!(optimizer.optimization_level(), OptimizationLevel::High);
529 }
530}