1use anyhow::Result;
4use serde::{Deserialize, Serialize};
5use std::time::{Duration, Instant};
6
7#[derive(Debug, Clone)]
9pub struct PerformanceProfiler {
10 config: ProfilingConfig,
11 metrics: Vec<PerformanceMetric>,
12}
13
14#[derive(Debug, Clone, Serialize, Deserialize)]
16pub struct ProfilingConfig {
17 pub track_memory_usage: bool,
18 pub track_parse_time: bool,
19 pub track_node_creation: bool,
20 pub track_edge_creation: bool,
21 pub sample_interval_ms: u64,
22 pub max_samples: usize,
23 pub enable_detailed_timing: bool,
24}
25
26impl Default for ProfilingConfig {
27 fn default() -> Self {
28 Self {
29 track_memory_usage: true,
30 track_parse_time: true,
31 track_node_creation: true,
32 track_edge_creation: true,
33 sample_interval_ms: 100,
34 max_samples: 1000,
35 enable_detailed_timing: false,
36 }
37 }
38}
39
40#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
42pub enum MetricType {
43 ParseTime,
44 MemoryUsage,
45 NodeCreation,
46 EdgeCreation,
47 FileSize,
48 TreeDepth,
49 NodeCount,
50 EdgeCount,
51 ValidationTime,
52 VisualizationTime,
53}
54
55#[derive(Debug, Clone, Serialize, Deserialize)]
57pub struct PerformanceMetric {
58 pub metric_type: MetricType,
59 pub value: f64,
60 pub unit: String,
61 pub timestamp: Duration,
62 pub context: Option<String>,
63}
64
65#[derive(Debug, Clone)]
67pub struct ProfilingReport {
68 pub metrics: Vec<PerformanceMetric>,
69 pub summary: PerformanceSummary,
70 pub analysis: PerformanceAnalysis,
71 pub recommendations: Vec<String>,
72}
73
74#[derive(Debug, Clone, Default)]
76pub struct PerformanceSummary {
77 pub total_parse_time_ms: f64,
78 pub average_parse_time_ms: f64,
79 pub peak_memory_usage_mb: f64,
80 pub total_nodes_created: u64,
81 pub total_edges_created: u64,
82 pub parse_throughput_kb_per_sec: f64,
83 pub nodes_per_second: f64,
84 pub validation_overhead_percent: f64,
85}
86
87#[derive(Debug, Clone, Default)]
89pub struct PerformanceAnalysis {
90 pub bottlenecks: Vec<PerformanceBottleneck>,
91 pub trends: Vec<PerformanceTrend>,
92 pub comparisons: Vec<PerformanceComparison>,
93 pub efficiency_score: f64,
94}
95
96#[derive(Debug, Clone)]
98pub struct PerformanceBottleneck {
99 pub area: String,
100 pub severity: BottleneckSeverity,
101 pub description: String,
102 pub impact_percent: f64,
103 pub suggestion: String,
104}
105
106#[derive(Debug, Clone, Copy)]
108pub enum BottleneckSeverity {
109 Low,
110 Medium,
111 High,
112 Critical,
113}
114
115#[derive(Debug, Clone)]
117pub struct PerformanceTrend {
118 pub metric_type: MetricType,
119 pub trend_direction: TrendDirection,
120 pub rate_of_change: f64,
121 pub confidence: f64,
122}
123
124#[derive(Debug, Clone, Copy)]
126pub enum TrendDirection {
127 Improving,
128 Stable,
129 Degrading,
130}
131
132#[derive(Debug, Clone)]
134pub struct PerformanceComparison {
135 pub description: String,
136 pub baseline_value: f64,
137 pub current_value: f64,
138 pub change_percent: f64,
139 pub is_improvement: bool,
140}
141
142impl PerformanceProfiler {
143 pub fn new() -> Self {
145 Self {
146 config: ProfilingConfig::default(),
147 metrics: Vec::new(),
148 }
149 }
150
151 pub fn with_config(config: ProfilingConfig) -> Self {
153 Self {
154 config,
155 metrics: Vec::new(),
156 }
157 }
158
159 pub fn start_session(&mut self) {
161 self.metrics.clear();
162 self.record_metric(
163 MetricType::ParseTime,
164 0.0,
165 "ms",
166 Some("Session started".to_string()),
167 );
168 }
169
170 pub fn record_metric(
172 &mut self,
173 metric_type: MetricType,
174 value: f64,
175 unit: &str,
176 context: Option<String>,
177 ) {
178 if self.metrics.len() >= self.config.max_samples {
179 self.metrics.remove(0);
181 }
182
183 let metric = PerformanceMetric {
184 metric_type,
185 value,
186 unit: unit.to_string(),
187 timestamp: self.get_session_time(),
188 context,
189 };
190
191 self.metrics.push(metric);
192 }
193
194 pub fn time_parse_operation<F, R>(&mut self, operation: F) -> Result<R>
196 where
197 F: FnOnce() -> Result<R>,
198 {
199 let start_time = Instant::now();
200 let result = operation()?;
201 let elapsed = start_time.elapsed();
202
203 self.record_metric(
204 MetricType::ParseTime,
205 elapsed.as_millis() as f64,
206 "ms",
207 Some("Parse operation".to_string()),
208 );
209
210 Ok(result)
211 }
212
213 pub fn profile_memory_usage(&mut self) {
215 let estimated_memory_mb = self.estimate_memory_usage();
218
219 self.record_metric(
220 MetricType::MemoryUsage,
221 estimated_memory_mb,
222 "MB",
223 Some("Memory snapshot".to_string()),
224 );
225 }
226
227 fn estimate_memory_usage(&self) -> f64 {
229 let metrics_size = self.metrics.len() * std::mem::size_of::<PerformanceMetric>();
232 metrics_size as f64 / (1024.0 * 1024.0) }
234
235 pub fn record_node_creation(&mut self, count: u64) {
237 if self.config.track_node_creation {
238 self.record_metric(
239 MetricType::NodeCreation,
240 count as f64,
241 "nodes",
242 Some("Nodes created".to_string()),
243 );
244 }
245 }
246
247 pub fn record_edge_creation(&mut self, count: u64) {
249 if self.config.track_edge_creation {
250 self.record_metric(
251 MetricType::EdgeCreation,
252 count as f64,
253 "edges",
254 Some("Edges created".to_string()),
255 );
256 }
257 }
258
259 pub fn record_file_size(&mut self, size_bytes: u64) {
261 self.record_metric(
262 MetricType::FileSize,
263 size_bytes as f64 / 1024.0, "KB",
265 Some("File size".to_string()),
266 );
267 }
268
269 pub fn generate_report(&self) -> ProfilingReport {
271 let summary = self.calculate_summary();
272 let analysis = self.analyze_performance();
273 let recommendations = self.generate_recommendations(&analysis);
274
275 ProfilingReport {
276 metrics: self.metrics.clone(),
277 summary,
278 analysis,
279 recommendations,
280 }
281 }
282
283 fn calculate_summary(&self) -> PerformanceSummary {
285 let mut summary = PerformanceSummary::default();
286
287 let parse_times: Vec<f64> = self
288 .metrics
289 .iter()
290 .filter(|m| matches!(m.metric_type, MetricType::ParseTime))
291 .map(|m| m.value)
292 .collect();
293
294 if !parse_times.is_empty() {
295 summary.total_parse_time_ms = parse_times.iter().sum();
296 summary.average_parse_time_ms = summary.total_parse_time_ms / parse_times.len() as f64;
297 }
298
299 summary.peak_memory_usage_mb = self
301 .metrics
302 .iter()
303 .filter(|m| matches!(m.metric_type, MetricType::MemoryUsage))
304 .map(|m| m.value)
305 .fold(0.0, f64::max);
306
307 summary.total_nodes_created = self
309 .metrics
310 .iter()
311 .filter(|m| matches!(m.metric_type, MetricType::NodeCreation))
312 .map(|m| m.value as u64)
313 .sum();
314
315 summary.total_edges_created = self
316 .metrics
317 .iter()
318 .filter(|m| matches!(m.metric_type, MetricType::EdgeCreation))
319 .map(|m| m.value as u64)
320 .sum();
321
322 let total_file_size_kb: f64 = self
324 .metrics
325 .iter()
326 .filter(|m| matches!(m.metric_type, MetricType::FileSize))
327 .map(|m| m.value)
328 .sum();
329
330 if summary.total_parse_time_ms > 0.0 {
331 summary.parse_throughput_kb_per_sec =
332 total_file_size_kb / (summary.total_parse_time_ms / 1000.0);
333 summary.nodes_per_second =
334 summary.total_nodes_created as f64 / (summary.total_parse_time_ms / 1000.0);
335 }
336
337 summary
338 }
339
340 fn analyze_performance(&self) -> PerformanceAnalysis {
342 PerformanceAnalysis {
343 bottlenecks: self.identify_bottlenecks(),
344 trends: self.analyze_trends(),
345 efficiency_score: self.calculate_efficiency_score(),
346 ..Default::default()
347 }
348 }
349
350 fn identify_bottlenecks(&self) -> Vec<PerformanceBottleneck> {
352 let mut bottlenecks = Vec::new();
353
354 let avg_parse_time = self
356 .metrics
357 .iter()
358 .filter(|m| matches!(m.metric_type, MetricType::ParseTime))
359 .map(|m| m.value)
360 .sum::<f64>()
361 / self.metrics.len().max(1) as f64;
362
363 if avg_parse_time > 1000.0 {
364 bottlenecks.push(PerformanceBottleneck {
366 area: "Parse Time".to_string(),
367 severity: BottleneckSeverity::High,
368 description: format!("Average parse time is {avg_parse_time:.1}ms"),
369 impact_percent: 80.0,
370 suggestion: "Consider optimizing parser grammar or using incremental parsing"
371 .to_string(),
372 });
373 }
374
375 let peak_memory = self
377 .metrics
378 .iter()
379 .filter(|m| matches!(m.metric_type, MetricType::MemoryUsage))
380 .map(|m| m.value)
381 .fold(0.0, f64::max);
382
383 if peak_memory > 100.0 {
384 bottlenecks.push(PerformanceBottleneck {
386 area: "Memory Usage".to_string(),
387 severity: BottleneckSeverity::Medium,
388 description: format!("Peak memory usage is {peak_memory:.1}MB"),
389 impact_percent: 40.0,
390 suggestion: "Consider streaming parsing or memory pooling".to_string(),
391 });
392 }
393
394 bottlenecks
395 }
396
397 fn analyze_trends(&self) -> Vec<PerformanceTrend> {
399 let mut trends = Vec::new();
400
401 let parse_times: Vec<f64> = self
403 .metrics
404 .iter()
405 .filter(|m| matches!(m.metric_type, MetricType::ParseTime))
406 .map(|m| m.value)
407 .collect();
408
409 if parse_times.len() >= 3 {
410 let trend = self.calculate_trend(&parse_times);
411 trends.push(PerformanceTrend {
412 metric_type: MetricType::ParseTime,
413 trend_direction: trend.0,
414 rate_of_change: trend.1,
415 confidence: 0.8,
416 });
417 }
418
419 trends
420 }
421
422 fn calculate_trend(&self, values: &[f64]) -> (TrendDirection, f64) {
424 if values.len() < 2 {
425 return (TrendDirection::Stable, 0.0);
426 }
427
428 let first_half_avg =
429 values[..values.len() / 2].iter().sum::<f64>() / (values.len() / 2) as f64;
430 let second_half_avg = values[values.len() / 2..].iter().sum::<f64>()
431 / (values.len() - values.len() / 2) as f64;
432
433 let change_percent = ((second_half_avg - first_half_avg) / first_half_avg) * 100.0;
434
435 let direction = if change_percent > 5.0 {
436 TrendDirection::Degrading } else if change_percent < -5.0 {
438 TrendDirection::Improving
439 } else {
440 TrendDirection::Stable
441 };
442
443 (direction, change_percent.abs())
444 }
445
446 fn calculate_efficiency_score(&self) -> f64 {
448 let mut score: f64 = 100.0;
449
450 let bottlenecks = self.identify_bottlenecks();
452 for bottleneck in bottlenecks {
453 let deduction = match bottleneck.severity {
454 BottleneckSeverity::Critical => 30.0,
455 BottleneckSeverity::High => 20.0,
456 BottleneckSeverity::Medium => 10.0,
457 BottleneckSeverity::Low => 5.0,
458 };
459 score -= deduction;
460 }
461
462 score.max(0.0)
463 }
464
465 fn generate_recommendations(&self, analysis: &PerformanceAnalysis) -> Vec<String> {
467 let mut recommendations = Vec::new();
468
469 if analysis.efficiency_score < 70.0 {
470 recommendations.push(
471 "Overall performance needs improvement. Consider profiling specific operations."
472 .to_string(),
473 );
474 }
475
476 for bottleneck in &analysis.bottlenecks {
477 recommendations.push(format!("{}: {}", bottleneck.area, bottleneck.suggestion));
478 }
479
480 for trend in &analysis.trends {
481 if matches!(trend.trend_direction, TrendDirection::Degrading) {
482 recommendations.push(format!(
483 "Performance degradation detected in {:?}. Monitor and optimize.",
484 trend.metric_type
485 ));
486 }
487 }
488
489 if recommendations.is_empty() {
490 recommendations
491 .push("Performance looks good! Continue monitoring for regressions.".to_string());
492 }
493
494 recommendations
495 }
496
497 fn get_session_time(&self) -> Duration {
499 Duration::from_millis(self.metrics.len() as u64 * 10)
501 }
502
503 pub fn export_csv(&self) -> String {
505 let mut csv = String::new();
506 csv.push_str("timestamp_ms,metric_type,value,unit,context\n");
507
508 for metric in &self.metrics {
509 csv.push_str(&format!(
510 "{},{:?},{},{},{}\n",
511 metric.timestamp.as_millis(),
512 metric.metric_type,
513 metric.value,
514 metric.unit,
515 metric.context.as_deref().unwrap_or("")
516 ));
517 }
518
519 csv
520 }
521}
522
523impl Default for PerformanceProfiler {
524 fn default() -> Self {
525 Self::new()
526 }
527}
528
529impl ProfilingReport {
530 pub fn format_summary(&self) -> String {
532 let mut output = String::new();
533
534 output.push_str("=== Performance Profile Summary ===\n");
535 output.push_str(&format!(
536 "Total Parse Time: {:.1}ms\n",
537 self.summary.total_parse_time_ms
538 ));
539 output.push_str(&format!(
540 "Average Parse Time: {:.1}ms\n",
541 self.summary.average_parse_time_ms
542 ));
543 output.push_str(&format!(
544 "Peak Memory Usage: {:.1}MB\n",
545 self.summary.peak_memory_usage_mb
546 ));
547 output.push_str(&format!(
548 "Nodes Created: {}\n",
549 self.summary.total_nodes_created
550 ));
551 output.push_str(&format!(
552 "Edges Created: {}\n",
553 self.summary.total_edges_created
554 ));
555 output.push_str(&format!(
556 "Parse Throughput: {:.1} KB/s\n",
557 self.summary.parse_throughput_kb_per_sec
558 ));
559 output.push_str(&format!(
560 "Efficiency Score: {:.1}/100\n",
561 self.analysis.efficiency_score
562 ));
563
564 if !self.analysis.bottlenecks.is_empty() {
565 output.push_str("\n## Performance Issues:\n");
566 for bottleneck in &self.analysis.bottlenecks {
567 output.push_str(&format!(
568 "- {}: {}\n",
569 bottleneck.area, bottleneck.description
570 ));
571 }
572 }
573
574 if !self.recommendations.is_empty() {
575 output.push_str("\n## Recommendations:\n");
576 for rec in &self.recommendations {
577 output.push_str(&format!("- {rec}\n"));
578 }
579 }
580
581 output
582 }
583}
584
585#[cfg(test)]
586mod tests {
587 use super::*;
588
589 #[test]
590 fn test_profiler_creation() {
591 let profiler = PerformanceProfiler::new();
592 assert!(profiler.config.track_parse_time);
593 assert!(profiler.config.track_memory_usage);
594 assert_eq!(profiler.metrics.len(), 0, "Should have 0 items");
595 }
596
597 #[test]
598 fn test_record_metric() {
599 let mut profiler = PerformanceProfiler::new();
600 profiler.record_metric(MetricType::ParseTime, 100.0, "ms", None);
601
602 assert_eq!(profiler.metrics.len(), 1, "Should have 1 items");
603 assert_eq!(profiler.metrics[0].value, 100.0);
604 assert_eq!(profiler.metrics[0].unit, "ms");
605 }
606
607 #[test]
608 fn test_generate_report() {
609 let mut profiler = PerformanceProfiler::new();
610 profiler.record_metric(MetricType::ParseTime, 50.0, "ms", None);
611 profiler.record_metric(MetricType::MemoryUsage, 10.0, "MB", None);
612
613 let report = profiler.generate_report();
614 assert_eq!(report.summary.total_parse_time_ms, 50.0);
615 assert_eq!(report.summary.peak_memory_usage_mb, 10.0);
616 }
617
618 #[test]
619 fn test_efficiency_score_calculation() {
620 let profiler = PerformanceProfiler::new();
621 let score = profiler.calculate_efficiency_score();
622 assert_eq!(score, 100.0); }
624}