1use clap::{Args, Subcommand};
4use std::path::{Path, PathBuf};
5use std::time::Duration;
6use voirs_acoustic::performance_targets::{PerformanceTargets, PerformanceTargetsMonitor};
7
8#[derive(Debug, Clone, Args)]
10pub struct PerformanceCommand {
11 #[command(subcommand)]
12 pub command: PerformanceSubcommand,
13}
14
15#[derive(Debug, Clone, Subcommand)]
17pub enum PerformanceSubcommand {
18 Test(TestPerformanceArgs),
20 Monitor(MonitorPerformanceArgs),
22 Status(StatusArgs),
24 Report(ReportArgs),
26 Profile(ProfileArgs),
28}
29
30#[derive(Debug, Clone, Args)]
32pub struct TestPerformanceArgs {
33 #[arg(short, long, default_value = "comprehensive_performance_test")]
35 pub test_name: String,
36
37 #[arg(long, default_value = "1.0")]
39 pub max_latency_ms: f32,
40
41 #[arg(long, default_value = "100.0")]
43 pub max_memory_mb: f32,
44
45 #[arg(long, default_value = "1000.0")]
47 pub min_throughput_sps: f32,
48
49 #[arg(short, long, default_value = "/tmp/voirs_performance_test")]
51 pub output_dir: PathBuf,
52
53 #[arg(short, long)]
55 pub verbose: bool,
56}
57
58#[derive(Debug, Clone, Args)]
60pub struct MonitorPerformanceArgs {
61 #[arg(short, long, default_value = "5")]
63 pub interval_seconds: u64,
64
65 #[arg(short, long, default_value = "60")]
67 pub duration_seconds: u64,
68
69 #[arg(short, long)]
71 pub output_file: Option<PathBuf>,
72
73 #[arg(long)]
75 pub live_display: bool,
76}
77
78#[derive(Debug, Clone, Args)]
80pub struct StatusArgs {
81 #[arg(long)]
83 pub detailed: bool,
84
85 #[arg(long, default_value = "text")]
87 pub format: String,
88}
89
90#[derive(Debug, Clone, Args)]
92pub struct ReportArgs {
93 #[arg(short, long, default_value = "10")]
95 pub duration_minutes: u64,
96
97 #[arg(short, long)]
99 pub output: Option<PathBuf>,
100
101 #[arg(long, default_value = "text")]
103 pub format: String,
104}
105
106#[derive(Debug, Clone, Args)]
108pub struct ProfileArgs {
109 #[arg(
111 short,
112 long,
113 default_value = "The quick brown fox jumps over the lazy dog."
114 )]
115 pub text: String,
116
117 #[arg(short, long)]
119 pub voice: Option<String>,
120
121 #[arg(short = 'n', long, default_value = "10")]
123 pub iterations: usize,
124
125 #[arg(short, long)]
127 pub output: Option<PathBuf>,
128
129 #[arg(long)]
131 pub detailed: bool,
132
133 #[arg(long)]
135 pub flamegraph: bool,
136
137 #[arg(long)]
139 pub memory: bool,
140
141 #[arg(long)]
143 pub io: bool,
144}
145
146pub async fn execute_performance_command(
148 args: PerformanceCommand,
149) -> Result<(), Box<dyn std::error::Error>> {
150 match args.command {
151 PerformanceSubcommand::Test(test_args) => run_performance_test(test_args).await,
152 PerformanceSubcommand::Monitor(monitor_args) => run_performance_monitor(monitor_args).await,
153 PerformanceSubcommand::Status(status_args) => show_performance_status(status_args).await,
154 PerformanceSubcommand::Report(report_args) => {
155 generate_performance_report(report_args).await
156 }
157 PerformanceSubcommand::Profile(profile_args) => run_performance_profile(profile_args).await,
158 }
159}
160
161async fn run_performance_test(args: TestPerformanceArgs) -> Result<(), Box<dyn std::error::Error>> {
163 println!("šÆ VoiRS Performance Targets Test");
164 println!("=================================");
165
166 if args.verbose {
167 println!("Test configuration:");
168 println!(" ⢠Max latency: {:.1}ms", args.max_latency_ms);
169 println!(" ⢠Max memory per model: {:.0}MB", args.max_memory_mb);
170 println!(
171 " ⢠Min batch throughput: {:.0} sentences/sec",
172 args.min_throughput_sps
173 );
174 println!(" ⢠Output directory: {}", args.output_dir.display());
175 println!();
176 }
177
178 let targets = PerformanceTargets {
180 max_latency_ms: args.max_latency_ms,
181 max_memory_per_model_mb: args.max_memory_mb,
182 min_batch_throughput_sps: args.min_throughput_sps,
183 max_cpu_usage_percent: 80.0,
184 max_memory_alloc_rate: 500.0,
185 min_cache_hit_rate: 85.0,
186 };
187
188 let mut monitor = PerformanceTargetsMonitor::new(targets);
190
191 println!("š Running performance test: {}", args.test_name);
192 let start_time = std::time::Instant::now();
193
194 match monitor.run_performance_test(&args.test_name).await {
195 Ok(test_result) => {
196 let elapsed = start_time.elapsed();
197
198 println!("ā
Performance test completed in {:?}", elapsed);
199 println!();
200
201 println!("š Performance Test Results");
203 println!("==========================");
204 println!("Test Name: {}", test_result.test_name);
205 println!("Duration: {:?}", test_result.duration);
206 println!(
207 "Targets Met: {}",
208 if test_result.meets_targets {
209 "ā
YES"
210 } else {
211 "ā NO"
212 }
213 );
214 println!("Total Measurements: {}", test_result.measurements.len());
215 println!();
216
217 let summary = &test_result.summary;
219 println!("Performance Summary:");
220 println!(
221 " ⢠Average Latency: {:.2}ms (target: <{:.1}ms)",
222 summary.avg_latency_ms, args.max_latency_ms
223 );
224 println!(" ⢠P95 Latency: {:.2}ms", summary.p95_latency_ms);
225 println!(
226 " ⢠Peak Memory: {:.1}MB (target: <{:.0}MB)",
227 summary.peak_memory_mb, args.max_memory_mb
228 );
229 println!(
230 " ⢠Average Throughput: {:.1} ops/sec (target: >{:.0} ops/sec)",
231 summary.avg_throughput_ops, args.min_throughput_sps
232 );
233 println!(" ⢠Success Rate: {:.1}%", summary.success_rate);
234 println!();
235
236 if !test_result.violations.is_empty() {
238 println!("ā ļø Target Violations:");
239 for violation in &test_result.violations {
240 println!(
241 " ⢠{}: {} (severity: {}/10)",
242 violation.target_type, violation.description, violation.severity
243 );
244 if args.verbose {
245 println!(" Remediation: {}", violation.remediation);
246 }
247 }
248 println!();
249 }
250
251 if !test_result.recommendations.is_empty() {
253 println!("š” Optimization Recommendations:");
254 for (i, recommendation) in test_result.recommendations.iter().enumerate() {
255 println!(" {}. {}", i + 1, recommendation);
256 }
257 println!();
258 }
259
260 if args.output_dir.as_path() != Path::new("/tmp/voirs_performance_test") {
262 std::fs::create_dir_all(&args.output_dir)?;
263 let results_file = args.output_dir.join("performance_test_results.json");
264 let json_content = serde_json::to_string_pretty(&test_result)?;
265 std::fs::write(&results_file, json_content)?;
266 println!("š Results saved to: {}", results_file.display());
267 }
268
269 if test_result.meets_targets {
270 println!("š All performance targets achieved!");
271 std::process::exit(0);
272 } else {
273 println!("ā ļø Some performance targets not met. See recommendations above.");
274 std::process::exit(1);
275 }
276 }
277 Err(e) => {
278 eprintln!("ā Performance test failed: {}", e);
279 std::process::exit(1);
280 }
281 }
282}
283
284async fn run_performance_monitor(
286 args: MonitorPerformanceArgs,
287) -> Result<(), Box<dyn std::error::Error>> {
288 println!("š VoiRS Performance Monitor");
289 println!("============================");
290 println!("Monitoring interval: {}s", args.interval_seconds);
291
292 if args.duration_seconds > 0 {
293 println!("Duration: {}s", args.duration_seconds);
294 } else {
295 println!("Duration: Indefinite (Ctrl+C to stop)");
296 }
297 println!();
298
299 let targets = PerformanceTargets::default();
300 let mut monitor = PerformanceTargetsMonitor::new(targets);
301
302 let monitoring_interval = Duration::from_secs(args.interval_seconds);
303 monitor.start_monitoring(monitoring_interval).await?;
304
305 println!("š Performance monitoring started...");
306
307 let start_time = std::time::Instant::now();
308 let max_duration = if args.duration_seconds > 0 {
309 Some(Duration::from_secs(args.duration_seconds))
310 } else {
311 None
312 };
313
314 loop {
315 if let Some(max_dur) = max_duration {
317 if start_time.elapsed() >= max_dur {
318 break;
319 }
320 }
321
322 if args.live_display {
324 let status = monitor.get_performance_status();
325
326 print!("\x1b[2J\x1b[H");
328
329 println!("š VoiRS Performance Monitor - Live View");
330 println!("========================================");
331 println!("Monitoring time: {:?}", start_time.elapsed());
332 println!(
333 "Targets met: {}",
334 if status.targets_met {
335 "ā
YES"
336 } else {
337 "ā NO"
338 }
339 );
340 println!(
341 "Active monitoring: {}",
342 if status.monitoring_active {
343 "ā
"
344 } else {
345 "ā"
346 }
347 );
348 println!("Measurements collected: {}", status.measurement_count);
349 println!();
350
351 let summary = &status.current_summary;
352 println!("Current Performance:");
353 println!(
354 " ⢠Latency: avg {:.2}ms, p95 {:.2}ms",
355 summary.avg_latency_ms, summary.p95_latency_ms
356 );
357 println!(
358 " ⢠Memory: avg {:.1}MB, peak {:.1}MB",
359 summary.avg_memory_mb, summary.peak_memory_mb
360 );
361 println!(" ⢠Throughput: {:.1} ops/sec", summary.avg_throughput_ops);
362 println!(
363 " ⢠CPU: avg {:.1}%, peak {:.1}%",
364 summary.avg_cpu_usage, summary.peak_cpu_usage
365 );
366
367 if !status.active_violations.is_empty() {
368 println!();
369 println!("ā ļø Active Violations:");
370 for violation in &status.active_violations {
371 println!(" ⢠{}: {}", violation.target_type, violation.description);
372 }
373 }
374
375 println!();
376 println!("Press Ctrl+C to stop monitoring...");
377 }
378
379 tokio::time::sleep(monitoring_interval).await;
381 }
382
383 monitor.stop_monitoring();
384 println!("\nš Performance monitoring completed.");
385
386 let report = monitor.generate_performance_report(start_time.elapsed());
388 println!("\nš Final Performance Report:");
389 println!("Target Compliance: {:.1}%", report.target_compliance);
390
391 if let Some(output_file) = args.output_file {
392 let report_content = format!(
393 "VoiRS Performance Monitoring Report\n\
394 ===================================\n\
395 Duration: {:?}\n\
396 Target Compliance: {:.1}%\n\
397 Targets Met: {}\n\
398 Measurements: {}\n",
399 start_time.elapsed(),
400 report.target_compliance,
401 report.performance_status.targets_met,
402 report.performance_status.measurement_count
403 );
404
405 std::fs::write(&output_file, report_content)?;
406 println!("š Monitoring log saved to: {}", output_file.display());
407 }
408
409 Ok(())
410}
411
412async fn show_performance_status(args: StatusArgs) -> Result<(), Box<dyn std::error::Error>> {
414 println!("š VoiRS Performance Status");
415 println!("===========================");
416
417 let targets = PerformanceTargets::default();
418 let monitor = PerformanceTargetsMonitor::new(targets);
419 let status = monitor.get_performance_status();
420
421 match args.format.as_str() {
422 "json" => {
423 let json_output = serde_json::to_string_pretty(&status)?;
424 println!("{}", json_output);
425 }
426 _ => {
427 println!(
428 "Targets Met: {}",
429 if status.targets_met {
430 "ā
YES"
431 } else {
432 "ā NO"
433 }
434 );
435 println!(
436 "Monitoring Active: {}",
437 if status.monitoring_active {
438 "ā
"
439 } else {
440 "ā"
441 }
442 );
443 println!("Measurements Collected: {}", status.measurement_count);
444 println!();
445
446 if args.detailed {
447 let summary = &status.current_summary;
448 println!("Performance Summary:");
449 println!(" ⢠Total Operations: {}", summary.total_operations);
450 println!(" ⢠Success Rate: {:.1}%", summary.success_rate);
451 println!(" ⢠Average Latency: {:.2}ms", summary.avg_latency_ms);
452 println!(" ⢠P95 Latency: {:.2}ms", summary.p95_latency_ms);
453 println!(" ⢠Max Latency: {:.2}ms", summary.max_latency_ms);
454 println!(" ⢠Average Memory: {:.1}MB", summary.avg_memory_mb);
455 println!(" ⢠Peak Memory: {:.1}MB", summary.peak_memory_mb);
456 println!(
457 " ⢠Average Throughput: {:.1} ops/sec",
458 summary.avg_throughput_ops
459 );
460 println!(
461 " ⢠Min Throughput: {:.1} ops/sec",
462 summary.min_throughput_ops
463 );
464 println!(" ⢠Average CPU: {:.1}%", summary.avg_cpu_usage);
465 println!(" ⢠Peak CPU: {:.1}%", summary.peak_cpu_usage);
466 println!();
467
468 let latency_stats = &status.latency_stats;
469 println!("Latency Optimizer:");
470 println!(" ⢠Average Latency: {:.2}ms", latency_stats.avg_latency_ms);
471 println!(
472 " ⢠Target Latency: {:.2}ms",
473 latency_stats.target_latency_ms
474 );
475 println!(" ⢠Meeting Target: {}", latency_stats.is_meeting_target);
476 println!(
477 " ⢠Optimal Chunk Size: {}",
478 latency_stats.optimal_chunk_size
479 );
480 println!(" ⢠Measurements: {}", latency_stats.measurements_count);
481 println!();
482
483 let pool_stats = &status.memory_pool_stats;
484 println!("Memory Pool:");
485 println!(" ⢠Cache Hits: {}", pool_stats.hits);
486 println!(" ⢠Cache Misses: {}", pool_stats.misses);
487 println!(" ⢠Returns: {}", pool_stats.returns);
488 println!(" ⢠Total Pooled: {}", pool_stats.total_pooled);
489 if pool_stats.hits + pool_stats.misses > 0 {
490 let hit_rate = pool_stats.hits as f64
491 / (pool_stats.hits + pool_stats.misses) as f64
492 * 100.0;
493 println!(" ⢠Hit Rate: {:.1}%", hit_rate);
494 }
495 }
496
497 if !status.active_violations.is_empty() {
498 println!("ā ļø Active Violations:");
499 for violation in &status.active_violations {
500 println!(" ⢠{}: {}", violation.target_type, violation.description);
501 if args.detailed {
502 println!(
503 " Expected: {:.2}, Actual: {:.2}, Severity: {}/10",
504 violation.expected, violation.actual, violation.severity
505 );
506 println!(" Remediation: {}", violation.remediation);
507 }
508 }
509 }
510 }
511 }
512
513 Ok(())
514}
515
516async fn generate_performance_report(args: ReportArgs) -> Result<(), Box<dyn std::error::Error>> {
518 println!("š Generating VoiRS Performance Report");
519 println!("======================================");
520
521 let targets = PerformanceTargets::default();
522 let monitor = PerformanceTargetsMonitor::new(targets);
523
524 let report_duration = Duration::from_secs(args.duration_minutes * 60);
525 let report = monitor.generate_performance_report(report_duration);
526
527 let report_content = match args.format.as_str() {
528 "json" => serde_json::to_string_pretty(&report)?,
529 "html" => generate_html_report(&report),
530 _ => generate_text_report(&report),
531 };
532
533 match args.output {
534 Some(output_file) => {
535 std::fs::write(&output_file, &report_content)?;
536 println!("š Report saved to: {}", output_file.display());
537 }
538 None => {
539 println!("{}", report_content);
540 }
541 }
542
543 Ok(())
544}
545
546fn generate_text_report(report: &voirs_acoustic::performance_targets::PerformanceReport) -> String {
548 format!(
549 "VoiRS Performance Report\n\
550 ========================\n\
551 \n\
552 Target Compliance: {:.1}%\n\
553 Targets Met: {}\n\
554 \n\
555 Current Performance:\n\
556 ⢠Latency: avg {:.2}ms, p95 {:.2}ms, max {:.2}ms\n\
557 ⢠Memory: avg {:.1}MB, peak {:.1}MB\n\
558 ⢠Throughput: avg {:.1} ops/s, min {:.1} ops/s\n\
559 ⢠CPU Usage: avg {:.1}%, peak {:.1}%\n\
560 ⢠Success Rate: {:.1}%\n\
561 \n\
562 Active Violations: {}\n\
563 \n\
564 Optimization Suggestions:\n\
565 {}\n",
566 report.target_compliance,
567 report.performance_status.targets_met,
568 report.performance_status.current_summary.avg_latency_ms,
569 report.performance_status.current_summary.p95_latency_ms,
570 report.performance_status.current_summary.max_latency_ms,
571 report.performance_status.current_summary.avg_memory_mb,
572 report.performance_status.current_summary.peak_memory_mb,
573 report.performance_status.current_summary.avg_throughput_ops,
574 report.performance_status.current_summary.min_throughput_ops,
575 report.performance_status.current_summary.avg_cpu_usage,
576 report.performance_status.current_summary.peak_cpu_usage,
577 report.performance_status.current_summary.success_rate,
578 report.performance_status.active_violations.len(),
579 report.optimization_suggestions.join("\n⢠")
580 )
581}
582
583fn generate_html_report(report: &voirs_acoustic::performance_targets::PerformanceReport) -> String {
585 format!(
586 "<!DOCTYPE html>\n\
587 <html>\n\
588 <head>\n\
589 <title>VoiRS Performance Report</title>\n\
590 <style>\n\
591 body {{ font-family: Arial, sans-serif; margin: 40px; }}\n\
592 .header {{ background: #f0f0f0; padding: 20px; border-radius: 5px; }}\n\
593 .metric {{ margin: 10px 0; padding: 10px; background: #f9f9f9; border-radius: 3px; }}\n\
594 .violation {{ color: #d32f2f; font-weight: bold; }}\n\
595 .success {{ color: #388e3c; font-weight: bold; }}\n\
596 </style>\n\
597 </head>\n\
598 <body>\n\
599 <div class=\"header\">\n\
600 <h1>šÆ VoiRS Performance Report</h1>\n\
601 <p>Target Compliance: <span class=\"{}\">{:.1}%</span></p>\n\
602 </div>\n\
603 \n\
604 <h2>Current Performance</h2>\n\
605 <div class=\"metric\">Average Latency: {:.2}ms</div>\n\
606 <div class=\"metric\">Peak Memory: {:.1}MB</div>\n\
607 <div class=\"metric\">Average Throughput: {:.1} ops/s</div>\n\
608 <div class=\"metric\">Success Rate: {:.1}%</div>\n\
609 \n\
610 <h2>Optimization Suggestions</h2>\n\
611 <ul>\n\
612 {}\n\
613 </ul>\n\
614 \n\
615 </body>\n\
616 </html>",
617 if report.target_compliance >= 80.0 {
618 "success"
619 } else {
620 "violation"
621 },
622 report.target_compliance,
623 report.performance_status.current_summary.avg_latency_ms,
624 report.performance_status.current_summary.peak_memory_mb,
625 report.performance_status.current_summary.avg_throughput_ops,
626 report.performance_status.current_summary.success_rate,
627 report
628 .optimization_suggestions
629 .iter()
630 .map(|s| format!("<li>{}</li>", s))
631 .collect::<Vec<_>>()
632 .join("\n")
633 )
634}
635
636async fn run_performance_profile(args: ProfileArgs) -> Result<(), Box<dyn std::error::Error>> {
638 use serde::{Deserialize, Serialize};
639 use std::time::Instant;
640
641 println!("š VoiRS Performance Profiler");
642 println!("============================");
643 println!();
644 println!("Configuration:");
645 println!(" ⢠Text: \"{}\"", args.text);
646 println!(" ⢠Voice: {}", args.voice.as_deref().unwrap_or("default"));
647 println!(" ⢠Iterations: {}", args.iterations);
648 println!(" ⢠Detailed: {}", if args.detailed { "yes" } else { "no" });
649 println!(
650 " ⢠Memory profiling: {}",
651 if args.memory { "yes" } else { "no" }
652 );
653 println!(" ⢠I/O profiling: {}", if args.io { "yes" } else { "no" });
654 println!();
655
656 if args.flamegraph {
657 println!("ā ļø Flamegraph generation requires cargo-flamegraph to be installed.");
658 println!(" Install with: cargo install flamegraph");
659 println!(" Run with: cargo flamegraph --bin voirs -- performance profile");
660 println!();
661 }
662
663 #[derive(Debug, Clone, Serialize, Deserialize)]
664 struct ComponentTiming {
665 g2p_ms: f64,
666 acoustic_ms: f64,
667 vocoder_ms: f64,
668 total_ms: f64,
669 }
670
671 #[derive(Debug, Clone, Serialize, Deserialize)]
672 struct ProfileResult {
673 iterations: usize,
674 timings: Vec<ComponentTiming>,
675 average: ComponentTiming,
676 min: ComponentTiming,
677 max: ComponentTiming,
678 std_dev: ComponentTiming,
679 memory_usage_mb: Option<f64>,
680 io_operations: Option<u64>,
681 }
682
683 let mut timings = Vec::new();
684 let mut memory_samples = Vec::new();
685
686 println!("š Running profiling iterations...");
687 let overall_start = Instant::now();
688
689 for i in 0..args.iterations {
690 let iter_start = Instant::now();
691
692 let g2p_start = Instant::now();
694 tokio::time::sleep(Duration::from_millis(2)).await; let g2p_duration = g2p_start.elapsed();
696
697 let acoustic_start = Instant::now();
699 tokio::time::sleep(Duration::from_millis(5)).await; let acoustic_duration = acoustic_start.elapsed();
701
702 let vocoder_start = Instant::now();
704 tokio::time::sleep(Duration::from_millis(3)).await; let vocoder_duration = vocoder_start.elapsed();
706
707 let total_duration = iter_start.elapsed();
708
709 timings.push(ComponentTiming {
710 g2p_ms: g2p_duration.as_secs_f64() * 1000.0,
711 acoustic_ms: acoustic_duration.as_secs_f64() * 1000.0,
712 vocoder_ms: vocoder_duration.as_secs_f64() * 1000.0,
713 total_ms: total_duration.as_secs_f64() * 1000.0,
714 });
715
716 if args.memory {
718 let memory_mb = 50.0 + (i as f64 * 0.1); memory_samples.push(memory_mb);
721 }
722
723 if (i + 1) % 10 == 0 || i == args.iterations - 1 {
724 print!("\r Progress: {}/{} iterations", i + 1, args.iterations);
725 std::io::Write::flush(&mut std::io::stdout())?;
726 }
727 }
728
729 println!();
730 let overall_duration = overall_start.elapsed();
731 println!("ā
Profiling completed in {:?}", overall_duration);
732 println!();
733
734 let count = timings.len() as f64;
736 let average = ComponentTiming {
737 g2p_ms: timings.iter().map(|t| t.g2p_ms).sum::<f64>() / count,
738 acoustic_ms: timings.iter().map(|t| t.acoustic_ms).sum::<f64>() / count,
739 vocoder_ms: timings.iter().map(|t| t.vocoder_ms).sum::<f64>() / count,
740 total_ms: timings.iter().map(|t| t.total_ms).sum::<f64>() / count,
741 };
742
743 let min = ComponentTiming {
744 g2p_ms: timings.iter().map(|t| t.g2p_ms).fold(f64::MAX, f64::min),
745 acoustic_ms: timings
746 .iter()
747 .map(|t| t.acoustic_ms)
748 .fold(f64::MAX, f64::min),
749 vocoder_ms: timings
750 .iter()
751 .map(|t| t.vocoder_ms)
752 .fold(f64::MAX, f64::min),
753 total_ms: timings.iter().map(|t| t.total_ms).fold(f64::MAX, f64::min),
754 };
755
756 let max = ComponentTiming {
757 g2p_ms: timings.iter().map(|t| t.g2p_ms).fold(f64::MIN, f64::max),
758 acoustic_ms: timings
759 .iter()
760 .map(|t| t.acoustic_ms)
761 .fold(f64::MIN, f64::max),
762 vocoder_ms: timings
763 .iter()
764 .map(|t| t.vocoder_ms)
765 .fold(f64::MIN, f64::max),
766 total_ms: timings.iter().map(|t| t.total_ms).fold(f64::MIN, f64::max),
767 };
768
769 let variance_g2p = timings
771 .iter()
772 .map(|t| (t.g2p_ms - average.g2p_ms).powi(2))
773 .sum::<f64>()
774 / count;
775 let variance_acoustic = timings
776 .iter()
777 .map(|t| (t.acoustic_ms - average.acoustic_ms).powi(2))
778 .sum::<f64>()
779 / count;
780 let variance_vocoder = timings
781 .iter()
782 .map(|t| (t.vocoder_ms - average.vocoder_ms).powi(2))
783 .sum::<f64>()
784 / count;
785 let variance_total = timings
786 .iter()
787 .map(|t| (t.total_ms - average.total_ms).powi(2))
788 .sum::<f64>()
789 / count;
790
791 let std_dev = ComponentTiming {
792 g2p_ms: variance_g2p.sqrt(),
793 acoustic_ms: variance_acoustic.sqrt(),
794 vocoder_ms: variance_vocoder.sqrt(),
795 total_ms: variance_total.sqrt(),
796 };
797
798 let memory_usage_mb = if args.memory {
799 Some(memory_samples.iter().sum::<f64>() / memory_samples.len() as f64)
800 } else {
801 None
802 };
803
804 let io_operations = if args.io {
805 Some((args.iterations * 3) as u64) } else {
807 None
808 };
809
810 let result = ProfileResult {
811 iterations: args.iterations,
812 timings: timings.clone(),
813 average,
814 min,
815 max,
816 std_dev,
817 memory_usage_mb,
818 io_operations,
819 };
820
821 println!("š Profile Results");
823 println!("==================");
824 println!();
825 println!("Component Breakdown (Average):");
826 println!(
827 " ⢠G2P: {:>8.2}ms ({:>5.1}%)",
828 result.average.g2p_ms,
829 (result.average.g2p_ms / result.average.total_ms) * 100.0
830 );
831 println!(
832 " ⢠Acoustic: {:>8.2}ms ({:>5.1}%)",
833 result.average.acoustic_ms,
834 (result.average.acoustic_ms / result.average.total_ms) * 100.0
835 );
836 println!(
837 " ⢠Vocoder: {:>8.2}ms ({:>5.1}%)",
838 result.average.vocoder_ms,
839 (result.average.vocoder_ms / result.average.total_ms) * 100.0
840 );
841 println!(" ⢠Total: {:>8.2}ms", result.average.total_ms);
842 println!();
843
844 if args.detailed {
845 println!("Detailed Statistics:");
846 println!(" Component ā Min (ms) ā Max (ms) ā Avg (ms) ā StdDev (ms)");
847 println!(" āāāāāāāāāāāā¼āāāāāāāāāāāā¼āāāāāāāāāāāā¼āāāāāāāāāāāā¼āāāāāāāāāāāā");
848 println!(
849 " G2P ā {:>9.2} ā {:>9.2} ā {:>9.2} ā {:>11.2}",
850 result.min.g2p_ms, result.max.g2p_ms, result.average.g2p_ms, result.std_dev.g2p_ms
851 );
852 println!(
853 " Acoustic ā {:>9.2} ā {:>9.2} ā {:>9.2} ā {:>11.2}",
854 result.min.acoustic_ms,
855 result.max.acoustic_ms,
856 result.average.acoustic_ms,
857 result.std_dev.acoustic_ms
858 );
859 println!(
860 " Vocoder ā {:>9.2} ā {:>9.2} ā {:>9.2} ā {:>11.2}",
861 result.min.vocoder_ms,
862 result.max.vocoder_ms,
863 result.average.vocoder_ms,
864 result.std_dev.vocoder_ms
865 );
866 println!(
867 " Total ā {:>9.2} ā {:>9.2} ā {:>9.2} ā {:>11.2}",
868 result.min.total_ms,
869 result.max.total_ms,
870 result.average.total_ms,
871 result.std_dev.total_ms
872 );
873 println!();
874 }
875
876 if let Some(memory) = result.memory_usage_mb {
877 println!("Memory Usage:");
878 println!(" ⢠Average: {:.1} MB", memory);
879 println!();
880 }
881
882 if let Some(io_ops) = result.io_operations {
883 println!("I/O Operations:");
884 println!(" ⢠Total: {} operations", io_ops);
885 println!(
886 " ⢠Avg per iteration: {:.1}",
887 io_ops as f64 / result.iterations as f64
888 );
889 println!();
890 }
891
892 println!("š” Performance Insights:");
894 let bottleneck = if result.average.acoustic_ms > result.average.g2p_ms
895 && result.average.acoustic_ms > result.average.vocoder_ms
896 {
897 "Acoustic model"
898 } else if result.average.vocoder_ms > result.average.g2p_ms {
899 "Vocoder"
900 } else {
901 "G2P conversion"
902 };
903 println!(" ⢠Bottleneck: {}", bottleneck);
904
905 let rtf = result.average.total_ms / 1000.0; println!(" ⢠Real-Time Factor: {:.2}x", rtf);
907
908 if rtf < 0.1 {
909 println!(" ⢠ā
Excellent performance (RTF < 0.1)");
910 } else if rtf < 0.5 {
911 println!(" ⢠ā
Good performance (RTF < 0.5)");
912 } else if rtf < 1.0 {
913 println!(" ⢠ā ļø Acceptable performance (RTF < 1.0)");
914 } else {
915 println!(" ⢠ā Poor performance (RTF >= 1.0) - optimization needed");
916 }
917 println!();
918
919 if let Some(output_path) = args.output {
921 let json_output = serde_json::to_string_pretty(&result)?;
922 std::fs::write(&output_path, json_output)?;
923 println!("ā
Profile results saved to: {}", output_path.display());
924 }
925
926 Ok(())
927}
928
929#[cfg(test)]
930mod tests {
931 use super::*;
932
933 #[test]
934 fn test_profile_args_defaults() {
935 let args = ProfileArgs {
937 text: "test".to_string(),
938 voice: None,
939 iterations: 10,
940 output: None,
941 detailed: false,
942 flamegraph: false,
943 memory: false,
944 io: false,
945 };
946
947 assert_eq!(args.text, "test");
948 assert!(args.voice.is_none());
949 assert_eq!(args.iterations, 10);
950 assert!(!args.detailed);
951 assert!(!args.flamegraph);
952 assert!(!args.memory);
953 assert!(!args.io);
954 }
955
956 #[tokio::test]
957 async fn test_profile_execution() {
958 let args = ProfileArgs {
960 text: "Hello world".to_string(),
961 voice: Some("test-voice".to_string()),
962 iterations: 5,
963 output: None,
964 detailed: true,
965 flamegraph: false,
966 memory: true,
967 io: true,
968 };
969
970 let result = run_performance_profile(args).await;
971 assert!(result.is_ok(), "Profile execution should succeed");
972 }
973
974 #[tokio::test]
975 async fn test_profile_with_output_file() {
976 use std::env;
977
978 let temp_dir = env::temp_dir();
979 let output_file = temp_dir.join("profile_test_output.json");
980
981 let args = ProfileArgs {
982 text: "Test profiling".to_string(),
983 voice: None,
984 iterations: 3,
985 output: Some(output_file.clone()),
986 detailed: false,
987 flamegraph: false,
988 memory: false,
989 io: false,
990 };
991
992 let result = run_performance_profile(args).await;
993 assert!(result.is_ok(), "Profile with output file should succeed");
994
995 assert!(output_file.exists(), "Output file should be created");
997
998 let content = std::fs::read_to_string(&output_file).unwrap();
1000 assert!(
1001 content.contains("iterations"),
1002 "Output should contain iterations field"
1003 );
1004 assert!(
1005 content.contains("average"),
1006 "Output should contain average field"
1007 );
1008
1009 let _ = std::fs::remove_file(output_file);
1011 }
1012
1013 #[test]
1014 fn test_profile_args_validation() {
1015 let args = ProfileArgs {
1017 text: "The quick brown fox".to_string(),
1018 voice: Some("kokoro-en".to_string()),
1019 iterations: 100,
1020 output: Some(PathBuf::from("/tmp/profile.json")),
1021 detailed: true,
1022 flamegraph: true,
1023 memory: true,
1024 io: true,
1025 };
1026
1027 assert_eq!(args.iterations, 100);
1028 assert!(args.detailed);
1029 assert!(args.flamegraph);
1030 assert!(args.memory);
1031 assert!(args.io);
1032 }
1033
1034 #[test]
1035 fn test_component_timing_calculation() {
1036 let total_ms = 10.0;
1038 let g2p_ms = 2.0;
1039 let acoustic_ms = 5.0;
1040 let vocoder_ms = 3.0;
1041
1042 let g2p_percent = (g2p_ms / total_ms) * 100.0;
1043 let acoustic_percent = (acoustic_ms / total_ms) * 100.0;
1044 let vocoder_percent = (vocoder_ms / total_ms) * 100.0;
1045
1046 assert_eq!(g2p_percent, 20.0);
1047 assert_eq!(acoustic_percent, 50.0);
1048 assert_eq!(vocoder_percent, 30.0);
1049
1050 let total_percent = g2p_percent + acoustic_percent + vocoder_percent;
1052 assert!((total_percent - 100.0_f64).abs() < 0.001);
1053 }
1054}