use std::time::{Duration, Instant};
use std::path::PathBuf;
use tempfile::tempdir;
use std::fs;
use pmat::cli::handlers::complexity_handlers;
use pmat::services::complexity::ComplexityAnalysisService;
use anyhow::Result;
#[derive(Debug, Clone)]
pub struct PerformanceTargets {
pub startup_cold_ms: u64, pub startup_hot_ms: u64,
pub loc_per_sec_st: u64, pub loc_per_sec_mt: u64,
pub base_rss_mb: u64, pub per_kloc_kb: u64, }
impl Default for PerformanceTargets {
fn default() -> Self {
Self {
startup_cold_ms: 127,
startup_hot_ms: 4,
loc_per_sec_st: 487_000,
loc_per_sec_mt: 3_921_000,
base_rss_mb: 47,
per_kloc_kb: 312,
}
}
}
fn generate_test_code(lines: usize) -> String {
let mut code = String::with_capacity(lines * 50);
code.push_str("// Generated test code for performance testing\n");
code.push_str("use std::collections::HashMap;\n\n");
code.push_str("pub struct TestStruct {\n");
code.push_str(" data: HashMap<String, i32>,\n");
code.push_str("}\n\n");
for i in 0..lines.saturating_sub(10) {
code.push_str(&format!("pub fn test_function_{i}() -> i32 {{\n"));
code.push_str(&format!(" let mut sum = 0;\n"));
code.push_str(&format!(" for j in 0..{i} {{\n"));
code.push_str(&format!(" sum += j * {i};\n"));
code.push_str(&format!(" }}\n"));
code.push_str(&format!(" sum\n"));
code.push_str(&format!("}}\n\n"));
}
code
}
#[test]
pub async fn test_cold_startup_performance() -> Result<()> {
let targets = PerformanceTargets::default();
let start = Instant::now();
let _service = ComplexityAnalysisService::new();
let duration = start.elapsed();
let duration_ms = duration.as_millis() as u64;
assert!(
duration_ms <= targets.startup_cold_ms,
"Cold startup took {}ms, expected ≤{}ms (SPECIFICATION.md target)",
duration_ms, targets.startup_cold_ms
);
println!("✅ Cold startup: {}ms (target: ≤{}ms)", duration_ms, targets.startup_cold_ms);
Ok(())
}
#[test]
pub async fn test_hot_startup_performance() -> Result<()> {
let targets = PerformanceTargets::default();
let _service1 = ComplexityAnalysisService::new();
let start = Instant::now();
let _service2 = ComplexityAnalysisService::new();
let duration = start.elapsed();
let duration_ms = duration.as_millis() as u64;
assert!(
duration_ms <= targets.startup_hot_ms,
"Hot startup took {}ms, expected ≤{}ms (SPECIFICATION.md target)",
duration_ms, targets.startup_hot_ms
);
println!("✅ Hot startup: {}ms (target: ≤{}ms)", duration_ms, targets.startup_hot_ms);
Ok(())
}
#[tokio::test]
async fn test_single_threaded_throughput() -> Result<()> {
let targets = PerformanceTargets::default();
let test_lines = 10_000;
let temp_dir = tempdir()?;
let test_file = temp_dir.path().join("test.rs");
let test_code = generate_test_code(test_lines);
fs::write(&test_file, &test_code)?;
let start = Instant::now();
complexity_handlers::handle_analyze_complexity(
&temp_dir.path().to_path_buf(),
None, 20, 15, &pmat::cli::enums::ComplexityOutputFormat::Json,
Some(test_file.clone()),
None, None, None, false, ).await?;
let duration = start.elapsed();
let actual_throughput = (test_lines as f64) / duration.as_secs_f64();
assert!(
actual_throughput >= targets.loc_per_sec_st as f64 * 0.8, "Single-threaded throughput: {:.0} LOC/s, expected ≥{} LOC/s",
actual_throughput, targets.loc_per_sec_st
);
println!("✅ Single-threaded throughput: {:.0} LOC/s (target: ≥{} LOC/s)",
actual_throughput, targets.loc_per_sec_st);
Ok(())
}
#[tokio::test]
async fn test_realistic_project_analysis() -> Result<()> {
let test_lines = 50_000;
let temp_dir = tempdir()?;
let src_dir = temp_dir.path().join("src");
fs::create_dir(&src_dir)?;
for i in 0..10 {
let file_path = src_dir.join(format!("module_{}.rs", i));
let file_code = generate_test_code(test_lines / 10);
fs::write(&file_path, &file_code)?;
}
let start = Instant::now();
complexity_handlers::handle_analyze_complexity(
&temp_dir.path().to_path_buf(),
None, 20, 15, &pmat::cli::enums::ComplexityOutputFormat::Summary,
None, None, None, None, false, ).await?;
let duration = start.elapsed();
let actual_throughput = (test_lines as f64) / duration.as_secs_f64();
let min_throughput = 100_000; assert!(
actual_throughput >= min_throughput as f64,
"Multi-file analysis throughput: {:.0} LOC/s, expected ≥{} LOC/s",
actual_throughput, min_throughput
);
println!("✅ Multi-file analysis: {:.0} LOC/s, duration: {:?}",
actual_throughput, duration);
Ok(())
}
#[tokio::test]
async fn test_memory_usage_patterns() -> Result<()> {
let test_lines = 20_000;
let temp_dir = tempdir()?;
let test_file = temp_dir.path().join("memory_test.rs");
let test_code = generate_test_code(test_lines);
fs::write(&test_file, &test_code)?;
let initial_memory = get_memory_usage_mb();
complexity_handlers::handle_analyze_complexity(
&temp_dir.path().to_path_buf(),
None,
20,
15,
&pmat::cli::enums::ComplexityOutputFormat::Json,
Some(test_file),
None,
None,
None,
false,
).await?;
let final_memory = get_memory_usage_mb();
let memory_used = final_memory.saturating_sub(initial_memory);
let expected_memory_mb = 10; assert!(
memory_used <= expected_memory_mb,
"Memory usage: {}MB for {}K LOC, expected ≤{}MB",
memory_used, test_lines / 1000, expected_memory_mb
);
println!("✅ Memory usage: {}MB for {}K LOC", memory_used, test_lines / 1000);
Ok(())
}
#[tokio::test]
async fn test_performance_regression_detection() -> Result<()> {
const ITERATIONS: usize = 5;
let test_lines = 5_000;
let temp_dir = tempdir()?;
let test_file = temp_dir.path().join("regression_test.rs");
let test_code = generate_test_code(test_lines);
fs::write(&test_file, &test_code)?;
let mut durations = Vec::with_capacity(ITERATIONS);
for _ in 0..ITERATIONS {
let start = Instant::now();
complexity_handlers::handle_analyze_complexity(
&temp_dir.path().to_path_buf(),
None,
20,
15,
&pmat::cli::enums::ComplexityOutputFormat::Json,
Some(test_file.clone()),
None,
None,
None,
false,
).await?;
durations.push(start.elapsed());
}
let avg_duration = durations.iter().sum::<Duration>() / ITERATIONS as u32;
let max_duration = durations.iter().max().unwrap();
let min_duration = durations.iter().min().unwrap();
let variance_ratio = max_duration.as_millis() as f64 / min_duration.as_millis() as f64;
assert!(
variance_ratio <= 2.0,
"High performance variance: min={}ms, max={}ms, ratio={:.2}",
min_duration.as_millis(), max_duration.as_millis(), variance_ratio
);
println!("✅ Performance consistency: avg={}ms, min={}ms, max={}ms",
avg_duration.as_millis(), min_duration.as_millis(), max_duration.as_millis());
Ok(())
}
#[tokio::test]
async fn test_large_file_performance() -> Result<()> {
let test_lines = 100_000;
let temp_dir = tempdir()?;
let test_file = temp_dir.path().join("large_file.rs");
let test_code = generate_test_code(test_lines);
fs::write(&test_file, &test_code)?;
let start = Instant::now();
complexity_handlers::handle_analyze_complexity(
&temp_dir.path().to_path_buf(),
None,
20,
15,
&pmat::cli::enums::ComplexityOutputFormat::Summary,
Some(test_file),
None,
None,
None,
false,
).await?;
let duration = start.elapsed();
let max_duration_secs = 5; assert!(
duration.as_secs() <= max_duration_secs,
"Large file analysis took {}s, expected ≤{}s for 100K LOC",
duration.as_secs(), max_duration_secs
);
let throughput = (test_lines as f64) / duration.as_secs_f64();
println!("✅ Large file performance: {:.0} LOC/s, duration: {:?}", throughput, duration);
Ok(())
}
fn get_memory_usage_mb() -> u64 {
#[cfg(target_os = "linux")]
{
use std::fs;
if let Ok(status) = fs::read_to_string("/proc/self/status") {
for line in status.lines() {
if line.starts_with("VmRSS:") {
if let Some(kb_str) = line.split_whitespace().nth(1) {
if let Ok(kb) = kb_str.parse::<u64>() {
return kb / 1024; }
}
}
}
}
}
0
}
pub struct PerformanceTestConfig {
pub enable_regression_tests: bool,
pub enable_memory_tests: bool,
pub enable_throughput_tests: bool,
pub test_iterations: usize,
}
impl Default for PerformanceTestConfig {
fn default() -> Self {
Self {
enable_regression_tests: true,
enable_memory_tests: true,
enable_throughput_tests: true,
test_iterations: 3,
}
}
}
pub async fn run_performance_test_suite(config: PerformanceTestConfig) -> Result<()> {
println!("🏃 Running PMAT Performance Test Suite (SPECIFICATION.md Section 30)");
println!("================================================================");
if config.enable_throughput_tests {
println!("\n📊 Throughput Tests:");
test_single_threaded_throughput().await?;
test_realistic_project_analysis().await?;
test_large_file_performance().await?;
}
if config.enable_regression_tests {
println!("\n🔍 Regression Tests:");
test_performance_regression_detection().await?;
}
if config.enable_memory_tests {
println!("\n💾 Memory Tests:");
test_memory_usage_patterns().await?;
}
println!("\n✅ All performance tests passed!");
println!("Performance characteristics meet SPECIFICATION.md Section 1.4 requirements");
Ok(())
}
#[cfg(test)]
mod performance_specification_tests {
use super::*;
#[tokio::test]
async fn test_specification_performance_suite() -> Result<()> {
let config = PerformanceTestConfig::default();
run_performance_test_suite(config).await
}
}