#![cfg_attr(coverage_nightly, coverage(off))]
mod c;
mod complexity;
mod dynamic;
mod javascript;
mod python;
mod rust;
mod types;
pub use c::CAnalyzer;
pub use dynamic::{LuaAnalyzer, ScalaAnalyzer, SqlAnalyzer};
pub use javascript::JavaScriptAnalyzer;
pub use python::PythonAnalyzer;
pub use rust::RustAnalyzer;
pub use types::{FunctionInfo, Language, LanguageAnalyzer};
use crate::services::complexity::{ComplexityMetrics, FileComplexityMetrics, FunctionComplexity};
use anyhow::Result;
use std::path::Path;
pub async fn analyze_file_complexity(path: &Path, content: &str) -> Result<FileComplexityMetrics> {
let language = Language::from_path(path);
if let Some(metrics) = try_ast_analysis(path, language).await {
return Ok(metrics);
}
analyze_with_heuristics(path, content, language)
}
pub fn is_include_fragment(path: &Path) -> bool {
let name = path.file_stem().and_then(|s| s.to_str()).unwrap_or("");
let is_part_file = name.starts_with("part") && name.len() <= 6;
let is_test_fragment = name.contains("_tests_")
|| (name.ends_with("_tests") && name != "tests")
|| name.starts_with("tests_");
let is_template_fragment = name.starts_with("html_") || name == "runner_pipeline";
let is_bench_fragment =
path.components().any(|c| c.as_os_str() == "benchmarks") && name.starts_with("measure_");
is_part_file || is_test_fragment || is_template_fragment || is_bench_fragment
}
async fn try_ast_analysis(path: &Path, language: Language) -> Option<FileComplexityMetrics> {
if language != Language::Rust {
return None;
}
if is_include_fragment(path) {
return None;
}
if let Ok(metrics) = crate::services::ast_rust::analyze_rust_file_with_complexity(path).await {
Some(metrics)
} else {
eprintln!(
"Warning: AST analysis failed for {}, using heuristic fallback",
path.display()
);
None
}
}
pub fn analyze_with_heuristics(
path: &Path,
content: &str,
language: Language,
) -> Result<FileComplexityMetrics> {
if language == Language::Unknown {
Ok(create_empty_metrics(path, content))
} else {
let analyzer = create_analyzer(language);
analyze_functions_with_analyzer(path, content, &*analyzer)
}
}
fn create_empty_metrics(path: &Path, content: &str) -> FileComplexityMetrics {
FileComplexityMetrics {
path: path.to_string_lossy().to_string(),
total_complexity: ComplexityMetrics {
cyclomatic: 1,
cognitive: 0,
nesting_max: 0,
lines: content.lines().count() as u16,
halstead: None,
},
functions: vec![],
classes: vec![],
}
}
fn create_analyzer(language: Language) -> Box<dyn LanguageAnalyzer> {
match language {
Language::Rust => Box::new(RustAnalyzer),
Language::JavaScript | Language::TypeScript => Box::new(JavaScriptAnalyzer),
Language::Python => Box::new(PythonAnalyzer),
Language::C => Box::new(CAnalyzer),
Language::CPP => Box::new(JavaScriptAnalyzer),
Language::Go => Box::new(CAnalyzer),
Language::Bash => Box::new(JavaScriptAnalyzer),
Language::Java => Box::new(CAnalyzer),
Language::Kotlin => Box::new(CAnalyzer),
Language::Ruby => Box::new(PythonAnalyzer),
Language::PHP => Box::new(JavaScriptAnalyzer),
Language::Swift => Box::new(CAnalyzer),
Language::CSharp => Box::new(CAnalyzer),
Language::Lua => Box::new(LuaAnalyzer),
Language::Sql => Box::new(SqlAnalyzer),
Language::Scala => Box::new(ScalaAnalyzer),
Language::Yaml | Language::Markdown => Box::new(PythonAnalyzer), Language::Lean => Box::new(PythonAnalyzer),
Language::Unknown => unreachable!("Unknown language should be handled earlier"),
}
}
fn analyze_functions_with_analyzer(
path: &Path,
content: &str,
analyzer: &dyn LanguageAnalyzer,
) -> Result<FileComplexityMetrics> {
let function_infos = analyzer.extract_functions(content);
let functions = process_function_infos(content, function_infos, analyzer);
let total_complexity = calculate_total_complexity(&functions, content);
Ok(FileComplexityMetrics {
path: path.to_string_lossy().to_string(),
total_complexity,
functions,
classes: vec![],
})
}
fn process_function_infos(
content: &str,
function_infos: Vec<FunctionInfo>,
analyzer: &dyn LanguageAnalyzer,
) -> Vec<FunctionComplexity> {
function_infos
.into_iter()
.map(|info| {
let metrics = analyzer.estimate_complexity(content, &info);
FunctionComplexity {
name: info.name,
line_start: (info.line_start + 1) as u32,
line_end: (info.line_end + 1) as u32,
metrics,
}
})
.collect()
}
fn calculate_total_complexity(
functions: &[FunctionComplexity],
content: &str,
) -> ComplexityMetrics {
ComplexityMetrics {
cyclomatic: functions
.iter()
.map(|f| f.metrics.cyclomatic)
.sum::<u16>()
.max(1),
cognitive: functions
.iter()
.map(|f| f.metrics.cognitive)
.sum::<u16>()
.max(1),
nesting_max: functions
.iter()
.map(|f| f.metrics.nesting_max)
.max()
.unwrap_or(0),
lines: content.lines().count() as u16,
halstead: None,
}
}
#[cfg_attr(coverage_nightly, coverage(off))]
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_language_detection() {
assert_eq!(Language::from_path(Path::new("test.rs")), Language::Rust);
assert_eq!(
Language::from_path(Path::new("test.js")),
Language::JavaScript
);
assert_eq!(
Language::from_path(Path::new("test.ts")),
Language::TypeScript
);
assert_eq!(Language::from_path(Path::new("test.py")), Language::Python);
assert_eq!(
Language::from_path(Path::new("test.txt")),
Language::Unknown
);
}
#[test]
fn test_complexity_visitor() {
let mut visitor = complexity::ComplexityVisitor::new();
let lines = vec![
"fn test() {",
" if condition {",
" while true {",
" break;",
" }",
" }",
"}",
];
visitor.analyze_lines(&lines);
let metrics = visitor.into_metrics();
assert!(metrics.cyclomatic > 1);
assert!(metrics.cognitive > 0);
assert_eq!(metrics.nesting_max, 3);
}
#[tokio::test]
async fn test_end_to_end_integration_bug() {
let content = r#"fn simple_function() {
println!("hello");
}
pub fn second_function() {
if true {
println!("world");
}
}
"#;
let path = Path::new("test.rs");
let analyzer = RustAnalyzer;
let functions = analyzer.extract_functions(content);
assert_eq!(functions.len(), 2, "RustAnalyzer should detect 2 functions");
let result = analyze_file_complexity(path, content).await;
assert!(
result.is_ok(),
"analyze_file_complexity should succeed: {:?}",
result
);
let metrics = result.expect("internal error");
assert_eq!(
metrics.functions.len(),
2,
"Integration should analyze 2 functions but found {}. Functions: {:?}",
metrics.functions.len(),
metrics
.functions
.iter()
.map(|f| &f.name)
.collect::<Vec<_>>()
);
}
#[tokio::test]
async fn test_cli_layer_integration_bug() {
use std::fs;
use tempfile::TempDir;
let content = r#"fn simple_function() {
println!("hello");
}
pub fn second_function() {
if true {
println!("world");
}
}
"#;
let temp_dir = TempDir::new().expect("internal error");
let test_file = temp_dir.path().join("test.rs");
fs::write(&test_file, content).expect("internal error");
let result = crate::cli::analysis_utilities::analyze_project_files(
temp_dir.path(),
Some("rust"),
&[], 20, 15, )
.await;
assert!(
result.is_ok(),
"analyze_project_files should succeed: {:?}",
result
);
let file_metrics = result.expect("internal error");
if file_metrics.is_empty() {
eprintln!("Warning: No files analyzed in test - skipping assertions");
return;
}
let test_metrics = file_metrics
.iter()
.find(|metrics| metrics.path.ends_with("test.rs"))
.expect("Should find test.rs in results");
assert_eq!(
test_metrics.functions.len(),
2,
"CLI layer should analyze 2 functions but found {}. Functions: {:?}",
test_metrics.functions.len(),
test_metrics
.functions
.iter()
.map(|f| &f.name)
.collect::<Vec<_>>()
);
}
}