pub(crate) mod analyzer;
mod collector;
mod normalize;
mod report;
mod scoring;
use std::error::Error;
use std::path::Path;
use crate::dups;
use crate::walk;
use analyzer::{FileScore, ProjectScore, compute_project_score, score_to_grade};
use collector::{FileMetrics, analyze_single_file};
use report::{print_json, print_report};
use scoring::{build_dimensions, build_empty_dimensions, score_file};
#[cfg(test)]
pub(crate) use scoring::{
FILE_WEIGHTS, MISSING_DIM_SCORE, W_CYCOM, W_DUP, W_HAL, W_INDENT, W_MI, W_SIZE, weighted_mean,
};
pub fn run(
path: &Path,
json: bool,
include_tests: bool,
bottom: usize,
min_lines: usize,
) -> Result<(), Box<dyn Error>> {
let score = compute_score(path, include_tests, bottom, min_lines)?;
let target = path.to_str().filter(|s| *s != ".").map(|s| s.to_string());
if json {
print_json(&score, target.as_deref())?;
} else {
print_report(&score, bottom, target.as_deref());
}
Ok(())
}
fn compute_score(
path: &Path,
include_tests: bool,
bottom: usize,
min_lines: usize,
) -> Result<ProjectScore, Box<dyn Error>> {
let exclude_tests = !include_tests;
let mut file_metrics: Vec<FileMetrics> = Vec::new();
let mut dup_files: Vec<dups::detector::NormalizedFile> = Vec::new();
let mut total_code_lines: usize = 0;
for (file_path, spec) in walk::source_files(path, exclude_tests) {
if let Some(result) = analyze_single_file(&file_path, spec, exclude_tests) {
total_code_lines += result.normalized_count;
dup_files.push(result.dup_file);
file_metrics.push(result.metrics);
}
}
let dup_groups = if dup_files.is_empty() {
Vec::new()
} else {
dups::detector::detect_duplicates(&dup_files, min_lines, true)
};
let duplicated_lines: usize = dup_groups.iter().map(|g| g.duplicated_lines()).sum();
let dup_percent = if total_code_lines == 0 {
0.0
} else {
duplicated_lines as f64 / total_code_lines as f64 * 100.0
};
let total_loc: usize = file_metrics.iter().map(|f| f.code_lines).sum();
let files_analyzed = file_metrics.len();
if files_analyzed == 0 {
let dimensions = build_empty_dimensions();
return Ok(ProjectScore {
score: 0.0,
grade: score_to_grade(0.0),
files_analyzed: 0,
total_loc: 0,
dimensions,
needs_attention: vec![],
});
}
let dimensions = build_dimensions(&file_metrics, total_loc, dup_percent);
let project_score = compute_project_score(&dimensions);
let mut file_scores: Vec<FileScore> = file_metrics.iter().map(score_file).collect();
file_scores.sort_by(|a, b| a.score.total_cmp(&b.score));
file_scores.truncate(bottom);
Ok(ProjectScore {
score: project_score,
grade: score_to_grade(project_score),
files_analyzed,
total_loc,
dimensions,
needs_attention: file_scores,
})
}
#[cfg(test)]
#[path = "mod_test.rs"]
mod tests;