use super::ScoringModel;
use super::analyzer::{DimensionScore, FileScore, score_to_grade};
use super::collector::FileMetrics;
use super::normalize::{
normalize_cognitive, normalize_complexity, normalize_duplication, normalize_file_size,
normalize_halstead, normalize_indent, normalize_mi,
};
pub const W_COGCOM: f64 = 0.30;
pub const W_DUP: f64 = 0.20;
pub const W_INDENT: f64 = 0.15;
pub const W_HAL: f64 = 0.20;
pub const W_SIZE: f64 = 0.15;
pub const FILE_WEIGHTS: [(f64, &str); 4] = [
(W_COGCOM, "Cogcom"),
(W_INDENT, "Indent"),
(W_HAL, "Halstead"),
(W_SIZE, "Size"),
];
pub const W_MI: f64 = 0.30;
pub const W_CYCOM: f64 = 0.20;
pub const W_DUP_LEGACY: f64 = 0.15;
pub const W_INDENT_LEGACY: f64 = 0.15;
pub const W_HAL_LEGACY: f64 = 0.15;
pub const W_SIZE_LEGACY: f64 = 0.05;
pub const FILE_WEIGHTS_LEGACY: [(f64, &str); 5] = [
(W_MI, "MI"),
(W_CYCOM, "Cycom"),
(W_INDENT_LEGACY, "Indent"),
(W_HAL_LEGACY, "Halstead"),
(W_SIZE_LEGACY, "Size"),
];
pub const MISSING_DIM_SCORE: f64 = 50.0;
pub fn build_dimensions(
file_metrics: &[FileMetrics],
total_loc: usize,
dup_percent: f64,
model: &ScoringModel,
) -> Vec<DimensionScore> {
match model {
ScoringModel::Cognitive => build_cognitive_dimensions(file_metrics, total_loc, dup_percent),
ScoringModel::Legacy => build_legacy_dimensions(file_metrics, total_loc, dup_percent),
}
}
pub fn build_empty_dimensions(model: &ScoringModel) -> Vec<DimensionScore> {
build_dimensions(&[], 0, 0.0, model)
}
pub fn score_file(f: &FileMetrics, model: &ScoringModel) -> FileScore {
match model {
ScoringModel::Cognitive => score_file_cognitive(f),
ScoringModel::Legacy => score_file_legacy(f),
}
}
fn build_cognitive_dimensions(
file_metrics: &[FileMetrics],
total_loc: usize,
dup_percent: f64,
) -> Vec<DimensionScore> {
let cogcom_dim = weighted_mean(file_metrics, total_loc, |f| {
f.max_cognitive.map(normalize_cognitive)
});
let indent_dim = weighted_mean(file_metrics, total_loc, |f| {
f.indent_stddev.map(normalize_indent)
});
let hal_dim = weighted_mean(file_metrics, total_loc, |f| {
f.halstead_effort
.map(|e| normalize_halstead(e, f.code_lines))
});
let size_dim = weighted_mean(file_metrics, total_loc, |f| {
Some(normalize_file_size(f.code_lines))
});
let dup_dim = normalize_duplication(dup_percent);
let dim = |name, weight, score| DimensionScore {
name,
weight,
score,
grade: score_to_grade(score),
};
vec![
dim("Cognitive Complexity", W_COGCOM, cogcom_dim),
dim("Duplication", W_DUP, dup_dim),
dim("Indentation Complexity", W_INDENT, indent_dim),
dim("Halstead Effort", W_HAL, hal_dim),
dim("File Size", W_SIZE, size_dim),
]
}
fn score_file_cognitive(f: &FileMetrics) -> FileScore {
let mut issues: Vec<String> = Vec::new();
let file_weight_sum: f64 = FILE_WEIGHTS.iter().map(|(w, _)| w).sum();
let cogcom_s = score_dim(
f.max_cognitive,
normalize_cognitive,
|v| format!("Cognitive: {v}"),
&mut issues,
);
let indent_s = score_dim(
f.indent_stddev,
normalize_indent,
|v| format!("Indent: {v:.1}"),
&mut issues,
);
let hal_s = score_dim(
f.halstead_effort,
|e| normalize_halstead(e, f.code_lines),
|v| format!("Effort: {v:.0}"),
&mut issues,
);
let size_s = normalize_file_size(f.code_lines);
if f.code_lines > 1000 {
issues.push(format!("Size: {} LOC", f.code_lines));
}
let weighted_sum = cogcom_s * W_COGCOM + indent_s * W_INDENT + hal_s * W_HAL + size_s * W_SIZE;
let file_score = weighted_sum / file_weight_sum;
FileScore {
path: f.path.clone(),
score: file_score,
grade: score_to_grade(file_score),
loc: f.code_lines,
issues,
}
}
fn build_legacy_dimensions(
file_metrics: &[FileMetrics],
total_loc: usize,
dup_percent: f64,
) -> Vec<DimensionScore> {
let mi_dim = weighted_mean(file_metrics, total_loc, |f| f.mi_score.map(normalize_mi));
let cycom_dim = weighted_mean(file_metrics, total_loc, |f| {
f.max_complexity.map(normalize_complexity)
});
let indent_dim = weighted_mean(file_metrics, total_loc, |f| {
f.indent_stddev.map(normalize_indent)
});
let hal_dim = weighted_mean(file_metrics, total_loc, |f| {
f.halstead_effort
.map(|e| normalize_halstead(e, f.code_lines))
});
let size_dim = weighted_mean(file_metrics, total_loc, |f| {
Some(normalize_file_size(f.code_lines))
});
let dup_dim = normalize_duplication(dup_percent);
let dim = |name, weight, score| DimensionScore {
name,
weight,
score,
grade: score_to_grade(score),
};
vec![
dim("Maintainability Index", W_MI, mi_dim),
dim("Cyclomatic Complexity", W_CYCOM, cycom_dim),
dim("Duplication", W_DUP_LEGACY, dup_dim),
dim("Indentation Complexity", W_INDENT_LEGACY, indent_dim),
dim("Halstead Effort", W_HAL_LEGACY, hal_dim),
dim("File Size", W_SIZE_LEGACY, size_dim),
]
}
fn score_file_legacy(f: &FileMetrics) -> FileScore {
let mut issues: Vec<String> = Vec::new();
let file_weight_sum: f64 = FILE_WEIGHTS_LEGACY.iter().map(|(w, _)| w).sum();
let mi_s = score_dim(
f.mi_score,
normalize_mi,
|v| format!("MI: {v:.1}"),
&mut issues,
);
let cycom_s = score_dim(
f.max_complexity,
normalize_complexity,
|v| format!("Complexity: {v}"),
&mut issues,
);
let indent_s = score_dim(
f.indent_stddev,
normalize_indent,
|v| format!("Indent: {v:.1}"),
&mut issues,
);
let hal_s = score_dim(
f.halstead_effort,
|e| normalize_halstead(e, f.code_lines),
|v| format!("Effort: {v:.0}"),
&mut issues,
);
let size_s = normalize_file_size(f.code_lines);
if f.code_lines > 1000 {
issues.push(format!("Size: {} LOC", f.code_lines));
}
let weighted_sum = mi_s * W_MI
+ cycom_s * W_CYCOM
+ indent_s * W_INDENT_LEGACY
+ hal_s * W_HAL_LEGACY
+ size_s * W_SIZE_LEGACY;
let file_score = weighted_sum / file_weight_sum;
FileScore {
path: f.path.clone(),
score: file_score,
grade: score_to_grade(file_score),
loc: f.code_lines,
issues,
}
}
fn score_dim<T: Copy>(
value: Option<T>,
normalize: impl Fn(T) -> f64,
label: impl Fn(T) -> String,
issues: &mut Vec<String>,
) -> f64 {
match value {
Some(v) => {
let s = normalize(v);
if s < 60.0 {
issues.push(label(v));
}
s
}
None => MISSING_DIM_SCORE,
}
}
pub fn weighted_mean(
files: &[FileMetrics],
total_loc: usize,
score_fn: impl Fn(&FileMetrics) -> Option<f64>,
) -> f64 {
if total_loc == 0 {
return 0.0;
}
let mut weighted_sum = 0.0;
let mut weight_sum = 0usize;
for f in files {
if let Some(s) = score_fn(f) {
let w = f.code_lines.max(1); weighted_sum += s * w as f64;
weight_sum += w;
}
}
if weight_sum == 0 {
0.0
} else {
weighted_sum / weight_sum as f64
}
}
#[cfg(test)]
#[path = "scoring_test.rs"]
mod tests;