use std::path::{Path, PathBuf};
const SECS_PER_DAY: u64 = 86_400;
use crate::health_types::{
FileHealthScore, HOTSPOT_SCORE_THRESHOLD, HealthScore, HealthScorePenalties, HealthTrend,
HotspotEntry, RiskProfile, SNAPSHOT_SCHEMA_VERSION, TrendCount, TrendDirection, TrendMetric,
TrendPoint, VitalSigns, VitalSignsCounts, VitalSignsSnapshot, letter_grade,
};
pub struct VitalSignsInput<'a> {
pub modules: &'a [fallow_core::extract::ModuleInfo],
pub file_scores: Option<&'a [FileHealthScore]>,
pub hotspots: Option<&'a [HotspotEntry]>,
pub total_files: usize,
pub analysis_counts: Option<AnalysisCounts>,
}
pub struct AnalysisCounts {
pub total_exports: usize,
pub dead_files: usize,
pub dead_exports: usize,
pub unused_deps: usize,
pub circular_deps: usize,
pub total_deps: usize,
}
#[expect(
clippy::cast_possible_truncation,
reason = "percentile indices, dep counts, hotspot counts, and LOC per file are bounded by project size"
)]
pub fn compute_vital_signs(input: &VitalSignsInput<'_>) -> VitalSigns {
let mut all_cyclomatic: Vec<u16> = input
.modules
.iter()
.flat_map(|m| m.complexity.iter().map(|c| c.cyclomatic))
.collect();
all_cyclomatic.sort_unstable();
let avg_cyclomatic = if all_cyclomatic.is_empty() {
0.0
} else {
let sum: u64 = all_cyclomatic.iter().map(|&c| u64::from(c)).sum();
(sum as f64 / all_cyclomatic.len() as f64 * 10.0).round() / 10.0
};
let p90_cyclomatic = if all_cyclomatic.is_empty() {
0
} else {
let idx = (all_cyclomatic.len() as f64 * 0.9).ceil() as usize;
let idx = idx.min(all_cyclomatic.len()) - 1;
u32::from(all_cyclomatic[idx])
};
let (dead_file_pct, dead_export_pct, unused_dep_count, circular_dep_count) =
if let Some(ref counts) = input.analysis_counts {
let dfp = if input.total_files > 0 {
Some((counts.dead_files as f64 / input.total_files as f64 * 1000.0).round() / 10.0)
} else {
Some(0.0)
};
let dep = if counts.total_exports > 0 {
Some(
(counts.dead_exports as f64 / counts.total_exports as f64 * 1000.0).round()
/ 10.0,
)
} else {
Some(0.0)
};
(
dfp,
dep,
Some(counts.unused_deps as u32),
Some(counts.circular_deps as u32),
)
} else {
(None, None, None, None)
};
let maintainability_avg = input.file_scores.and_then(|scores| {
if scores.is_empty() {
return None;
}
let sum: f64 = scores.iter().map(|s| s.maintainability_index).sum();
Some((sum / scores.len() as f64 * 10.0).round() / 10.0)
});
let hotspot_count = input.hotspots.map(|entries| {
entries
.iter()
.filter(|e| e.score >= HOTSPOT_SCORE_THRESHOLD)
.count() as u32
});
let total_loc: u64 = input
.modules
.iter()
.map(|m| m.line_offsets.len() as u64)
.sum();
let counts = input.analysis_counts.as_ref().map(|ac| VitalSignsCounts {
total_files: input.total_files,
total_exports: ac.total_exports,
dead_files: ac.dead_files,
dead_exports: ac.dead_exports,
duplicated_lines: None,
total_lines: Some(total_loc as usize),
files_scored: input.file_scores.map(<[_]>::len),
total_deps: ac.total_deps,
});
let unit_size_profile = if all_cyclomatic.is_empty() {
None
} else {
let all_line_counts: Vec<u32> = input
.modules
.iter()
.flat_map(|m| m.complexity.iter().map(|c| c.line_count))
.collect();
Some(compute_size_risk_profile(&all_line_counts))
};
let unit_interfacing_profile = if all_cyclomatic.is_empty() {
None
} else {
let all_param_counts: Vec<u8> = input
.modules
.iter()
.flat_map(|m| m.complexity.iter().map(|c| c.param_count))
.collect();
Some(compute_interfacing_risk_profile(&all_param_counts))
};
let (p95_fan_in, coupling_high_pct) = if let Some(scores) = input.file_scores {
compute_coupling_concentration(scores)
} else {
(None, None)
};
VitalSigns {
dead_file_pct,
dead_export_pct,
avg_cyclomatic,
p90_cyclomatic,
duplication_pct: None, hotspot_count,
maintainability_avg,
unused_dep_count,
circular_dep_count,
counts,
unit_size_profile,
unit_interfacing_profile,
p95_fan_in,
coupling_high_pct,
total_loc,
}
}
fn compute_size_risk_profile(line_counts: &[u32]) -> RiskProfile {
if line_counts.is_empty() {
return RiskProfile {
low_risk: 0.0,
medium_risk: 0.0,
high_risk: 0.0,
very_high_risk: 0.0,
};
}
let total = line_counts.len() as f64;
let low = line_counts.iter().filter(|&&lc| lc <= 15).count() as f64;
let medium = line_counts
.iter()
.filter(|&&lc| (16..=30).contains(&lc))
.count() as f64;
let high = line_counts
.iter()
.filter(|&&lc| (31..=60).contains(&lc))
.count() as f64;
let very_high = line_counts.iter().filter(|&&lc| lc > 60).count() as f64;
RiskProfile {
low_risk: (low / total * 1000.0).round() / 10.0,
medium_risk: (medium / total * 1000.0).round() / 10.0,
high_risk: (high / total * 1000.0).round() / 10.0,
very_high_risk: (very_high / total * 1000.0).round() / 10.0,
}
}
fn compute_interfacing_risk_profile(param_counts: &[u8]) -> RiskProfile {
if param_counts.is_empty() {
return RiskProfile {
low_risk: 0.0,
medium_risk: 0.0,
high_risk: 0.0,
very_high_risk: 0.0,
};
}
let total = param_counts.len() as f64;
let low = param_counts.iter().filter(|&&pc| pc <= 2).count() as f64;
let medium = param_counts
.iter()
.filter(|&&pc| (3..=4).contains(&pc))
.count() as f64;
let high = param_counts
.iter()
.filter(|&&pc| (5..=6).contains(&pc))
.count() as f64;
let very_high = param_counts.iter().filter(|&&pc| pc >= 7).count() as f64;
RiskProfile {
low_risk: (low / total * 1000.0).round() / 10.0,
medium_risk: (medium / total * 1000.0).round() / 10.0,
high_risk: (high / total * 1000.0).round() / 10.0,
very_high_risk: (very_high / total * 1000.0).round() / 10.0,
}
}
#[expect(
clippy::cast_possible_truncation,
reason = "fan-in values are bounded by project size"
)]
fn compute_coupling_concentration(scores: &[FileHealthScore]) -> (Option<u32>, Option<f64>) {
if scores.is_empty() {
return (None, None);
}
let mut fan_ins: Vec<usize> = scores.iter().map(|s| s.fan_in).collect();
fan_ins.sort_unstable();
let idx = (fan_ins.len() as f64 * 0.95).ceil() as usize;
let idx = idx.min(fan_ins.len()) - 1;
let p95 = fan_ins[idx] as u32;
let threshold = (p95 as usize).max(10);
let high_count = fan_ins.iter().filter(|&&fi| fi > threshold).count();
let high_pct = (high_count as f64 / fan_ins.len() as f64 * 1000.0).round() / 10.0;
(Some(p95), Some(high_pct))
}
pub fn compute_health_score(vs: &VitalSigns, total_files: usize) -> HealthScore {
let round1 = |v: f64| -> f64 { (v * 10.0).round() / 10.0 };
let mut score = 100.0_f64;
let dead_files_penalty = vs.dead_file_pct.map(|dfp| round1((dfp * 0.2).min(15.0)));
if let Some(p) = dead_files_penalty {
score -= p;
}
let dead_exports_penalty = vs.dead_export_pct.map(|dep| round1((dep * 0.2).min(15.0)));
if let Some(p) = dead_exports_penalty {
score -= p;
}
let complexity_penalty = round1(((vs.avg_cyclomatic - 1.5).max(0.0) * 5.0).min(20.0));
score -= complexity_penalty;
let p90_penalty = round1((f64::from(vs.p90_cyclomatic) - 10.0).clamp(0.0, 10.0));
score -= p90_penalty;
let maintainability_penalty = vs
.maintainability_avg
.map(|mi| round1(((70.0 - mi).max(0.0) * 0.5).min(15.0)));
if let Some(p) = maintainability_penalty {
score -= p;
}
let hotspot_penalty = vs.hotspot_count.map(|hc| {
if total_files > 0 {
round1((f64::from(hc) / total_files as f64 * 200.0).min(10.0))
} else {
0.0
}
});
if let Some(p) = hotspot_penalty {
score -= p;
}
let unused_deps_penalty = vs
.unused_dep_count
.map(|ud| round1(f64::from(ud).min(10.0)));
if let Some(p) = unused_deps_penalty {
score -= p;
}
let circular_deps_penalty = vs
.circular_dep_count
.map(|cd| round1(f64::from(cd).min(10.0)));
if let Some(p) = circular_deps_penalty {
score -= p;
}
let unit_size_penalty = vs
.unit_size_profile
.as_ref()
.map(|profile| round1(((profile.very_high_risk - 5.0).max(0.0) * 0.5).min(10.0)));
if let Some(p) = unit_size_penalty {
score -= p;
}
let coupling_penalty = vs
.p95_fan_in
.map(|p95| round1(((f64::from(p95) - 30.0).max(0.0) * 0.25).min(5.0)));
if let Some(p) = coupling_penalty {
score -= p;
}
let duplication_penalty = vs
.duplication_pct
.map(|dp| round1(((dp - 5.0).max(0.0) * 1.0).min(10.0)));
if let Some(p) = duplication_penalty {
score -= p;
}
let score = (score * 10.0).round() / 10.0;
let score = score.clamp(0.0, 100.0);
let grade = letter_grade(score);
HealthScore {
score,
grade,
penalties: HealthScorePenalties {
dead_files: dead_files_penalty,
dead_exports: dead_exports_penalty,
complexity: complexity_penalty,
p90_complexity: p90_penalty,
maintainability: maintainability_penalty,
hotspots: hotspot_penalty,
unused_deps: unused_deps_penalty,
circular_deps: circular_deps_penalty,
unit_size: unit_size_penalty,
coupling: coupling_penalty,
duplication: duplication_penalty,
},
}
}
pub fn build_counts(input: &VitalSignsInput<'_>) -> VitalSignsCounts {
let (total_exports, dead_files, dead_exports, total_deps) = input
.analysis_counts
.as_ref()
.map_or((0, 0, 0, 0), |counts| {
(
counts.total_exports,
counts.dead_files,
counts.dead_exports,
counts.total_deps,
)
});
let total_lines: usize = input.modules.iter().map(|m| m.line_offsets.len()).sum();
VitalSignsCounts {
total_files: input.total_files,
total_exports,
dead_files,
dead_exports,
duplicated_lines: None,
total_lines: Some(total_lines),
files_scored: input.file_scores.map(<[_]>::len),
total_deps,
}
}
fn git_sha(root: &Path) -> Option<String> {
std::process::Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.current_dir(root)
.output()
.ok()
.filter(|o| o.status.success())
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
}
fn git_branch(root: &Path) -> Option<String> {
std::process::Command::new("git")
.args(["rev-parse", "--abbrev-ref", "HEAD"])
.current_dir(root)
.output()
.ok()
.filter(|o| o.status.success())
.and_then(|o| {
let name = String::from_utf8_lossy(&o.stdout).trim().to_string();
if name == "HEAD" { None } else { Some(name) }
})
}
pub fn build_snapshot(
vital_signs: VitalSigns,
counts: VitalSignsCounts,
root: &Path,
shallow_clone: bool,
health_score: Option<&HealthScore>,
coverage_model: Option<crate::health_types::CoverageModel>,
) -> VitalSignsSnapshot {
let now = chrono_timestamp();
VitalSignsSnapshot {
snapshot_schema_version: SNAPSHOT_SCHEMA_VERSION,
version: env!("CARGO_PKG_VERSION").to_string(),
timestamp: now,
git_sha: git_sha(root),
git_branch: git_branch(root),
shallow_clone,
vital_signs,
counts,
score: health_score.map(|s| s.score),
grade: health_score.map(|s| s.grade.to_string()),
coverage_model,
}
}
fn chrono_timestamp() -> String {
use std::time::SystemTime;
let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap_or_default();
let secs = now.as_secs();
let days = secs / SECS_PER_DAY;
let time_secs = secs % SECS_PER_DAY;
let hours = time_secs / 3600;
let minutes = (time_secs % 3600) / 60;
let seconds = time_secs % 60;
let (year, month, day) = days_to_ymd(days);
format!("{year:04}-{month:02}-{day:02}T{hours:02}:{minutes:02}:{seconds:02}Z")
}
const fn days_to_ymd(days: u64) -> (u64, u64, u64) {
let z = days + 719_468;
let era = z / 146_097;
let doe = z - era * 146_097;
let yoe = (doe - doe / 1460 + doe / 36524 - doe / 146_096) / 365;
let y = yoe + era * 400;
let doy = doe - (365 * yoe + yoe / 4 - yoe / 100);
let mp = (5 * doy + 2) / 153;
let d = doy - (153 * mp + 2) / 5 + 1;
let m = if mp < 10 { mp + 3 } else { mp - 9 };
let y = if m <= 2 { y + 1 } else { y };
(y, m, d)
}
pub fn save_snapshot(
snapshot: &VitalSignsSnapshot,
root: &Path,
explicit_path: Option<&Path>,
) -> Result<PathBuf, String> {
let path = explicit_path.map_or_else(
|| {
let dir = root.join(".fallow").join("snapshots");
let filename = snapshot.timestamp.replace(':', "-");
dir.join(format!("{filename}.json"))
},
Path::to_path_buf,
);
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| format!("failed to create snapshot directory: {e}"))?;
}
let json =
serde_json::to_string_pretty(snapshot).map_err(|e| format!("failed to serialize: {e}"))?;
std::fs::write(&path, json).map_err(|e| format!("failed to write snapshot: {e}"))?;
Ok(path)
}
pub fn load_snapshots(root: &Path) -> Vec<VitalSignsSnapshot> {
let dir = root.join(".fallow").join("snapshots");
let Ok(entries) = std::fs::read_dir(&dir) else {
return Vec::new();
};
let mut snapshots = Vec::new();
for entry in entries {
let Ok(entry) = entry else { continue };
let path = entry.path();
if path.extension().is_some_and(|ext| ext == "json") {
match std::fs::read_to_string(&path) {
Ok(content) => match serde_json::from_str::<VitalSignsSnapshot>(&content) {
Ok(snap) => snapshots.push(snap),
Err(e) => {
eprintln!("warning: skipping corrupt snapshot {}: {e}", path.display());
}
},
Err(e) => {
eprintln!("warning: could not read snapshot {}: {e}", path.display());
}
}
}
}
snapshots.sort_by(|a, b| a.timestamp.cmp(&b.timestamp));
snapshots
}
const TREND_TOLERANCE: f64 = 0.5;
#[expect(
clippy::too_many_lines,
reason = "trend computation compares many metric dimensions"
)]
pub fn compute_trend(
current_vs: &VitalSigns,
current_counts: &VitalSignsCounts,
current_score: Option<f64>,
snapshots: &[VitalSignsSnapshot],
) -> Option<HealthTrend> {
let prev = snapshots.last()?;
let compared_to = TrendPoint {
timestamp: prev.timestamp.clone(),
git_sha: prev.git_sha.clone(),
score: prev.score,
grade: prev.grade.clone(),
coverage_model: prev.coverage_model.clone(),
snapshot_schema_version: Some(prev.snapshot_schema_version),
};
let mut metrics = Vec::new();
if let (Some(prev_score), Some(cur_score)) = (prev.score, current_score) {
metrics.push(make_metric(
"score",
"Health Score",
prev_score,
cur_score,
"",
true, None,
None,
));
}
if let (Some(prev_val), Some(cur_val)) =
(prev.vital_signs.dead_file_pct, current_vs.dead_file_pct)
{
metrics.push(make_metric(
"dead_file_pct",
"Dead Files",
prev_val,
cur_val,
"%",
false,
Some(TrendCount {
value: prev.counts.dead_files,
total: prev.counts.total_files,
}),
Some(TrendCount {
value: current_counts.dead_files,
total: current_counts.total_files,
}),
));
}
if let (Some(prev_val), Some(cur_val)) =
(prev.vital_signs.dead_export_pct, current_vs.dead_export_pct)
{
metrics.push(make_metric(
"dead_export_pct",
"Dead Exports",
prev_val,
cur_val,
"%",
false,
Some(TrendCount {
value: prev.counts.dead_exports,
total: prev.counts.total_exports,
}),
Some(TrendCount {
value: current_counts.dead_exports,
total: current_counts.total_exports,
}),
));
}
{
metrics.push(make_metric(
"avg_cyclomatic",
"Avg Cyclomatic",
prev.vital_signs.avg_cyclomatic,
current_vs.avg_cyclomatic,
"",
false,
None,
None,
));
}
if let (Some(prev_val), Some(cur_val)) = (
prev.vital_signs.maintainability_avg,
current_vs.maintainability_avg,
) {
metrics.push(make_metric(
"maintainability_avg",
"Maintainability",
prev_val,
cur_val,
"",
true,
None,
None,
));
}
if let (Some(prev_val), Some(cur_val)) = (
prev.vital_signs.unused_dep_count,
current_vs.unused_dep_count,
) {
metrics.push(make_metric(
"unused_dep_count",
"Unused Deps",
f64::from(prev_val),
f64::from(cur_val),
"",
false,
None,
None,
));
}
if let (Some(prev_val), Some(cur_val)) = (
prev.vital_signs.circular_dep_count,
current_vs.circular_dep_count,
) {
metrics.push(make_metric(
"circular_dep_count",
"Circular Deps",
f64::from(prev_val),
f64::from(cur_val),
"",
false,
None,
None,
));
}
if let (Some(prev_val), Some(cur_val)) =
(prev.vital_signs.hotspot_count, current_vs.hotspot_count)
{
metrics.push(make_metric(
"hotspot_count",
"Hotspots",
f64::from(prev_val),
f64::from(cur_val),
"",
false,
None,
None,
));
}
if let (Some(prev_profile), Some(cur_profile)) = (
&prev.vital_signs.unit_size_profile,
¤t_vs.unit_size_profile,
) {
metrics.push(make_metric(
"unit_size_very_high_pct",
"Oversized Fns",
prev_profile.very_high_risk,
cur_profile.very_high_risk,
"%",
false,
None,
None,
));
}
if let (Some(prev_val), Some(cur_val)) = (prev.vital_signs.p95_fan_in, current_vs.p95_fan_in) {
metrics.push(make_metric(
"p95_fan_in",
"P95 Fan-in",
f64::from(prev_val),
f64::from(cur_val),
"",
false,
None,
None,
));
}
if let (Some(prev_val), Some(cur_val)) =
(prev.vital_signs.duplication_pct, current_vs.duplication_pct)
{
metrics.push(make_metric(
"duplication_pct",
"Duplication",
prev_val,
cur_val,
"%",
false,
prev.counts
.duplicated_lines
.zip(prev.counts.total_lines)
.map(|(d, t)| TrendCount { value: d, total: t }),
current_counts
.duplicated_lines
.zip(current_counts.total_lines)
.map(|(d, t)| TrendCount { value: d, total: t }),
));
}
let (improving, declining) =
metrics
.iter()
.fold((0usize, 0usize), |(imp, dec), m| match m.direction {
TrendDirection::Improving => (imp + 1, dec),
TrendDirection::Declining => (imp, dec + 1),
TrendDirection::Stable => (imp, dec),
});
let overall_direction = match improving.cmp(&declining) {
std::cmp::Ordering::Greater => TrendDirection::Improving,
std::cmp::Ordering::Less => TrendDirection::Declining,
std::cmp::Ordering::Equal => TrendDirection::Stable,
};
Some(HealthTrend {
compared_to,
metrics,
snapshots_loaded: snapshots.len(),
overall_direction,
})
}
#[expect(
clippy::too_many_arguments,
reason = "metric builder needs all parameters"
)]
fn make_metric(
name: &'static str,
label: &'static str,
previous: f64,
current: f64,
unit: &'static str,
higher_is_better: bool,
previous_count: Option<TrendCount>,
current_count: Option<TrendCount>,
) -> TrendMetric {
let delta = (current - previous).round_to(1);
let direction = if delta.abs() < TREND_TOLERANCE {
TrendDirection::Stable
} else if (higher_is_better && delta > 0.0) || (!higher_is_better && delta < 0.0) {
TrendDirection::Improving
} else {
TrendDirection::Declining
};
TrendMetric {
name,
label,
previous,
current,
delta,
direction,
unit,
previous_count,
current_count,
}
}
trait RoundTo {
fn round_to(self, decimals: u32) -> Self;
}
impl RoundTo for f64 {
fn round_to(self, decimals: u32) -> Self {
let factor = 10_f64.powi(decimals as i32);
(self * factor).round() / factor
}
}
#[cfg(test)]
mod tests {
use super::*;
fn make_module(id: u32, cyclomatic: u16) -> fallow_core::extract::ModuleInfo {
fallow_core::extract::ModuleInfo {
file_id: fallow_core::discover::FileId(id),
exports: Vec::new(),
imports: Vec::new(),
re_exports: Vec::new(),
dynamic_imports: Vec::new(),
dynamic_import_patterns: Vec::new(),
require_calls: Vec::new(),
member_accesses: Vec::new(),
whole_object_uses: Vec::new(),
has_cjs_exports: false,
content_hash: 0,
suppressions: Vec::new(),
unused_import_bindings: Vec::new(),
line_offsets: Vec::new(),
flag_uses: Vec::new(),
class_heritage: Vec::new(),
complexity: vec![fallow_types::extract::FunctionComplexity {
name: format!("fn_{id}"),
line: id + 1,
col: 0,
cyclomatic,
cognitive: 0,
line_count: 10,
param_count: 0,
}],
}
}
#[expect(
clippy::cast_possible_truncation,
reason = "test values are trivially small"
)]
fn make_modules() -> Vec<fallow_core::extract::ModuleInfo> {
(0..10)
.map(|i| make_module(i, (i as u16 + 1) * 2))
.collect()
}
#[test]
fn compute_cyclomatic_stats() {
let modules = make_modules();
let input = VitalSignsInput {
modules: &modules,
file_scores: None,
hotspots: None,
total_files: 10,
analysis_counts: None,
};
let vs = compute_vital_signs(&input);
assert!((vs.avg_cyclomatic - 11.0).abs() < f64::EPSILON);
assert_eq!(vs.p90_cyclomatic, 18);
}
#[test]
fn compute_with_analysis_counts() {
let modules = make_modules();
let input = VitalSignsInput {
modules: &modules,
file_scores: None,
hotspots: None,
total_files: 100,
analysis_counts: Some(AnalysisCounts {
total_exports: 500,
dead_files: 5,
dead_exports: 50,
unused_deps: 3,
circular_deps: 2,
total_deps: 40,
}),
};
let vs = compute_vital_signs(&input);
assert_eq!(vs.dead_file_pct, Some(5.0)); assert_eq!(vs.dead_export_pct, Some(10.0)); assert_eq!(vs.unused_dep_count, Some(3));
assert_eq!(vs.circular_dep_count, Some(2));
}
#[test]
fn compute_hotspot_count_with_threshold() {
let hotspots = vec![
HotspotEntry {
path: PathBuf::from("a.ts"),
score: 80.0,
commits: 10,
weighted_commits: 8.0,
lines_added: 100,
lines_deleted: 50,
complexity_density: 0.5,
fan_in: 5,
trend: fallow_core::churn::ChurnTrend::Stable,
ownership: None,
is_test_path: false,
},
HotspotEntry {
path: PathBuf::from("b.ts"),
score: 30.0, commits: 5,
weighted_commits: 3.0,
lines_added: 40,
lines_deleted: 20,
complexity_density: 0.2,
fan_in: 2,
trend: fallow_core::churn::ChurnTrend::Cooling,
ownership: None,
is_test_path: false,
},
HotspotEntry {
path: PathBuf::from("c.ts"),
score: 50.0, commits: 8,
weighted_commits: 6.0,
lines_added: 80,
lines_deleted: 30,
complexity_density: 0.4,
fan_in: 3,
trend: fallow_core::churn::ChurnTrend::Accelerating,
ownership: None,
is_test_path: false,
},
];
let modules = Vec::new();
let input = VitalSignsInput {
modules: &modules,
file_scores: None,
hotspots: Some(&hotspots),
total_files: 10,
analysis_counts: None,
};
let vs = compute_vital_signs(&input);
assert_eq!(vs.hotspot_count, Some(2)); }
#[test]
fn compute_without_hotspots_gives_none() {
let modules = Vec::new();
let input = VitalSignsInput {
modules: &modules,
file_scores: None,
hotspots: None,
total_files: 0,
analysis_counts: None,
};
let vs = compute_vital_signs(&input);
assert!(vs.hotspot_count.is_none());
}
#[test]
fn snapshot_save_and_load() {
let dir = tempfile::tempdir().unwrap();
let root = dir.path();
let vs = VitalSigns {
dead_file_pct: Some(3.2),
dead_export_pct: Some(8.1),
avg_cyclomatic: 4.7,
p90_cyclomatic: 12,
hotspot_count: Some(5),
maintainability_avg: Some(72.4),
unused_dep_count: Some(4),
circular_dep_count: Some(2),
..Default::default()
};
let counts = VitalSignsCounts {
total_files: 1200,
total_exports: 5400,
dead_files: 38,
dead_exports: 437,
files_scored: Some(1150),
total_deps: 42,
..Default::default()
};
let health_score = compute_health_score(&vs, 1200);
let snapshot = build_snapshot(vs, counts, root, false, Some(&health_score), None);
let saved_path = save_snapshot(&snapshot, root, None).unwrap();
assert!(saved_path.exists());
assert!(saved_path.starts_with(root.join(".fallow/snapshots")));
let content = std::fs::read_to_string(&saved_path).unwrap();
let loaded: VitalSignsSnapshot = serde_json::from_str(&content).unwrap();
assert_eq!(loaded.snapshot_schema_version, SNAPSHOT_SCHEMA_VERSION);
assert!((loaded.vital_signs.avg_cyclomatic - 4.7).abs() < f64::EPSILON);
assert_eq!(loaded.counts.total_files, 1200);
assert!(loaded.score.is_some());
assert!(loaded.grade.is_some());
}
#[test]
fn snapshot_save_explicit_path() {
let dir = tempfile::tempdir().unwrap();
let root = dir.path();
let explicit = root.join("my-snapshot.json");
let vs = VitalSigns {
avg_cyclomatic: 1.0,
p90_cyclomatic: 2,
..Default::default()
};
let counts = VitalSignsCounts::default();
let snapshot = build_snapshot(vs, counts, root, false, None, None);
let saved = save_snapshot(&snapshot, root, Some(&explicit)).unwrap();
assert_eq!(saved, explicit);
assert!(explicit.exists());
}
#[test]
fn snapshot_save_creates_nested_dirs() {
let dir = tempfile::tempdir().unwrap();
let root = dir.path();
let nested = root.join("a/b/c/snapshot.json");
let vs = VitalSigns {
avg_cyclomatic: 1.0,
p90_cyclomatic: 2,
..Default::default()
};
let counts = VitalSignsCounts::default();
let snapshot = build_snapshot(vs, counts, root, false, None, None);
let saved = save_snapshot(&snapshot, root, Some(&nested)).unwrap();
assert_eq!(saved, nested);
assert!(nested.exists());
}
#[test]
fn days_to_ymd_epoch() {
assert_eq!(days_to_ymd(0), (1970, 1, 1));
}
#[test]
fn days_to_ymd_known_date() {
assert_eq!(days_to_ymd(20_537), (2026, 3, 25));
}
#[test]
fn health_score_perfect() {
let vs = VitalSigns {
dead_file_pct: Some(0.0),
dead_export_pct: Some(0.0),
avg_cyclomatic: 1.0,
p90_cyclomatic: 2,
hotspot_count: Some(0),
maintainability_avg: Some(90.0),
unused_dep_count: Some(0),
circular_dep_count: Some(0),
..Default::default()
};
let score = compute_health_score(&vs, 100);
assert!((score.score - 100.0).abs() < f64::EPSILON);
assert_eq!(score.grade, "A");
}
#[test]
fn health_score_no_optional_metrics() {
let vs = VitalSigns {
avg_cyclomatic: 1.0,
p90_cyclomatic: 2,
..Default::default()
};
let score = compute_health_score(&vs, 0);
assert!((score.score - 100.0).abs() < f64::EPSILON);
assert_eq!(score.grade, "A");
assert!(score.penalties.dead_files.is_none());
assert!(score.penalties.unused_deps.is_none());
assert!(score.penalties.duplication.is_none());
}
#[test]
fn health_score_dead_code_penalty() {
let vs = VitalSigns {
dead_file_pct: Some(50.0),
dead_export_pct: Some(30.0),
avg_cyclomatic: 1.0,
p90_cyclomatic: 2,
..Default::default()
};
let score = compute_health_score(&vs, 100);
assert!((score.score - 84.0).abs() < 0.1);
assert_eq!(score.grade, "B");
}
#[test]
fn health_score_complexity_penalty() {
let vs = VitalSigns {
avg_cyclomatic: 5.5,
p90_cyclomatic: 15,
..Default::default()
};
let score = compute_health_score(&vs, 100);
assert!((score.score - 75.0).abs() < 0.1);
assert_eq!(score.grade, "B");
}
#[test]
fn health_score_clamped_at_zero() {
let vs = VitalSigns {
dead_file_pct: Some(100.0),
dead_export_pct: Some(100.0),
avg_cyclomatic: 10.0,
p90_cyclomatic: 30,
hotspot_count: Some(50),
maintainability_avg: Some(20.0),
unused_dep_count: Some(100),
circular_dep_count: Some(50),
..Default::default()
};
let score = compute_health_score(&vs, 100);
assert!((score.score).abs() < f64::EPSILON);
assert_eq!(score.grade, "F");
}
#[test]
fn health_score_hotspot_normalized_by_files() {
let vs = VitalSigns {
avg_cyclomatic: 1.0,
p90_cyclomatic: 2,
hotspot_count: Some(5),
..Default::default()
};
let score_100 = compute_health_score(&vs, 100);
let score_1000 = compute_health_score(&vs, 1000);
assert!(score_1000.score > score_100.score);
}
#[test]
fn health_score_duplication_penalty() {
let vs = VitalSigns {
dead_file_pct: None,
dead_export_pct: None,
avg_cyclomatic: 1.0,
p90_cyclomatic: 2,
duplication_pct: Some(10.0), hotspot_count: None,
maintainability_avg: None,
unused_dep_count: None,
circular_dep_count: None,
counts: None,
unit_size_profile: None,
unit_interfacing_profile: None,
p95_fan_in: None,
coupling_high_pct: None,
total_loc: 0,
};
let score = compute_health_score(&vs, 100);
assert_eq!(score.penalties.duplication, Some(5.0));
let vs_low = VitalSigns {
duplication_pct: Some(4.0),
..vs.clone()
};
let score_low = compute_health_score(&vs_low, 100);
assert_eq!(score_low.penalties.duplication, Some(0.0));
let vs_high = VitalSigns {
duplication_pct: Some(20.0),
..vs
};
let score_high = compute_health_score(&vs_high, 100);
assert_eq!(score_high.penalties.duplication, Some(10.0));
}
#[test]
fn load_snapshots_empty_dir() {
let dir = tempfile::tempdir().unwrap();
let snaps = load_snapshots(dir.path());
assert!(snaps.is_empty());
}
#[test]
fn load_snapshots_returns_sorted() {
let dir = tempfile::tempdir().unwrap();
let root = dir.path();
let snap_dir = root.join(".fallow/snapshots");
std::fs::create_dir_all(&snap_dir).unwrap();
let older = make_test_snapshot("2026-01-01T00:00:00Z", Some(72.0));
let newer = make_test_snapshot("2026-03-01T00:00:00Z", Some(78.0));
std::fs::write(
snap_dir.join("2026-03-01T00-00-00Z.json"),
serde_json::to_string(&newer).unwrap(),
)
.unwrap();
std::fs::write(
snap_dir.join("2026-01-01T00-00-00Z.json"),
serde_json::to_string(&older).unwrap(),
)
.unwrap();
let loaded = load_snapshots(root);
assert_eq!(loaded.len(), 2);
assert_eq!(loaded[0].timestamp, "2026-01-01T00:00:00Z");
assert_eq!(loaded[1].timestamp, "2026-03-01T00:00:00Z");
}
#[test]
fn load_snapshots_skips_corrupt_files() {
let dir = tempfile::tempdir().unwrap();
let root = dir.path();
let snap_dir = root.join(".fallow/snapshots");
std::fs::create_dir_all(&snap_dir).unwrap();
std::fs::write(snap_dir.join("corrupt.json"), "not valid json").unwrap();
let good = make_test_snapshot("2026-02-01T00:00:00Z", Some(80.0));
std::fs::write(
snap_dir.join("good.json"),
serde_json::to_string(&good).unwrap(),
)
.unwrap();
let loaded = load_snapshots(root);
assert_eq!(loaded.len(), 1);
assert_eq!(loaded[0].timestamp, "2026-02-01T00:00:00Z");
}
#[test]
fn load_snapshots_ignores_non_json() {
let dir = tempfile::tempdir().unwrap();
let root = dir.path();
let snap_dir = root.join(".fallow/snapshots");
std::fs::create_dir_all(&snap_dir).unwrap();
std::fs::write(snap_dir.join("readme.txt"), "not a snapshot").unwrap();
let loaded = load_snapshots(root);
assert!(loaded.is_empty());
}
#[test]
fn compute_trend_no_snapshots() {
let vs = make_test_vital_signs();
let counts = make_test_counts();
assert!(compute_trend(&vs, &counts, Some(78.0), &[]).is_none());
}
#[test]
fn compute_trend_improving() {
let prev = make_test_snapshot("2026-01-01T00:00:00Z", Some(72.0));
let vs = VitalSigns {
dead_file_pct: Some(2.8),
dead_export_pct: Some(7.5),
avg_cyclomatic: 4.1,
p90_cyclomatic: 12,
hotspot_count: Some(3),
maintainability_avg: Some(75.0),
unused_dep_count: Some(3),
circular_dep_count: Some(1),
..Default::default()
};
let counts = VitalSignsCounts {
total_files: 100,
total_exports: 500,
dead_files: 3,
dead_exports: 38,
files_scored: Some(95),
total_deps: 40,
..Default::default()
};
let trend = compute_trend(&vs, &counts, Some(78.0), &[prev]).unwrap();
assert_eq!(trend.compared_to.timestamp, "2026-01-01T00:00:00Z");
assert_eq!(trend.snapshots_loaded, 1);
assert_eq!(trend.overall_direction, TrendDirection::Improving);
let score_metric = trend.metrics.iter().find(|m| m.name == "score").unwrap();
assert_eq!(score_metric.direction, TrendDirection::Improving);
assert!((score_metric.delta - 6.0).abs() < f64::EPSILON);
}
#[test]
fn compute_trend_stable_within_tolerance() {
let prev = make_test_snapshot("2026-01-01T00:00:00Z", Some(78.0));
let vs = make_test_vital_signs();
let counts = make_test_counts();
let trend = compute_trend(&vs, &counts, Some(78.3), &[prev]).unwrap();
let score_metric = trend.metrics.iter().find(|m| m.name == "score").unwrap();
assert_eq!(score_metric.direction, TrendDirection::Stable);
}
#[test]
fn compute_trend_uses_most_recent_snapshot() {
let older = make_test_snapshot("2026-01-01T00:00:00Z", Some(60.0));
let newer = make_test_snapshot("2026-03-01T00:00:00Z", Some(72.0));
let vs = make_test_vital_signs();
let counts = make_test_counts();
let trend = compute_trend(&vs, &counts, Some(78.0), &[older, newer]).unwrap();
assert_eq!(trend.compared_to.score, Some(72.0));
assert_eq!(trend.snapshots_loaded, 2);
}
#[test]
fn compute_trend_includes_raw_counts() {
let prev = make_test_snapshot("2026-01-01T00:00:00Z", Some(72.0));
let vs = make_test_vital_signs();
let counts = make_test_counts();
let trend = compute_trend(&vs, &counts, Some(78.0), &[prev]).unwrap();
let dead_files = trend
.metrics
.iter()
.find(|m| m.name == "dead_file_pct")
.unwrap();
assert!(dead_files.previous_count.is_some());
assert!(dead_files.current_count.is_some());
}
fn make_test_vital_signs() -> VitalSigns {
VitalSigns {
dead_file_pct: Some(3.2),
dead_export_pct: Some(8.1),
avg_cyclomatic: 4.2,
p90_cyclomatic: 12,
hotspot_count: Some(5),
maintainability_avg: Some(72.4),
unused_dep_count: Some(4),
circular_dep_count: Some(2),
..Default::default()
}
}
fn make_test_counts() -> VitalSignsCounts {
VitalSignsCounts {
total_files: 100,
total_exports: 500,
dead_files: 3,
dead_exports: 40,
files_scored: Some(95),
total_deps: 42,
..Default::default()
}
}
fn make_test_snapshot(timestamp: &str, score: Option<f64>) -> VitalSignsSnapshot {
VitalSignsSnapshot {
snapshot_schema_version: SNAPSHOT_SCHEMA_VERSION,
version: "2.5.5".into(),
timestamp: timestamp.into(),
git_sha: Some("abc1234".into()),
git_branch: Some("main".into()),
shallow_clone: false,
vital_signs: VitalSigns {
dead_file_pct: Some(3.2),
dead_export_pct: Some(8.1),
avg_cyclomatic: 4.7,
p90_cyclomatic: 12,
hotspot_count: Some(5),
maintainability_avg: Some(72.4),
unused_dep_count: Some(4),
circular_dep_count: Some(2),
..Default::default()
},
counts: VitalSignsCounts {
total_files: 100,
total_exports: 500,
dead_files: 3,
dead_exports: 40,
files_scored: Some(95),
total_deps: 42,
..Default::default()
},
score,
grade: score.map(|s| letter_grade(s).to_string()),
coverage_model: None,
}
}
}