use chrono::DateTime;
use std::collections::HashMap;
use crate::event::GitEvent;
use super::{
BusFactorAnalysis, BusFactorEntry, BusFactorRisk, CommitQualityAnalysis, ContributorInfo,
FileHeatmap, TechDebtAnalysis, TechDebtEntry, TechDebtLevel,
};
const DAYS_PER_YEAR: f64 = 365.0;
const TOP_CONTRIBUTORS: usize = 5;
const BUS_FACTOR_CUMULATIVE_THRESHOLD: f64 = 50.0;
const TECH_DEBT_CHURN_WEIGHT: f64 = 0.5;
const TECH_DEBT_COMPLEXITY_WEIGHT: f64 = 0.4;
const TECH_DEBT_AGE_WEIGHT: f64 = 0.1;
const TECH_DEBT_HIGH: f64 = 0.6;
const TECH_DEBT_MEDIUM: f64 = 0.3;
const LARGE_COMMIT_CHANGES: usize = 50;
const WEIGHT_QUALITY: f64 = 0.20;
const WEIGHT_TEST: f64 = 0.15;
const WEIGHT_BUS_FACTOR: f64 = 0.20;
const WEIGHT_TECH_DEBT: f64 = 0.20;
const WEIGHT_CHURN: f64 = 0.15;
const WEIGHT_CADENCE: f64 = 0.10;
const TEST_MSG_WEIGHT: f64 = 0.3;
const TEST_FILE_WEIGHT: f64 = 0.7;
const LOW_TEST_THRESHOLD: f64 = 0.1;
const RECENT_COMMITS_WINDOW: usize = 30;
const BUS_FACTOR_CRITICAL: f64 = 0.3;
const BUS_FACTOR_WARNING: f64 = 0.5;
const SINGLE_AUTHOR_CONCENTRATION: u32 = 70;
const HIGH_CHURN_MULTIPLIER: f64 = 2.0;
const CHURN_WARNING_THRESHOLD: f64 = 0.5;
const MIN_PERIOD_FOR_CADENCE: u64 = 14;
const CV_STABLE: f64 = 0.5;
const CV_UNSTABLE: f64 = 2.0;
const CV_RANGE: f64 = 1.5; const CV_WEIGHT: f64 = 0.8;
const CADENCE_SCORE_UNSTABLE: f64 = 0.2;
const HIGH_CONF_COMMITS: usize = 100;
const HIGH_CONF_AUTHORS: usize = 3;
const HIGH_CONF_DAYS: u64 = 30;
const MEDIUM_CONF_COMMITS: usize = 30;
const MEDIUM_CONF_DAYS: u64 = 7;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum AlertSeverity {
Info,
Warning,
Critical,
}
impl AlertSeverity {
pub fn color(&self) -> &'static str {
match self {
AlertSeverity::Info => "blue",
AlertSeverity::Warning => "yellow",
AlertSeverity::Critical => "red",
}
}
pub fn icon(&self) -> &'static str {
match self {
AlertSeverity::Info => "ℹ",
AlertSeverity::Warning => "âš ",
AlertSeverity::Critical => "🔴",
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum HealthAlertKind {
LowCommitQuality,
LowTestCoverage,
HighBusFactorRisk,
ModerateBusFactorRisk,
HighTechDebt,
HighCodeChurn,
Other,
}
#[derive(Debug, Clone)]
pub struct HealthAlert {
pub kind: HealthAlertKind,
pub severity: AlertSeverity,
pub message: String,
pub details: Option<String>,
}
impl HealthAlert {
pub fn new(kind: HealthAlertKind, severity: AlertSeverity, message: impl Into<String>) -> Self {
Self {
kind,
severity,
message: message.into(),
details: None,
}
}
pub fn with_details(
kind: HealthAlertKind,
severity: AlertSeverity,
message: impl Into<String>,
details: impl Into<String>,
) -> Self {
Self {
kind,
severity,
message: message.into(),
details: Some(details.into()),
}
}
}
pub fn is_test_file(path: &str) -> bool {
let lower = path.to_lowercase();
lower.ends_with("_test.rs")
|| lower.ends_with("_test.go")
|| lower.ends_with(".test.ts")
|| lower.ends_with(".test.tsx")
|| lower.ends_with(".test.js")
|| lower.ends_with(".test.jsx")
|| lower.ends_with(".spec.ts")
|| lower.ends_with(".spec.tsx")
|| lower.ends_with(".spec.js")
|| lower.ends_with(".spec.jsx")
|| lower
.rsplit('/')
.next()
.map(|f| f.starts_with("test_") && f.ends_with(".py"))
.unwrap_or(false)
|| lower.contains("/tests/")
|| lower.contains("/__tests__/")
|| lower.contains("/spec/")
|| lower.starts_with("tests/")
|| lower.starts_with("__tests__/")
|| lower.starts_with("spec/")
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ConfidenceLevel {
High,
Medium,
Low,
}
impl ConfidenceLevel {
pub fn as_str(&self) -> &'static str {
match self {
ConfidenceLevel::High => "High",
ConfidenceLevel::Medium => "Medium",
ConfidenceLevel::Low => "Low",
}
}
}
#[derive(Debug, Clone)]
pub struct HealthConfidence {
pub level: ConfidenceLevel,
pub reason: String,
}
#[derive(Debug, Clone, Default)]
pub struct HealthScoreComponent {
pub score: f64,
pub weight: f64,
pub description: String,
}
#[derive(Debug, Clone)]
pub struct ProjectHealth {
pub overall_score: u8,
pub quality: HealthScoreComponent,
pub test_health: HealthScoreComponent,
pub bus_factor_risk: HealthScoreComponent,
pub tech_debt: HealthScoreComponent,
pub code_churn: HealthScoreComponent,
pub commit_cadence: HealthScoreComponent,
pub alerts: Vec<HealthAlert>,
pub total_commits: usize,
pub total_authors: usize,
pub analysis_period_days: u64,
pub confidence: HealthConfidence,
}
impl Default for ProjectHealth {
fn default() -> Self {
Self {
overall_score: 50,
quality: HealthScoreComponent {
score: 0.5,
weight: WEIGHT_QUALITY,
description: "Commit quality average".to_string(),
},
test_health: HealthScoreComponent {
score: 0.5,
weight: WEIGHT_TEST,
description: "Test commit ratio".to_string(),
},
bus_factor_risk: HealthScoreComponent {
score: 0.5,
weight: WEIGHT_BUS_FACTOR,
description: "Knowledge concentration (lower is better)".to_string(),
},
tech_debt: HealthScoreComponent {
score: 0.5,
weight: WEIGHT_TECH_DEBT,
description: "Technical debt (lower is better)".to_string(),
},
code_churn: HealthScoreComponent {
score: 0.5,
weight: WEIGHT_CHURN,
description: "Code churn rate".to_string(),
},
commit_cadence: HealthScoreComponent {
score: 0.5,
weight: WEIGHT_CADENCE,
description: "Commit cadence stability".to_string(),
},
alerts: Vec::new(),
total_commits: 0,
total_authors: 0,
analysis_period_days: 0,
confidence: HealthConfidence {
level: ConfidenceLevel::Low,
reason: "No data".to_string(),
},
}
}
}
impl ProjectHealth {
pub fn level(&self) -> &'static str {
match self.overall_score {
90..=100 => "Excellent",
75..=89 => "Good",
60..=74 => "Fair",
45..=59 => "Needs Work",
30..=44 => "Poor",
_ => "Critical",
}
}
pub fn score_color(&self) -> &'static str {
match self.overall_score {
90..=100 => "green",
75..=89 => "teal",
60..=74 => "sapphire",
45..=59 => "yellow",
30..=44 => "peach",
_ => "red",
}
}
pub fn score_bar(&self) -> String {
let filled = (self.overall_score.min(100) / 10) as usize;
let empty = 10usize.saturating_sub(filled);
format!("{}{}", "â–ˆ".repeat(filled), "â–‘".repeat(empty))
}
}
pub fn calculate_bus_factor(
events: &[&GitEvent],
get_files: impl Fn(&str) -> Option<Vec<String>>,
min_commits: usize,
) -> BusFactorAnalysis {
let mut dir_author_counts: HashMap<String, HashMap<String, usize>> = HashMap::new();
for event in events {
if let Some(files) = get_files(&event.short_hash) {
for file in &files {
let parts: Vec<&str> = file.split('/').collect();
if parts.len() > 1 {
let top_dir = parts[0].to_string();
let counts = dir_author_counts.entry(top_dir).or_default();
*counts.entry(event.author.clone()).or_insert(0) += 1;
}
if parts.len() > 2 {
let two_level_dir = format!("{}/{}", parts[0], parts[1]);
let counts = dir_author_counts.entry(two_level_dir).or_default();
*counts.entry(event.author.clone()).or_insert(0) += 1;
}
}
}
}
let mut entries = Vec::new();
let mut high_risk_count = 0;
let mut medium_risk_count = 0;
for (path, author_counts) in &dir_author_counts {
let total_commits: usize = author_counts.values().sum();
if total_commits < min_commits {
continue;
}
let mut contributors: Vec<ContributorInfo> = author_counts
.iter()
.map(|(name, &count)| ContributorInfo {
name: name.clone(),
commit_count: count,
contribution_percent: (count as f64 / total_commits as f64) * 100.0,
})
.collect();
contributors.sort_by(|a, b| b.commit_count.cmp(&a.commit_count));
let mut cumulative = 0.0;
let mut bus_factor = 0;
for contributor in &contributors {
cumulative += contributor.contribution_percent;
bus_factor += 1;
if cumulative >= BUS_FACTOR_CUMULATIVE_THRESHOLD {
break;
}
}
let risk_level = match bus_factor {
1 => {
high_risk_count += 1;
BusFactorRisk::High
}
2 => {
medium_risk_count += 1;
BusFactorRisk::Medium
}
_ => BusFactorRisk::Low,
};
entries.push(BusFactorEntry {
path: path.clone(),
bus_factor,
contributors: contributors.into_iter().take(TOP_CONTRIBUTORS).collect(),
total_commits,
risk_level,
is_directory: true,
});
}
entries.sort_by(|a, b| match (&a.risk_level, &b.risk_level) {
(BusFactorRisk::High, BusFactorRisk::High)
| (BusFactorRisk::Medium, BusFactorRisk::Medium)
| (BusFactorRisk::Low, BusFactorRisk::Low) => b.total_commits.cmp(&a.total_commits),
(BusFactorRisk::High, _) => std::cmp::Ordering::Less,
(_, BusFactorRisk::High) => std::cmp::Ordering::Greater,
(BusFactorRisk::Medium, BusFactorRisk::Low) => std::cmp::Ordering::Less,
(BusFactorRisk::Low, BusFactorRisk::Medium) => std::cmp::Ordering::Greater,
});
BusFactorAnalysis {
total_paths_analyzed: entries.len(),
entries,
high_risk_count,
medium_risk_count,
}
}
pub fn calculate_tech_debt(
events: &[&GitEvent],
get_files: impl Fn(&str) -> Option<Vec<String>>,
min_commits: usize,
) -> TechDebtAnalysis {
use chrono::Local;
let mut file_stats: HashMap<String, (usize, usize, DateTime<Local>)> = HashMap::new();
for event in events {
if let Some(files) = get_files(&event.short_hash) {
let changes_per_file = (event.files_added + event.files_deleted) / files.len().max(1);
for file in files {
let entry = file_stats.entry(file).or_insert((0, 0, event.timestamp));
entry.0 += 1; entry.1 += changes_per_file; if event.timestamp > entry.2 {
entry.2 = event.timestamp;
}
}
}
}
let max_changes = file_stats.values().map(|(c, _, _)| *c).max().unwrap_or(1);
let max_lines = file_stats.values().map(|(_, l, _)| *l).max().unwrap_or(1);
let now = Local::now();
let mut entries = Vec::new();
let mut total_score = 0.0;
let mut high_debt_count = 0;
for (path, (change_count, total_changes, last_modified)) in &file_stats {
if *change_count < min_commits {
continue;
}
let churn_score = *change_count as f64 / max_changes as f64;
let complexity_score = *total_changes as f64 / max_lines as f64;
let days_since_change = (now - *last_modified).num_days().max(0) as f64;
let age_score = (days_since_change / DAYS_PER_YEAR).min(1.0);
let score = churn_score * TECH_DEBT_CHURN_WEIGHT
+ complexity_score * TECH_DEBT_COMPLEXITY_WEIGHT
+ (churn_score * age_score) * TECH_DEBT_AGE_WEIGHT;
let debt_level = if score >= TECH_DEBT_HIGH {
high_debt_count += 1;
TechDebtLevel::High
} else if score >= TECH_DEBT_MEDIUM {
TechDebtLevel::Medium
} else {
TechDebtLevel::Low
};
entries.push(TechDebtEntry {
path: path.clone(),
score,
churn_score,
complexity_score,
age_score,
change_count: *change_count,
total_changes: *total_changes,
debt_level,
});
total_score += score;
}
entries.sort_by(|a, b| {
b.score
.partial_cmp(&a.score)
.unwrap_or(std::cmp::Ordering::Equal)
});
let total_files_analyzed = entries.len();
let avg_score = if total_files_analyzed > 0 {
total_score / total_files_analyzed as f64
} else {
0.0
};
TechDebtAnalysis {
entries,
avg_score,
high_debt_count,
total_files_analyzed,
}
}
pub fn calculate_project_health<F>(
events: &[&GitEvent],
files_fn: F,
quality_analysis: Option<&CommitQualityAnalysis>,
bus_factor: Option<&BusFactorAnalysis>,
tech_debt_analysis: Option<&TechDebtAnalysis>,
heatmap: &FileHeatmap,
) -> ProjectHealth
where
F: Fn(&str) -> Option<Vec<String>>,
{
if events.is_empty() {
return ProjectHealth::default();
}
let total_commits = events.len();
let mut alerts = Vec::new();
let authors: std::collections::HashSet<&str> =
events.iter().map(|e| e.author.as_str()).collect();
let total_authors = authors.len();
let analysis_period_days = if events.len() >= 2 {
let newest = events.first().map(|e| e.timestamp);
let oldest = events.last().map(|e| e.timestamp);
if let (Some(n), Some(o)) = (newest, oldest) {
let duration = n.signed_duration_since(o);
duration.num_days().unsigned_abs()
} else {
0
}
} else {
0
};
let quality_score = if let Some(qa) = quality_analysis {
qa.avg_score
} else {
let conventional_count = events
.iter()
.filter(|e| {
let msg = e.message.to_lowercase();
msg.starts_with("feat:")
|| msg.starts_with("fix:")
|| msg.starts_with("docs:")
|| msg.starts_with("style:")
|| msg.starts_with("refactor:")
|| msg.starts_with("test:")
|| msg.starts_with("chore:")
|| msg.starts_with("perf:")
})
.count();
conventional_count as f64 / total_commits as f64
};
let conventional_pct = (quality_score * 100.0).round() as u32;
let quality = HealthScoreComponent {
score: quality_score,
weight: WEIGHT_QUALITY,
description: format!(
"Commit quality: {:.0}% conventional commits",
quality_score * 100.0
),
};
if quality_score < BUS_FACTOR_CRITICAL {
alerts.push(HealthAlert::with_details(
HealthAlertKind::LowCommitQuality,
AlertSeverity::Warning,
"Low commit quality",
format!(
"Conventional Commit format is {}% of all commits",
conventional_pct
),
));
}
let test_msg_count = events
.iter()
.filter(|e| e.message.to_lowercase().starts_with("test:"))
.count();
let test_file_commit_count = events
.iter()
.filter(|e| {
if let Some(files) = files_fn(&e.short_hash) {
files.iter().any(|f| is_test_file(f))
} else {
false
}
})
.count();
let test_score = ((test_msg_count as f64 * TEST_MSG_WEIGHT
+ test_file_commit_count as f64 * TEST_FILE_WEIGHT)
/ total_commits as f64)
.min(1.0);
let test_health = HealthScoreComponent {
score: test_score,
weight: WEIGHT_TEST,
description: format!(
"Test coverage: {:.0}% test-related commits",
test_score * 100.0
),
};
if test_score < LOW_TEST_THRESHOLD {
let recent_count = events.len().min(RECENT_COMMITS_WINDOW);
let recent_test_files = events
.iter()
.take(recent_count)
.filter(|e| {
if let Some(files) = files_fn(&e.short_hash) {
files.iter().any(|f| is_test_file(f))
} else {
false
}
})
.count();
alerts.push(HealthAlert::with_details(
HealthAlertKind::LowTestCoverage,
AlertSeverity::Info,
"Low test coverage in commits",
format!(
"In the last {} commits, only {} include test file changes",
recent_count, recent_test_files
),
));
}
let bus_factor_score;
let mut top_author_info = String::new();
if let Some(bf) = bus_factor {
let low_risk_count = bf
.total_paths_analyzed
.saturating_sub(bf.high_risk_count + bf.medium_risk_count);
let risk_ratio = bf.high_risk_count as f64
/ (bf.high_risk_count + bf.medium_risk_count + low_risk_count).max(1) as f64;
bus_factor_score = 1.0 - risk_ratio;
} else {
let mut author_commits: HashMap<&str, usize> = HashMap::new();
for e in events.iter() {
*author_commits.entry(e.author.as_str()).or_insert(0) += 1;
}
let total = total_commits as f64;
let entropy: f64 = author_commits
.values()
.map(|&count| {
let p = count as f64 / total;
if p > 0.0 {
-p * p.ln()
} else {
0.0
}
})
.sum();
let max_entropy = (total_authors as f64).ln();
bus_factor_score = if max_entropy > 0.0 {
entropy / max_entropy
} else {
0.0
};
if let Some((&top_author, &top_count)) = author_commits.iter().max_by_key(|(_, &c)| c) {
let pct = (top_count as f64 / total * 100.0).round() as u32;
if pct > SINGLE_AUTHOR_CONCENTRATION {
top_author_info = format!(
"{}% of commits are by a single author ({})",
pct, top_author
);
}
}
}
let bus_factor_risk = if total_authors <= 1 {
HealthScoreComponent {
score: 0.0,
weight: 0.0,
description: "Skipped: solo development".to_string(),
}
} else {
HealthScoreComponent {
score: bus_factor_score,
weight: WEIGHT_BUS_FACTOR,
description: format!(
"Knowledge distribution: {:.0}% (higher is better)",
bus_factor_score * 100.0
),
}
};
if total_authors > 1 {
if bus_factor_score < BUS_FACTOR_CRITICAL {
let details = if top_author_info.is_empty() {
"Knowledge is concentrated in few contributors. Consider knowledge sharing."
.to_string()
} else {
top_author_info.clone()
};
alerts.push(HealthAlert::with_details(
HealthAlertKind::HighBusFactorRisk,
AlertSeverity::Critical,
"High bus factor risk",
details,
));
} else if bus_factor_score < BUS_FACTOR_WARNING {
let details = if top_author_info.is_empty() {
"Consider improving knowledge distribution across team members.".to_string()
} else {
top_author_info
};
alerts.push(HealthAlert::with_details(
HealthAlertKind::ModerateBusFactorRisk,
AlertSeverity::Warning,
"Moderate bus factor risk",
details,
));
}
}
let tech_debt_score = if let Some(td) = tech_debt_analysis {
1.0 - td.avg_score.min(1.0)
} else {
let large_commits = events
.iter()
.filter(|e| e.files_added + e.files_deleted > LARGE_COMMIT_CHANGES)
.count();
let large_ratio = large_commits as f64 / total_commits as f64;
1.0 - large_ratio.min(1.0)
};
let tech_debt = HealthScoreComponent {
score: tech_debt_score,
weight: WEIGHT_TECH_DEBT,
description: format!(
"Technical debt: {:.0}% clean (higher is better)",
tech_debt_score * 100.0
),
};
if tech_debt_score < TECH_DEBT_MEDIUM {
alerts.push(HealthAlert::with_details(
HealthAlertKind::HighTechDebt,
AlertSeverity::Warning,
"High technical debt indicated",
"Many large commits suggest accumulated technical debt.",
));
}
let churn_score = if heatmap.total_files > 0 {
let avg_changes = heatmap.files.iter().map(|f| f.change_count).sum::<usize>() as f64
/ heatmap.total_files as f64;
let high_churn_count = heatmap
.files
.iter()
.filter(|f| f.change_count as f64 > avg_changes * HIGH_CHURN_MULTIPLIER)
.count();
let churn_ratio = high_churn_count as f64 / heatmap.total_files as f64;
(1.0 - churn_ratio).max(0.0)
} else {
0.5 };
let code_churn = HealthScoreComponent {
score: churn_score,
weight: WEIGHT_CHURN,
description: format!(
"Code churn: {:.0}% stable (higher is better)",
churn_score * 100.0
),
};
if churn_score < CHURN_WARNING_THRESHOLD {
if let Some(top_file) = heatmap.files.first() {
alerts.push(HealthAlert::with_details(
HealthAlertKind::HighCodeChurn,
AlertSeverity::Warning,
"High code churn detected",
format!(
"{} has been changed {} times in the analysis period",
top_file.path, top_file.change_count
),
));
}
}
let cadence_score = if analysis_period_days >= MIN_PERIOD_FOR_CADENCE {
let mut weekly_counts: HashMap<i64, usize> = HashMap::new();
if let Some(oldest_ts) = events.last().map(|e| e.timestamp) {
for e in events.iter() {
let days_since = e
.timestamp
.signed_duration_since(oldest_ts)
.num_days()
.unsigned_abs();
let week = (days_since / 7) as i64; *weekly_counts.entry(week).or_insert(0) += 1;
}
}
if weekly_counts.len() >= 2 {
let values: Vec<f64> = weekly_counts.values().map(|&v| v as f64).collect();
let mean = values.iter().sum::<f64>() / values.len() as f64;
if mean > 0.0 {
let variance =
values.iter().map(|v| (v - mean).powi(2)).sum::<f64>() / values.len() as f64;
let std_dev = variance.sqrt();
let cv = std_dev / mean;
if cv < CV_STABLE {
1.0
} else if cv > CV_UNSTABLE {
CADENCE_SCORE_UNSTABLE
} else {
1.0 - (cv - CV_STABLE) / CV_RANGE * CV_WEIGHT
}
} else {
0.5
}
} else {
0.5
}
} else {
0.5 };
let commit_cadence = HealthScoreComponent {
score: cadence_score,
weight: WEIGHT_CADENCE,
description: format!(
"Commit cadence: {:.0}% stable (higher is better)",
cadence_score * 100.0
),
};
let total_weight = quality.weight
+ test_health.weight
+ bus_factor_risk.weight
+ tech_debt.weight
+ code_churn.weight
+ commit_cadence.weight;
let raw = quality.score * quality.weight
+ test_health.score * test_health.weight
+ bus_factor_risk.score * bus_factor_risk.weight
+ tech_debt.score * tech_debt.weight
+ code_churn.score * code_churn.weight
+ commit_cadence.score * commit_cadence.weight;
let overall = if total_weight > 0.0 {
raw / total_weight
} else {
0.0
};
let overall_score = (overall * 100.0).round().min(100.0) as u8;
let confidence = if total_commits >= HIGH_CONF_COMMITS
&& total_authors >= HIGH_CONF_AUTHORS
&& analysis_period_days >= HIGH_CONF_DAYS
{
HealthConfidence {
level: ConfidenceLevel::High,
reason: format!(
"{} commits, {} authors, {} days",
total_commits, total_authors, analysis_period_days
),
}
} else if total_commits >= MEDIUM_CONF_COMMITS && analysis_period_days >= MEDIUM_CONF_DAYS {
HealthConfidence {
level: ConfidenceLevel::Medium,
reason: format!(
"{} commits over {} days",
total_commits, analysis_period_days
),
}
} else {
HealthConfidence {
level: ConfidenceLevel::Low,
reason: format!(
"Only {} commits, {} authors, {} days — results may not be representative",
total_commits, total_authors, analysis_period_days
),
}
};
alerts.sort_by(|a, b| b.severity.cmp(&a.severity));
ProjectHealth {
overall_score,
quality,
test_health,
bus_factor_risk,
tech_debt,
code_churn,
commit_cadence,
alerts,
total_commits,
total_authors,
analysis_period_days,
confidence,
}
}
#[cfg(test)]
#[allow(clippy::useless_vec)]
mod tests {
use super::*;
use crate::stats::AggregationLevel;
use chrono::Local;
fn create_test_event(author: &str, insertions: usize, deletions: usize) -> GitEvent {
GitEvent::commit(
"abc1234".to_string(),
"test commit".to_string(),
author.to_string(),
Local::now(),
insertions,
deletions,
)
}
fn create_test_event_for_quality(
hash: &str,
message: &str,
insertions: usize,
deletions: usize,
) -> GitEvent {
GitEvent::commit(
hash.to_string(),
message.to_string(),
"author".to_string(),
Local::now(),
insertions,
deletions,
)
}
#[test]
fn test_project_health_default() {
let health = ProjectHealth::default();
assert_eq!(health.overall_score, 50);
assert_eq!(health.level(), "Needs Work");
}
#[test]
fn test_project_health_level() {
let health = ProjectHealth {
overall_score: 95,
..Default::default()
};
assert_eq!(health.level(), "Excellent");
let health = ProjectHealth {
overall_score: 80,
..Default::default()
};
assert_eq!(health.level(), "Good");
let health = ProjectHealth {
overall_score: 65,
..Default::default()
};
assert_eq!(health.level(), "Fair");
let health = ProjectHealth {
overall_score: 50,
..Default::default()
};
assert_eq!(health.level(), "Needs Work");
let health = ProjectHealth {
overall_score: 35,
..Default::default()
};
assert_eq!(health.level(), "Poor");
let health = ProjectHealth {
overall_score: 20,
..Default::default()
};
assert_eq!(health.level(), "Critical");
}
#[test]
fn test_project_health_score_bar() {
let health = ProjectHealth {
overall_score: 100,
..Default::default()
};
assert_eq!(health.score_bar(), "██████████");
let health = ProjectHealth {
overall_score: 50,
..Default::default()
};
assert_eq!(health.score_bar(), "█████░░░░░");
let health = ProjectHealth {
overall_score: 0,
..Default::default()
};
assert_eq!(health.score_bar(), "â–‘â–‘â–‘â–‘â–‘â–‘â–‘â–‘â–‘â–‘");
}
#[test]
fn test_alert_severity_ordering() {
assert!(AlertSeverity::Critical > AlertSeverity::Warning);
assert!(AlertSeverity::Warning > AlertSeverity::Info);
}
#[test]
fn test_calculate_project_health_empty() {
let events: Vec<&GitEvent> = vec![];
let empty_heatmap = FileHeatmap {
files: vec![],
total_files: 0,
aggregation_level: AggregationLevel::Files,
};
let health = calculate_project_health(&events, |_| None, None, None, None, &empty_heatmap);
assert_eq!(health.overall_score, 50);
assert_eq!(health.total_commits, 0);
}
#[test]
fn test_calculate_project_health_with_events() {
let events = vec![
create_test_event_for_quality("hash1", "feat: add new feature", 50, 10),
create_test_event_for_quality("hash2", "fix: bug fix", 20, 5),
create_test_event_for_quality("hash3", "test: add tests", 30, 0),
];
let refs: Vec<&GitEvent> = events.iter().collect();
let empty_heatmap = FileHeatmap {
files: vec![],
total_files: 0,
aggregation_level: AggregationLevel::Files,
};
let health = calculate_project_health(&refs, |_| None, None, None, None, &empty_heatmap);
assert_eq!(health.total_commits, 3);
assert!(health.overall_score > 0);
assert!(health.quality.score > 0.5);
}
#[test]
fn test_calculate_project_health_alerts() {
let mut events = Vec::new();
for i in 0..10 {
let mut event = create_test_event("single_author", 10, 5);
event.short_hash = format!("hash{}", i);
events.push(event);
}
let refs: Vec<&GitEvent> = events.iter().collect();
let empty_heatmap = FileHeatmap {
files: vec![],
total_files: 0,
aggregation_level: AggregationLevel::Files,
};
let health = calculate_project_health(&refs, |_| None, None, None, None, &empty_heatmap);
let has_bus_factor_alert = health
.alerts
.iter()
.any(|a| a.message.contains("bus factor"));
assert!(!has_bus_factor_alert);
assert_eq!(health.bus_factor_risk.weight, 0.0);
let mut multi_events = Vec::new();
for i in 0..10 {
let mut event = create_test_event("author_a", 10, 5);
event.short_hash = format!("hash_a{}", i);
multi_events.push(event);
}
let mut event_b = create_test_event("author_b", 1, 0);
event_b.short_hash = "hash_b0".to_string();
multi_events.push(event_b);
let multi_refs: Vec<&GitEvent> = multi_events.iter().collect();
let health2 =
calculate_project_health(&multi_refs, |_| None, None, None, None, &empty_heatmap);
let has_bus_factor_alert2 = health2
.alerts
.iter()
.any(|a| a.message.contains("bus factor"));
assert!(has_bus_factor_alert2);
assert_eq!(health2.bus_factor_risk.weight, 0.20);
}
#[test]
fn test_is_test_file() {
assert!(is_test_file("src/foo_test.rs"));
assert!(is_test_file("src/foo_test.go"));
assert!(is_test_file("src/foo.test.ts"));
assert!(is_test_file("src/foo.test.js"));
assert!(is_test_file("src/foo.spec.ts"));
assert!(is_test_file("src/foo.spec.js"));
assert!(is_test_file("tests/test_main.py"));
assert!(is_test_file("__tests__/foo.js"));
assert!(is_test_file("spec/helper.rb"));
assert!(!is_test_file("src/contest.rs"));
assert!(!is_test_file("src/latest.ts"));
assert!(!is_test_file("src/main.rs"));
assert!(!is_test_file("protest.js"));
}
#[test]
fn test_confidence_levels() {
let events: Vec<GitEvent> = (0..100)
.map(|i| {
let mut e = create_test_event(
if i % 3 == 0 {
"Alice"
} else if i % 3 == 1 {
"Bob"
} else {
"Charlie"
},
10,
5,
);
e.short_hash = format!("hash{}", i);
e.timestamp = Local::now() - chrono::Duration::days(i as i64);
e
})
.collect();
let refs: Vec<&GitEvent> = events.iter().collect();
let empty_heatmap = FileHeatmap {
files: vec![],
total_files: 0,
aggregation_level: AggregationLevel::Files,
};
let health = calculate_project_health(&refs, |_| None, None, None, None, &empty_heatmap);
assert_eq!(health.confidence.level, ConfidenceLevel::High);
}
}