use std::fs::File;
use std::io::Write;
use std::path::Path;
use anyhow::Result;
use chrono::Local;
use serde::Serialize;
use crate::stats::{
ActivityTimeline, AlertSeverity, ChangeCouplingAnalysis, CodeOwnership, CommitImpactAnalysis,
CommitQualityAnalysis, FileHeatmap, ProjectHealth, RepoStats,
};
#[derive(Debug, Serialize)]
struct AuthorStatsJson {
name: String,
commit_count: usize,
insertions: usize,
deletions: usize,
last_commit: String,
commit_percentage: f64,
}
#[derive(Debug, Serialize)]
struct RepoStatsJson {
generated_at: String,
total_commits: usize,
total_insertions: usize,
total_deletions: usize,
author_count: usize,
authors: Vec<AuthorStatsJson>,
}
pub fn export_stats_csv(stats: &RepoStats, path: &Path) -> Result<()> {
let mut wtr = csv::Writer::from_path(path)?;
wtr.write_record([
"Author",
"Commits",
"Insertions",
"Deletions",
"LastCommit",
"CommitPercentage",
])?;
for author in &stats.authors {
wtr.write_record([
&author.name,
&author.commit_count.to_string(),
&author.insertions.to_string(),
&author.deletions.to_string(),
&author.last_commit.format("%Y-%m-%d %H:%M:%S").to_string(),
&format!("{:.1}%", author.commit_percentage(stats.total_commits)),
])?;
}
wtr.flush()?;
Ok(())
}
pub fn export_stats_json(stats: &RepoStats, path: &Path) -> Result<()> {
let json_stats = RepoStatsJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_commits: stats.total_commits,
total_insertions: stats.total_insertions,
total_deletions: stats.total_deletions,
author_count: stats.author_count(),
authors: stats
.authors
.iter()
.map(|a| AuthorStatsJson {
name: a.name.clone(),
commit_count: a.commit_count,
insertions: a.insertions,
deletions: a.deletions,
last_commit: a.last_commit.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
commit_percentage: a.commit_percentage(stats.total_commits),
})
.collect(),
};
let json = serde_json::to_string_pretty(&json_stats)?;
let mut file = File::create(path)?;
file.write_all(json.as_bytes())?;
Ok(())
}
#[derive(Debug, Serialize)]
struct FileHeatmapEntryJson {
path: String,
change_count: usize,
heat_level: f64,
}
#[derive(Debug, Serialize)]
struct FileHeatmapJson {
generated_at: String,
total_files: usize,
files: Vec<FileHeatmapEntryJson>,
}
pub fn export_heatmap_csv(heatmap: &FileHeatmap, path: &Path) -> Result<()> {
let mut wtr = csv::Writer::from_path(path)?;
wtr.write_record(["Path", "ChangeCount", "HeatLevel"])?;
for entry in &heatmap.files {
wtr.write_record([
&entry.path,
&entry.change_count.to_string(),
&format!("{:.2}", entry.heat_level()),
])?;
}
wtr.flush()?;
Ok(())
}
pub fn export_heatmap_json(heatmap: &FileHeatmap, path: &Path) -> Result<()> {
let json_heatmap = FileHeatmapJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_files: heatmap.total_files,
files: heatmap
.files
.iter()
.map(|f| FileHeatmapEntryJson {
path: f.path.clone(),
change_count: f.change_count,
heat_level: f.heat_level(),
})
.collect(),
};
let json = serde_json::to_string_pretty(&json_heatmap)?;
let mut file = File::create(path)?;
file.write_all(json.as_bytes())?;
Ok(())
}
#[derive(Debug, Serialize)]
struct CodeOwnershipEntryJson {
path: String,
is_directory: bool,
primary_author: String,
primary_commits: usize,
total_commits: usize,
ownership_percentage: f64,
}
#[derive(Debug, Serialize)]
struct CodeOwnershipJson {
generated_at: String,
total_files: usize,
entries: Vec<CodeOwnershipEntryJson>,
}
pub fn export_ownership_csv(ownership: &CodeOwnership, path: &Path) -> Result<()> {
let mut wtr = csv::Writer::from_path(path)?;
wtr.write_record([
"Path",
"Type",
"PrimaryAuthor",
"PrimaryCommits",
"TotalCommits",
"OwnershipPercentage",
])?;
for entry in &ownership.entries {
let entry_type = if entry.is_directory {
"dir".to_string()
} else {
"file".to_string()
};
wtr.write_record([
&entry.path,
&entry_type,
&entry.primary_author,
&entry.primary_commits.to_string(),
&entry.total_commits.to_string(),
&format!("{:.1}%", entry.ownership_percentage()),
])?;
}
wtr.flush()?;
Ok(())
}
pub fn export_ownership_json(ownership: &CodeOwnership, path: &Path) -> Result<()> {
let json_ownership = CodeOwnershipJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_files: ownership.total_files,
entries: ownership
.entries
.iter()
.map(|e| CodeOwnershipEntryJson {
path: e.path.clone(),
is_directory: e.is_directory,
primary_author: e.primary_author.clone(),
primary_commits: e.primary_commits,
total_commits: e.total_commits,
ownership_percentage: e.ownership_percentage(),
})
.collect(),
};
let json = serde_json::to_string_pretty(&json_ownership)?;
let mut file = File::create(path)?;
file.write_all(json.as_bytes())?;
Ok(())
}
#[derive(Debug, Serialize)]
struct TimelineCellJson {
day: String,
hour: usize,
commits: usize,
heat_level: f64,
}
#[derive(Debug, Serialize)]
struct ActivityTimelineJson {
generated_at: String,
total_commits: usize,
peak_day: String,
peak_hour: usize,
peak_count: usize,
cells: Vec<TimelineCellJson>,
}
pub fn export_timeline_csv(timeline: &ActivityTimeline, path: &Path) -> Result<()> {
let mut wtr = csv::Writer::from_path(path)?;
wtr.write_record(["Day", "Hour", "Commits", "HeatLevel"])?;
for day in 0..7 {
for hour in 0..24 {
let commits = timeline.grid[day][hour];
if commits > 0 {
wtr.write_record([
ActivityTimeline::day_name(day),
&hour.to_string(),
&commits.to_string(),
&format!("{:.2}", timeline.heat_level(day, hour)),
])?;
}
}
}
wtr.flush()?;
Ok(())
}
pub fn export_timeline_json(timeline: &ActivityTimeline, path: &Path) -> Result<()> {
let mut cells = Vec::new();
for day in 0..7 {
for hour in 0..24 {
let commits = timeline.grid[day][hour];
if commits > 0 {
cells.push(TimelineCellJson {
day: ActivityTimeline::day_name(day).to_string(),
hour,
commits,
heat_level: timeline.heat_level(day, hour),
});
}
}
}
let json_timeline = ActivityTimelineJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_commits: timeline.total_commits,
peak_day: ActivityTimeline::day_name(timeline.peak_day).to_string(),
peak_hour: timeline.peak_hour,
peak_count: timeline.peak_count,
cells,
};
let json = serde_json::to_string_pretty(&json_timeline)?;
let mut file = File::create(path)?;
file.write_all(json.as_bytes())?;
Ok(())
}
#[derive(Debug, Serialize)]
struct CommitImpactScoreJson {
commit_hash: String,
commit_message: String,
author: String,
date: String,
files_changed: usize,
insertions: usize,
deletions: usize,
score: f64,
file_score: f64,
change_score: f64,
heat_score: f64,
}
#[derive(Debug, Serialize)]
struct CommitImpactAnalysisJson {
generated_at: String,
total_commits: usize,
avg_score: f64,
max_score: f64,
high_impact_count: usize,
commits: Vec<CommitImpactScoreJson>,
}
pub fn export_impact_csv(analysis: &CommitImpactAnalysis, path: &Path) -> Result<()> {
let mut wtr = csv::Writer::from_path(path)?;
wtr.write_record([
"Hash",
"Author",
"Message",
"FilesChanged",
"Insertions",
"Deletions",
"Score",
"FileScore",
"ChangeScore",
"HeatScore",
"Date",
])?;
for commit in &analysis.commits {
wtr.write_record([
&commit.commit_hash,
&commit.author,
&commit.commit_message,
&commit.files_changed.to_string(),
&commit.insertions.to_string(),
&commit.deletions.to_string(),
&format!("{:.3}", commit.score),
&format!("{:.3}", commit.file_score),
&format!("{:.3}", commit.change_score),
&format!("{:.3}", commit.heat_score),
&commit.date.format("%Y-%m-%d %H:%M:%S").to_string(),
])?;
}
wtr.flush()?;
Ok(())
}
pub fn export_impact_json(analysis: &CommitImpactAnalysis, path: &Path) -> Result<()> {
let json_analysis = CommitImpactAnalysisJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_commits: analysis.total_commits,
avg_score: analysis.avg_score,
max_score: analysis.max_score,
high_impact_count: analysis.high_impact_count,
commits: analysis
.commits
.iter()
.map(|c| CommitImpactScoreJson {
commit_hash: c.commit_hash.clone(),
commit_message: c.commit_message.clone(),
author: c.author.clone(),
date: c.date.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
files_changed: c.files_changed,
insertions: c.insertions,
deletions: c.deletions,
score: c.score,
file_score: c.file_score,
change_score: c.change_score,
heat_score: c.heat_score,
})
.collect(),
};
let json = serde_json::to_string_pretty(&json_analysis)?;
let mut file = File::create(path)?;
file.write_all(json.as_bytes())?;
Ok(())
}
#[derive(Debug, Serialize)]
struct FileCouplingJson {
file: String,
coupled_file: String,
co_change_count: usize,
file_change_count: usize,
coupling_percent: f64,
}
#[derive(Debug, Serialize)]
struct ChangeCouplingAnalysisJson {
generated_at: String,
total_couplings: usize,
high_coupling_count: usize,
total_files_analyzed: usize,
couplings: Vec<FileCouplingJson>,
}
pub fn export_coupling_csv(analysis: &ChangeCouplingAnalysis, path: &Path) -> Result<()> {
let mut wtr = csv::Writer::from_path(path)?;
wtr.write_record([
"File",
"CoupledFile",
"CoChangeCount",
"FileChangeCount",
"CouplingPercent",
])?;
for coupling in &analysis.couplings {
wtr.write_record([
&coupling.file,
&coupling.coupled_file,
&coupling.co_change_count.to_string(),
&coupling.file_change_count.to_string(),
&format!("{:.1}%", coupling.coupling_percent * 100.0),
])?;
}
wtr.flush()?;
Ok(())
}
pub fn export_coupling_json(analysis: &ChangeCouplingAnalysis, path: &Path) -> Result<()> {
let json_analysis = ChangeCouplingAnalysisJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_couplings: analysis.couplings.len(),
high_coupling_count: analysis.high_coupling_count,
total_files_analyzed: analysis.total_files_analyzed,
couplings: analysis
.couplings
.iter()
.map(|c| FileCouplingJson {
file: c.file.clone(),
coupled_file: c.coupled_file.clone(),
co_change_count: c.co_change_count,
file_change_count: c.file_change_count,
coupling_percent: c.coupling_percent,
})
.collect(),
};
let json = serde_json::to_string_pretty(&json_analysis)?;
let mut file = File::create(path)?;
file.write_all(json.as_bytes())?;
Ok(())
}
#[derive(Debug, Serialize)]
struct CommitQualityScoreJson {
commit_hash: String,
commit_message: String,
author: String,
date: String,
files_changed: usize,
insertions: usize,
deletions: usize,
score: f64,
message_score: f64,
size_score: f64,
test_score: f64,
atomicity_score: f64,
quality_level: String,
}
#[derive(Debug, Serialize)]
struct CommitQualityAnalysisJson {
generated_at: String,
total_commits: usize,
avg_score: f64,
high_quality_count: usize,
low_quality_count: usize,
commits: Vec<CommitQualityScoreJson>,
}
pub fn export_quality_csv(analysis: &CommitQualityAnalysis, path: &Path) -> Result<()> {
let mut wtr = csv::Writer::from_path(path)?;
wtr.write_record([
"Hash",
"Author",
"Message",
"FilesChanged",
"Insertions",
"Deletions",
"Score",
"MessageScore",
"SizeScore",
"TestScore",
"AtomicityScore",
"QualityLevel",
"Date",
])?;
for commit in &analysis.commits {
wtr.write_record([
&commit.commit_hash,
&commit.author,
&commit.commit_message,
&commit.files_changed.to_string(),
&commit.insertions.to_string(),
&commit.deletions.to_string(),
&format!("{:.3}", commit.score),
&format!("{:.3}", commit.message_score),
&format!("{:.3}", commit.size_score),
&format!("{:.3}", commit.test_score),
&format!("{:.3}", commit.atomicity_score),
commit.quality_level(),
&commit.date.format("%Y-%m-%d %H:%M:%S").to_string(),
])?;
}
wtr.flush()?;
Ok(())
}
pub fn export_quality_json(analysis: &CommitQualityAnalysis, path: &Path) -> Result<()> {
let json_analysis = CommitQualityAnalysisJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_commits: analysis.total_commits,
avg_score: analysis.avg_score,
high_quality_count: analysis.high_quality_count,
low_quality_count: analysis.low_quality_count,
commits: analysis
.commits
.iter()
.map(|c| CommitQualityScoreJson {
commit_hash: c.commit_hash.clone(),
commit_message: c.commit_message.clone(),
author: c.author.clone(),
date: c.date.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
files_changed: c.files_changed,
insertions: c.insertions,
deletions: c.deletions,
score: c.score,
message_score: c.message_score,
size_score: c.size_score,
test_score: c.test_score,
atomicity_score: c.atomicity_score,
quality_level: c.quality_level().to_string(),
})
.collect(),
};
let json = serde_json::to_string_pretty(&json_analysis)?;
let mut file = File::create(path)?;
file.write_all(json.as_bytes())?;
Ok(())
}
pub fn quality_to_json(analysis: &CommitQualityAnalysis) -> Result<String> {
let json_analysis = CommitQualityAnalysisJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_commits: analysis.total_commits,
avg_score: analysis.avg_score,
high_quality_count: analysis.high_quality_count,
low_quality_count: analysis.low_quality_count,
commits: analysis
.commits
.iter()
.map(|c| CommitQualityScoreJson {
commit_hash: c.commit_hash.clone(),
commit_message: c.commit_message.clone(),
author: c.author.clone(),
date: c.date.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
files_changed: c.files_changed,
insertions: c.insertions,
deletions: c.deletions,
score: c.score,
message_score: c.message_score,
size_score: c.size_score,
test_score: c.test_score,
atomicity_score: c.atomicity_score,
quality_level: c.quality_level().to_string(),
})
.collect(),
};
Ok(serde_json::to_string_pretty(&json_analysis)?)
}
use crate::event::GitEvent;
#[derive(Debug, Serialize)]
struct CommitLogEntryJson {
hash: String,
message: String,
author: String,
date: String,
files_added: usize,
files_deleted: usize,
}
#[derive(Debug, Serialize)]
struct CommitLogJson {
generated_at: String,
total_commits: usize,
commits: Vec<CommitLogEntryJson>,
}
pub fn stats_to_json(stats: &RepoStats) -> Result<String> {
let json_stats = RepoStatsJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_commits: stats.total_commits,
total_insertions: stats.total_insertions,
total_deletions: stats.total_deletions,
author_count: stats.author_count(),
authors: stats
.authors
.iter()
.map(|a| AuthorStatsJson {
name: a.name.clone(),
commit_count: a.commit_count,
insertions: a.insertions,
deletions: a.deletions,
last_commit: a.last_commit.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
commit_percentage: a.commit_percentage(stats.total_commits),
})
.collect(),
};
Ok(serde_json::to_string_pretty(&json_stats)?)
}
pub fn heatmap_to_json(heatmap: &FileHeatmap) -> Result<String> {
let json_heatmap = FileHeatmapJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_files: heatmap.total_files,
files: heatmap
.files
.iter()
.map(|f| FileHeatmapEntryJson {
path: f.path.clone(),
change_count: f.change_count,
heat_level: f.heat_level(),
})
.collect(),
};
Ok(serde_json::to_string_pretty(&json_heatmap)?)
}
pub fn impact_to_json(analysis: &CommitImpactAnalysis) -> Result<String> {
let json_analysis = CommitImpactAnalysisJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_commits: analysis.total_commits,
avg_score: analysis.avg_score,
max_score: analysis.max_score,
high_impact_count: analysis.high_impact_count,
commits: analysis
.commits
.iter()
.map(|c| CommitImpactScoreJson {
commit_hash: c.commit_hash.clone(),
commit_message: c.commit_message.clone(),
author: c.author.clone(),
date: c.date.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
files_changed: c.files_changed,
insertions: c.insertions,
deletions: c.deletions,
score: c.score,
file_score: c.file_score,
change_score: c.change_score,
heat_score: c.heat_score,
})
.collect(),
};
Ok(serde_json::to_string_pretty(&json_analysis)?)
}
pub fn coupling_to_json(analysis: &ChangeCouplingAnalysis) -> Result<String> {
let json_analysis = ChangeCouplingAnalysisJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_couplings: analysis.couplings.len(),
high_coupling_count: analysis.high_coupling_count,
total_files_analyzed: analysis.total_files_analyzed,
couplings: analysis
.couplings
.iter()
.map(|c| FileCouplingJson {
file: c.file.clone(),
coupled_file: c.coupled_file.clone(),
co_change_count: c.co_change_count,
file_change_count: c.file_change_count,
coupling_percent: c.coupling_percent,
})
.collect(),
};
Ok(serde_json::to_string_pretty(&json_analysis)?)
}
pub fn log_to_json(events: &[GitEvent]) -> Result<String> {
let json_log = CommitLogJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_commits: events.len(),
commits: events
.iter()
.map(|e| CommitLogEntryJson {
hash: e.short_hash.clone(),
message: e.message.clone(),
author: e.author.clone(),
date: e.timestamp.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
files_added: e.files_added,
files_deleted: e.files_deleted,
})
.collect(),
};
Ok(serde_json::to_string_pretty(&json_log)?)
}
use crate::stats::{BusFactorAnalysis, TechDebtAnalysis};
#[derive(Debug, Serialize)]
struct BusFactorEntryJson {
path: String,
bus_factor: usize,
risk_level: String,
total_commits: usize,
contributors: Vec<ContributorInfoJson>,
}
#[derive(Debug, Serialize)]
struct ContributorInfoJson {
name: String,
commit_count: usize,
contribution_percent: f64,
}
#[derive(Debug, Serialize)]
struct BusFactorAnalysisJson {
generated_at: String,
total_paths_analyzed: usize,
high_risk_count: usize,
medium_risk_count: usize,
entries: Vec<BusFactorEntryJson>,
}
pub fn bus_factor_to_json(analysis: &BusFactorAnalysis) -> Result<String> {
let json_analysis = BusFactorAnalysisJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_paths_analyzed: analysis.total_paths_analyzed,
high_risk_count: analysis.high_risk_count,
medium_risk_count: analysis.medium_risk_count,
entries: analysis
.entries
.iter()
.map(|e| BusFactorEntryJson {
path: e.path.clone(),
bus_factor: e.bus_factor,
risk_level: e.risk_level.display_name().to_string(),
total_commits: e.total_commits,
contributors: e
.contributors
.iter()
.map(|c| ContributorInfoJson {
name: c.name.clone(),
commit_count: c.commit_count,
contribution_percent: c.contribution_percent,
})
.collect(),
})
.collect(),
};
Ok(serde_json::to_string_pretty(&json_analysis)?)
}
#[derive(Debug, Serialize)]
struct TechDebtEntryJson {
path: String,
score: f64,
churn_score: f64,
complexity_score: f64,
age_score: f64,
debt_level: String,
change_count: usize,
total_changes: usize,
}
#[derive(Debug, Serialize)]
struct TechDebtAnalysisJson {
generated_at: String,
total_files_analyzed: usize,
avg_score: f64,
high_debt_count: usize,
entries: Vec<TechDebtEntryJson>,
}
pub fn tech_debt_to_json(analysis: &TechDebtAnalysis) -> Result<String> {
let json_analysis = TechDebtAnalysisJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_files_analyzed: analysis.total_files_analyzed,
avg_score: analysis.avg_score,
high_debt_count: analysis.high_debt_count,
entries: analysis
.entries
.iter()
.map(|e| TechDebtEntryJson {
path: e.path.clone(),
score: e.score,
churn_score: e.churn_score,
complexity_score: e.complexity_score,
age_score: e.age_score,
debt_level: e.debt_level.display_name().to_string(),
change_count: e.change_count,
total_changes: e.total_changes,
})
.collect(),
};
Ok(serde_json::to_string_pretty(&json_analysis)?)
}
#[derive(Debug, Serialize)]
struct OwnershipEntryJson {
path: String,
is_directory: bool,
primary_author: String,
primary_commits: usize,
total_commits: usize,
ownership_percentage: f64,
}
#[derive(Debug, Serialize)]
struct OwnershipAnalysisJson {
generated_at: String,
total_files: usize,
entries: Vec<OwnershipEntryJson>,
}
pub fn ownership_to_json(ownership: &CodeOwnership) -> Result<String> {
let json_analysis = OwnershipAnalysisJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_files: ownership.total_files,
entries: ownership
.entries
.iter()
.map(|e| OwnershipEntryJson {
path: e.path.clone(),
is_directory: e.is_directory,
primary_author: e.primary_author.clone(),
primary_commits: e.primary_commits,
total_commits: e.total_commits,
ownership_percentage: e.ownership_percentage(),
})
.collect(),
};
Ok(serde_json::to_string_pretty(&json_analysis)?)
}
pub fn timeline_to_json(timeline: &ActivityTimeline) -> Result<String> {
let mut cells = Vec::new();
for day in 0..7 {
for hour in 0..24 {
let commits = timeline.grid[day][hour];
if commits > 0 {
cells.push(TimelineCellJson {
day: ActivityTimeline::day_name(day).to_string(),
hour,
commits,
heat_level: timeline.heat_level(day, hour),
});
}
}
}
let json_timeline = ActivityTimelineJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
total_commits: timeline.total_commits,
peak_day: ActivityTimeline::day_name(timeline.peak_day).to_string(),
peak_hour: timeline.peak_hour,
peak_count: timeline.peak_count,
cells,
};
Ok(serde_json::to_string_pretty(&json_timeline)?)
}
#[derive(Debug, Serialize)]
struct HealthAlertJson {
severity: String,
message: String,
details: Option<String>,
}
#[derive(Debug, Serialize)]
struct HealthScoreComponentJson {
score: f64,
score_percent: u8,
weight: f64,
description: String,
}
#[derive(Debug, Serialize)]
struct ProjectHealthJson {
generated_at: String,
overall_score: u8,
level: String,
quality: HealthScoreComponentJson,
test_health: HealthScoreComponentJson,
bus_factor_risk: HealthScoreComponentJson,
tech_debt: HealthScoreComponentJson,
code_churn: HealthScoreComponentJson,
commit_cadence: HealthScoreComponentJson,
alerts: Vec<HealthAlertJson>,
total_commits: usize,
total_authors: usize,
analysis_period_days: u64,
confidence: String,
}
pub fn health_to_json(health: &ProjectHealth) -> Result<String> {
let json_health = ProjectHealthJson {
generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
overall_score: health.overall_score,
level: health.level().to_string(),
quality: HealthScoreComponentJson {
score: health.quality.score,
score_percent: (health.quality.score * 100.0).round() as u8,
weight: health.quality.weight,
description: health.quality.description.clone(),
},
test_health: HealthScoreComponentJson {
score: health.test_health.score,
score_percent: (health.test_health.score * 100.0).round() as u8,
weight: health.test_health.weight,
description: health.test_health.description.clone(),
},
bus_factor_risk: HealthScoreComponentJson {
score: health.bus_factor_risk.score,
score_percent: (health.bus_factor_risk.score * 100.0).round() as u8,
weight: health.bus_factor_risk.weight,
description: health.bus_factor_risk.description.clone(),
},
tech_debt: HealthScoreComponentJson {
score: health.tech_debt.score,
score_percent: (health.tech_debt.score * 100.0).round() as u8,
weight: health.tech_debt.weight,
description: health.tech_debt.description.clone(),
},
code_churn: HealthScoreComponentJson {
score: health.code_churn.score,
score_percent: (health.code_churn.score * 100.0).round() as u8,
weight: health.code_churn.weight,
description: health.code_churn.description.clone(),
},
commit_cadence: HealthScoreComponentJson {
score: health.commit_cadence.score,
score_percent: (health.commit_cadence.score * 100.0).round() as u8,
weight: health.commit_cadence.weight,
description: health.commit_cadence.description.clone(),
},
alerts: health
.alerts
.iter()
.map(|a| HealthAlertJson {
severity: match a.severity {
AlertSeverity::Info => "info".to_string(),
AlertSeverity::Warning => "warning".to_string(),
AlertSeverity::Critical => "critical".to_string(),
},
message: a.message.clone(),
details: a.details.clone(),
})
.collect(),
total_commits: health.total_commits,
total_authors: health.total_authors,
analysis_period_days: health.analysis_period_days,
confidence: health.confidence.level.as_str().to_string(),
};
Ok(serde_json::to_string_pretty(&json_health)?)
}
pub fn health_to_markdown(health: &ProjectHealth) -> String {
let mut md = String::new();
md.push_str("# Project Health Dashboard\n\n");
md.push_str(&format!(
"## Overall Score: {} / 100 ({})\n\n",
health.overall_score,
health.level()
));
md.push_str(&format!("`{}`\n\n", health.score_bar()));
md.push_str("### Summary\n\n");
md.push_str(&format!(
"- **Commits analyzed**: {}\n",
health.total_commits
));
md.push_str(&format!("- **Contributors**: {}\n", health.total_authors));
md.push_str(&format!(
"- **Period**: {} days\n",
health.analysis_period_days
));
md.push_str(&format!(
"- **Confidence**: {}\n\n",
health.confidence.level.as_str()
));
md.push_str("### Score Breakdown\n\n");
md.push_str("| Component | Score | Weight |\n");
md.push_str("|-----------|-------|--------|\n");
md.push_str(&format!(
"| Quality | {:.0}% | {:.0}% |\n",
health.quality.score * 100.0,
health.quality.weight * 100.0
));
md.push_str(&format!(
"| Test Health | {:.0}% | {:.0}% |\n",
health.test_health.score * 100.0,
health.test_health.weight * 100.0
));
if health.total_authors > 1 {
md.push_str(&format!(
"| Bus Factor Risk | {:.0}% | {:.0}% |\n",
health.bus_factor_risk.score * 100.0,
health.bus_factor_risk.weight * 100.0
));
}
md.push_str(&format!(
"| Technical Debt | {:.0}% | {:.0}% |\n",
health.tech_debt.score * 100.0,
health.tech_debt.weight * 100.0
));
md.push_str(&format!(
"| Code Churn | {:.0}% | {:.0}% |\n",
health.code_churn.score * 100.0,
health.code_churn.weight * 100.0
));
md.push_str(&format!(
"| Commit Cadence | {:.0}% | {:.0}% |\n\n",
health.commit_cadence.score * 100.0,
health.commit_cadence.weight * 100.0
));
if !health.alerts.is_empty() {
md.push_str("### Alerts\n\n");
for alert in &health.alerts {
let icon = alert.severity.icon();
md.push_str(&format!("- {} **{}**", icon, alert.message));
if let Some(details) = &alert.details {
md.push_str(&format!("\n - {}", details));
}
md.push('\n');
}
md.push('\n');
}
md.push_str(&format!(
"*Generated at {}*\n",
Local::now().format("%Y-%m-%d %H:%M:%S")
));
md
}
#[cfg(test)]
mod tests {
use super::*;
use crate::stats::{AuthorStats, FileHeatmapEntry};
use chrono::Local;
use std::fs;
use tempfile::tempdir;
fn create_test_stats() -> RepoStats {
RepoStats {
authors: vec![
AuthorStats {
name: "Alice".to_string(),
commit_count: 50,
insertions: 1000,
deletions: 200,
last_commit: Local::now(),
},
AuthorStats {
name: "Bob".to_string(),
commit_count: 30,
insertions: 500,
deletions: 100,
last_commit: Local::now(),
},
],
total_commits: 80,
total_insertions: 1500,
total_deletions: 300,
}
}
#[test]
fn test_export_stats_csv() {
let dir = tempdir().unwrap();
let path = dir.path().join("stats.csv");
let stats = create_test_stats();
export_stats_csv(&stats, &path).unwrap();
let content = fs::read_to_string(&path).unwrap();
assert!(content.contains("Author,Commits,Insertions,Deletions"));
assert!(content.contains("Alice,50,1000,200"));
assert!(content.contains("Bob,30,500,100"));
}
#[test]
fn test_export_stats_json() {
let dir = tempdir().unwrap();
let path = dir.path().join("stats.json");
let stats = create_test_stats();
export_stats_json(&stats, &path).unwrap();
let content = fs::read_to_string(&path).unwrap();
assert!(content.contains("\"total_commits\": 80"));
assert!(content.contains("\"name\": \"Alice\""));
assert!(content.contains("\"commit_count\": 50"));
}
fn create_test_heatmap() -> FileHeatmap {
use crate::stats::AggregationLevel;
FileHeatmap {
files: vec![
FileHeatmapEntry {
path: "src/main.rs".to_string(),
change_count: 10,
max_changes: 10,
},
FileHeatmapEntry {
path: "src/lib.rs".to_string(),
change_count: 5,
max_changes: 10,
},
],
total_files: 2,
aggregation_level: AggregationLevel::Files,
}
}
#[test]
fn test_export_heatmap_csv() {
let dir = tempdir().unwrap();
let path = dir.path().join("heatmap.csv");
let heatmap = create_test_heatmap();
export_heatmap_csv(&heatmap, &path).unwrap();
let content = fs::read_to_string(&path).unwrap();
assert!(content.contains("Path,ChangeCount,HeatLevel"));
assert!(content.contains("src/main.rs,10,1.00"));
assert!(content.contains("src/lib.rs,5,0.50"));
}
#[test]
fn test_export_heatmap_json() {
let dir = tempdir().unwrap();
let path = dir.path().join("heatmap.json");
let heatmap = create_test_heatmap();
export_heatmap_json(&heatmap, &path).unwrap();
let content = fs::read_to_string(&path).unwrap();
assert!(content.contains("\"total_files\": 2"));
assert!(content.contains("\"path\": \"src/main.rs\""));
}
}