1use std::fs::File;
6use std::io::Write;
7use std::path::Path;
8
9use anyhow::Result;
10use chrono::Local;
11use serde::Serialize;
12
13use crate::stats::{
14 ActivityTimeline, AlertSeverity, ChangeCouplingAnalysis, CodeOwnership, CommitImpactAnalysis,
15 CommitQualityAnalysis, FileHeatmap, ProjectHealth, RepoStats,
16};
17
18#[derive(Debug, Clone, Copy, PartialEq, Eq)]
20pub enum ExportFormat {
21 Csv,
22 Json,
23}
24
25impl ExportFormat {
26 pub fn extension(&self) -> &'static str {
28 match self {
29 ExportFormat::Csv => "csv",
30 ExportFormat::Json => "json",
31 }
32 }
33
34 pub fn name(&self) -> &'static str {
36 match self {
37 ExportFormat::Csv => "CSV",
38 ExportFormat::Json => "JSON",
39 }
40 }
41}
42
43#[derive(Debug, Serialize)]
45struct AuthorStatsJson {
46 name: String,
47 commit_count: usize,
48 insertions: usize,
49 deletions: usize,
50 last_commit: String,
51 commit_percentage: f64,
52}
53
54#[derive(Debug, Serialize)]
56struct RepoStatsJson {
57 generated_at: String,
58 total_commits: usize,
59 total_insertions: usize,
60 total_deletions: usize,
61 author_count: usize,
62 authors: Vec<AuthorStatsJson>,
63}
64
65pub fn export_stats_csv(stats: &RepoStats, path: &Path) -> Result<()> {
67 let mut wtr = csv::Writer::from_path(path)?;
68
69 wtr.write_record([
71 "Author",
72 "Commits",
73 "Insertions",
74 "Deletions",
75 "LastCommit",
76 "CommitPercentage",
77 ])?;
78
79 for author in &stats.authors {
81 wtr.write_record([
82 &author.name,
83 &author.commit_count.to_string(),
84 &author.insertions.to_string(),
85 &author.deletions.to_string(),
86 &author.last_commit.format("%Y-%m-%d %H:%M:%S").to_string(),
87 &format!("{:.1}%", author.commit_percentage(stats.total_commits)),
88 ])?;
89 }
90
91 wtr.flush()?;
92 Ok(())
93}
94
95pub fn export_stats_json(stats: &RepoStats, path: &Path) -> Result<()> {
97 let json_stats = RepoStatsJson {
98 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
99 total_commits: stats.total_commits,
100 total_insertions: stats.total_insertions,
101 total_deletions: stats.total_deletions,
102 author_count: stats.author_count(),
103 authors: stats
104 .authors
105 .iter()
106 .map(|a| AuthorStatsJson {
107 name: a.name.clone(),
108 commit_count: a.commit_count,
109 insertions: a.insertions,
110 deletions: a.deletions,
111 last_commit: a.last_commit.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
112 commit_percentage: a.commit_percentage(stats.total_commits),
113 })
114 .collect(),
115 };
116
117 let json = serde_json::to_string_pretty(&json_stats)?;
118 let mut file = File::create(path)?;
119 file.write_all(json.as_bytes())?;
120 Ok(())
121}
122
123#[derive(Debug, Serialize)]
125struct FileHeatmapEntryJson {
126 path: String,
127 change_count: usize,
128 heat_level: f64,
129}
130
131#[derive(Debug, Serialize)]
133struct FileHeatmapJson {
134 generated_at: String,
135 total_files: usize,
136 files: Vec<FileHeatmapEntryJson>,
137}
138
139pub fn export_heatmap_csv(heatmap: &FileHeatmap, path: &Path) -> Result<()> {
141 let mut wtr = csv::Writer::from_path(path)?;
142
143 wtr.write_record(["Path", "ChangeCount", "HeatLevel"])?;
145
146 for entry in &heatmap.files {
148 wtr.write_record([
149 &entry.path,
150 &entry.change_count.to_string(),
151 &format!("{:.2}", entry.heat_level()),
152 ])?;
153 }
154
155 wtr.flush()?;
156 Ok(())
157}
158
159pub fn export_heatmap_json(heatmap: &FileHeatmap, path: &Path) -> Result<()> {
161 let json_heatmap = FileHeatmapJson {
162 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
163 total_files: heatmap.total_files,
164 files: heatmap
165 .files
166 .iter()
167 .map(|f| FileHeatmapEntryJson {
168 path: f.path.clone(),
169 change_count: f.change_count,
170 heat_level: f.heat_level(),
171 })
172 .collect(),
173 };
174
175 let json = serde_json::to_string_pretty(&json_heatmap)?;
176 let mut file = File::create(path)?;
177 file.write_all(json.as_bytes())?;
178 Ok(())
179}
180
181#[derive(Debug, Serialize)]
183struct CodeOwnershipEntryJson {
184 path: String,
185 is_directory: bool,
186 primary_author: String,
187 primary_commits: usize,
188 total_commits: usize,
189 ownership_percentage: f64,
190}
191
192#[derive(Debug, Serialize)]
194struct CodeOwnershipJson {
195 generated_at: String,
196 total_files: usize,
197 entries: Vec<CodeOwnershipEntryJson>,
198}
199
200pub fn export_ownership_csv(ownership: &CodeOwnership, path: &Path) -> Result<()> {
202 let mut wtr = csv::Writer::from_path(path)?;
203
204 wtr.write_record([
206 "Path",
207 "Type",
208 "PrimaryAuthor",
209 "PrimaryCommits",
210 "TotalCommits",
211 "OwnershipPercentage",
212 ])?;
213
214 for entry in &ownership.entries {
216 let entry_type = if entry.is_directory {
217 "dir".to_string()
218 } else {
219 "file".to_string()
220 };
221 wtr.write_record([
222 &entry.path,
223 &entry_type,
224 &entry.primary_author,
225 &entry.primary_commits.to_string(),
226 &entry.total_commits.to_string(),
227 &format!("{:.1}%", entry.ownership_percentage()),
228 ])?;
229 }
230
231 wtr.flush()?;
232 Ok(())
233}
234
235pub fn export_ownership_json(ownership: &CodeOwnership, path: &Path) -> Result<()> {
237 let json_ownership = CodeOwnershipJson {
238 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
239 total_files: ownership.total_files,
240 entries: ownership
241 .entries
242 .iter()
243 .map(|e| CodeOwnershipEntryJson {
244 path: e.path.clone(),
245 is_directory: e.is_directory,
246 primary_author: e.primary_author.clone(),
247 primary_commits: e.primary_commits,
248 total_commits: e.total_commits,
249 ownership_percentage: e.ownership_percentage(),
250 })
251 .collect(),
252 };
253
254 let json = serde_json::to_string_pretty(&json_ownership)?;
255 let mut file = File::create(path)?;
256 file.write_all(json.as_bytes())?;
257 Ok(())
258}
259
260#[derive(Debug, Serialize)]
262struct TimelineCellJson {
263 day: String,
264 hour: usize,
265 commits: usize,
266 heat_level: f64,
267}
268
269#[derive(Debug, Serialize)]
271struct ActivityTimelineJson {
272 generated_at: String,
273 total_commits: usize,
274 peak_day: String,
275 peak_hour: usize,
276 peak_count: usize,
277 cells: Vec<TimelineCellJson>,
278}
279
280pub fn export_timeline_csv(timeline: &ActivityTimeline, path: &Path) -> Result<()> {
282 let mut wtr = csv::Writer::from_path(path)?;
283
284 wtr.write_record(["Day", "Hour", "Commits", "HeatLevel"])?;
286
287 for day in 0..7 {
289 for hour in 0..24 {
290 let commits = timeline.grid[day][hour];
291 if commits > 0 {
292 wtr.write_record([
293 ActivityTimeline::day_name(day),
294 &hour.to_string(),
295 &commits.to_string(),
296 &format!("{:.2}", timeline.heat_level(day, hour)),
297 ])?;
298 }
299 }
300 }
301
302 wtr.flush()?;
303 Ok(())
304}
305
306pub fn export_timeline_json(timeline: &ActivityTimeline, path: &Path) -> Result<()> {
308 let mut cells = Vec::new();
309 for day in 0..7 {
310 for hour in 0..24 {
311 let commits = timeline.grid[day][hour];
312 if commits > 0 {
313 cells.push(TimelineCellJson {
314 day: ActivityTimeline::day_name(day).to_string(),
315 hour,
316 commits,
317 heat_level: timeline.heat_level(day, hour),
318 });
319 }
320 }
321 }
322
323 let json_timeline = ActivityTimelineJson {
324 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
325 total_commits: timeline.total_commits,
326 peak_day: ActivityTimeline::day_name(timeline.peak_day).to_string(),
327 peak_hour: timeline.peak_hour,
328 peak_count: timeline.peak_count,
329 cells,
330 };
331
332 let json = serde_json::to_string_pretty(&json_timeline)?;
333 let mut file = File::create(path)?;
334 file.write_all(json.as_bytes())?;
335 Ok(())
336}
337
338#[derive(Debug, Serialize)]
340struct CommitImpactScoreJson {
341 commit_hash: String,
342 commit_message: String,
343 author: String,
344 date: String,
345 files_changed: usize,
346 insertions: usize,
347 deletions: usize,
348 score: f64,
349 file_score: f64,
350 change_score: f64,
351 heat_score: f64,
352}
353
354#[derive(Debug, Serialize)]
356struct CommitImpactAnalysisJson {
357 generated_at: String,
358 total_commits: usize,
359 avg_score: f64,
360 max_score: f64,
361 high_impact_count: usize,
362 commits: Vec<CommitImpactScoreJson>,
363}
364
365pub fn export_impact_csv(analysis: &CommitImpactAnalysis, path: &Path) -> Result<()> {
367 let mut wtr = csv::Writer::from_path(path)?;
368
369 wtr.write_record([
371 "Hash",
372 "Author",
373 "Message",
374 "FilesChanged",
375 "Insertions",
376 "Deletions",
377 "Score",
378 "FileScore",
379 "ChangeScore",
380 "HeatScore",
381 "Date",
382 ])?;
383
384 for commit in &analysis.commits {
386 wtr.write_record([
387 &commit.commit_hash,
388 &commit.author,
389 &commit.commit_message,
390 &commit.files_changed.to_string(),
391 &commit.insertions.to_string(),
392 &commit.deletions.to_string(),
393 &format!("{:.3}", commit.score),
394 &format!("{:.3}", commit.file_score),
395 &format!("{:.3}", commit.change_score),
396 &format!("{:.3}", commit.heat_score),
397 &commit.date.format("%Y-%m-%d %H:%M:%S").to_string(),
398 ])?;
399 }
400
401 wtr.flush()?;
402 Ok(())
403}
404
405pub fn export_impact_json(analysis: &CommitImpactAnalysis, path: &Path) -> Result<()> {
407 let json_analysis = CommitImpactAnalysisJson {
408 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
409 total_commits: analysis.total_commits,
410 avg_score: analysis.avg_score,
411 max_score: analysis.max_score,
412 high_impact_count: analysis.high_impact_count,
413 commits: analysis
414 .commits
415 .iter()
416 .map(|c| CommitImpactScoreJson {
417 commit_hash: c.commit_hash.clone(),
418 commit_message: c.commit_message.clone(),
419 author: c.author.clone(),
420 date: c.date.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
421 files_changed: c.files_changed,
422 insertions: c.insertions,
423 deletions: c.deletions,
424 score: c.score,
425 file_score: c.file_score,
426 change_score: c.change_score,
427 heat_score: c.heat_score,
428 })
429 .collect(),
430 };
431
432 let json = serde_json::to_string_pretty(&json_analysis)?;
433 let mut file = File::create(path)?;
434 file.write_all(json.as_bytes())?;
435 Ok(())
436}
437
438#[derive(Debug, Serialize)]
440struct FileCouplingJson {
441 file: String,
442 coupled_file: String,
443 co_change_count: usize,
444 file_change_count: usize,
445 coupling_percent: f64,
446}
447
448#[derive(Debug, Serialize)]
450struct ChangeCouplingAnalysisJson {
451 generated_at: String,
452 total_couplings: usize,
453 high_coupling_count: usize,
454 total_files_analyzed: usize,
455 couplings: Vec<FileCouplingJson>,
456}
457
458pub fn export_coupling_csv(analysis: &ChangeCouplingAnalysis, path: &Path) -> Result<()> {
460 let mut wtr = csv::Writer::from_path(path)?;
461
462 wtr.write_record([
464 "File",
465 "CoupledFile",
466 "CoChangeCount",
467 "FileChangeCount",
468 "CouplingPercent",
469 ])?;
470
471 for coupling in &analysis.couplings {
473 wtr.write_record([
474 &coupling.file,
475 &coupling.coupled_file,
476 &coupling.co_change_count.to_string(),
477 &coupling.file_change_count.to_string(),
478 &format!("{:.1}%", coupling.coupling_percent * 100.0),
479 ])?;
480 }
481
482 wtr.flush()?;
483 Ok(())
484}
485
486pub fn export_coupling_json(analysis: &ChangeCouplingAnalysis, path: &Path) -> Result<()> {
488 let json_analysis = ChangeCouplingAnalysisJson {
489 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
490 total_couplings: analysis.couplings.len(),
491 high_coupling_count: analysis.high_coupling_count,
492 total_files_analyzed: analysis.total_files_analyzed,
493 couplings: analysis
494 .couplings
495 .iter()
496 .map(|c| FileCouplingJson {
497 file: c.file.clone(),
498 coupled_file: c.coupled_file.clone(),
499 co_change_count: c.co_change_count,
500 file_change_count: c.file_change_count,
501 coupling_percent: c.coupling_percent,
502 })
503 .collect(),
504 };
505
506 let json = serde_json::to_string_pretty(&json_analysis)?;
507 let mut file = File::create(path)?;
508 file.write_all(json.as_bytes())?;
509 Ok(())
510}
511
512#[derive(Debug, Serialize)]
514struct CommitQualityScoreJson {
515 commit_hash: String,
516 commit_message: String,
517 author: String,
518 date: String,
519 files_changed: usize,
520 insertions: usize,
521 deletions: usize,
522 score: f64,
523 message_score: f64,
524 size_score: f64,
525 test_score: f64,
526 atomicity_score: f64,
527 quality_level: String,
528}
529
530#[derive(Debug, Serialize)]
532struct CommitQualityAnalysisJson {
533 generated_at: String,
534 total_commits: usize,
535 avg_score: f64,
536 high_quality_count: usize,
537 low_quality_count: usize,
538 commits: Vec<CommitQualityScoreJson>,
539}
540
541pub fn export_quality_csv(analysis: &CommitQualityAnalysis, path: &Path) -> Result<()> {
543 let mut wtr = csv::Writer::from_path(path)?;
544
545 wtr.write_record([
547 "Hash",
548 "Author",
549 "Message",
550 "FilesChanged",
551 "Insertions",
552 "Deletions",
553 "Score",
554 "MessageScore",
555 "SizeScore",
556 "TestScore",
557 "AtomicityScore",
558 "QualityLevel",
559 "Date",
560 ])?;
561
562 for commit in &analysis.commits {
564 wtr.write_record([
565 &commit.commit_hash,
566 &commit.author,
567 &commit.commit_message,
568 &commit.files_changed.to_string(),
569 &commit.insertions.to_string(),
570 &commit.deletions.to_string(),
571 &format!("{:.3}", commit.score),
572 &format!("{:.3}", commit.message_score),
573 &format!("{:.3}", commit.size_score),
574 &format!("{:.3}", commit.test_score),
575 &format!("{:.3}", commit.atomicity_score),
576 commit.quality_level(),
577 &commit.date.format("%Y-%m-%d %H:%M:%S").to_string(),
578 ])?;
579 }
580
581 wtr.flush()?;
582 Ok(())
583}
584
585pub fn export_quality_json(analysis: &CommitQualityAnalysis, path: &Path) -> Result<()> {
587 let json_analysis = CommitQualityAnalysisJson {
588 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
589 total_commits: analysis.total_commits,
590 avg_score: analysis.avg_score,
591 high_quality_count: analysis.high_quality_count,
592 low_quality_count: analysis.low_quality_count,
593 commits: analysis
594 .commits
595 .iter()
596 .map(|c| CommitQualityScoreJson {
597 commit_hash: c.commit_hash.clone(),
598 commit_message: c.commit_message.clone(),
599 author: c.author.clone(),
600 date: c.date.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
601 files_changed: c.files_changed,
602 insertions: c.insertions,
603 deletions: c.deletions,
604 score: c.score,
605 message_score: c.message_score,
606 size_score: c.size_score,
607 test_score: c.test_score,
608 atomicity_score: c.atomicity_score,
609 quality_level: c.quality_level().to_string(),
610 })
611 .collect(),
612 };
613
614 let json = serde_json::to_string_pretty(&json_analysis)?;
615 let mut file = File::create(path)?;
616 file.write_all(json.as_bytes())?;
617 Ok(())
618}
619
620pub fn quality_to_json(analysis: &CommitQualityAnalysis) -> Result<String> {
622 let json_analysis = CommitQualityAnalysisJson {
623 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
624 total_commits: analysis.total_commits,
625 avg_score: analysis.avg_score,
626 high_quality_count: analysis.high_quality_count,
627 low_quality_count: analysis.low_quality_count,
628 commits: analysis
629 .commits
630 .iter()
631 .map(|c| CommitQualityScoreJson {
632 commit_hash: c.commit_hash.clone(),
633 commit_message: c.commit_message.clone(),
634 author: c.author.clone(),
635 date: c.date.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
636 files_changed: c.files_changed,
637 insertions: c.insertions,
638 deletions: c.deletions,
639 score: c.score,
640 message_score: c.message_score,
641 size_score: c.size_score,
642 test_score: c.test_score,
643 atomicity_score: c.atomicity_score,
644 quality_level: c.quality_level().to_string(),
645 })
646 .collect(),
647 };
648
649 Ok(serde_json::to_string_pretty(&json_analysis)?)
650}
651
652use crate::event::GitEvent;
657
658#[derive(Debug, Serialize)]
660struct CommitLogEntryJson {
661 hash: String,
662 message: String,
663 author: String,
664 date: String,
665 files_added: usize,
666 files_deleted: usize,
667}
668
669#[derive(Debug, Serialize)]
671struct CommitLogJson {
672 generated_at: String,
673 total_commits: usize,
674 commits: Vec<CommitLogEntryJson>,
675}
676
677pub fn stats_to_json(stats: &RepoStats) -> Result<String> {
679 let json_stats = RepoStatsJson {
680 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
681 total_commits: stats.total_commits,
682 total_insertions: stats.total_insertions,
683 total_deletions: stats.total_deletions,
684 author_count: stats.author_count(),
685 authors: stats
686 .authors
687 .iter()
688 .map(|a| AuthorStatsJson {
689 name: a.name.clone(),
690 commit_count: a.commit_count,
691 insertions: a.insertions,
692 deletions: a.deletions,
693 last_commit: a.last_commit.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
694 commit_percentage: a.commit_percentage(stats.total_commits),
695 })
696 .collect(),
697 };
698
699 Ok(serde_json::to_string_pretty(&json_stats)?)
700}
701
702pub fn heatmap_to_json(heatmap: &FileHeatmap) -> Result<String> {
704 let json_heatmap = FileHeatmapJson {
705 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
706 total_files: heatmap.total_files,
707 files: heatmap
708 .files
709 .iter()
710 .map(|f| FileHeatmapEntryJson {
711 path: f.path.clone(),
712 change_count: f.change_count,
713 heat_level: f.heat_level(),
714 })
715 .collect(),
716 };
717
718 Ok(serde_json::to_string_pretty(&json_heatmap)?)
719}
720
721pub fn impact_to_json(analysis: &CommitImpactAnalysis) -> Result<String> {
723 let json_analysis = CommitImpactAnalysisJson {
724 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
725 total_commits: analysis.total_commits,
726 avg_score: analysis.avg_score,
727 max_score: analysis.max_score,
728 high_impact_count: analysis.high_impact_count,
729 commits: analysis
730 .commits
731 .iter()
732 .map(|c| CommitImpactScoreJson {
733 commit_hash: c.commit_hash.clone(),
734 commit_message: c.commit_message.clone(),
735 author: c.author.clone(),
736 date: c.date.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
737 files_changed: c.files_changed,
738 insertions: c.insertions,
739 deletions: c.deletions,
740 score: c.score,
741 file_score: c.file_score,
742 change_score: c.change_score,
743 heat_score: c.heat_score,
744 })
745 .collect(),
746 };
747
748 Ok(serde_json::to_string_pretty(&json_analysis)?)
749}
750
751pub fn coupling_to_json(analysis: &ChangeCouplingAnalysis) -> Result<String> {
753 let json_analysis = ChangeCouplingAnalysisJson {
754 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
755 total_couplings: analysis.couplings.len(),
756 high_coupling_count: analysis.high_coupling_count,
757 total_files_analyzed: analysis.total_files_analyzed,
758 couplings: analysis
759 .couplings
760 .iter()
761 .map(|c| FileCouplingJson {
762 file: c.file.clone(),
763 coupled_file: c.coupled_file.clone(),
764 co_change_count: c.co_change_count,
765 file_change_count: c.file_change_count,
766 coupling_percent: c.coupling_percent,
767 })
768 .collect(),
769 };
770
771 Ok(serde_json::to_string_pretty(&json_analysis)?)
772}
773
774pub fn log_to_json(events: &[GitEvent]) -> Result<String> {
776 let json_log = CommitLogJson {
777 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
778 total_commits: events.len(),
779 commits: events
780 .iter()
781 .map(|e| CommitLogEntryJson {
782 hash: e.short_hash.clone(),
783 message: e.message.clone(),
784 author: e.author.clone(),
785 date: e.timestamp.format("%Y-%m-%dT%H:%M:%S%z").to_string(),
786 files_added: e.files_added,
787 files_deleted: e.files_deleted,
788 })
789 .collect(),
790 };
791
792 Ok(serde_json::to_string_pretty(&json_log)?)
793}
794
795use crate::stats::{BusFactorAnalysis, TechDebtAnalysis};
800
801#[derive(Debug, Serialize)]
803struct BusFactorEntryJson {
804 path: String,
805 bus_factor: usize,
806 risk_level: String,
807 total_commits: usize,
808 contributors: Vec<ContributorInfoJson>,
809}
810
811#[derive(Debug, Serialize)]
812struct ContributorInfoJson {
813 name: String,
814 commit_count: usize,
815 contribution_percent: f64,
816}
817
818#[derive(Debug, Serialize)]
820struct BusFactorAnalysisJson {
821 generated_at: String,
822 total_paths_analyzed: usize,
823 high_risk_count: usize,
824 medium_risk_count: usize,
825 entries: Vec<BusFactorEntryJson>,
826}
827
828pub fn bus_factor_to_json(analysis: &BusFactorAnalysis) -> Result<String> {
830 let json_analysis = BusFactorAnalysisJson {
831 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
832 total_paths_analyzed: analysis.total_paths_analyzed,
833 high_risk_count: analysis.high_risk_count,
834 medium_risk_count: analysis.medium_risk_count,
835 entries: analysis
836 .entries
837 .iter()
838 .map(|e| BusFactorEntryJson {
839 path: e.path.clone(),
840 bus_factor: e.bus_factor,
841 risk_level: e.risk_level.display_name().to_string(),
842 total_commits: e.total_commits,
843 contributors: e
844 .contributors
845 .iter()
846 .map(|c| ContributorInfoJson {
847 name: c.name.clone(),
848 commit_count: c.commit_count,
849 contribution_percent: c.contribution_percent,
850 })
851 .collect(),
852 })
853 .collect(),
854 };
855
856 Ok(serde_json::to_string_pretty(&json_analysis)?)
857}
858
859#[derive(Debug, Serialize)]
865struct TechDebtEntryJson {
866 path: String,
867 score: f64,
868 churn_score: f64,
869 complexity_score: f64,
870 age_score: f64,
871 debt_level: String,
872 change_count: usize,
873 total_changes: usize,
874}
875
876#[derive(Debug, Serialize)]
878struct TechDebtAnalysisJson {
879 generated_at: String,
880 total_files_analyzed: usize,
881 avg_score: f64,
882 high_debt_count: usize,
883 entries: Vec<TechDebtEntryJson>,
884}
885
886pub fn tech_debt_to_json(analysis: &TechDebtAnalysis) -> Result<String> {
888 let json_analysis = TechDebtAnalysisJson {
889 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
890 total_files_analyzed: analysis.total_files_analyzed,
891 avg_score: analysis.avg_score,
892 high_debt_count: analysis.high_debt_count,
893 entries: analysis
894 .entries
895 .iter()
896 .map(|e| TechDebtEntryJson {
897 path: e.path.clone(),
898 score: e.score,
899 churn_score: e.churn_score,
900 complexity_score: e.complexity_score,
901 age_score: e.age_score,
902 debt_level: e.debt_level.display_name().to_string(),
903 change_count: e.change_count,
904 total_changes: e.total_changes,
905 })
906 .collect(),
907 };
908
909 Ok(serde_json::to_string_pretty(&json_analysis)?)
910}
911
912#[derive(Debug, Serialize)]
918struct OwnershipEntryJson {
919 path: String,
920 is_directory: bool,
921 primary_author: String,
922 primary_commits: usize,
923 total_commits: usize,
924 ownership_percentage: f64,
925}
926
927#[derive(Debug, Serialize)]
929struct OwnershipAnalysisJson {
930 generated_at: String,
931 total_files: usize,
932 entries: Vec<OwnershipEntryJson>,
933}
934
935pub fn ownership_to_json(ownership: &CodeOwnership) -> Result<String> {
937 let json_analysis = OwnershipAnalysisJson {
938 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
939 total_files: ownership.total_files,
940 entries: ownership
941 .entries
942 .iter()
943 .map(|e| OwnershipEntryJson {
944 path: e.path.clone(),
945 is_directory: e.is_directory,
946 primary_author: e.primary_author.clone(),
947 primary_commits: e.primary_commits,
948 total_commits: e.total_commits,
949 ownership_percentage: e.ownership_percentage(),
950 })
951 .collect(),
952 };
953
954 Ok(serde_json::to_string_pretty(&json_analysis)?)
955}
956
957pub fn timeline_to_json(timeline: &ActivityTimeline) -> Result<String> {
963 let mut cells = Vec::new();
964 for day in 0..7 {
965 for hour in 0..24 {
966 let commits = timeline.grid[day][hour];
967 if commits > 0 {
968 cells.push(TimelineCellJson {
969 day: ActivityTimeline::day_name(day).to_string(),
970 hour,
971 commits,
972 heat_level: timeline.heat_level(day, hour),
973 });
974 }
975 }
976 }
977
978 let json_timeline = ActivityTimelineJson {
979 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
980 total_commits: timeline.total_commits,
981 peak_day: ActivityTimeline::day_name(timeline.peak_day).to_string(),
982 peak_hour: timeline.peak_hour,
983 peak_count: timeline.peak_count,
984 cells,
985 };
986
987 Ok(serde_json::to_string_pretty(&json_timeline)?)
988}
989
990#[derive(Debug, Serialize)]
996struct HealthAlertJson {
997 severity: String,
998 message: String,
999 details: Option<String>,
1000}
1001
1002#[derive(Debug, Serialize)]
1004struct HealthScoreComponentJson {
1005 score: f64,
1006 score_percent: u8,
1007 weight: f64,
1008 description: String,
1009}
1010
1011#[derive(Debug, Serialize)]
1013struct ProjectHealthJson {
1014 generated_at: String,
1015 overall_score: u8,
1016 level: String,
1017 quality: HealthScoreComponentJson,
1018 test_health: HealthScoreComponentJson,
1019 bus_factor_risk: HealthScoreComponentJson,
1020 tech_debt: HealthScoreComponentJson,
1021 alerts: Vec<HealthAlertJson>,
1022 total_commits: usize,
1023 total_authors: usize,
1024 analysis_period_days: u64,
1025}
1026
1027pub fn health_to_json(health: &ProjectHealth) -> Result<String> {
1029 let json_health = ProjectHealthJson {
1030 generated_at: Local::now().format("%Y-%m-%dT%H:%M:%S%z").to_string(),
1031 overall_score: health.overall_score,
1032 level: health.level().to_string(),
1033 quality: HealthScoreComponentJson {
1034 score: health.quality.score,
1035 score_percent: (health.quality.score * 100.0).round() as u8,
1036 weight: health.quality.weight,
1037 description: health.quality.description.clone(),
1038 },
1039 test_health: HealthScoreComponentJson {
1040 score: health.test_health.score,
1041 score_percent: (health.test_health.score * 100.0).round() as u8,
1042 weight: health.test_health.weight,
1043 description: health.test_health.description.clone(),
1044 },
1045 bus_factor_risk: HealthScoreComponentJson {
1046 score: health.bus_factor_risk.score,
1047 score_percent: (health.bus_factor_risk.score * 100.0).round() as u8,
1048 weight: health.bus_factor_risk.weight,
1049 description: health.bus_factor_risk.description.clone(),
1050 },
1051 tech_debt: HealthScoreComponentJson {
1052 score: health.tech_debt.score,
1053 score_percent: (health.tech_debt.score * 100.0).round() as u8,
1054 weight: health.tech_debt.weight,
1055 description: health.tech_debt.description.clone(),
1056 },
1057 alerts: health
1058 .alerts
1059 .iter()
1060 .map(|a| HealthAlertJson {
1061 severity: match a.severity {
1062 AlertSeverity::Info => "info".to_string(),
1063 AlertSeverity::Warning => "warning".to_string(),
1064 AlertSeverity::Critical => "critical".to_string(),
1065 },
1066 message: a.message.clone(),
1067 details: a.details.clone(),
1068 })
1069 .collect(),
1070 total_commits: health.total_commits,
1071 total_authors: health.total_authors,
1072 analysis_period_days: health.analysis_period_days,
1073 };
1074
1075 Ok(serde_json::to_string_pretty(&json_health)?)
1076}
1077
1078pub fn health_to_markdown(health: &ProjectHealth) -> String {
1080 let mut md = String::new();
1081
1082 md.push_str("# Project Health Dashboard\n\n");
1083
1084 md.push_str(&format!(
1086 "## Overall Score: {} / 100 ({})\n\n",
1087 health.overall_score,
1088 health.level()
1089 ));
1090 md.push_str(&format!("`{}`\n\n", health.score_bar()));
1091
1092 md.push_str("### Summary\n\n");
1094 md.push_str(&format!(
1095 "- **Commits analyzed**: {}\n",
1096 health.total_commits
1097 ));
1098 md.push_str(&format!("- **Contributors**: {}\n", health.total_authors));
1099 md.push_str(&format!(
1100 "- **Period**: {} days\n\n",
1101 health.analysis_period_days
1102 ));
1103
1104 md.push_str("### Score Breakdown\n\n");
1106 md.push_str("| Component | Score | Weight |\n");
1107 md.push_str("|-----------|-------|--------|\n");
1108 md.push_str(&format!(
1109 "| Quality | {:.0}% | {:.0}% |\n",
1110 health.quality.score * 100.0,
1111 health.quality.weight * 100.0
1112 ));
1113 md.push_str(&format!(
1114 "| Test Health | {:.0}% | {:.0}% |\n",
1115 health.test_health.score * 100.0,
1116 health.test_health.weight * 100.0
1117 ));
1118 md.push_str(&format!(
1119 "| Bus Factor Risk | {:.0}% | {:.0}% |\n",
1120 health.bus_factor_risk.score * 100.0,
1121 health.bus_factor_risk.weight * 100.0
1122 ));
1123 md.push_str(&format!(
1124 "| Technical Debt | {:.0}% | {:.0}% |\n\n",
1125 health.tech_debt.score * 100.0,
1126 health.tech_debt.weight * 100.0
1127 ));
1128
1129 if !health.alerts.is_empty() {
1131 md.push_str("### Alerts\n\n");
1132 for alert in &health.alerts {
1133 let icon = alert.severity.icon();
1134 md.push_str(&format!("- {} **{}**", icon, alert.message));
1135 if let Some(details) = &alert.details {
1136 md.push_str(&format!("\n - {}", details));
1137 }
1138 md.push('\n');
1139 }
1140 md.push('\n');
1141 }
1142
1143 md.push_str(&format!(
1144 "*Generated at {}*\n",
1145 Local::now().format("%Y-%m-%d %H:%M:%S")
1146 ));
1147
1148 md
1149}
1150
1151#[cfg(test)]
1152mod tests {
1153 use super::*;
1154 use crate::stats::{AuthorStats, FileHeatmapEntry};
1155 use chrono::Local;
1156 use std::fs;
1157 use tempfile::tempdir;
1158
1159 fn create_test_stats() -> RepoStats {
1160 RepoStats {
1161 authors: vec![
1162 AuthorStats {
1163 name: "Alice".to_string(),
1164 commit_count: 50,
1165 insertions: 1000,
1166 deletions: 200,
1167 last_commit: Local::now(),
1168 },
1169 AuthorStats {
1170 name: "Bob".to_string(),
1171 commit_count: 30,
1172 insertions: 500,
1173 deletions: 100,
1174 last_commit: Local::now(),
1175 },
1176 ],
1177 total_commits: 80,
1178 total_insertions: 1500,
1179 total_deletions: 300,
1180 }
1181 }
1182
1183 #[test]
1184 fn test_export_stats_csv() {
1185 let dir = tempdir().unwrap();
1186 let path = dir.path().join("stats.csv");
1187 let stats = create_test_stats();
1188
1189 export_stats_csv(&stats, &path).unwrap();
1190
1191 let content = fs::read_to_string(&path).unwrap();
1192 assert!(content.contains("Author,Commits,Insertions,Deletions"));
1193 assert!(content.contains("Alice,50,1000,200"));
1194 assert!(content.contains("Bob,30,500,100"));
1195 }
1196
1197 #[test]
1198 fn test_export_stats_json() {
1199 let dir = tempdir().unwrap();
1200 let path = dir.path().join("stats.json");
1201 let stats = create_test_stats();
1202
1203 export_stats_json(&stats, &path).unwrap();
1204
1205 let content = fs::read_to_string(&path).unwrap();
1206 assert!(content.contains("\"total_commits\": 80"));
1207 assert!(content.contains("\"name\": \"Alice\""));
1208 assert!(content.contains("\"commit_count\": 50"));
1209 }
1210
1211 #[test]
1212 fn test_export_format_extension() {
1213 assert_eq!(ExportFormat::Csv.extension(), "csv");
1214 assert_eq!(ExportFormat::Json.extension(), "json");
1215 }
1216
1217 #[test]
1218 fn test_export_format_name() {
1219 assert_eq!(ExportFormat::Csv.name(), "CSV");
1220 assert_eq!(ExportFormat::Json.name(), "JSON");
1221 }
1222
1223 fn create_test_heatmap() -> FileHeatmap {
1224 use crate::stats::AggregationLevel;
1225 FileHeatmap {
1226 files: vec![
1227 FileHeatmapEntry {
1228 path: "src/main.rs".to_string(),
1229 change_count: 10,
1230 max_changes: 10,
1231 },
1232 FileHeatmapEntry {
1233 path: "src/lib.rs".to_string(),
1234 change_count: 5,
1235 max_changes: 10,
1236 },
1237 ],
1238 total_files: 2,
1239 aggregation_level: AggregationLevel::Files,
1240 }
1241 }
1242
1243 #[test]
1244 fn test_export_heatmap_csv() {
1245 let dir = tempdir().unwrap();
1246 let path = dir.path().join("heatmap.csv");
1247 let heatmap = create_test_heatmap();
1248
1249 export_heatmap_csv(&heatmap, &path).unwrap();
1250
1251 let content = fs::read_to_string(&path).unwrap();
1252 assert!(content.contains("Path,ChangeCount,HeatLevel"));
1253 assert!(content.contains("src/main.rs,10,1.00"));
1254 assert!(content.contains("src/lib.rs,5,0.50"));
1255 }
1256
1257 #[test]
1258 fn test_export_heatmap_json() {
1259 let dir = tempdir().unwrap();
1260 let path = dir.path().join("heatmap.json");
1261 let heatmap = create_test_heatmap();
1262
1263 export_heatmap_json(&heatmap, &path).unwrap();
1264
1265 let content = fs::read_to_string(&path).unwrap();
1266 assert!(content.contains("\"total_files\": 2"));
1267 assert!(content.contains("\"path\": \"src/main.rs\""));
1268 }
1269}