use chrono::{DateTime, Local};
use std::collections::HashMap;
use crate::event::GitEvent;
use super::AggregationLevel;
#[derive(Debug, Clone)]
pub struct AuthorStats {
pub name: String,
pub commit_count: usize,
pub insertions: usize,
pub deletions: usize,
pub last_commit: DateTime<Local>,
}
impl AuthorStats {
pub fn commit_percentage(&self, total: usize) -> f64 {
if total == 0 {
0.0
} else {
(self.commit_count as f64 / total as f64) * 100.0
}
}
}
#[derive(Debug, Clone, Default)]
pub struct RepoStats {
pub authors: Vec<AuthorStats>,
pub total_commits: usize,
pub total_insertions: usize,
pub total_deletions: usize,
}
impl RepoStats {
pub fn author_count(&self) -> usize {
self.authors.len()
}
}
#[derive(Debug, Clone)]
pub struct FileHeatmapEntry {
pub path: String,
pub change_count: usize,
pub max_changes: usize,
}
impl FileHeatmapEntry {
pub fn heat_level(&self) -> f64 {
if self.max_changes == 0 {
0.0
} else {
self.change_count as f64 / self.max_changes as f64
}
}
pub fn heat_bar(&self) -> &'static str {
let level = self.heat_level();
if level >= 0.8 {
"█████"
} else if level >= 0.6 {
"████ "
} else if level >= 0.4 {
"███ "
} else if level >= 0.2 {
"██ "
} else {
"█ "
}
}
}
#[derive(Debug, Clone, Default)]
pub struct FileHeatmap {
pub files: Vec<FileHeatmapEntry>,
pub total_files: usize,
pub aggregation_level: AggregationLevel,
}
impl FileHeatmap {
pub fn file_count(&self) -> usize {
self.files.len()
}
pub fn with_aggregation(&self, level: AggregationLevel) -> FileHeatmap {
if level == AggregationLevel::Files {
return FileHeatmap {
files: self.files.clone(),
total_files: self.total_files,
aggregation_level: level,
};
}
let mut dir_counts: HashMap<String, usize> = HashMap::new();
for entry in &self.files {
let dir = extract_directory(&entry.path, level);
*dir_counts.entry(dir).or_insert(0) += entry.change_count;
}
let max_changes = dir_counts.values().copied().max().unwrap_or(0);
let mut files: Vec<FileHeatmapEntry> = dir_counts
.into_iter()
.map(|(path, change_count)| FileHeatmapEntry {
path,
change_count,
max_changes,
})
.collect();
files.sort_by(|a, b| b.change_count.cmp(&a.change_count));
FileHeatmap {
total_files: self.total_files,
files,
aggregation_level: level,
}
}
}
fn extract_directory(path: &str, level: AggregationLevel) -> String {
let parts: Vec<&str> = path.split('/').collect();
match level {
AggregationLevel::Files => path.to_string(),
AggregationLevel::Shallow => {
if parts.len() > 2 {
format!("{}/{}/", parts[0], parts[1])
} else if parts.len() == 2 {
format!("{}/", parts[0])
} else {
path.to_string()
}
}
AggregationLevel::Deep => {
if parts.len() > 1 {
format!("{}/", parts[0])
} else {
path.to_string()
}
}
}
}
pub fn calculate_file_heatmap(
events: &[&GitEvent],
get_files: impl Fn(&str) -> Option<Vec<String>>,
) -> FileHeatmap {
let mut file_counts: HashMap<String, usize> = HashMap::new();
for event in events {
if let Some(files) = get_files(&event.short_hash) {
for file in files {
*file_counts.entry(file).or_insert(0) += 1;
}
}
}
let max_changes = file_counts.values().copied().max().unwrap_or(0);
let mut files: Vec<FileHeatmapEntry> = file_counts
.into_iter()
.map(|(path, change_count)| FileHeatmapEntry {
path,
change_count,
max_changes,
})
.collect();
files.sort_by(|a, b| b.change_count.cmp(&a.change_count));
let total_files = files.len();
FileHeatmap {
files,
total_files,
aggregation_level: AggregationLevel::Files,
}
}
#[derive(Debug, Clone, Default)]
pub struct ActivityTimeline {
pub grid: [[usize; 24]; 7],
pub total_commits: usize,
pub peak_day: usize,
pub peak_hour: usize,
pub peak_count: usize,
pub max_count: usize,
}
impl ActivityTimeline {
pub fn day_name(day: usize) -> &'static str {
match day {
0 => "Mon",
1 => "Tue",
2 => "Wed",
3 => "Thu",
4 => "Fri",
5 => "Sat",
6 => "Sun",
_ => "???",
}
}
pub fn heat_level(&self, day: usize, hour: usize) -> f64 {
if self.max_count == 0 {
0.0
} else {
self.grid[day][hour] as f64 / self.max_count as f64
}
}
pub fn heat_char(level: f64) -> &'static str {
if level >= 0.8 {
"██"
} else if level >= 0.6 {
"▓▓"
} else if level >= 0.4 {
"▒▒"
} else if level >= 0.2 {
"░░"
} else if level > 0.0 {
"··"
} else {
" "
}
}
pub fn peak_summary(&self) -> String {
if self.peak_count == 0 {
"No activity".to_string()
} else {
format!(
"{} {:02}:00-{:02}:00 ({} commits)",
Self::day_name(self.peak_day),
self.peak_hour,
(self.peak_hour + 1) % 24,
self.peak_count
)
}
}
}
pub fn calculate_activity_timeline(events: &[&GitEvent]) -> ActivityTimeline {
use chrono::Datelike;
use chrono::Timelike;
let mut timeline = ActivityTimeline {
total_commits: events.len(),
..Default::default()
};
for event in events {
let day = event.timestamp.weekday().num_days_from_monday() as usize;
let hour = event.timestamp.hour() as usize;
timeline.grid[day][hour] += 1;
}
let mut max_count = 0usize;
for (day, hours) in timeline.grid.iter().enumerate() {
for (hour, &count) in hours.iter().enumerate() {
if count > max_count {
max_count = count;
timeline.peak_day = day;
timeline.peak_hour = hour;
timeline.peak_count = count;
}
}
}
timeline.max_count = max_count;
timeline
}
#[derive(Debug, Clone)]
pub struct CodeOwnershipEntry {
pub path: String,
pub primary_author: String,
pub primary_commits: usize,
pub total_commits: usize,
pub depth: usize,
pub is_directory: bool,
}
impl CodeOwnershipEntry {
pub fn ownership_percentage(&self) -> f64 {
if self.total_commits == 0 {
0.0
} else {
(self.primary_commits as f64 / self.total_commits as f64) * 100.0
}
}
}
#[derive(Debug, Clone, Default)]
pub struct CodeOwnership {
pub entries: Vec<CodeOwnershipEntry>,
pub total_files: usize,
}
impl CodeOwnership {
pub fn entry_count(&self) -> usize {
self.entries.len()
}
}
pub fn calculate_ownership(
events: &[&GitEvent],
get_files: impl Fn(&str) -> Option<Vec<String>>,
) -> CodeOwnership {
let mut file_author_counts: HashMap<String, HashMap<String, usize>> = HashMap::new();
for event in events {
if let Some(files) = get_files(&event.short_hash) {
for file in files {
let author_counts = file_author_counts.entry(file).or_default();
*author_counts.entry(event.author.clone()).or_insert(0) += 1;
}
}
}
let mut dir_author_counts: HashMap<String, HashMap<String, usize>> = HashMap::new();
for (file_path, author_counts) in &file_author_counts {
let parts: Vec<&str> = file_path.split('/').collect();
for i in 1..parts.len() {
let dir_path = parts[..i].join("/");
let dir_counts = dir_author_counts.entry(dir_path).or_default();
for (author, count) in author_counts {
*dir_counts.entry(author.clone()).or_insert(0) += count;
}
}
}
let mut entries = Vec::new();
let mut dir_paths: Vec<String> = dir_author_counts.keys().cloned().collect();
dir_paths.sort();
for dir_path in dir_paths {
let author_counts = &dir_author_counts[&dir_path];
let (primary_author, primary_commits) = author_counts
.iter()
.max_by_key(|(_, c)| *c)
.map(|(a, c)| (a.clone(), *c))
.unwrap_or_default();
let total_commits: usize = author_counts.values().sum();
let depth = dir_path.matches('/').count();
entries.push(CodeOwnershipEntry {
path: dir_path,
primary_author,
primary_commits,
total_commits,
depth,
is_directory: true,
});
}
let mut file_paths: Vec<String> = file_author_counts.keys().cloned().collect();
file_paths.sort();
for file_path in file_paths {
let author_counts = &file_author_counts[&file_path];
let (primary_author, primary_commits) = author_counts
.iter()
.max_by_key(|(_, c)| *c)
.map(|(a, c)| (a.clone(), *c))
.unwrap_or_default();
let total_commits: usize = author_counts.values().sum();
let depth = file_path.matches('/').count();
entries.push(CodeOwnershipEntry {
path: file_path,
primary_author,
primary_commits,
total_commits,
depth,
is_directory: false,
});
}
entries.sort_by(|a, b| a.path.cmp(&b.path));
let total_files = file_author_counts.len();
CodeOwnership {
entries,
total_files,
}
}
pub fn calculate_stats(events: &[&GitEvent]) -> RepoStats {
let mut author_map: HashMap<String, AuthorStats> = HashMap::new();
let mut total_insertions = 0usize;
let mut total_deletions = 0usize;
for event in events {
total_insertions += event.files_added;
total_deletions += event.files_deleted;
let entry = author_map
.entry(event.author.clone())
.or_insert(AuthorStats {
name: event.author.clone(),
commit_count: 0,
insertions: 0,
deletions: 0,
last_commit: event.timestamp,
});
entry.commit_count += 1;
entry.insertions += event.files_added;
entry.deletions += event.files_deleted;
if event.timestamp > entry.last_commit {
entry.last_commit = event.timestamp;
}
}
let mut authors: Vec<AuthorStats> = author_map.into_values().collect();
authors.sort_by(|a, b| b.commit_count.cmp(&a.commit_count));
RepoStats {
authors,
total_commits: events.len(),
total_insertions,
total_deletions,
}
}
#[cfg(test)]
#[allow(clippy::useless_vec)]
mod tests {
use super::*;
use chrono::Local;
fn create_test_event(author: &str, insertions: usize, deletions: usize) -> GitEvent {
GitEvent::commit(
"abc1234".to_string(),
"test commit".to_string(),
author.to_string(),
Local::now(),
insertions,
deletions,
)
}
fn create_test_event_with_hash(hash: &str) -> GitEvent {
GitEvent::commit(
hash.to_string(),
"test commit".to_string(),
"author".to_string(),
Local::now(),
10,
5,
)
}
#[test]
fn test_calculate_stats_empty() {
let stats = calculate_stats(&[]);
assert_eq!(stats.total_commits, 0);
assert_eq!(stats.authors.len(), 0);
}
#[test]
fn test_calculate_stats_single_author() {
let events = vec![
create_test_event("Alice", 10, 5),
create_test_event("Alice", 20, 10),
];
let refs: Vec<&GitEvent> = events.iter().collect();
let stats = calculate_stats(&refs);
assert_eq!(stats.total_commits, 2);
assert_eq!(stats.authors.len(), 1);
assert_eq!(stats.authors[0].name, "Alice");
assert_eq!(stats.authors[0].commit_count, 2);
assert_eq!(stats.authors[0].insertions, 30);
assert_eq!(stats.authors[0].deletions, 15);
}
#[test]
fn test_calculate_stats_multiple_authors() {
let events = vec![
create_test_event("Alice", 10, 5),
create_test_event("Bob", 5, 2),
create_test_event("Alice", 20, 10),
create_test_event("Bob", 15, 8),
create_test_event("Bob", 10, 5),
];
let refs: Vec<&GitEvent> = events.iter().collect();
let stats = calculate_stats(&refs);
assert_eq!(stats.total_commits, 5);
assert_eq!(stats.authors.len(), 2);
assert_eq!(stats.authors[0].name, "Bob");
assert_eq!(stats.authors[0].commit_count, 3);
assert_eq!(stats.authors[1].name, "Alice");
assert_eq!(stats.authors[1].commit_count, 2);
}
#[test]
fn test_calculate_stats_totals() {
let events = vec![
create_test_event("Alice", 10, 5),
create_test_event("Bob", 20, 10),
];
let refs: Vec<&GitEvent> = events.iter().collect();
let stats = calculate_stats(&refs);
assert_eq!(stats.total_insertions, 30);
assert_eq!(stats.total_deletions, 15);
}
#[test]
fn test_author_stats_commit_percentage() {
let author = AuthorStats {
name: "Alice".to_string(),
commit_count: 25,
insertions: 0,
deletions: 0,
last_commit: Local::now(),
};
assert!((author.commit_percentage(100) - 25.0).abs() < 0.01);
assert!((author.commit_percentage(50) - 50.0).abs() < 0.01);
}
#[test]
fn test_author_stats_commit_percentage_zero() {
let author = AuthorStats {
name: "Alice".to_string(),
commit_count: 10,
insertions: 0,
deletions: 0,
last_commit: Local::now(),
};
assert_eq!(author.commit_percentage(0), 0.0);
}
#[test]
fn test_repo_stats_author_count() {
let events = vec![
create_test_event("Alice", 10, 5),
create_test_event("Bob", 5, 2),
create_test_event("Charlie", 15, 8),
];
let refs: Vec<&GitEvent> = events.iter().collect();
let stats = calculate_stats(&refs);
assert_eq!(stats.author_count(), 3);
}
#[test]
fn test_calculate_file_heatmap_empty() {
let events: Vec<&GitEvent> = vec![];
let heatmap = calculate_file_heatmap(&events, |_| None);
assert_eq!(heatmap.file_count(), 0);
}
#[test]
fn test_calculate_file_heatmap_single_file() {
let events = vec![
create_test_event_with_hash("abc1234"),
create_test_event_with_hash("def5678"),
];
let refs: Vec<&GitEvent> = events.iter().collect();
let heatmap = calculate_file_heatmap(&refs, |_| Some(vec!["src/main.rs".to_string()]));
assert_eq!(heatmap.file_count(), 1);
assert_eq!(heatmap.files[0].path, "src/main.rs");
assert_eq!(heatmap.files[0].change_count, 2);
}
#[test]
fn test_calculate_file_heatmap_multiple_files() {
let events = vec![
create_test_event_with_hash("abc1234"),
create_test_event_with_hash("def5678"),
create_test_event_with_hash("ghi9012"),
];
let refs: Vec<&GitEvent> = events.iter().collect();
let heatmap = calculate_file_heatmap(&refs, |hash| match hash {
"abc1234" => Some(vec!["src/a.rs".to_string(), "src/b.rs".to_string()]),
"def5678" => Some(vec!["src/a.rs".to_string()]),
"ghi9012" => Some(vec!["src/a.rs".to_string(), "src/c.rs".to_string()]),
_ => None,
});
assert_eq!(heatmap.file_count(), 3);
assert_eq!(heatmap.files[0].path, "src/a.rs");
assert_eq!(heatmap.files[0].change_count, 3);
}
#[test]
fn test_file_heatmap_entry_heat_level() {
let entry = FileHeatmapEntry {
path: "test.rs".to_string(),
change_count: 5,
max_changes: 10,
};
assert!((entry.heat_level() - 0.5).abs() < 0.01);
}
#[test]
fn test_file_heatmap_entry_heat_bar() {
let entry_high = FileHeatmapEntry {
path: "test.rs".to_string(),
change_count: 10,
max_changes: 10,
};
assert_eq!(entry_high.heat_bar(), "█████");
let entry_low = FileHeatmapEntry {
path: "test.rs".to_string(),
change_count: 1,
max_changes: 10,
};
assert_eq!(entry_low.heat_bar(), "█ ");
}
#[test]
fn test_aggregation_level_next() {
assert_eq!(AggregationLevel::Files.next(), AggregationLevel::Shallow);
assert_eq!(AggregationLevel::Shallow.next(), AggregationLevel::Deep);
assert_eq!(AggregationLevel::Deep.next(), AggregationLevel::Files);
}
#[test]
fn test_aggregation_level_prev() {
assert_eq!(AggregationLevel::Files.prev(), AggregationLevel::Deep);
assert_eq!(AggregationLevel::Shallow.prev(), AggregationLevel::Files);
assert_eq!(AggregationLevel::Deep.prev(), AggregationLevel::Shallow);
}
#[test]
fn test_aggregation_level_display_name() {
assert_eq!(AggregationLevel::Files.display_name(), "Files");
assert!(AggregationLevel::Shallow
.display_name()
.contains("2 levels"));
assert!(AggregationLevel::Deep.display_name().contains("top level"));
}
#[test]
fn test_heatmap_with_aggregation_shallow() {
let heatmap = FileHeatmap {
files: vec![
FileHeatmapEntry {
path: "src/auth/login.rs".to_string(),
change_count: 10,
max_changes: 10,
},
FileHeatmapEntry {
path: "src/auth/token.rs".to_string(),
change_count: 5,
max_changes: 10,
},
FileHeatmapEntry {
path: "src/api/user.rs".to_string(),
change_count: 3,
max_changes: 10,
},
],
total_files: 3,
aggregation_level: AggregationLevel::Files,
};
let aggregated = heatmap.with_aggregation(AggregationLevel::Shallow);
assert_eq!(aggregated.aggregation_level, AggregationLevel::Shallow);
assert_eq!(aggregated.files.len(), 2);
assert_eq!(aggregated.files[0].path, "src/auth/");
assert_eq!(aggregated.files[0].change_count, 15);
}
#[test]
fn test_heatmap_with_aggregation_deep() {
let heatmap = FileHeatmap {
files: vec![
FileHeatmapEntry {
path: "src/auth/login.rs".to_string(),
change_count: 10,
max_changes: 10,
},
FileHeatmapEntry {
path: "src/api/user.rs".to_string(),
change_count: 5,
max_changes: 10,
},
FileHeatmapEntry {
path: "tests/test.rs".to_string(),
change_count: 3,
max_changes: 10,
},
],
total_files: 3,
aggregation_level: AggregationLevel::Files,
};
let aggregated = heatmap.with_aggregation(AggregationLevel::Deep);
assert_eq!(aggregated.aggregation_level, AggregationLevel::Deep);
assert_eq!(aggregated.files.len(), 2);
assert_eq!(aggregated.files[0].path, "src/");
assert_eq!(aggregated.files[0].change_count, 15);
}
}