1use crate::{GitXError, Result};
2use console::style;
3use std::collections::HashMap;
4use std::process::Command;
5
6pub fn run() -> Result<String> {
7 let mut output = Vec::new();
8
9 output.push(format!("{} Technical Debt Analysis\n", style("🔍").bold()));
10
11 output.push(analyze_large_commits()?);
13
14 output.push(analyze_file_hotspots()?);
16
17 output.push(analyze_long_lived_branches()?);
19
20 output.push(analyze_code_churn()?);
22
23 output.push(analyze_binary_files()?);
25
26 output.push(format!("\n{}", style("Analysis complete!").bold()));
27
28 Ok(output.join("\n"))
29}
30
31fn analyze_large_commits() -> Result<String> {
32 let mut output = Vec::new();
33 output.push(format!(
34 "{} Large Commits (>20 files changed)",
35 style("📊").bold()
36 ));
37
38 let git_output = Command::new("git")
39 .args([
40 "log",
41 "--all",
42 "--pretty=format:%h|%s|%an|%ad",
43 "--date=short",
44 "--numstat",
45 "--since=6 months ago",
46 ])
47 .output()?;
48
49 if !git_output.status.success() {
50 return Err(GitXError::GitCommand(
51 "Failed to analyze commit history".to_string(),
52 ));
53 }
54
55 let stdout = String::from_utf8_lossy(&git_output.stdout);
56 let large_commits = parse_large_commits(&stdout);
57
58 if large_commits.is_empty() {
59 output.push(format!(" {} No large commits found", style("✓").green()));
60 } else {
61 for (i, commit) in large_commits.iter().take(5).enumerate() {
62 output.push(format!(
63 " {}. {} files | {} | {}",
64 i + 1,
65 style(commit.files_changed).cyan().bold(),
66 style(&commit.hash[0..7]).dim(),
67 style(&commit.message).bold()
68 ));
69 }
70 if large_commits.len() > 5 {
71 output.push(format!(
72 " {} ({} more commits...)",
73 style("...").dim(),
74 large_commits.len() - 5
75 ));
76 }
77 }
78
79 Ok(output.join("\n"))
80}
81
82fn analyze_file_hotspots() -> Result<String> {
83 let mut output = Vec::new();
84 output.push(format!(
85 "{} File Hotspots (frequently modified)",
86 style("🔥").bold()
87 ));
88
89 let git_output = Command::new("git")
90 .args([
91 "log",
92 "--all",
93 "--pretty=format:",
94 "--name-only",
95 "--since=6 months ago",
96 ])
97 .output()?;
98
99 if !git_output.status.success() {
100 return Err(GitXError::GitCommand(
101 "Failed to analyze file modifications".to_string(),
102 ));
103 }
104
105 let stdout = String::from_utf8_lossy(&git_output.stdout);
106 let hotspots = analyze_file_modification_frequency(&stdout);
107
108 if hotspots.is_empty() {
109 output.push(format!(
110 " {} No file modification data found",
111 style("✓").green()
112 ));
113 } else {
114 for (i, (file, count)) in hotspots.iter().take(5).enumerate() {
115 let risk_level = if *count > 50 {
116 style("HIGH").red().bold()
117 } else if *count > 20 {
118 style("MED").yellow().bold()
119 } else {
120 style("LOW").green().bold()
121 };
122
123 output.push(format!(
124 " {}. {} changes | {} | {}",
125 i + 1,
126 style(count).cyan().bold(),
127 risk_level,
128 style(file).bold()
129 ));
130 }
131 }
132
133 Ok(output.join("\n"))
134}
135
136fn analyze_long_lived_branches() -> Result<String> {
137 let mut output = Vec::new();
138 output.push(format!(
139 "{} Long-lived Branches (>30 days)",
140 style("🌿").bold()
141 ));
142
143 let git_output = Command::new("git")
144 .args([
145 "for-each-ref",
146 "--format=%(refname:short)|%(committerdate:relative)|%(authorname)",
147 "refs/heads/",
148 ])
149 .output()?;
150
151 if !git_output.status.success() {
152 return Err(GitXError::GitCommand(
153 "Failed to analyze branches".to_string(),
154 ));
155 }
156
157 let stdout = String::from_utf8_lossy(&git_output.stdout);
158 let long_lived = parse_long_lived_branches(&stdout);
159
160 if long_lived.is_empty() {
161 output.push(format!(
162 " {} No long-lived branches found",
163 style("✓").green()
164 ));
165 } else {
166 for branch in long_lived.iter().take(5) {
167 let age_style = if branch.days_old > 90 {
168 style(&branch.age).red().bold()
169 } else {
170 style(&branch.age).yellow().bold()
171 };
172
173 output.push(format!(
174 " • {} | {} | {}",
175 style(&branch.name).bold(),
176 age_style,
177 style(&branch.author).dim()
178 ));
179 }
180 }
181
182 Ok(output.join("\n"))
183}
184
185fn analyze_code_churn() -> Result<String> {
186 let mut output = Vec::new();
187 output.push(format!(
188 "{} Code Churn (high add/delete ratio)",
189 style("🔄").bold()
190 ));
191
192 let git_output = Command::new("git")
193 .args([
194 "log",
195 "--all",
196 "--pretty=format:",
197 "--numstat",
198 "--since=3 months ago",
199 ])
200 .output()?;
201
202 if !git_output.status.success() {
203 return Err(GitXError::GitCommand(
204 "Failed to analyze code churn".to_string(),
205 ));
206 }
207
208 let stdout = String::from_utf8_lossy(&git_output.stdout);
209 let churn_files = analyze_churn_patterns(&stdout);
210
211 if churn_files.is_empty() {
212 output.push(format!(
213 " {} No high-churn files found",
214 style("✓").green()
215 ));
216 } else {
217 for (i, file) in churn_files.iter().take(5).enumerate() {
218 let churn_ratio =
219 file.total_changes as f64 / (file.additions + file.deletions + 1) as f64;
220 let churn_style = if churn_ratio > 3.0 {
221 style("HIGH").red().bold()
222 } else if churn_ratio > 1.5 {
223 style("MED").yellow().bold()
224 } else {
225 style("LOW").green().bold()
226 };
227
228 output.push(format!(
229 " {}. +{} -{} | {} | {}",
230 i + 1,
231 style(file.additions).green(),
232 style(file.deletions).red(),
233 churn_style,
234 style(&file.path).bold()
235 ));
236 }
237 }
238
239 Ok(output.join("\n"))
240}
241
242fn analyze_binary_files() -> Result<String> {
243 let mut output = Vec::new();
244 output.push(format!("{} Binary Files in Repository", style("📦").bold()));
245
246 let git_output = Command::new("git").args(["ls-files"]).output()?;
247
248 if !git_output.status.success() {
249 return Err(GitXError::GitCommand(
250 "Failed to list repository files".to_string(),
251 ));
252 }
253
254 let stdout = String::from_utf8_lossy(&git_output.stdout);
255 let binary_files = identify_binary_files(&stdout);
256
257 if binary_files.is_empty() {
258 output.push(format!(
259 " {} No binary files detected",
260 style("✓").green()
261 ));
262 } else {
263 output.push(format!(
264 " {} {} binary files found",
265 style("!").yellow(),
266 binary_files.len()
267 ));
268
269 for file in binary_files.iter().take(3) {
270 output.push(format!(" • {}", style(file).dim()));
271 }
272
273 if binary_files.len() > 3 {
274 output.push(format!(
275 " {} ({} more files...)",
276 style("...").dim(),
277 binary_files.len() - 3
278 ));
279 }
280 }
281
282 Ok(output.join("\n"))
283}
284
285#[derive(Clone)]
286struct LargeCommit {
287 hash: String,
288 message: String,
289 files_changed: usize,
290}
291
292#[derive(Clone)]
293struct BranchInfo {
294 name: String,
295 age: String,
296 author: String,
297 days_old: u32,
298}
299
300#[derive(Clone)]
301struct ChurnFile {
302 path: String,
303 additions: u32,
304 deletions: u32,
305 total_changes: u32,
306}
307
308fn parse_large_commits(output: &str) -> Vec<LargeCommit> {
309 let mut commits = Vec::new();
310 let mut current_commit: Option<LargeCommit> = None;
311 let mut file_count = 0;
312
313 for line in output.lines() {
314 if line.contains('|') && !line.starts_with(char::is_numeric) {
315 if let Some(mut commit) = current_commit.take() {
317 commit.files_changed = file_count;
318 if file_count > 20 {
319 commits.push(commit);
320 }
321 }
322
323 let parts: Vec<&str> = line.splitn(4, '|').collect();
324 if parts.len() >= 2 {
325 current_commit = Some(LargeCommit {
326 hash: parts[0].to_string(),
327 message: parts[1].chars().take(60).collect(),
328 files_changed: 0,
329 });
330 file_count = 0;
331 }
332 } else if line.trim().is_empty() {
333 continue;
335 } else if line.chars().next().is_some_and(|c| c.is_ascii_digit()) {
336 file_count += 1;
338 }
339 }
340
341 if let Some(mut commit) = current_commit {
343 commit.files_changed = file_count;
344 if file_count > 20 {
345 commits.push(commit);
346 }
347 }
348
349 commits.sort_by(|a, b| b.files_changed.cmp(&a.files_changed));
350 commits
351}
352
353fn analyze_file_modification_frequency(output: &str) -> Vec<(String, usize)> {
354 let mut file_counts: HashMap<String, usize> = HashMap::new();
355
356 for line in output.lines() {
357 let file = line.trim();
358 if !file.is_empty() && !file.starts_with('.') {
359 *file_counts.entry(file.to_string()).or_insert(0) += 1;
360 }
361 }
362
363 let mut sorted: Vec<(String, usize)> = file_counts.into_iter().collect();
364 sorted.sort_by(|a, b| b.1.cmp(&a.1));
365 sorted.into_iter().filter(|(_, count)| *count > 5).collect()
366}
367
368fn parse_long_lived_branches(output: &str) -> Vec<BranchInfo> {
369 let mut branches = Vec::new();
370
371 for line in output.lines() {
372 let parts: Vec<&str> = line.splitn(3, '|').collect();
373 if parts.len() >= 3 {
374 let name = parts[0].trim();
375 let age = parts[1].trim();
376 let author = parts[2].trim();
377
378 if name == "main" || name == "master" || name == "develop" {
380 continue;
381 }
382
383 let days_old = estimate_days_from_relative_date(age);
384 if days_old > 30 {
385 branches.push(BranchInfo {
386 name: name.to_string(),
387 age: age.to_string(),
388 author: author.to_string(),
389 days_old,
390 });
391 }
392 }
393 }
394
395 branches.sort_by(|a, b| b.days_old.cmp(&a.days_old));
396 branches
397}
398
399fn analyze_churn_patterns(output: &str) -> Vec<ChurnFile> {
400 let mut file_stats: HashMap<String, (u32, u32)> = HashMap::new();
401
402 for line in output.lines() {
403 let parts: Vec<&str> = line.split_whitespace().collect();
404 if parts.len() == 3 {
405 if let (Ok(additions), Ok(deletions)) =
406 (parts[0].parse::<u32>(), parts[1].parse::<u32>())
407 {
408 let file = parts[2];
409 let entry = file_stats.entry(file.to_string()).or_insert((0, 0));
410 entry.0 += additions;
411 entry.1 += deletions;
412 }
413 }
414 }
415
416 let mut churn_files: Vec<ChurnFile> = file_stats
417 .into_iter()
418 .filter(|(_, (adds, dels))| *adds + *dels > 100) .map(|(path, (additions, deletions))| ChurnFile {
420 path,
421 additions,
422 deletions,
423 total_changes: additions + deletions,
424 })
425 .collect();
426
427 churn_files.sort_by(|a, b| b.total_changes.cmp(&a.total_changes));
428 churn_files
429}
430
431fn identify_binary_files(output: &str) -> Vec<String> {
432 let binary_extensions = [
433 ".jpg", ".jpeg", ".png", ".gif", ".bmp", ".ico", ".svg", ".mp4", ".avi", ".mov", ".wmv",
434 ".flv", ".webm", ".mp3", ".wav", ".flac", ".aac", ".ogg", ".zip", ".tar", ".gz", ".7z",
435 ".rar", ".exe", ".dll", ".so", ".dylib", ".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt",
436 ".pptx", ".bin", ".dat", ".db", ".sqlite", ".sqlite3",
437 ];
438
439 output
440 .lines()
441 .filter(|line| {
442 let file = line.trim();
443 binary_extensions.iter().any(|ext| file.ends_with(ext))
444 })
445 .map(|line| line.trim().to_string())
446 .collect()
447}
448
449fn estimate_days_from_relative_date(relative: &str) -> u32 {
450 if relative.contains("year") {
451 365
452 } else if relative.contains("month") {
453 if let Some(num_str) = relative.split_whitespace().next() {
454 if let Ok(months) = num_str.parse::<u32>() {
455 months * 30
456 } else {
457 90 }
459 } else {
460 90
461 }
462 } else if relative.contains("week") {
463 if let Some(num_str) = relative.split_whitespace().next() {
464 if let Ok(weeks) = num_str.parse::<u32>() {
465 weeks * 7
466 } else {
467 14 }
469 } else {
470 14
471 }
472 } else if relative.contains("day") {
473 if let Some(num_str) = relative.split_whitespace().next() {
474 num_str.parse::<u32>().unwrap_or(7)
475 } else {
476 7
477 }
478 } else {
479 0 }
481}
482
483#[cfg(test)]
484mod tests {
485 use super::*;
486 use crate::GitXError;
487
488 #[test]
489 fn test_parse_large_commits() {
490 let sample_output = r#"abc123|Add new feature|John Doe|2025-01-15
4915 10 src/main.rs
4923 2 src/lib.rs
49315 0 README.md
494
495def456|Refactor code|Jane Smith|2025-01-14
4961 1 src/utils.rs
4972 1 src/config.rs"#;
498
499 let commits = parse_large_commits(sample_output);
500 assert!(
502 commits.is_empty(),
503 "Should not include commits with few files"
504 );
505 }
506
507 #[test]
508 fn test_analyze_file_modification_frequency() {
509 let sample_output = r#"src/main.rs
510src/lib.rs
511src/main.rs
512src/main.rs
513README.md
514src/main.rs
515src/main.rs
516src/main.rs"#;
517
518 let frequencies = analyze_file_modification_frequency(sample_output);
519 assert!(!frequencies.is_empty());
520 assert_eq!(frequencies[0].0, "src/main.rs");
521 assert_eq!(frequencies[0].1, 6);
522 }
523
524 #[test]
525 fn test_parse_long_lived_branches() {
526 let sample_output = r#"main|2 hours ago|John Doe
527feature/old-branch|3 months ago|Jane Smith
528hotfix/urgent|2 days ago|Bob Wilson
529feature/ancient|1 year ago|Alice Johnson"#;
530
531 let branches = parse_long_lived_branches(sample_output);
532 assert_eq!(branches.len(), 2); assert_eq!(branches[0].name, "feature/ancient");
534 assert!(branches[0].days_old > 300);
535 }
536
537 #[test]
538 fn test_analyze_churn_patterns() {
539 let sample_output = r#"50 30 src/main.rs
540100 80 src/heavy_churn.rs
5415 2 src/stable.rs
542200 150 src/refactored.rs"#;
543
544 let churn = analyze_churn_patterns(sample_output);
545 assert!(!churn.is_empty());
546 assert_eq!(churn[0].path, "src/refactored.rs");
548 assert_eq!(churn[0].total_changes, 350);
549 }
550
551 #[test]
552 fn test_identify_binary_files() {
553 let sample_output = r#"src/main.rs
554assets/logo.png
555README.md
556docs/manual.pdf
557src/lib.rs
558media/video.mp4"#;
559
560 let binaries = identify_binary_files(sample_output);
561 assert_eq!(binaries.len(), 3);
562 assert!(binaries.contains(&"assets/logo.png".to_string()));
563 assert!(binaries.contains(&"docs/manual.pdf".to_string()));
564 assert!(binaries.contains(&"media/video.mp4".to_string()));
565 }
566
567 #[test]
568 fn test_estimate_days_from_relative_date() {
569 assert_eq!(estimate_days_from_relative_date("3 months ago"), 90);
570 assert_eq!(estimate_days_from_relative_date("2 weeks ago"), 14);
571 assert_eq!(estimate_days_from_relative_date("5 days ago"), 5);
572 assert_eq!(estimate_days_from_relative_date("1 year ago"), 365);
573 assert_eq!(estimate_days_from_relative_date("2 hours ago"), 0);
574 }
575
576 #[test]
577 fn test_run_no_git_repo() {
578 let sample_churn = analyze_churn_patterns("100 50 test.rs");
581 assert!(!sample_churn.is_empty());
582 assert_eq!(sample_churn[0].path, "test.rs");
583 assert_eq!(sample_churn[0].total_changes, 150);
584 }
585
586 #[test]
587 fn test_gitx_error_integration() {
588 let io_error = std::io::Error::new(std::io::ErrorKind::NotFound, "git not found");
590 let gitx_error: GitXError = io_error.into();
591 match gitx_error {
592 GitXError::Io(_) => {} _ => panic!("Should convert to Io error"),
594 }
595
596 let git_error = GitXError::GitCommand("test error".to_string());
597 assert_eq!(git_error.to_string(), "Git command failed: test error");
598 }
599
600 #[test]
601 fn test_branch_info_struct() {
602 let branch = BranchInfo {
603 name: "feature/test".to_string(),
604 age: "2 months ago".to_string(),
605 author: "Test Author".to_string(),
606 days_old: 60,
607 };
608
609 assert_eq!(branch.name, "feature/test");
610 assert_eq!(branch.days_old, 60);
611 }
612
613 #[test]
614 fn test_churn_file_struct() {
615 let file = ChurnFile {
616 path: "src/test.rs".to_string(),
617 additions: 100,
618 deletions: 50,
619 total_changes: 150,
620 };
621
622 assert_eq!(file.path, "src/test.rs");
623 assert_eq!(file.total_changes, 150);
624 }
625
626 #[test]
627 fn test_large_commit_struct() {
628 let commit = LargeCommit {
629 hash: "abc123".to_string(),
630 message: "Large refactor".to_string(),
631 files_changed: 25,
632 };
633
634 assert_eq!(commit.hash, "abc123");
635 assert_eq!(commit.files_changed, 25);
636 }
637}