use clap::Parser;
use std::fs;
use std::path::PathBuf;
use tracing_subscriber::EnvFilter;
use walkdir::WalkDir;
use garbage_code_hunter::{
analyzer::{CodeAnalyzer, CodeIssue},
config::{AppConfig, AppMode},
educational::EducationalAdvisor,
hall_of_shame::HallOfShame,
llm::{LlmConfig, LlmRoastProvider, LocalRoastProvider, RoastProvider},
reporter::Reporter,
};
#[derive(Parser)]
#[command(name = "garbage-code-hunter")]
#[command(about = "A humorous Rust code quality detector that roasts your garbage code 🔥")]
#[command(version)]
struct Args {
#[arg(default_value = ".")]
path: PathBuf,
#[arg(long)]
harsh: bool,
#[arg(long)]
savage: bool,
#[arg(short, long)]
verbose: bool,
#[arg(short = 't', long, default_value = "5")]
top: usize,
#[arg(short = 'i', long, default_value = "5")]
issues: usize,
#[arg(short = 's', long)]
summary: bool,
#[arg(short, long)]
markdown: bool,
#[arg(short, long, default_value = "en-US")]
lang: String,
#[arg(short, long)]
exclude: Vec<String>,
#[arg(long)]
educational: bool,
#[arg(long)]
hall_of_shame: bool,
#[arg(long)]
suggestions: bool,
#[arg(short = 'f', long, default_value = "text")]
format: String,
#[arg(long)]
llm: bool,
#[arg(long, default_value = "ollama")]
llm_provider: String,
#[arg(long)]
llm_endpoint: Option<String>,
#[arg(long)]
llm_model: Option<String>,
#[arg(long)]
llm_api_key: Option<String>,
#[arg(long, default_value = "30")]
llm_timeout: u64,
#[arg(long)]
config: Option<PathBuf>,
}
fn main() {
tracing_subscriber::fmt()
.with_env_filter(
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("warn")),
)
.init();
let args = Args::parse();
let mut app_config = AppConfig::from_file(args.config.as_deref()).unwrap_or_else(|e| {
eprintln!("Warning: Failed to load config: {e}");
AppConfig {
mode: AppMode::Local,
}
});
app_config.merge_cli(
args.llm,
&args.llm_provider,
args.llm_endpoint.as_deref(),
args.llm_model.as_deref(),
args.llm_api_key.as_deref(),
Some(args.llm_timeout), );
let analyzer = CodeAnalyzer::new(&args.exclude, &args.lang);
let issues = analyzer.analyze_path(&args.path);
let (file_count, total_lines) = calculate_metrics(&args.path, &args.exclude);
let educational_advisor = if args.educational {
Some(EducationalAdvisor::new(&args.lang))
} else {
None
};
let mut hall_of_shame = if args.hall_of_shame || args.suggestions {
Some(HallOfShame::new())
} else {
None
};
if let Some(ref mut shame) = hall_of_shame {
let issues_by_file = group_issues_by_file(&issues);
for (file_path, file_issues) in issues_by_file {
let file_lines = count_file_lines(&file_path);
shame.add_file_analysis(file_path, &file_issues, file_lines);
}
}
let roast_provider: Box<dyn RoastProvider> = match &app_config.mode {
AppMode::Local => Box::new(LocalRoastProvider),
AppMode::Llm(llm_cfg) => {
let config = LlmConfig::from_args(
&llm_cfg.provider,
Some(&llm_cfg.endpoint),
Some(&llm_cfg.model),
llm_cfg.api_key.as_deref(),
llm_cfg.timeout_secs,
);
Box::new(LlmRoastProvider::new(config))
}
};
let reporter = Reporter::new(
args.harsh,
args.savage,
args.verbose,
args.top,
args.issues,
args.summary,
args.markdown,
&args.lang,
roast_provider,
);
if args.format == "json" {
output_json(&issues);
return;
}
if args.educational || args.hall_of_shame || args.suggestions {
reporter.report_with_metrics(issues.clone(), file_count, total_lines);
if args.educational {
if let Some(advisor) = educational_advisor.as_ref() {
println!("\n🎓 Educational Advice:");
println!("{}", "─".repeat(50));
for issue in &issues {
if let Some(advice) = advisor.get_advice(&issue.rule_name) {
println!("\n📚 {}: {}", issue.rule_name, advice.why_bad);
println!("💡 How to fix: {}", advice.how_to_fix);
if let Some(tip) = &advice.best_practice_tip {
println!("✨ Tip: {}", tip);
}
}
}
}
}
if args.hall_of_shame {
if let Some(hall) = hall_of_shame.as_ref() {
let stats = hall.generate_shame_report();
println!("\n🏆 Hall of Shame:");
println!("{}", "─".repeat(50));
println!("📊 Total files analyzed: {}", stats.total_files_analyzed);
println!("🗑️ Total issues found: {}", stats.total_issues);
println!(
"📈 Garbage density: {:.2} issues per 1000 lines",
stats.garbage_density
);
println!("\n🔥 Worst Files:");
for (i, entry) in stats.hall_of_shame.iter().take(5).enumerate() {
println!(
" {}. {} - {} issues (score: {:.1})",
i + 1,
entry
.file_path
.file_name()
.unwrap_or_default()
.to_string_lossy(),
entry.total_issues,
entry.shame_score
);
}
}
}
if args.suggestions {
println!("\n🎯 Improvement Suggestions:");
println!("- Focus on renaming meaningless variables");
println!("- Reduce function complexity and nesting");
println!("- Replace unwrap() with proper error handling");
}
} else {
reporter.report_with_metrics(issues, file_count, total_lines);
}
}
fn calculate_metrics(path: &PathBuf, exclude_patterns: &[String]) -> (usize, usize) {
let mut file_count = 0;
let mut total_lines = 0;
let exclude_regexes: Vec<regex::Regex> = exclude_patterns
.iter()
.filter_map(|pattern| {
let regex_pattern = pattern
.replace(".", r"\.")
.replace("*", ".*")
.replace("?", ".");
regex::Regex::new(®ex_pattern).ok()
})
.collect();
let should_exclude = |path: &std::path::Path| -> bool {
let path_str = path.to_string_lossy();
exclude_regexes
.iter()
.any(|pattern| pattern.is_match(&path_str))
};
if path.is_file() {
if !should_exclude(path) {
if let Some(ext) = path.extension() {
if ext == "rs" {
file_count = 1;
if let Ok(content) = fs::read_to_string(path) {
total_lines = content.lines().count();
}
}
}
}
} else if path.is_dir() {
for entry in WalkDir::new(path)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| !should_exclude(e.path()))
.filter(|e| e.path().extension().is_some_and(|ext| ext == "rs"))
{
file_count += 1;
if let Ok(content) = fs::read_to_string(entry.path()) {
total_lines += content.lines().count();
}
}
}
(file_count, total_lines)
}
fn group_issues_by_file(
issues: &[CodeIssue],
) -> std::collections::HashMap<std::path::PathBuf, Vec<CodeIssue>> {
let mut grouped = std::collections::HashMap::new();
for issue in issues {
grouped
.entry(issue.file_path.clone())
.or_insert_with(Vec::new)
.push(issue.clone());
}
grouped
}
fn count_file_lines(file_path: &std::path::Path) -> usize {
std::fs::read_to_string(file_path)
.map(|content| content.lines().count())
.unwrap_or(0)
}
fn output_json(issues: &[CodeIssue]) {
use serde_json;
let json_issues: Vec<serde_json::Value> = issues
.iter()
.map(|issue| {
serde_json::json!({
"file_path": issue.file_path.to_string_lossy(),
"line": issue.line,
"column": issue.column,
"rule_name": issue.rule_name,
"message": issue.message,
"severity": format!("{:?}", issue.severity)
})
})
.collect();
if let Ok(json_output) = serde_json::to_string_pretty(&json_issues) {
println!("{}", json_output);
} else {
eprintln!("Error: Failed to serialize issues to JSON");
std::process::exit(1);
}
}