use std::collections::HashSet;
use std::fs;
use std::io::{self, Write};
use std::path::PathBuf;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use colored::Colorize;
use rayon::prelude::*;
use crate::ai::{
get_custom_provider, AiProvider, AiProviderConfig, AiProviderKind, AiSuggester, FixSuggestion,
SuggestionOptions, SuggestionResult,
};
use crate::utils::types::{LintIssue, RunResult, Severity};
use super::menu::{print_code_context, print_diff};
use super::nolint::{add_nolint_comment, describe_nolint_action, NolintResult};
type FileIssueData = (PathBuf, Vec<(usize, String, String)>, usize);
type FileBatch<'a> = Vec<&'a FileIssueData>;
type BatchFileInput<'a> = (&'a std::path::Path, &'a [(usize, String, String)]);
#[derive(Debug, Clone)]
pub struct AiFixConfig {
pub provider: AiProviderKind,
pub model: Option<String>,
pub max_suggestions: usize,
pub accept_all: bool,
pub verbose: bool,
pub parallel_jobs: usize,
}
impl Default for AiFixConfig {
fn default() -> Self {
Self {
provider: AiProviderKind::Claude,
model: None,
max_suggestions: 3,
accept_all: false,
verbose: false,
parallel_jobs: 4,
}
}
}
impl AiFixConfig {
pub fn with_provider(provider: &str) -> Self {
let provider_kind: AiProviderKind = provider.parse().unwrap_or_default();
Self {
provider: provider_kind,
..Default::default()
}
}
pub fn with_model(mut self, model: Option<String>) -> Self {
self.model = model;
self
}
pub fn with_accept_all(mut self, accept_all: bool) -> Self {
self.accept_all = accept_all;
self
}
pub fn with_verbose(mut self, verbose: bool) -> Self {
self.verbose = verbose;
self
}
pub fn with_parallel(mut self, jobs: usize) -> Self {
self.parallel_jobs = jobs;
self
}
}
#[derive(Debug, Default)]
pub struct AiFixResult {
pub suggested: usize,
pub applied: usize,
pub skipped: usize,
pub errors: usize,
pub quit_early: bool,
pub modified_files: HashSet<PathBuf>,
}
pub fn create_suggester(config: &AiFixConfig) -> Result<AiSuggester, String> {
let mut provider_config = build_provider_config(&config.provider);
if let Some(ref model) = config.model {
provider_config.model = model.clone();
}
provider_config.api_key = resolve_api_key(&config.provider);
resolve_endpoint(&config.provider, &mut provider_config);
let provider = AiProvider::new(provider_config);
let suggester = AiSuggester::with_provider(provider);
if !suggester.is_available() {
let hint = get_provider_hint(&config.provider)?;
return Err(format!(
"AI provider {} is not available. {}",
suggester.provider_name(),
hint
));
}
Ok(suggester)
}
fn build_provider_config(kind: &AiProviderKind) -> AiProviderConfig {
match kind {
AiProviderKind::Claude => AiProviderConfig::claude(),
AiProviderKind::ClaudeCli => AiProviderConfig::claude_cli(),
AiProviderKind::CodeBuddy => AiProviderConfig::codebuddy(),
AiProviderKind::CodeBuddyCli => AiProviderConfig::codebuddy_cli(),
AiProviderKind::OpenAi => AiProviderConfig::openai(),
AiProviderKind::CodexCli => AiProviderConfig::codex_cli(),
AiProviderKind::Gemini => AiProviderConfig::gemini(),
AiProviderKind::GeminiCli => AiProviderConfig::gemini_cli(),
AiProviderKind::Local => AiProviderConfig::local(),
AiProviderKind::Custom(name) => AiProviderConfig {
kind: AiProviderKind::Custom(name.clone()),
..AiProviderConfig::default()
},
AiProviderKind::Mock => AiProviderConfig::mock(),
}
}
fn resolve_api_key(kind: &AiProviderKind) -> Option<String> {
match kind {
AiProviderKind::Claude => std::env::var("ANTHROPIC_AUTH_TOKEN")
.or_else(|_| std::env::var("ANTHROPIC_API_KEY"))
.ok(),
AiProviderKind::CodeBuddy => std::env::var("CODEBUDDY_API_KEY").ok(),
AiProviderKind::OpenAi | AiProviderKind::CodexCli => std::env::var("OPENAI_API_KEY").ok(),
AiProviderKind::Gemini => std::env::var("GEMINI_API_KEY")
.or_else(|_| std::env::var("GOOGLE_API_KEY"))
.ok(),
_ => None,
}
}
fn resolve_endpoint(kind: &AiProviderKind, config: &mut AiProviderConfig) {
let env_var = match kind {
AiProviderKind::Claude => Some("ANTHROPIC_BASE_URL"),
AiProviderKind::CodeBuddy => Some("CODEBUDDY_BASE_URL"),
_ => None,
};
if let Some(var_name) = env_var {
if let Ok(base_url) = std::env::var(var_name) {
config.endpoint = Some(base_url);
}
}
}
fn get_provider_hint(kind: &AiProviderKind) -> Result<&str, String> {
match kind {
AiProviderKind::Claude => {
Ok("Set ANTHROPIC_AUTH_TOKEN or ANTHROPIC_API_KEY environment variable")
}
AiProviderKind::ClaudeCli => Ok("Install Claude CLI (claude command must be available)"),
AiProviderKind::CodeBuddy => Ok("Set CODEBUDDY_API_KEY environment variable"),
AiProviderKind::CodeBuddyCli => {
Ok("Install CodeBuddy CLI (codebuddy command must be available)")
}
AiProviderKind::OpenAi => Ok("Set OPENAI_API_KEY environment variable"),
AiProviderKind::CodexCli => Ok("Install Codex CLI (npm install -g @openai/codex)"),
AiProviderKind::Gemini => {
Ok("Set GEMINI_API_KEY or GOOGLE_API_KEY environment variable")
}
AiProviderKind::GeminiCli => {
Ok("Install Gemini CLI (npm install -g @google/gemini-cli)")
}
AiProviderKind::Local => Ok("Set LINTHIS_AI_ENDPOINT environment variable"),
AiProviderKind::Custom(name) => Err(format!(
"Custom AI provider '{}' is not available. Check your config and ensure required tools/keys are set.",
name
)),
AiProviderKind::Mock => Ok("Mock provider should always be available"),
}
}
fn is_cli_provider(kind: &AiProviderKind) -> bool {
match kind {
AiProviderKind::ClaudeCli
| AiProviderKind::CodeBuddyCli
| AiProviderKind::CodexCli
| AiProviderKind::GeminiCli => true,
AiProviderKind::Custom(_) => get_custom_provider().map(|cp| cp.is_cli).unwrap_or(false),
_ => false,
}
}
fn group_issues_by_file(
issues: &[LintIssue],
) -> std::collections::HashMap<PathBuf, Vec<&LintIssue>> {
let mut groups: std::collections::HashMap<PathBuf, Vec<&LintIssue>> =
std::collections::HashMap::new();
for issue in issues {
groups
.entry(issue.file_path.clone())
.or_default()
.push(issue);
}
groups
}
pub fn run_cli_file_fix(issues: &[LintIssue], config: &AiFixConfig) -> AiFixResult {
let mut fix_result = AiFixResult::default();
let file_groups = group_issues_by_file(issues);
let total_files = file_groups.len();
println!();
println!("{}", "─".repeat(60).dimmed());
println!(
" {} Direct file editing mode ({} files{})",
"CLI Fix:".cyan().bold(),
total_files,
if config.accept_all && config.parallel_jobs > 1 {
format!(", {} parallel", config.parallel_jobs)
} else {
String::new()
}
);
println!("{}", "─".repeat(60).dimmed());
println!();
let provider_config = build_cli_provider_config(&config.provider);
let provider_config = match provider_config {
Some(c) => c,
None => return fix_result,
};
let file_list: Vec<_> = file_groups.into_iter().collect();
if config.accept_all && config.parallel_jobs > 1 {
return run_cli_file_fix_parallel(&file_list, &provider_config, config, total_files);
}
let provider = AiProvider::new(provider_config);
let cli_name = get_cli_name(&config.provider);
for (file_idx, (file_path, file_issues)) in file_list.iter().enumerate() {
println!(
" [{}/{}] Processing: {}",
file_idx + 1,
total_files,
file_path.display()
);
let original_content = match fs::read_to_string(file_path) {
Ok(c) => c,
Err(e) => {
eprintln!(" {} Failed to read file: {}", "✗".red(), e);
fix_result.errors += file_issues.len();
continue;
}
};
let issues_data: Vec<(usize, String, String)> = file_issues
.iter()
.map(|i| {
(
i.line,
i.message.clone(),
i.code.clone().unwrap_or_default(),
)
})
.collect();
println!(" {} issues to fix", issues_data.len());
let diff_result = run_with_spinner(&cli_name, || {
provider.fix_file_with_cli(file_path, &issues_data)
});
process_cli_diff_result(
diff_result,
file_path,
&original_content,
file_issues.len(),
config.accept_all,
&mut fix_result,
);
println!();
}
print_cli_fix_summary(&fix_result, total_files);
fix_result
}
fn build_cli_provider_config(kind: &AiProviderKind) -> Option<AiProviderConfig> {
match kind {
AiProviderKind::ClaudeCli => Some(AiProviderConfig::claude_cli()),
AiProviderKind::CodeBuddyCli => Some(AiProviderConfig::codebuddy_cli()),
AiProviderKind::CodexCli => Some(AiProviderConfig::codex_cli()),
AiProviderKind::GeminiCli => Some(AiProviderConfig::gemini_cli()),
AiProviderKind::Custom(name) => Some(AiProviderConfig {
kind: AiProviderKind::Custom(name.clone()),
..AiProviderConfig::default()
}),
_ => None,
}
}
fn get_cli_name(kind: &AiProviderKind) -> String {
match kind {
AiProviderKind::ClaudeCli => "Claude".into(),
AiProviderKind::CodeBuddyCli => "CodeBuddy".into(),
AiProviderKind::CodexCli => "Codex".into(),
AiProviderKind::GeminiCli => "Gemini".into(),
AiProviderKind::Custom(name) => name.clone(),
_ => "CLI".into(),
}
}
fn run_with_spinner<F, T>(cli_name: &str, f: F) -> T
where
F: FnOnce() -> T,
{
let spinner_running = Arc::new(std::sync::atomic::AtomicBool::new(true));
let spinner_running_clone = Arc::clone(&spinner_running);
let cli_name_owned = cli_name.to_string();
let spinner_handle = std::thread::spawn(move || {
let spinner_chars = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];
let start_time = std::time::Instant::now();
let mut idx = 0;
let mut first_print = true;
while spinner_running_clone.load(std::sync::atomic::Ordering::Relaxed) {
let elapsed = start_time.elapsed();
let secs = elapsed.as_secs();
let time_str = if secs >= 60 {
format!("{}m {}s", secs / 60, secs % 60)
} else {
format!("{}s", secs)
};
if first_print {
println!(
" {} Running {} CLI... ({})",
spinner_chars[idx].to_string().cyan(),
cli_name_owned,
time_str.dimmed()
);
println!();
first_print = false;
} else {
print!(
"\x1B[2A\r {} Running {} CLI... ({})\x1B[K\n\n",
spinner_chars[idx].to_string().cyan(),
cli_name_owned,
time_str.dimmed()
);
}
io::stdout().flush().ok();
idx = (idx + 1) % spinner_chars.len();
std::thread::sleep(std::time::Duration::from_millis(100));
}
});
let result = f();
spinner_running.store(false, std::sync::atomic::Ordering::Relaxed);
let _ = spinner_handle.join();
print!("\x1B[2A\x1B[K\n\x1B[K\x1B[A");
io::stdout().flush().ok();
result
}
fn print_colored_diff(diff: &str) {
println!();
println!(" {}", "Changes:".bold());
for line in diff.lines() {
if line.starts_with('+') && !line.starts_with("+++") {
println!(" {}", line.green());
} else if line.starts_with('-') && !line.starts_with("---") {
println!(" {}", line.red());
} else if line.starts_with("@@") {
println!(" {}", line.cyan());
} else {
println!(" {}", line.dimmed());
}
}
println!();
}
fn process_cli_diff_result(
diff_result: Result<String, String>,
file_path: &PathBuf,
original_content: &str,
issue_count: usize,
accept_all: bool,
fix_result: &mut AiFixResult,
) {
match diff_result {
Ok(diff) => {
if diff.is_empty() {
println!(" {} No changes made", "⚠".yellow());
fix_result.skipped += issue_count;
return;
}
print_colored_diff(&diff);
if accept_all {
println!(" {} Changes applied", "✓".green());
fix_result.applied += issue_count;
fix_result.modified_files.insert(file_path.clone());
} else {
confirm_or_restore(file_path, original_content, issue_count, fix_result);
}
}
Err(e) => {
eprintln!(" {} CLI error: {}", "✗".red(), e);
let _ = fs::write(file_path, original_content);
fix_result.errors += issue_count;
}
}
}
fn confirm_or_restore(
file_path: &PathBuf,
original_content: &str,
issue_count: usize,
fix_result: &mut AiFixResult,
) {
print!(" Apply changes? [Y/n/r(estore)]: ");
io::stdout().flush().ok();
let input = read_line().trim().to_lowercase();
match input.as_str() {
"n" | "no" => {
let _ = fs::write(file_path, original_content);
println!(" {} Changes discarded", "⚠".yellow());
fix_result.skipped += issue_count;
}
"r" | "restore" => {
let _ = fs::write(file_path, original_content);
println!(" {} File restored", "↺".cyan());
fix_result.skipped += issue_count;
}
_ => {
println!(" {} Changes applied", "✓".green());
fix_result.applied += issue_count;
fix_result.modified_files.insert(file_path.clone());
}
}
}
#[derive(Debug)]
struct BatchResult {
diffs: std::collections::HashMap<PathBuf, String>,
files: Vec<(PathBuf, usize)>,
error: Option<String>,
}
const FILES_PER_BATCH: usize = 8;
fn get_cli_display_name(provider: &AiProviderKind) -> &str {
match provider {
AiProviderKind::ClaudeCli => "Claude",
AiProviderKind::CodeBuddyCli => "CodeBuddy",
AiProviderKind::CodexCli => "Codex",
AiProviderKind::GeminiCli => "Gemini",
AiProviderKind::Custom(name) => name.as_str(),
_ => "CLI",
}
}
fn prepare_file_data(file_list: &[(PathBuf, Vec<&LintIssue>)]) -> Vec<FileIssueData> {
file_list
.iter()
.map(|(path, issues)| {
let issues_data: Vec<(usize, String, String)> = issues
.iter()
.map(|i| {
(
i.line,
i.message.clone(),
i.code.clone().unwrap_or_default(),
)
})
.collect();
let count = issues.len();
(path.clone(), issues_data, count)
})
.collect()
}
fn print_batch_plan(
total_issues: usize,
total_files: usize,
total_batches: usize,
actual_files_per_batch: usize,
actual_parallel: usize,
) {
println!(
" {} {} issues in {} files, {} batch{} (up to {} files/batch, {} parallel)",
"→".cyan(),
total_issues,
total_files,
total_batches,
if total_batches == 1 { "" } else { "es" },
actual_files_per_batch,
actual_parallel
);
println!();
}
fn spawn_progress_spinner(
progress: Arc<AtomicUsize>,
total_batches: usize,
cli_name: String,
) -> std::thread::JoinHandle<()> {
std::thread::spawn(move || {
let spinner_chars = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];
let start_time = std::time::Instant::now();
let mut idx = 0;
loop {
let current = progress.load(Ordering::Relaxed);
let elapsed = start_time.elapsed();
let secs = elapsed.as_secs();
let time_str = if secs >= 60 {
format!("{}m {}s", secs / 60, secs % 60)
} else {
format!("{}s", secs)
};
print!(
"\r {} [batch {}/{}] Running {} CLI... ({})\x1B[K",
spinner_chars[idx].to_string().cyan(),
current,
total_batches,
cli_name,
time_str.dimmed()
);
io::stdout().flush().ok();
if current >= total_batches {
break;
}
idx = (idx + 1) % spinner_chars.len();
std::thread::sleep(std::time::Duration::from_millis(100));
}
})
}
fn process_single_batch(
batch: &FileBatch<'_>,
provider_config: &AiProviderConfig,
working_dir: &std::path::Path,
) -> BatchResult {
let provider = AiProvider::new(provider_config.clone());
let batch_files: Vec<BatchFileInput<'_>> = batch
.iter()
.map(|(path, issues, _count)| (path.as_path(), issues.as_slice()))
.collect();
let files_info: Vec<(PathBuf, usize)> = batch
.iter()
.map(|(path, _, count)| (path.clone(), *count))
.collect();
match provider.fix_files_batch_with_cli(&batch_files, working_dir) {
Ok(diffs) => BatchResult {
diffs,
files: files_info,
error: None,
},
Err(e) => BatchResult {
diffs: std::collections::HashMap::new(),
files: files_info,
error: Some(e),
},
}
}
fn run_cli_file_fix_parallel(
file_list: &[(PathBuf, Vec<&LintIssue>)],
provider_config: &AiProviderConfig,
config: &AiFixConfig,
total_files: usize,
) -> AiFixResult {
use std::sync::Mutex;
let mut fix_result = AiFixResult::default();
let cli_name = get_cli_display_name(&config.provider);
let file_data = prepare_file_data(file_list);
let batches: Vec<FileBatch<'_>> = file_data
.iter()
.collect::<Vec<_>>()
.chunks(FILES_PER_BATCH)
.map(|chunk| chunk.to_vec())
.collect();
let total_batches = batches.len();
let total_issues: usize = file_data.iter().map(|(_, _, count)| count).sum();
let actual_parallel = config.parallel_jobs.min(total_batches);
let actual_files_per_batch = if total_batches > 0 {
total_files.div_ceil(total_batches)
} else {
total_files
};
print_batch_plan(
total_issues,
total_files,
total_batches,
actual_files_per_batch,
actual_parallel,
);
let progress = Arc::new(AtomicUsize::new(0));
let progress_handle =
spawn_progress_spinner(Arc::clone(&progress), total_batches, cli_name.to_string());
let pool = rayon::ThreadPoolBuilder::new()
.num_threads(config.parallel_jobs)
.build()
.unwrap_or_else(|_| rayon::ThreadPoolBuilder::new().build().unwrap());
let working_dir = crate::utils::get_project_root();
let results_mutex = Arc::new(Mutex::new(Vec::new()));
pool.install(|| {
batches.par_iter().for_each(|batch| {
let result = process_single_batch(batch, provider_config, &working_dir);
progress.fetch_add(1, Ordering::Relaxed);
results_mutex.lock().unwrap().push(result);
});
});
let _ = progress_handle.join();
println!(); println!();
let results = Arc::try_unwrap(results_mutex)
.expect("All parallel tasks completed")
.into_inner()
.unwrap();
collect_batch_results(&results, total_files, &mut fix_result);
print_cli_fix_summary(&fix_result, total_files);
fix_result
}
fn collect_batch_results(
results: &[BatchResult],
total_files: usize,
fix_result: &mut AiFixResult,
) {
let mut file_idx = 0;
for batch_result in results {
if let Some(ref error) = batch_result.error {
for (file_path, issue_count) in &batch_result.files {
file_idx += 1;
println!(" [{}/{}] {}", file_idx, total_files, file_path.display());
eprintln!(" {} CLI error: {}", "✗".red(), error);
fix_result.errors += issue_count;
}
} else {
for (file_path, issue_count) in &batch_result.files {
file_idx += 1;
println!(" [{}/{}] {}", file_idx, total_files, file_path.display());
if let Some(diff) = batch_result.diffs.get(file_path) {
print_colored_diff(diff);
println!(" {} Changes applied", "✓".green());
fix_result.applied += issue_count;
fix_result.modified_files.insert(file_path.clone());
} else {
println!(" {} No changes made", "⚠".yellow());
fix_result.skipped += issue_count;
}
println!();
}
}
}
}
fn print_cli_fix_summary(fix_result: &AiFixResult, total_files: usize) {
println!("{}", "═".repeat(60).dimmed());
println!(" {}", "CLI Fix Summary".bold());
println!("{}", "─".repeat(60).dimmed());
println!(" Files processed: {}", total_files.to_string().cyan());
println!(
" Issues applied: {}",
fix_result.applied.to_string().green()
);
println!(
" Issues skipped: {}",
fix_result.skipped.to_string().yellow()
);
println!(" Errors: {}", fix_result.errors.to_string().red());
println!("{}", "═".repeat(60).dimmed());
if fix_result.applied > 0 {
println!();
println!("{}", " ⚠ Important for C/C++ projects:".yellow().bold());
println!(
" {}",
"If function signatures were changed, verify that:".dimmed()
);
println!(
" {}",
"- All declarations and definitions are updated".dimmed()
);
println!(
" {}",
"- All call sites use correct argument types".dimmed()
);
println!(" {}", "- The code still compiles successfully".dimmed());
println!();
println!(
" {}",
"Recommended: Run your build command to verify:".cyan()
);
println!(
" {}",
" make # or cmake --build build, etc.".dimmed()
);
}
println!();
}
pub fn get_suggestion_for_issue(
suggester: &AiSuggester,
issue: &LintIssue,
config: &AiFixConfig,
) -> SuggestionResult {
let source = match fs::read_to_string(&issue.file_path) {
Ok(s) => s,
Err(e) => {
return SuggestionResult::failure(
issue.code.as_deref().unwrap_or("UNKNOWN"),
&issue.file_path.to_string_lossy(),
issue.line,
&issue.message,
&format!("Failed to read file: {}", e),
);
}
};
let options = SuggestionOptions {
max_suggestions: config.max_suggestions,
include_explanation: true,
include_confidence: true,
skip_with_suggestion: false, ..Default::default()
};
suggester.suggest_fix(issue, &source, &options)
}
pub fn show_ai_suggestions(
issue: &LintIssue,
result: &SuggestionResult,
config: &AiFixConfig,
) -> (bool, bool) {
println!();
if let Some(ref error) = result.error {
println!(" {} {}", "AI Error:".red(), error);
return (false, false);
}
if result.suggestions.is_empty() {
println!(
" {}",
"No AI suggestions available for this issue.".yellow()
);
return (false, false);
}
println!(
" {} {} suggestion{}",
"AI Generated".green().bold(),
result.suggestions.len(),
if result.suggestions.len() == 1 {
""
} else {
"s"
}
);
println!();
for (idx, suggestion) in result.suggestions.iter().enumerate() {
println!(
" {} {}",
format!("[{}]", idx + 1).cyan().bold(),
"Suggestion:".bold()
);
print_suggestion_preview(issue, suggestion);
if let Some(ref explanation) = suggestion.explanation {
println!(" {} {}", "Explanation:".dimmed(), explanation);
}
if let Some(confidence) = suggestion.confidence {
let confidence_str = format!("{:.0}%", confidence * 100.0);
let colored = if confidence >= 0.8 {
confidence_str.green()
} else if confidence >= 0.5 {
confidence_str.yellow()
} else {
confidence_str.red()
};
println!(" {} {}", "Confidence:".dimmed(), colored);
}
println!();
}
if config.accept_all {
if let Some(suggestion) = result.suggestions.first() {
println!(" {} Applying first suggestion...", "→".cyan());
return (try_apply_suggestion(issue, suggestion), false);
}
}
prompt_suggestion_choice(issue, &result.suggestions)
}
fn try_apply_suggestion(issue: &LintIssue, suggestion: &FixSuggestion) -> bool {
let original_content = fs::read_to_string(&issue.file_path).ok();
let original_lines: Vec<&str> = original_content
.as_ref()
.map(|c| c.lines().collect())
.unwrap_or_default();
let start_line = issue.line;
let end_line = suggestion.end_line.max(issue.line);
if apply_suggestion(issue, suggestion) {
println!(" {} Applied successfully!", "✓".green());
println!();
print_suggestion_diff(&original_lines, suggestion, start_line, end_line);
true
} else {
println!(" {} Failed to apply.", "✗".red());
false
}
}
fn prompt_suggestion_choice(issue: &LintIssue, suggestions: &[FixSuggestion]) -> (bool, bool) {
for i in 1..=suggestions.len() {
if i == 1 {
println!(
" [{}] Apply suggestion #{} {}",
i.to_string().cyan(),
i,
"(default, press Enter)".dimmed()
);
} else {
println!(" [{}] Apply suggestion #{}", i.to_string().cyan(), i);
}
}
println!(" [{}] Skip this issue", "s".cyan());
println!(" [{}] Quit AI fix mode", "q".cyan());
println!();
print!(" > ");
io::stdout().flush().ok();
let input = read_line().trim().to_lowercase();
let input = if input.is_empty() { "1" } else { &input };
match input {
"s" | "skip" => (false, false),
"q" | "quit" => (false, true),
_ => {
if let Ok(num) = input.parse::<usize>() {
if num >= 1 && num <= suggestions.len() {
let applied = try_apply_suggestion(issue, &suggestions[num - 1]);
if !applied {
println!(" {} Failed to apply suggestion.", "✗".red());
}
return (applied, false);
}
}
println!(" {} Invalid choice, skipping.", "Invalid:".yellow());
(false, false)
}
}
}
fn validate_suggestion(
issue: &LintIssue,
suggestion: &FixSuggestion,
original_lines: &[&str],
) -> bool {
let suggestion_lines: Vec<&str> = suggestion.code.lines().collect();
let lines_to_replace = suggestion.end_line.saturating_sub(issue.line) + 1;
if lines_to_replace <= 3 && suggestion_lines.len() > lines_to_replace * 4 {
eprintln!(
" {} Suggestion rejected: replacing {} lines with {} lines is too different",
"⚠".yellow(),
lines_to_replace,
suggestion_lines.len()
);
return false;
}
let line_idx = issue.line.saturating_sub(1);
if line_idx < original_lines.len() {
let original_line = original_lines[line_idx].trim();
let first_suggestion_line = suggestion_lines.first().map(|s| s.trim()).unwrap_or("");
let def_patterns = ["def ", "class ", "fn ", "func ", "function "];
let orig_has_def = def_patterns.iter().any(|p| original_line.starts_with(p));
let sugg_has_def = def_patterns
.iter()
.any(|p| first_suggestion_line.starts_with(p));
if sugg_has_def && !orig_has_def {
eprintln!(
" {} Suggestion rejected: introduces function/class definition where original had none",
"⚠".yellow()
);
return false;
}
}
true
}
pub fn apply_suggestion(issue: &LintIssue, suggestion: &FixSuggestion) -> bool {
let content = match fs::read_to_string(&issue.file_path) {
Ok(c) => c,
Err(_) => return false,
};
let lines: Vec<&str> = content.lines().collect();
let line_idx = issue.line.saturating_sub(1);
if line_idx >= lines.len() {
return false;
}
if !validate_suggestion(issue, suggestion, &lines) {
return false;
}
let mut new_lines: Vec<String> = lines.iter().map(|s| s.to_string()).collect();
let suggestion_lines: Vec<&str> = suggestion.code.lines().collect();
if suggestion_lines.is_empty() {
return false;
}
let replacement_end = suggestion.end_line.max(issue.line);
let lines_to_replace = replacement_end - issue.line + 1;
let remove_count = lines_to_replace.min(new_lines.len() - line_idx);
for _ in 0..remove_count {
if line_idx < new_lines.len() {
new_lines.remove(line_idx);
}
}
for (i, line) in suggestion_lines.iter().enumerate() {
new_lines.insert(line_idx + i, line.to_string());
}
let new_content = new_lines.join("\n");
let final_content = if content.ends_with('\n') && !new_content.ends_with('\n') {
format!("{}\n", new_content)
} else {
new_content
};
fs::write(&issue.file_path, final_content).is_ok()
}
fn print_suggestion_diff(
original_lines: &[&str],
suggestion: &FixSuggestion,
start_line: usize,
end_line: usize,
) {
println!(" {}", "Changes:".bold());
let suggestion_lines: Vec<&str> = suggestion.code.lines().collect();
if start_line > 1 {
if let Some(context_line) = original_lines.get(start_line - 2) {
println!(
" {} {}",
format!(" {:>4} |", start_line - 1).dimmed(),
context_line.dimmed()
);
}
}
for i in start_line..=end_line {
if let Some(old_line) = original_lines.get(i - 1) {
println!(" {} {}", format!("-{:>4} |", i).red(), old_line.red());
}
}
for (i, new_line) in suggestion_lines.iter().enumerate() {
println!(
" {} {}",
format!("+{:>4} |", start_line + i).green(),
new_line.green()
);
}
let new_end_line = start_line + suggestion_lines.len();
if let Some(context_line) = original_lines.get(end_line) {
println!(
" {} {}",
format!(" {:>4} |", new_end_line).dimmed(),
context_line.dimmed()
);
}
println!();
}
fn print_suggestion_preview(issue: &LintIssue, suggestion: &FixSuggestion) {
let start_line = issue.line;
let end_line = suggestion.end_line.max(issue.line);
let suggestion_lines: Vec<&str> = suggestion.code.lines().collect();
for (line_num, content) in &issue.context_before {
println!(
" {} {}",
format!(" {:>4} |", line_num).dimmed(),
content.dimmed()
);
}
if end_line > start_line {
if let Ok(content) = fs::read_to_string(&issue.file_path) {
let lines: Vec<&str> = content.lines().collect();
for i in start_line..=end_line {
if let Some(old_line) = lines.get(i - 1) {
println!(" {} {}", format!("-{:>4} |", i).red(), old_line.red());
}
}
}
} else {
if let Some(ref code_line) = issue.code_line {
println!(
" {} {}",
format!("-{:>4} |", start_line).red(),
code_line.red()
);
}
}
for (i, new_line) in suggestion_lines.iter().enumerate() {
println!(
" {} {}",
format!("+{:>4} |", start_line + i).green(),
new_line.green()
);
}
for (line_num, content) in &issue.context_after {
println!(
" {} {}",
format!(" {:>4} |", line_num).dimmed(),
content.dimmed()
);
}
}
struct CachedSuggestion {
issue_idx: usize,
result: SuggestionResult,
}
fn collect_suggestions_sequential(
issues: &[LintIssue],
suggester: &AiSuggester,
config: &AiFixConfig,
total: usize,
) -> Vec<CachedSuggestion> {
let mut cached_suggestions: Vec<CachedSuggestion> = Vec::new();
for (idx, issue) in issues.iter().enumerate() {
print!(
"\r [{}/{}] Analyzing: {}:{}{}",
idx + 1,
total,
issue.file_path.display(),
issue.line,
" ".repeat(20) );
io::stdout().flush().ok();
let suggestion_result = get_suggestion_for_issue(suggester, issue, config);
cached_suggestions.push(CachedSuggestion {
issue_idx: idx,
result: suggestion_result,
});
}
print!("\r{}\r", " ".repeat(80));
io::stdout().flush().ok();
cached_suggestions
}
fn collect_suggestions_parallel(
issues: &[LintIssue],
suggester: &AiSuggester,
config: &AiFixConfig,
total: usize,
) -> Vec<CachedSuggestion> {
let pool = rayon::ThreadPoolBuilder::new()
.num_threads(config.parallel_jobs)
.build()
.unwrap_or_else(|_| rayon::ThreadPoolBuilder::new().build().unwrap());
let progress = Arc::new(AtomicUsize::new(0));
let progress_clone = Arc::clone(&progress);
let total_clone = total;
let progress_handle = std::thread::spawn(move || {
let mut last_printed = usize::MAX;
loop {
let current = progress_clone.load(Ordering::Relaxed);
if current != last_printed {
print!(
"\r [{}/{}] Analyzing in parallel...{}",
current,
total_clone,
" ".repeat(30)
);
io::stdout().flush().ok();
last_printed = current;
}
if current >= total_clone {
break;
}
std::thread::sleep(std::time::Duration::from_millis(100));
}
});
let results: Vec<CachedSuggestion> = pool.install(|| {
issues
.par_iter()
.enumerate()
.map(|(idx, issue)| {
let suggestion_result = get_suggestion_for_issue(suggester, issue, config);
progress.fetch_add(1, Ordering::Relaxed);
CachedSuggestion {
issue_idx: idx,
result: suggestion_result,
}
})
.collect()
});
let _ = progress_handle.join();
print!("\r{}\r", " ".repeat(80));
io::stdout().flush().ok();
let mut sorted_results = results;
sorted_results.sort_by_key(|c| c.issue_idx);
sorted_results
}
pub fn run_ai_fix_all(result: &RunResult, config: &AiFixConfig) -> AiFixResult {
let issues = &result.issues;
if issues.is_empty() {
println!("{}", "No issues to fix.".green());
return AiFixResult::default();
}
if is_cli_provider(&config.provider) {
return run_cli_file_fix(issues, config);
}
let suggester = match create_suggester(config) {
Ok(s) => s,
Err(e) => {
eprintln!("{}: {}", "Error".red(), e);
return AiFixResult {
errors: issues.len(),
..Default::default()
};
}
};
println!();
println!("{}", "═".repeat(60).dimmed());
println!(" {} - Batch Mode", "AI Fix".green().bold());
println!("{}", "─".repeat(60).dimmed());
println!(
" Provider: {} ({})",
suggester.provider_name().cyan(),
suggester.model_name()
);
println!(" Issues: {}", issues.len());
if config.accept_all {
println!(
" Mode: {} (will apply automatically)",
"Auto-apply".yellow()
);
} else {
println!(" Mode: Batch collect, then review");
}
println!("{}", "═".repeat(60).dimmed());
println!();
if !config.accept_all {
print!(" Start AI analysis? [Y/n]: ");
io::stdout().flush().ok();
let input = read_line().trim().to_lowercase();
if input == "n" || input == "no" {
println!(" Cancelled.");
return AiFixResult::default();
}
}
let total = issues.len();
println!();
println!("{}", "─".repeat(60).dimmed());
if config.parallel_jobs > 1 {
println!(
" {} Collecting AI suggestions ({} parallel)...",
"Phase 1:".cyan().bold(),
config.parallel_jobs
);
} else {
println!(
" {} Collecting AI suggestions...",
"Phase 1:".cyan().bold()
);
}
println!("{}", "─".repeat(60).dimmed());
let cached_suggestions = if config.parallel_jobs > 1 {
collect_suggestions_parallel(issues, &suggester, config, total)
} else {
collect_suggestions_sequential(issues, &suggester, config, total)
};
let errors = cached_suggestions
.iter()
.filter(|c| c.result.error.is_some() || c.result.suggestions.is_empty())
.count();
let successful = cached_suggestions.len() - errors;
println!(
" {} Collected {} suggestion{} ({} failed)",
"✓".green(),
successful.to_string().cyan(),
if successful == 1 { "" } else { "s" },
errors.to_string().red()
);
println!();
if config.accept_all {
return apply_all_suggestions(issues, &cached_suggestions, config);
}
let fix_result =
run_interactive_review(issues, &cached_suggestions, config, errors, successful);
print_fix_summary(&fix_result);
fix_result
}
enum ReviewAction {
Next,
Previous,
GoTo(usize),
Ignore,
AcceptAll,
Quit,
}
fn apply_nolint_action(issue: &LintIssue, fix_result: &mut AiFixResult) {
match add_nolint_comment(issue) {
NolintResult::Success(diffs) => {
fix_result.applied += 1;
println!("{} Added NOLINT comment", "✓".green());
println!();
print_diff(&diffs, &issue.file_path);
fix_result.modified_files.insert(issue.file_path.clone());
}
NolintResult::AlreadyIgnored => {
println!("{}", "Already has NOLINT comment".yellow());
fix_result.skipped += 1;
}
NolintResult::Error(e) => {
eprintln!("{}: {}", "Failed to add NOLINT".red(), e);
fix_result.skipped += 1;
}
}
}
fn print_review_header() {
println!("{}", "─".repeat(60).dimmed());
println!(
" {} Review suggestions (no more waiting)",
"Phase 2:".cyan().bold()
);
println!("{}", "─".repeat(60).dimmed());
println!();
println!(" Navigation: [p]revious, [g]o to #N, [q]uit");
println!();
}
enum ReviewStep {
Continue(usize),
Break,
}
struct ReviewStepContext<'a> {
idx: usize,
applied: bool,
total: usize,
issues: &'a [LintIssue],
cached_suggestions: &'a [CachedSuggestion],
}
fn handle_review_action(
action: ReviewAction,
ctx: &ReviewStepContext<'_>,
processed: &mut [bool],
fix_result: &mut AiFixResult,
) -> ReviewStep {
match action {
ReviewAction::Next => handle_next_action(ctx.idx, ctx.applied, processed, fix_result),
ReviewAction::Previous => handle_previous_action(ctx.idx),
ReviewAction::GoTo(target) => handle_goto_action(ctx.idx, target, ctx.total),
ReviewAction::Ignore => {
processed[ctx.idx] = true;
apply_nolint_action(&ctx.issues[ctx.idx], fix_result);
ReviewStep::Continue(ctx.idx + 1)
}
ReviewAction::AcceptAll => {
apply_remaining_from(
ctx.idx,
ctx.applied,
ctx.issues,
ctx.cached_suggestions,
processed,
fix_result,
);
ReviewStep::Break
}
ReviewAction::Quit => handle_quit_action(ctx.idx, processed, fix_result),
}
}
fn handle_next_action(
idx: usize,
applied: bool,
processed: &mut [bool],
fix_result: &mut AiFixResult,
) -> ReviewStep {
if !applied && !processed[idx] {
fix_result.skipped += 1;
}
processed[idx] = true;
ReviewStep::Continue(idx + 1)
}
fn handle_previous_action(idx: usize) -> ReviewStep {
if idx > 0 {
println!("{}", " (Going back to previous issue)".dimmed());
ReviewStep::Continue(idx - 1)
} else {
println!("{}", " Already at first issue".yellow());
ReviewStep::Continue(idx)
}
}
fn handle_goto_action(idx: usize, target: usize, total: usize) -> ReviewStep {
if target > 0 && target <= total {
ReviewStep::Continue(target - 1)
} else {
println!(
" {} Issue #{} out of range (1-{})",
"Invalid:".yellow(),
target,
total
);
ReviewStep::Continue(idx)
}
}
fn handle_quit_action(idx: usize, processed: &[bool], fix_result: &mut AiFixResult) -> ReviewStep {
fix_result.quit_early = true;
for (i, &was_processed) in processed.iter().enumerate() {
if !was_processed && i >= idx {
fix_result.skipped += 1;
}
}
ReviewStep::Break
}
fn run_interactive_review(
issues: &[LintIssue],
cached_suggestions: &[CachedSuggestion],
config: &AiFixConfig,
errors: usize,
successful: usize,
) -> AiFixResult {
let total = issues.len();
print_review_header();
let mut fix_result = AiFixResult {
errors,
suggested: successful,
..AiFixResult::default()
};
let mut idx = 0;
let mut processed = vec![false; total];
while idx < total {
let issue = &issues[idx];
let cached = &cached_suggestions[idx];
print_issue_header(issue, idx, total, config.verbose);
let (applied, action) = show_cached_suggestions(issue, &cached.result, idx, total);
if applied {
fix_result.applied += 1;
fix_result.modified_files.insert(issue.file_path.clone());
processed[idx] = true;
}
let ctx = ReviewStepContext {
idx,
applied,
total,
issues,
cached_suggestions,
};
match handle_review_action(action, &ctx, &mut processed, &mut fix_result) {
ReviewStep::Continue(new_idx) => idx = new_idx,
ReviewStep::Break => break,
}
}
fix_result
}
fn print_issue_header(issue: &LintIssue, idx: usize, total: usize, verbose: bool) {
println!();
println!("{}", "─".repeat(60).dimmed());
let current = idx + 1;
let severity_badge = match issue.severity {
Severity::Error => format!("[E{}]", current).red().bold(),
Severity::Warning => format!("[W{}]", current).yellow().bold(),
Severity::Info => format!("[I{}]", current).blue(),
};
let lang_tag = issue
.language
.map(|l| format!("[{}]", format!("{:?}", l).to_lowercase()))
.unwrap_or_default()
.dimmed();
let source_tag = issue
.source
.as_ref()
.map(|s| format!("[{}]", s))
.unwrap_or_default()
.dimmed();
let location = if let Some(col) = issue.column {
format!("{}:{}:{}", issue.file_path.display(), issue.line, col)
} else {
format!("{}:{}", issue.file_path.display(), issue.line)
};
let progress = format!("({}/{})", current, total).dimmed();
println!(
" {} {}{} {} {}",
severity_badge,
lang_tag,
source_tag,
location.white().bold(),
progress
);
print_code_context(issue);
if let Some(ref code) = issue.code {
println!(" {} ({})", issue.message, code.cyan());
} else {
println!(" {}", issue.message);
}
if verbose {
if let Some(ref suggestion) = issue.suggestion {
println!(" {} {}", "-->".green(), suggestion);
}
}
}
fn apply_remaining_from(
start_idx: usize,
current_applied: bool,
issues: &[LintIssue],
cached_suggestions: &[CachedSuggestion],
processed: &mut [bool],
fix_result: &mut AiFixResult,
) {
println!();
println!(
" {} Applying all remaining suggestions...",
"→".cyan().bold()
);
println!();
let total = issues.len();
if !current_applied && !processed[start_idx] {
apply_single_suggestion(start_idx, issues, cached_suggestions, fix_result);
}
processed[start_idx] = true;
for remaining_idx in (start_idx + 1)..total {
if processed[remaining_idx] {
continue;
}
let remaining_cached = &cached_suggestions[remaining_idx];
if remaining_cached.result.error.is_some() || remaining_cached.result.suggestions.is_empty()
{
fix_result.skipped += 1;
processed[remaining_idx] = true;
continue;
}
apply_single_suggestion(remaining_idx, issues, cached_suggestions, fix_result);
processed[remaining_idx] = true;
}
}
fn apply_single_suggestion(
idx: usize,
issues: &[LintIssue],
cached_suggestions: &[CachedSuggestion],
fix_result: &mut AiFixResult,
) {
let cached = &cached_suggestions[idx];
let issue = &issues[idx];
if let Some(suggestion) = cached.result.suggestions.first() {
let original_content = fs::read_to_string(&issue.file_path).ok();
let original_lines: Vec<&str> = original_content
.as_ref()
.map(|c| c.lines().collect())
.unwrap_or_default();
let start_line = issue.line;
let end_line = suggestion.end_line.max(issue.line);
if apply_suggestion(issue, suggestion) {
println!(
" {} Applied issue #{} ({}:{})",
"✓".green(),
idx + 1,
issue.file_path.display(),
issue.line
);
print_suggestion_diff(&original_lines, suggestion, start_line, end_line);
fix_result.applied += 1;
fix_result.modified_files.insert(issue.file_path.clone());
} else {
println!(" {} Failed to apply issue #{}", "✗".red(), idx + 1);
fix_result.skipped += 1;
}
}
}
fn print_fix_summary(fix_result: &AiFixResult) {
println!();
println!("{}", "═".repeat(60).dimmed());
println!(" {}", "AI Fix Summary".bold());
println!("{}", "─".repeat(60).dimmed());
println!(
" Suggestions collected: {}",
fix_result.suggested.to_string().cyan()
);
println!(" Applied: {}", fix_result.applied.to_string().green());
println!(" Skipped: {}", fix_result.skipped.to_string().yellow());
println!(" Errors: {}", fix_result.errors.to_string().red());
if fix_result.quit_early {
println!(" {}", "(Quit early)".dimmed());
}
println!("{}", "═".repeat(60).dimmed());
println!();
}
fn show_cached_suggestions(
issue: &LintIssue,
result: &SuggestionResult,
current: usize,
total: usize,
) -> (bool, ReviewAction) {
println!();
if let Some(ref error) = result.error {
println!(" {} {}", "AI Error:".red(), error);
return prompt_navigation(issue, current, total, false);
}
if result.suggestions.is_empty() {
println!(
" {}",
"No AI suggestions available for this issue.".yellow()
);
return prompt_navigation(issue, current, total, false);
}
println!(
" {} {} suggestion{}",
"AI Generated".green().bold(),
result.suggestions.len(),
if result.suggestions.len() == 1 {
""
} else {
"s"
}
);
println!();
for (idx, suggestion) in result.suggestions.iter().enumerate() {
print_single_suggestion(idx, suggestion);
print_suggestion_preview(issue, suggestion);
}
print_review_menu(issue, current, total, result.suggestions.len());
handle_review_input(issue, result)
}
fn print_single_suggestion(idx: usize, suggestion: &FixSuggestion) {
println!(
" {} {}",
format!("[{}]", idx + 1).cyan().bold(),
"Suggestion:".bold()
);
if let Some(ref explanation) = suggestion.explanation {
println!(" {} {}", "Explanation:".dimmed(), explanation);
}
if let Some(confidence) = suggestion.confidence {
let confidence_str = format!("{:.0}%", confidence * 100.0);
let colored = if confidence >= 0.8 {
confidence_str.green()
} else if confidence >= 0.5 {
confidence_str.yellow()
} else {
confidence_str.red()
};
println!(" {} {}", "Confidence:".dimmed(), colored);
}
println!();
}
fn print_review_menu(issue: &LintIssue, current: usize, total: usize, suggestion_count: usize) {
let nolint_desc = describe_nolint_action(issue);
println!(
" {}",
format!("Issue {}/{}", current + 1, total).bold().cyan()
);
println!();
for i in 1..=suggestion_count {
if i == 1 {
println!(
" [{}] Apply suggestion #{} {}",
i.to_string().cyan(),
i,
"(default, press Enter)".dimmed()
);
} else {
println!(" [{}] Apply suggestion #{}", i.to_string().cyan(), i);
}
}
println!(" [{}] Ignore - {}", "i".cyan(), nolint_desc.dimmed());
println!(" [{}] Skip", "s".cyan());
if current > 0 {
println!(
" [{}] Previous - go back to issue #{}",
"p".cyan(),
current
);
}
println!(" [{}] Go to #N - jump to specific issue", "g".cyan());
println!(
" [{}] Accept all - apply all remaining suggestions",
"a".cyan()
);
println!(" [{}] Quit", "q".cyan());
println!();
print!(" > ");
io::stdout().flush().ok();
}
fn handle_review_input(issue: &LintIssue, result: &SuggestionResult) -> (bool, ReviewAction) {
let input = read_line().trim().to_lowercase();
let input = if input.is_empty() { "1" } else { &input };
match input {
"i" | "ignore" => (false, ReviewAction::Ignore),
"a" | "accept" | "all" => (false, ReviewAction::AcceptAll),
"s" | "skip" => (false, ReviewAction::Next),
"p" | "prev" | "previous" => (false, ReviewAction::Previous),
"q" | "quit" => (false, ReviewAction::Quit),
input if input.starts_with('g') => {
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() >= 2 {
if let Ok(num) = parts[1].parse::<usize>() {
return (false, ReviewAction::GoTo(num));
}
}
print!(" {} ", "Go to issue #:".cyan());
io::stdout().flush().ok();
let num_input = read_line().trim().to_string();
if let Ok(num) = num_input.parse::<usize>() {
(false, ReviewAction::GoTo(num))
} else {
println!("{}", "Invalid issue number".yellow());
(false, ReviewAction::Next)
}
}
_ => try_apply_numbered_suggestion(issue, result, input),
}
}
fn try_apply_numbered_suggestion(
issue: &LintIssue,
result: &SuggestionResult,
input: &str,
) -> (bool, ReviewAction) {
if let Ok(num) = input.parse::<usize>() {
if num >= 1 && num <= result.suggestions.len() {
let suggestion = &result.suggestions[num - 1];
let original_content = fs::read_to_string(&issue.file_path).ok();
let original_lines: Vec<&str> = original_content
.as_ref()
.map(|c| c.lines().collect())
.unwrap_or_default();
let start_line = issue.line;
let end_line = suggestion.end_line.max(issue.line);
if apply_suggestion(issue, suggestion) {
println!(" {} Applied suggestion #{}!", "✓".green(), num);
println!();
print_suggestion_diff(&original_lines, suggestion, start_line, end_line);
return (true, ReviewAction::Next);
} else {
println!(" {} Failed to apply suggestion.", "✗".red());
return (false, ReviewAction::Next);
}
}
}
println!(" {} Invalid choice, skipping.", "Invalid:".yellow());
(false, ReviewAction::Next)
}
fn prompt_navigation(
issue: &LintIssue,
current: usize,
total: usize,
_applied: bool,
) -> (bool, ReviewAction) {
let nolint_desc = describe_nolint_action(issue);
println!();
println!(
" {}",
format!("Issue {}/{}", current + 1, total).bold().cyan()
);
println!();
println!(" [{}] Ignore - {}", "i".cyan(), nolint_desc.dimmed());
println!(" [{}] Skip", "s".cyan());
if current > 0 {
println!(
" [{}] Previous - go back to issue #{}",
"p".cyan(),
current
);
}
println!(" [{}] Go to #N - jump to specific issue", "g".cyan());
println!(
" [{}] Accept all - apply all remaining suggestions",
"a".cyan()
);
println!(" [{}] Quit", "q".cyan());
println!();
print!(" > ");
io::stdout().flush().ok();
let input = read_line().trim().to_lowercase();
match input.as_str() {
"i" | "ignore" => (false, ReviewAction::Ignore),
"a" | "accept" | "all" => (false, ReviewAction::AcceptAll),
"p" | "prev" | "previous" => (false, ReviewAction::Previous),
"q" | "quit" => (false, ReviewAction::Quit),
input if input.starts_with("g") => {
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() >= 2 {
if let Ok(num) = parts[1].parse::<usize>() {
return (false, ReviewAction::GoTo(num));
}
}
print!(" {} ", "Go to issue #:".cyan());
io::stdout().flush().ok();
let num_input = read_line().trim().to_string();
if let Ok(num) = num_input.parse::<usize>() {
(false, ReviewAction::GoTo(num))
} else {
(false, ReviewAction::Next)
}
}
_ => (false, ReviewAction::Next),
}
}
fn apply_all_suggestions(
issues: &[LintIssue],
cached: &[CachedSuggestion],
_config: &AiFixConfig,
) -> AiFixResult {
let mut fix_result = AiFixResult::default();
for cached_suggestion in cached {
let issue = &issues[cached_suggestion.issue_idx];
let result = &cached_suggestion.result;
if result.error.is_some() || result.suggestions.is_empty() {
fix_result.errors += 1;
continue;
}
fix_result.suggested += 1;
if let Some(suggestion) = result.suggestions.first() {
println!(
" {} Applying to {}:{}",
"→".cyan(),
issue.file_path.display(),
issue.line
);
let original_content = fs::read_to_string(&issue.file_path).ok();
let original_lines: Vec<&str> = original_content
.as_ref()
.map(|c| c.lines().collect())
.unwrap_or_default();
let start_line = issue.line;
let end_line = suggestion.end_line.max(issue.line);
if apply_suggestion(issue, suggestion) {
println!(" {} Applied!", "✓".green());
println!();
print_suggestion_diff(&original_lines, suggestion, start_line, end_line);
fix_result.applied += 1;
fix_result.modified_files.insert(issue.file_path.clone());
} else {
println!(" {} Failed to apply.", "✗".red());
fix_result.skipped += 1;
}
}
}
println!();
println!("{}", "═".repeat(60).dimmed());
println!(" {}", "AI Fix Summary".bold());
println!("{}", "─".repeat(60).dimmed());
println!(
" Suggestions collected: {}",
fix_result.suggested.to_string().cyan()
);
println!(" Applied: {}", fix_result.applied.to_string().green());
println!(" Skipped: {}", fix_result.skipped.to_string().yellow());
println!(" Errors: {}", fix_result.errors.to_string().red());
println!("{}", "═".repeat(60).dimmed());
println!();
fix_result
}
pub fn run_ai_fix_single(
issue: &LintIssue,
config: &AiFixConfig,
) -> Result<(bool, HashSet<PathBuf>), String> {
let suggester = create_suggester(config)?;
if config.verbose {
println!(
" {} {} ({})",
"Using:".dimmed(),
suggester.provider_name(),
suggester.model_name()
);
}
print!(" {} ", "Getting AI suggestion...".dimmed());
io::stdout().flush().ok();
let result = get_suggestion_for_issue(&suggester, issue, config);
print!("\r{}\r", " ".repeat(40));
io::stdout().flush().ok();
let (applied, _quit) = show_ai_suggestions(issue, &result, config);
let mut modified = HashSet::new();
if applied {
modified.insert(issue.file_path.clone());
}
Ok((applied, modified))
}
fn read_line() -> String {
let stdin = io::stdin();
let mut line = String::new();
use std::io::BufRead;
let mut handle = stdin.lock();
handle.read_line(&mut line).ok();
line
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ai_fix_config_default() {
let config = AiFixConfig::default();
assert_eq!(config.provider, AiProviderKind::Claude);
assert_eq!(config.max_suggestions, 3);
assert!(!config.accept_all);
}
#[test]
fn test_ai_fix_config_with_provider() {
let config = AiFixConfig::with_provider("openai");
assert_eq!(config.provider, AiProviderKind::OpenAi);
let config = AiFixConfig::with_provider("local");
assert_eq!(config.provider, AiProviderKind::Local);
let config = AiFixConfig::with_provider("mock");
assert_eq!(config.provider, AiProviderKind::Mock);
}
#[test]
fn test_ai_fix_result_default() {
let result = AiFixResult::default();
assert_eq!(result.suggested, 0);
assert_eq!(result.applied, 0);
assert_eq!(result.skipped, 0);
assert!(!result.quit_early);
}
}