use std::collections::HashSet;
use std::path::PathBuf;
use std::sync::atomic::{AtomicUsize, Ordering};
use tower_lsp::lsp_types::*;
use crate::code_actions::build_line_deletion_edit;
use crate::parser::with_parse_cache;
use crate::util::position_to_byte_offset;
use crate::virtual_members::with_active_resolved_class_cache;
use crate::{Backend, composer, config};
#[derive(Debug)]
pub struct FixOptions {
pub workspace_root: PathBuf,
pub path_filter: Option<PathBuf>,
pub rules: Vec<String>,
pub dry_run: bool,
pub use_colour: bool,
pub with_phpstan: bool,
}
struct AppliedFix {
rule: String,
line: u32,
description: String,
}
struct FileFixResult {
display_path: String,
abs_path: PathBuf,
new_content: String,
changed: bool,
fixes: Vec<AppliedFix>,
}
const NATIVE_RULES: &[&str] = &["unused_import"];
fn is_phpstan_rule(rule: &str) -> bool {
rule.starts_with("phpstan.")
}
fn validate_rules(rules: &[String], with_phpstan: bool) -> Vec<String> {
let mut errors = Vec::new();
for rule in rules {
if is_phpstan_rule(rule) {
if !with_phpstan {
errors.push(format!(
"Rule '{rule}' requires --with-phpstan to be enabled"
));
}
} else if !NATIVE_RULES.contains(&rule.as_str()) {
errors.push(format!("Unknown rule: '{rule}'"));
}
}
errors
}
fn effective_native_rules(rules: &[String]) -> Vec<&'static str> {
if rules.is_empty() {
NATIVE_RULES.to_vec()
} else {
NATIVE_RULES
.iter()
.filter(|r| rules.iter().any(|req| req == **r))
.copied()
.collect()
}
}
fn fix_unused_imports(backend: &Backend, uri: &str, content: &str) -> (String, Vec<AppliedFix>) {
let mut diagnostics: Vec<Diagnostic> = Vec::new();
backend.collect_unused_import_diagnostics(uri, content, &mut diagnostics);
if diagnostics.is_empty() {
return (content.to_string(), Vec::new());
}
let removed_import_lines: HashSet<usize> = diagnostics
.iter()
.map(|d| d.range.start.line as usize)
.collect();
let mut edits: Vec<TextEdit> = diagnostics
.iter()
.map(|d| build_line_deletion_edit(content, &d.range, &removed_import_lines))
.collect();
edits.sort_by(|a, b| b.range.start.cmp(&a.range.start));
let fixes: Vec<AppliedFix> = diagnostics
.iter()
.map(|d| AppliedFix {
rule: "unused_import".to_string(),
line: d.range.start.line + 1,
description: d.message.clone(),
})
.collect();
let new_content = apply_text_edits(content, &edits);
(new_content, fixes)
}
fn apply_text_edits(content: &str, edits: &[TextEdit]) -> String {
let mut result = content.to_string();
for edit in edits {
let start = position_to_byte_offset(&result, edit.range.start);
let end = position_to_byte_offset(&result, edit.range.end);
if start <= end && end <= result.len() {
result.replace_range(start..end, &edit.new_text);
}
}
result
}
pub async fn run(options: FixOptions) -> i32 {
let root = &options.workspace_root;
if !root.join("composer.json").is_file() {
eprintln!("Error: no composer.json found in {}", root.display());
eprintln!("The fix command currently only supports single Composer projects.");
return 1;
}
let rule_errors = validate_rules(&options.rules, options.with_phpstan);
if !rule_errors.is_empty() {
for err in &rule_errors {
eprintln!("Error: {err}");
}
return 1;
}
let native_rules = effective_native_rules(&options.rules);
if native_rules.is_empty() && !options.with_phpstan {
eprintln!("No applicable rules to run.");
return 0;
}
let cfg = match config::load_config(root) {
Ok(c) => c,
Err(e) => {
eprintln!("Warning: failed to load .phpantom.toml: {e}");
config::Config::default()
}
};
let backend = Backend::new_headless();
*backend.workspace_root().write() = Some(root.to_path_buf());
*backend.config.lock() = cfg.clone();
let composer_package = composer::read_composer_package(root);
let php_version = cfg
.php
.version
.as_deref()
.and_then(crate::types::PhpVersion::from_composer_constraint)
.unwrap_or_else(|| {
composer_package
.as_ref()
.and_then(composer::detect_php_version_from_package)
.unwrap_or_default()
});
backend.set_php_version(php_version);
backend
.init_single_project(root, php_version, composer_package, None)
.await;
let files = crate::analyse::discover_user_files(&backend, root, options.path_filter.as_deref());
if files.is_empty() {
eprintln!("No PHP files found.");
return 0;
}
let file_count = files.len();
let use_colour = options.use_colour;
let n_threads = std::thread::available_parallelism()
.map(|n| n.get())
.unwrap_or(4);
if use_colour {
eprint!("\r\x1b[2K {}", progress_bar(0, file_count, "Parsing"));
}
let next_idx = AtomicUsize::new(0);
let file_data: Vec<Option<(String, String, PathBuf)>> = std::thread::scope(|s| {
let handles: Vec<_> = (0..n_threads)
.map(|_| {
let backend = &backend;
let next_idx = &next_idx;
let files = &files;
s.spawn(move || {
let mut entries: Vec<(usize, String, String, PathBuf)> = Vec::new();
loop {
let i = next_idx.fetch_add(1, Ordering::Relaxed);
if i >= file_count {
break;
}
let file_path = &files[i];
let content = match std::fs::read_to_string(file_path) {
Ok(c) => c,
Err(_) => continue,
};
let uri = crate::util::path_to_uri(file_path);
backend.update_ast(&uri, &content);
entries.push((i, uri, content, file_path.clone()));
}
entries
})
})
.collect();
let mut indexed: Vec<Option<(String, String, PathBuf)>> =
(0..file_count).map(|_| None).collect();
for handle in handles {
for (i, uri, content, path) in handle.join().unwrap_or_default() {
indexed[i] = Some((uri, content, path));
}
}
indexed
});
if use_colour {
eprint!(
"\r\x1b[2K {}\n",
progress_bar(file_count, file_count, "Parsing")
);
}
if use_colour {
eprint!("\r\x1b[2K {}", progress_bar(0, file_count, "Fixing"));
}
let next_idx = AtomicUsize::new(0);
let dry_run = options.dry_run;
let results: Vec<FileFixResult> = std::thread::scope(|s| {
let handles: Vec<_> = (0..n_threads)
.map(|_| {
let backend = &backend;
let next_idx = &next_idx;
let file_data = &file_data;
let files = &files;
let native_rules = &native_rules;
s.spawn(move || {
let mut results: Vec<FileFixResult> = Vec::new();
loop {
let i = next_idx.fetch_add(1, Ordering::Relaxed);
if i >= file_count {
break;
}
if use_colour && i.is_multiple_of(20) {
eprint!("\r\x1b[2K {}", progress_bar(i + 1, file_count, "Fixing"));
}
let (uri, content, abs_path) = match &file_data[i] {
Some(tuple) => (&tuple.0, &tuple.1, &tuple.2),
None => continue,
};
let _parse_guard = with_parse_cache(content);
let _cache_guard =
with_active_resolved_class_cache(&backend.resolved_class_cache);
let mut current_content = content.clone();
let mut all_fixes: Vec<AppliedFix> = Vec::new();
for rule in native_rules.iter() {
match *rule {
"unused_import" => {
let (new_content, fixes) =
fix_unused_imports(backend, uri, ¤t_content);
current_content = new_content;
all_fixes.extend(fixes);
}
_ => {
}
}
}
let changed = current_content != *content;
if changed {
let display_path = files[i]
.strip_prefix(root)
.unwrap_or(&files[i])
.to_string_lossy()
.to_string();
results.push(FileFixResult {
display_path,
abs_path: abs_path.clone(),
new_content: current_content,
changed,
fixes: all_fixes,
});
}
}
results
})
})
.collect();
let mut merged: Vec<FileFixResult> = Vec::new();
for handle in handles {
merged.extend(handle.join().unwrap_or_default());
}
merged
});
if use_colour {
eprint!(
"\r\x1b[2K {}\n",
progress_bar(file_count, file_count, "Fixing")
);
}
let mut sorted_results: Vec<FileFixResult> =
results.into_iter().filter(|r| r.changed).collect();
sorted_results.sort_by(|a, b| a.display_path.cmp(&b.display_path));
if sorted_results.is_empty() {
print_success_box(use_colour);
return 0;
}
let total_fixes: usize = sorted_results.iter().map(|r| r.fixes.len()).sum();
let files_changed = sorted_results.len();
for result in &sorted_results {
print_fix_table(&result.display_path, &result.fixes, use_colour);
}
if dry_run {
print_dry_run_box(total_fixes, files_changed, use_colour);
return 2;
}
let mut write_errors = 0;
for result in &sorted_results {
if let Err(e) = std::fs::write(&result.abs_path, &result.new_content) {
eprintln!("Error: failed to write {}: {e}", result.display_path);
write_errors += 1;
}
}
if write_errors > 0 {
eprintln!("{write_errors} file(s) failed to write.");
return 1;
}
print_fixed_box(total_fixes, files_changed, use_colour);
0
}
fn print_fix_table(path: &str, fixes: &[AppliedFix], use_colour: bool) {
let line_col_w = fixes
.iter()
.map(|f| f.line.to_string().len())
.max()
.unwrap_or(0)
.max(4);
let msg_col_w = fixes
.iter()
.map(|f| f.description.len())
.max()
.unwrap_or(0)
.max(path.len());
let sep = format!(
" {} {}",
"-".repeat(line_col_w + 2),
"-".repeat(msg_col_w + 2),
);
println!("{sep}");
if use_colour {
println!(
" \x1b[32m{:>line_col_w$}\x1b[0m \x1b[32m{path}\x1b[0m",
"Line"
);
} else {
println!(" {:>line_col_w$} {path}", "Line");
}
println!("{sep}");
for fix in fixes {
let line_str = fix.line.to_string();
println!(" {:>line_col_w$} {}", line_str, fix.description);
if use_colour {
println!(
" {:>line_col_w$} \x1b[2m\u{1f527} {}\x1b[0m",
"", fix.rule
);
} else {
println!(" {:>line_col_w$} \u{1f527} {}", "", fix.rule);
}
}
println!("{sep}");
println!();
}
fn print_success_box(use_colour: bool) {
let text = " [OK] No fixable issues found ";
if use_colour {
let pad = " ".repeat(text.len());
println!();
println!(" \x1b[30;42m{pad}\x1b[0m");
println!(" \x1b[30;42m{text}\x1b[0m");
println!(" \x1b[30;42m{pad}\x1b[0m");
println!();
} else {
println!("{text}");
}
}
fn print_dry_run_box(total_fixes: usize, files_changed: usize, use_colour: bool) {
let fix_label = if total_fixes == 1 { "fix" } else { "fixes" };
let file_label = if files_changed == 1 { "file" } else { "files" };
let text = format!(
" [DRY RUN] {total_fixes} {fix_label} in {files_changed} {file_label} (not applied) "
);
if use_colour {
let pad = " ".repeat(text.len());
println!();
println!(" \x1b[30;43m{pad}\x1b[0m");
println!(" \x1b[30;43m{text}\x1b[0m");
println!(" \x1b[30;43m{pad}\x1b[0m");
println!();
} else {
println!("{text}");
}
}
fn print_fixed_box(total_fixes: usize, files_changed: usize, use_colour: bool) {
let fix_label = if total_fixes == 1 { "fix" } else { "fixes" };
let file_label = if files_changed == 1 { "file" } else { "files" };
let text =
format!(" [FIXED] Applied {total_fixes} {fix_label} across {files_changed} {file_label} ");
if use_colour {
let pad = " ".repeat(text.len());
println!();
println!(" \x1b[30;42m{pad}\x1b[0m");
println!(" \x1b[30;42m{text}\x1b[0m");
println!(" \x1b[30;42m{pad}\x1b[0m");
println!();
} else {
println!("{text}");
}
}
const BAR_WIDTH: usize = 28;
fn progress_bar(done: usize, total: usize, label: &str) -> String {
let pct = if total == 0 {
100
} else {
(done * 100) / total
};
let filled = if total == 0 {
BAR_WIDTH
} else {
(done * BAR_WIDTH) / total
};
let empty = BAR_WIDTH - filled;
format!(
" {done:>width$}/{total} [{bar_fill}{bar_empty}] {pct:>3}% {label}",
width = total.to_string().len(),
bar_fill = "\u{2593}".repeat(filled),
bar_empty = "\u{2591}".repeat(empty),
)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::util::position_to_byte_offset as lsp_position_to_byte_offset;
#[test]
fn apply_text_edits_removes_lines_bottom_to_top() {
let content = "line 0\nline 1\nline 2\nline 3\n";
let edits = vec![
TextEdit {
range: Range {
start: Position::new(2, 0),
end: Position::new(3, 0),
},
new_text: String::new(),
},
TextEdit {
range: Range {
start: Position::new(0, 0),
end: Position::new(1, 0),
},
new_text: String::new(),
},
];
let result = apply_text_edits(content, &edits);
assert_eq!(result, "line 1\nline 3\n");
}
#[test]
fn apply_text_edits_empty_list_returns_unchanged() {
let content = "unchanged\n";
let result = apply_text_edits(content, &[]);
assert_eq!(result, content);
}
#[test]
fn position_to_byte_offset_first_line() {
let content = "hello world\nsecond line\n";
assert_eq!(lsp_position_to_byte_offset(content, Position::new(0, 0)), 0);
assert_eq!(lsp_position_to_byte_offset(content, Position::new(0, 5)), 5);
}
#[test]
fn position_to_byte_offset_second_line() {
let content = "hello\nworld\n";
assert_eq!(lsp_position_to_byte_offset(content, Position::new(1, 0)), 6);
assert_eq!(lsp_position_to_byte_offset(content, Position::new(1, 3)), 9);
}
#[test]
fn position_to_byte_offset_past_end() {
let content = "abc";
assert_eq!(lsp_position_to_byte_offset(content, Position::new(5, 0)), 3);
}
#[test]
fn validate_rules_accepts_known_native_rules() {
let errors = validate_rules(&["unused_import".to_string()], false);
assert!(errors.is_empty());
}
#[test]
fn validate_rules_rejects_unknown_rules() {
let errors = validate_rules(&["nonexistent_rule".to_string()], false);
assert_eq!(errors.len(), 1);
assert!(errors[0].contains("Unknown rule"));
}
#[test]
fn validate_rules_rejects_phpstan_without_flag() {
let errors = validate_rules(&["phpstan.return.unusedType".to_string()], false);
assert_eq!(errors.len(), 1);
assert!(errors[0].contains("--with-phpstan"));
}
#[test]
fn validate_rules_accepts_phpstan_with_flag() {
let errors = validate_rules(&["phpstan.return.unusedType".to_string()], true);
assert!(errors.is_empty());
}
#[test]
fn effective_native_rules_empty_returns_all() {
let rules = effective_native_rules(&[]);
assert_eq!(rules, NATIVE_RULES);
}
#[test]
fn effective_native_rules_filters_to_requested() {
let rules = effective_native_rules(&["unused_import".to_string()]);
assert_eq!(rules, vec!["unused_import"]);
}
#[test]
fn effective_native_rules_ignores_phpstan_rules() {
let rules = effective_native_rules(&["phpstan.return.unusedType".to_string()]);
assert!(rules.is_empty());
}
#[test]
fn is_phpstan_rule_with_prefix() {
assert!(is_phpstan_rule("phpstan.return.unusedType"));
assert!(is_phpstan_rule("phpstan.anything"));
}
#[test]
fn is_phpstan_rule_without_prefix() {
assert!(!is_phpstan_rule("unused_import"));
assert!(!is_phpstan_rule("deprecated"));
}
#[test]
fn fix_removes_middle_import_without_blank_line() {
let backend = crate::Backend::new_test();
let content = "\
<?php
namespace Test;
use PHPMD\\Node\\AbstractCallableNode;
use PHPMD\\Node\\MethodNode;
use PHPMD\\Rule;
use PHPMD\\Rule\\Design\\CouplingBetweenObjects;
class Foo extends AbstractCallableNode {
public function bar(MethodNode $m, CouplingBetweenObjects $c): void {}
}
";
let uri = "file:///test.php";
backend.update_ast(uri, content);
let (result, fixes) = fix_unused_imports(&backend, uri, content);
assert_eq!(fixes.len(), 1, "should fix exactly one unused import");
assert!(
fixes[0].description.contains("Rule"),
"should fix the Rule import"
);
let expected = "\
<?php
namespace Test;
use PHPMD\\Node\\AbstractCallableNode;
use PHPMD\\Node\\MethodNode;
use PHPMD\\Rule\\Design\\CouplingBetweenObjects;
class Foo extends AbstractCallableNode {
public function bar(MethodNode $m, CouplingBetweenObjects $c): void {}
}
";
assert_eq!(
result, expected,
"Removing a middle import should not leave a blank line"
);
}
#[test]
fn fix_removes_first_import_without_blank_line() {
let backend = crate::Backend::new_test();
let content = "\
<?php
namespace Test;
use PHPMD\\Node\\AbstractCallableNode;
use PHPMD\\Node\\MethodNode;
use PHPMD\\Rule;
class Foo {
public function bar(MethodNode $m, Rule $r): void {}
}
";
let uri = "file:///test.php";
backend.update_ast(uri, content);
let (result, fixes) = fix_unused_imports(&backend, uri, content);
assert_eq!(fixes.len(), 1);
assert!(fixes[0].description.contains("AbstractCallableNode"));
let expected = "\
<?php
namespace Test;
use PHPMD\\Node\\MethodNode;
use PHPMD\\Rule;
class Foo {
public function bar(MethodNode $m, Rule $r): void {}
}
";
assert_eq!(
result, expected,
"Removing the first import should not leave a blank line"
);
}
#[test]
fn fix_removes_last_import_without_blank_line() {
let backend = crate::Backend::new_test();
let content = "\
<?php
namespace Test;
use PHPMD\\Node\\AbstractCallableNode;
use PHPMD\\Node\\MethodNode;
use PHPMD\\Rule;
class Foo {
public function bar(AbstractCallableNode $a, MethodNode $m): void {}
}
";
let uri = "file:///test.php";
backend.update_ast(uri, content);
let (result, fixes) = fix_unused_imports(&backend, uri, content);
assert_eq!(fixes.len(), 1);
assert!(fixes[0].description.contains("Rule"));
let expected = "\
<?php
namespace Test;
use PHPMD\\Node\\AbstractCallableNode;
use PHPMD\\Node\\MethodNode;
class Foo {
public function bar(AbstractCallableNode $a, MethodNode $m): void {}
}
";
assert_eq!(
result, expected,
"Removing the last import should not leave a blank line"
);
}
}