use std::{
borrow::Cow,
collections::{BTreeMap, BTreeSet, HashMap, HashSet},
fs, iter, mem,
path::PathBuf,
sync::OnceLock,
};
use ra_ap_syntax::{
self, AstNode, Edition, SyntaxNode,
ast::{
self, Attr, CallExpr, HasAttrs, HasName, HasVisibility, Item, MacroCall, Module, PathExpr,
PathPat, PathType, RecordExpr, RecordPat, Use,
},
};
use regex::Regex;
use crate::style::{
fixes,
shared::{self, Edit, FileContext, TopItem, TopKind, Violation, WORKSPACE_IMPORT_ROOTS},
};
type Import009Plan<'a> = (
bool,
Vec<(usize, usize, String)>,
Vec<(usize, usize, String)>,
Vec<(&'a TopItem, String, Option<String>)>,
);
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
enum Import008CandidateKind {
TypePath,
ValueReceiver,
MacroModule,
Derive,
}
#[derive(Clone, Debug)]
struct Import008Candidate {
line: usize,
start: usize,
end: usize,
kind: Import008CandidateKind,
symbol: String,
import_path: String,
replacement: String,
}
#[derive(Clone, Debug)]
struct Import004QualifiedFunctionCandidate {
line: usize,
start: usize,
end: usize,
full_path: String,
parent_module_path: String,
module_symbol: String,
leaf_text: String,
}
#[derive(Default)]
struct ImportedSymbolMaps {
symbol_paths: HashMap<String, HashSet<String>>,
symbol_lines: HashMap<String, HashSet<usize>>,
full_paths_by_symbol: HashMap<String, HashSet<String>>,
}
struct UseItemAnalysis<'a> {
item: &'a TopItem,
path: String,
imported_symbols: Vec<String>,
imported_symbol_set: HashSet<String>,
imported_self_full_paths: HashSet<String>,
imported_full_paths: Vec<String>,
}
struct ImportAnalysis<'a> {
use_runs: Vec<Vec<&'a TopItem>>,
use_item_analyses: Vec<UseItemAnalysis<'a>>,
use_item_analysis_by_key: HashMap<(usize, usize), usize>,
local_module_roots: HashSet<String>,
local_defined_symbols: HashSet<String>,
imported_symbol_maps: ImportedSymbolMaps,
qualified_type_paths_by_symbol: HashMap<String, HashSet<String>>,
qualified_value_paths_by_symbol: HashMap<String, HashSet<String>>,
}
impl<'a> ImportAnalysis<'a> {
fn use_item_analysis(&self, item: &TopItem) -> Option<&UseItemAnalysis<'a>> {
let idx = *self.use_item_analysis_by_key.get(&use_item_lock_key(item))?;
self.use_item_analyses.get(idx)
}
}
#[derive(Clone, Debug)]
struct Import008UseRecoveryCandidate {
line: usize,
start_line: usize,
end_line: usize,
symbol: String,
import_path: String,
}
#[derive(Default)]
struct MixedUseGroup {
indices: Vec<usize>,
has_self: bool,
children: Vec<String>,
}
struct UseEntry<'a> {
item: &'a TopItem,
origin: usize,
order: usize,
block: String,
}
struct DeriveOrderEntry {
display_text: String,
origin: usize,
original_index: usize,
}
struct DeriveOrderCandidate {
line: usize,
start: usize,
end: usize,
is_inner: bool,
entries: Vec<DeriveOrderEntry>,
}
#[derive(Default)]
struct TraitKeepAliveNormalizationState {
affected_symbols: HashSet<String>,
changed: bool,
seen_trait_keys: HashSet<String>,
}
struct Import006TargetScope {
ctx: FileContext,
offset: usize,
key: usize,
}
struct Import004ModuleAccessPlan {
access_path: String,
keep_parent_module_import: bool,
}
struct ValuePathCandidate {
path: ast::Path,
name_ref: ast::NameRef,
is_record_context: bool,
}
struct TypePathCandidate {
path: ast::Path,
name_ref: ast::NameRef,
suffix: String,
}
#[derive(Clone)]
enum BracedImportSegment {
Simple(String),
Nested { head: String, children: Vec<String> },
}
#[derive(Clone, Copy, Eq, PartialEq)]
enum PathQualificationRequirement {
Qualified,
Unqualified,
}
pub(crate) fn check_import_rules(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
with_import_shortening: bool,
) {
let use_runs = collect_non_pub_use_runs(ctx);
apply_pub_use_group_rules(ctx, violations, edits, emit_edits);
let use_items = use_runs.iter().flat_map(|run| run.iter().copied()).collect::<Vec<_>>();
let import010_fixed_lines =
apply_import010_no_super_use_rule(ctx, violations, edits, emit_edits, &use_items);
let import007_fixed_lines =
apply_import007_no_glob_use_rule(ctx, violations, edits, emit_edits, &use_items);
if ctx.path.file_name().is_some_and(|name| name == "error.rs") {
return;
}
let analysis = build_import_analysis(ctx, use_runs, use_items);
let mut use_item_skip_lines = HashSet::new();
use_item_skip_lines.extend(import010_fixed_lines.iter().copied());
use_item_skip_lines.extend(import007_fixed_lines.iter().copied());
let import004_fixed_lines = apply_use_item_rules(
ctx,
violations,
edits,
emit_edits,
&analysis.use_item_analyses,
&use_item_skip_lines,
with_import_shortening,
);
push_import004_ambiguous_symbol_violations(ctx, violations, &analysis.imported_symbol_maps);
let import004_qualified_path_fixed_lines = apply_import004_qualified_function_path_rule(
ctx,
violations,
edits,
emit_edits,
&analysis.use_runs,
&analysis.local_module_roots,
&analysis.imported_symbol_maps.full_paths_by_symbol,
true,
);
let import009_fixed_lines =
apply_import009_rules(ctx, violations, edits, emit_edits, &analysis, &use_item_skip_lines);
let import008_group_skip_lines =
apply_import008_rules(ctx, violations, edits, emit_edits, &analysis);
let mut import_group_skip_lines = HashSet::new();
import_group_skip_lines.extend(import010_fixed_lines);
import_group_skip_lines.extend(import007_fixed_lines);
import_group_skip_lines.extend(import004_fixed_lines);
import_group_skip_lines.extend(import004_qualified_path_fixed_lines);
import_group_skip_lines.extend(import009_fixed_lines);
import_group_skip_lines.extend(import008_group_skip_lines);
apply_import011_derive_order_rule(
ctx,
violations,
edits,
emit_edits,
&analysis.local_module_roots,
&analysis.local_defined_symbols,
&analysis.imported_symbol_maps.full_paths_by_symbol,
);
let (planned_import_group_edits, fixable_import_group_lines) =
build_import_group_fix_plans(ctx, &analysis, &import_group_skip_lines);
if emit_edits {
edits.extend(planned_import_group_edits);
}
push_import_group_order_spacing_violations(
ctx,
violations,
&analysis.use_runs,
&fixable_import_group_lines,
&analysis.local_module_roots,
);
apply_import006_local_use_scope_rule(ctx, violations, edits, emit_edits);
check_cfg_test_module_import_rules(ctx, violations, edits, emit_edits);
}
pub(crate) fn exported_symbols_from_super_scope(use_item: &Use) -> Option<BTreeSet<String>> {
let current_module = use_item.syntax().ancestors().find_map(Module::cast)?;
let current_module_item_list = current_module.item_list()?;
let current_module_name = current_module.name().map(|name| name.text().to_string());
let already_imported =
imported_symbols_from_current_module_use_items(¤t_module, use_item);
let used_symbols = collect_used_symbols_from_syntax(current_module_item_list.syntax());
let mut symbols = BTreeSet::new();
if let Some(parent_module) = current_module.syntax().ancestors().skip(1).find_map(Module::cast)
{
if let Some(item_list) = parent_module.item_list() {
collect_scope_symbols_from_items(
item_list.syntax().children().filter_map(Item::cast),
&mut symbols,
);
}
} else if let Some(source_file) =
current_module.syntax().ancestors().find_map(ast::SourceFile::cast)
{
collect_scope_symbols_from_items(
source_file.syntax().children().filter_map(Item::cast),
&mut symbols,
);
}
if symbols.is_empty() {
return None;
}
let used = symbols
.into_iter()
.filter(|symbol| current_module_name.as_deref() != Some(symbol.as_str()))
.filter(|symbol| !matches!(symbol.as_str(), "tests" | "_test"))
.filter(|symbol| !already_imported.contains(symbol))
.filter(|symbol| used_symbols.contains(symbol))
.collect::<BTreeSet<_>>();
Some(used)
}
fn build_import_analysis<'a>(
ctx: &FileContext,
use_runs: Vec<Vec<&'a TopItem>>,
use_items: Vec<&'a TopItem>,
) -> ImportAnalysis<'a> {
let use_item_analyses = collect_use_item_analyses(ctx, &use_items);
let use_item_analysis_by_key = use_item_analyses
.iter()
.enumerate()
.map(|(idx, analysis)| (use_item_lock_key(analysis.item), idx))
.collect::<HashMap<_, _>>();
ImportAnalysis {
imported_symbol_maps: collect_imported_symbol_maps(&use_item_analyses),
local_defined_symbols: collect_local_defined_symbols(ctx),
local_module_roots: collect_local_module_roots(ctx),
qualified_type_paths_by_symbol: collect_qualified_type_paths_by_symbol(ctx),
qualified_value_paths_by_symbol: collect_qualified_value_paths_by_symbol(ctx),
use_item_analysis_by_key,
use_item_analyses,
use_runs,
}
}
fn collect_use_item_analyses<'a>(
ctx: &FileContext,
use_items: &[&'a TopItem],
) -> Vec<UseItemAnalysis<'a>> {
let mut analyses = Vec::with_capacity(use_items.len());
for item in use_items {
let Some(path) = extract_use_path(ctx, item) else {
continue;
};
let imported_symbols = imported_symbols_from_use_path(&path);
let imported_symbol_set = imported_symbols
.iter()
.map(|symbol| normalize_ident(symbol).to_owned())
.collect::<HashSet<_>>();
analyses.push(UseItemAnalysis {
imported_full_paths: imported_full_paths_from_use_path(&path),
imported_self_full_paths: imported_self_full_paths_from_use_path(&path),
imported_symbol_set,
imported_symbols,
item,
path,
});
}
analyses
}
fn apply_import006_local_use_scope_rule(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
) {
let mut pending_paths_by_scope = HashMap::<usize, HashSet<String>>::new();
for use_item in ctx.source_file.syntax().descendants().filter_map(Use::cast) {
if import006_is_allowed_use_item(&use_item) {
continue;
}
let range = use_item.syntax().text_range();
let line = shared::line_from_offset(&ctx.line_starts, usize::from(range.start()));
let fix_plan =
build_import006_fix_plan(ctx, &use_item, &mut pending_paths_by_scope, emit_edits);
let fixable = fix_plan.is_some();
shared::push_violation(
violations,
ctx,
line,
"RUST-STYLE-IMPORT-006",
"Use items must appear only at file top level or module top level.",
fixable,
);
if emit_edits && let Some((delete_edit, insert_edit)) = fix_plan {
edits.push(delete_edit);
if let Some(insert_edit) = insert_edit {
edits.push(insert_edit);
}
}
}
}
fn import006_is_allowed_use_item(use_item: &Use) -> bool {
let Some(parent) = use_item.syntax().parent() else {
return false;
};
if ra_ap_syntax::SourceFile::cast(parent.clone()).is_some() {
return true;
}
let Some(item_list) = ast::ItemList::cast(parent) else {
return false;
};
item_list.syntax().parent().and_then(Module::cast).is_some()
}
fn build_import006_fix_plan(
ctx: &FileContext,
use_item: &Use,
pending_paths_by_scope: &mut HashMap<usize, HashSet<String>>,
emit_edits: bool,
) -> Option<(Edit, Option<Edit>)> {
let raw_item = use_item.syntax().text().to_string();
let path = extract_use_path_from_text(&raw_item)?;
if raw_item.contains(" as ")
|| path.contains('*')
|| path.contains(" as ")
|| import006_use_item_has_comments(ctx, use_item)
|| import006_use_item_has_cfg_attrs(use_item)
|| import006_has_cfg_ancestor(use_item)
{
return None;
}
let target_scope = import006_target_scope(ctx, use_item)?;
let normalized_path = normalize_use_path_for_equivalence(&path);
let known_paths = pending_paths_by_scope.entry(target_scope.key).or_default();
let path_already_pending = known_paths.contains(&normalized_path);
let equivalent_exists = path_already_pending
|| import006_target_scope_has_equivalent_path(&target_scope.ctx, &normalized_path);
if !equivalent_exists && import006_path_conflicts_in_target_scope(&target_scope.ctx, &path) {
return None;
}
let delete_edit = import006_delete_local_use_edit(ctx, use_item)?;
if equivalent_exists {
return Some((delete_edit, None));
}
let insert_edit = if emit_edits {
import006_insert_scope_use_edit(&target_scope.ctx, &path)
.and_then(|edit| import006_translate_scope_edit(ctx, target_scope.offset, edit))
} else {
import006_insert_scope_use_edit(&target_scope.ctx, &path)
.and_then(|edit| import006_translate_scope_edit(ctx, target_scope.offset, edit))
};
let insert_edit = insert_edit?;
known_paths.insert(normalized_path);
Some((delete_edit, Some(insert_edit)))
}
fn import006_use_item_has_cfg_attrs(use_item: &Use) -> bool {
use_item.attrs().any(|attr| attr.syntax().text().to_string().replace(' ', "").contains("cfg("))
}
fn import006_has_cfg_ancestor(use_item: &Use) -> bool {
use_item
.syntax()
.ancestors()
.skip(1)
.filter_map(Item::cast)
.filter(|item| !matches!(item, Item::Module(_)))
.any(|item| {
item.attrs()
.any(|attr| attr.syntax().text().to_string().replace(' ', "").contains("cfg("))
})
}
fn import006_use_item_has_comments(ctx: &FileContext, use_item: &Use) -> bool {
let range = use_item.syntax().text_range();
let start = usize::from(range.start());
let end = usize::from(range.end());
let start_line = shared::line_from_offset(&ctx.line_starts, start);
let end_line = if end == 0 {
start_line
} else {
shared::line_from_offset(&ctx.line_starts, end.saturating_sub(1))
};
let line_start = shared::offset_from_line(&ctx.line_starts, start_line).unwrap_or(start);
let line_end =
shared::offset_from_line(&ctx.line_starts, end_line + 1).unwrap_or(ctx.text.len());
let Some(snippet) = ctx.text.get(line_start..line_end) else {
return true;
};
if snippet.contains("//") || snippet.contains("/*") || snippet.contains("*/") {
return true;
}
let line_idx = start_line.saturating_sub(1);
let Some(line) = (line_idx > 0).then(|| ctx.lines.get(line_idx - 1)).flatten() else {
return false;
};
let trimmed = line.trim();
!trimmed.is_empty() && import006_is_comment_line(trimmed)
}
fn import006_is_comment_line(trimmed: &str) -> bool {
trimmed.starts_with("//")
|| trimmed.starts_with("/*")
|| trimmed.starts_with('*')
|| trimmed.starts_with("*/")
}
fn import006_target_scope(ctx: &FileContext, use_item: &Use) -> Option<Import006TargetScope> {
for item_list in use_item.syntax().ancestors().skip(1).filter_map(ast::ItemList::cast) {
if item_list.syntax().parent().and_then(Module::cast).is_none() {
continue;
}
let (body_start, body_end) = item_list_body_text_range(&item_list)?;
let body_text = ctx.text.get(body_start..body_end)?.to_owned();
let scope_ctx = shared::read_file_context_from_text(&ctx.path, body_text).ok()??;
return Some(Import006TargetScope { ctx: scope_ctx, offset: body_start, key: body_start });
}
Some(Import006TargetScope {
ctx: shared::read_file_context_from_text(&ctx.path, ctx.text.clone()).ok()??,
offset: 0,
key: 0,
})
}
fn import006_target_scope_has_equivalent_path(ctx: &FileContext, normalized_path: &str) -> bool {
ctx.top_items.iter().any(|item| {
item.kind == TopKind::Use
&& extract_use_path(ctx, item)
.is_some_and(|path| normalize_use_path_for_equivalence(&path) == normalized_path)
})
}
fn import006_path_conflicts_in_target_scope(ctx: &FileContext, path: &str) -> bool {
let imported_symbols = imported_symbols_from_use_path(path);
if imported_symbols.is_empty() {
return true;
}
let mut target_use_items = collect_non_pub_use_runs(ctx)
.into_iter()
.flat_map(|run| run.into_iter())
.collect::<Vec<_>>();
target_use_items.extend(collect_pub_use_runs(ctx).into_iter().flat_map(|run| run.into_iter()));
let target_use_item_analyses = collect_use_item_analyses(ctx, &target_use_items);
let target_maps = collect_imported_symbol_maps(&target_use_item_analyses);
let local_defined_symbols = collect_local_defined_symbols(ctx);
let target_path = normalize_use_path_for_equivalence(path);
imported_symbols.into_iter().any(|symbol| {
let normalized_symbol = normalize_ident(&symbol);
if local_defined_symbols.contains(normalized_symbol) {
return true;
}
target_maps.symbol_paths.iter().any(|(existing_symbol, paths)| {
normalize_ident(existing_symbol) == normalized_symbol
&& paths.iter().any(|existing_path| {
normalize_use_path_for_equivalence(existing_path) != target_path
})
})
})
}
fn import006_delete_local_use_edit(ctx: &FileContext, use_item: &Use) -> Option<Edit> {
let range = use_item.syntax().text_range();
let item_start = usize::from(range.start());
let item_end = usize::from(range.end());
let start_line = shared::line_from_offset(&ctx.line_starts, item_start);
let end_line = if item_end == 0 {
start_line
} else {
shared::line_from_offset(&ctx.line_starts, item_end.saturating_sub(1))
};
if let Some((line_start, line_end)) =
import006_full_line_delete_range(ctx, start_line, end_line, item_start, item_end)
{
return Some(Edit {
start: line_start,
end: line_end,
replacement: String::new(),
rule: "RUST-STYLE-IMPORT-006",
});
}
Some(Edit {
start: item_start,
end: item_end,
replacement: String::new(),
rule: "RUST-STYLE-IMPORT-006",
})
}
fn import006_full_line_delete_range(
ctx: &FileContext,
start_line: usize,
end_line: usize,
item_start: usize,
item_end: usize,
) -> Option<(usize, usize)> {
let line_start = shared::offset_from_line(&ctx.line_starts, start_line)?;
let line_end =
shared::offset_from_line(&ctx.line_starts, end_line + 1).unwrap_or(ctx.text.len());
let prefix = ctx.text.get(line_start..item_start)?;
let suffix = ctx.text.get(item_end..line_end)?;
if prefix.trim().is_empty() && suffix.trim().is_empty() {
Some((line_start, line_end))
} else {
None
}
}
fn import006_insert_scope_use_edit(ctx: &FileContext, path: &str) -> Option<Edit> {
let mut pending_import_paths = BTreeSet::new();
pending_import_paths.insert(path.to_owned());
let use_runs = collect_non_pub_use_runs(ctx);
let local_module_roots = collect_local_module_roots(ctx);
let (edit, _touched_lines) = build_import008_insert_edit(
ctx,
&use_runs,
&local_module_roots,
&pending_import_paths,
"RUST-STYLE-IMPORT-006",
)?;
Some(Edit { rule: "RUST-STYLE-IMPORT-006", ..edit })
}
fn import006_scope_insert_indent(ctx: &FileContext, insert_pos: usize) -> Option<String> {
if let Some(first_item) = ctx.top_items.first()
&& let Some(line_text) = ctx.lines.get(first_item.line.saturating_sub(1))
{
return Some(
line_text.chars().take_while(|ch| matches!(ch, ' ' | '\t')).collect::<String>(),
);
}
let line = shared::line_from_offset(&ctx.line_starts, insert_pos);
let line_count = ctx.lines.len();
let current_line = ctx.lines.get(line)?;
if current_line.trim().is_empty() {
return Some(
current_line.chars().take_while(|ch| matches!(ch, ' ' | '\t')).collect::<String>(),
);
}
for candidate_line in iter::once(line).chain((line + 1)..line_count).chain((0..line).rev()) {
let Some(line_text) = ctx.lines.get(candidate_line) else {
continue;
};
if line_text.trim().is_empty() {
continue;
}
return Some(
line_text.chars().take_while(|ch| matches!(ch, ' ' | '\t')).collect::<String>(),
);
}
Some(String::new())
}
fn import006_indent_insert_block(block: &str, indent: &str) -> String {
let mut rewritten = String::with_capacity(block.len() + indent.len());
for segment in block.split_inclusive('\n') {
let line = segment.strip_suffix('\n').unwrap_or(segment);
if !line.is_empty() {
rewritten.push_str(indent);
rewritten.push_str(line);
}
if segment.ends_with('\n') {
rewritten.push('\n');
}
}
rewritten
}
fn import006_translate_scope_edit(ctx: &FileContext, offset: usize, edit: Edit) -> Option<Edit> {
let start = offset.checked_add(edit.start)?;
let end = offset.checked_add(edit.end)?;
if end > ctx.text.len() || start > end {
return None;
}
Some(Edit { start, end, replacement: edit.replacement, rule: edit.rule })
}
fn check_cfg_test_module_import_rules(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
) {
for module in ctx.source_file.syntax().descendants().filter_map(Module::cast) {
if !module_has_cfg_test_attr(&module) {
continue;
}
let Some(item_list) = module.item_list() else {
continue;
};
let Some((module_body_start, module_body_end)) = item_list_body_text_range(&item_list)
else {
continue;
};
let Some(module_body_text) = ctx.text.get(module_body_start..module_body_end) else {
continue;
};
let Ok(Some(module_ctx)) =
shared::read_file_context_from_text(&ctx.path, module_body_text.to_owned())
else {
continue;
};
let has_follow_up_group_fix = cfg_test_module_has_follow_up_group_fix(&module_ctx);
let mut module_violations = Vec::new();
let mut module_edits = Vec::new();
apply_pub_use_group_rules(
&module_ctx,
&mut module_violations,
&mut module_edits,
emit_edits,
);
let use_runs = collect_non_pub_use_runs(&module_ctx);
let use_items = use_runs.iter().flat_map(|run| run.iter().copied()).collect::<Vec<_>>();
let analysis = build_import_analysis(&module_ctx, use_runs, use_items);
let use_item_skip_lines = collect_cfg_test_module_skip_lines(&analysis.use_item_analyses);
let import004_fixed_lines = apply_use_item_rules(
&module_ctx,
&mut module_violations,
&mut module_edits,
emit_edits,
&analysis.use_item_analyses,
&use_item_skip_lines,
false,
);
push_import004_ambiguous_symbol_violations(
&module_ctx,
&mut module_violations,
&analysis.imported_symbol_maps,
);
let import004_qualified_path_fixed_lines = apply_import004_qualified_function_path_rule(
&module_ctx,
&mut module_violations,
&mut module_edits,
emit_edits,
&analysis.use_runs,
&analysis.local_module_roots,
&analysis.imported_symbol_maps.full_paths_by_symbol,
false,
);
let mut import_group_skip_lines = use_item_skip_lines;
import_group_skip_lines.extend(import004_fixed_lines);
import_group_skip_lines.extend(import004_qualified_path_fixed_lines);
let (planned_import_group_edits, fixable_import_group_lines) =
build_import_group_fix_plans(&module_ctx, &analysis, &import_group_skip_lines);
if emit_edits {
module_edits.extend(planned_import_group_edits);
}
push_import_group_order_spacing_violations(
&module_ctx,
&mut module_violations,
&analysis.use_runs,
&fixable_import_group_lines,
&analysis.local_module_roots,
);
violations.extend(module_violations.into_iter().filter_map(|violation| {
translate_cfg_test_violation(
ctx,
&module_ctx,
module_body_start,
violation,
has_follow_up_group_fix,
)
}));
if !emit_edits {
continue;
}
edits.extend(
module_edits
.into_iter()
.filter_map(|edit| translate_cfg_test_edit(ctx, module_body_start, edit)),
);
}
}
fn module_has_cfg_test_attr(module: &Module) -> bool {
module
.attrs()
.any(|attr| attr.syntax().text().to_string().replace(' ', "").contains("cfg(test)"))
}
fn item_list_body_text_range(item_list: &ast::ItemList) -> Option<(usize, usize)> {
let range = item_list.syntax().text_range();
let start = usize::from(range.start());
let end = usize::from(range.end());
if end <= start + 1 { None } else { Some((start + 1, end - 1)) }
}
fn collect_cfg_test_module_skip_lines(use_item_analyses: &[UseItemAnalysis<'_>]) -> HashSet<usize> {
let mut skip_lines = HashSet::new();
for use_item_analysis in use_item_analyses {
let compact_path = compact_path_for_match(&use_item_analysis.path);
if compact_path.contains('*')
|| compact_path == "super"
|| compact_path.starts_with("super::")
{
skip_lines.insert(use_item_analysis.item.line);
}
}
skip_lines
}
fn translate_cfg_test_violation(
ctx: &FileContext,
module_ctx: &FileContext,
module_body_start: usize,
mut violation: Violation,
has_follow_up_group_fix: bool,
) -> Option<Violation> {
let local_line_offset = shared::offset_from_line(&module_ctx.line_starts, violation.line)?;
let line = shared::line_from_offset(&ctx.line_starts, module_body_start + local_line_offset);
if has_follow_up_group_fix
&& !violation.fixable
&& matches!(violation.rule, "RUST-STYLE-IMPORT-001" | "RUST-STYLE-IMPORT-002")
{
violation.fixable = true;
}
Some(Violation {
file: ctx.path.clone(),
line,
rule: violation.rule,
message: violation.message,
fixable: violation.fixable,
})
}
fn translate_cfg_test_edit(
ctx: &FileContext,
module_body_start: usize,
edit: Edit,
) -> Option<Edit> {
let start = module_body_start.checked_add(edit.start)?;
let end = module_body_start.checked_add(edit.end)?;
if end > ctx.text.len() || start > end {
return None;
}
Some(Edit { start, end, replacement: edit.replacement, rule: edit.rule })
}
fn cfg_test_module_has_follow_up_group_fix(ctx: &FileContext) -> bool {
let use_runs = collect_non_pub_use_runs(ctx);
if use_runs.is_empty() {
return false;
}
let use_items = use_runs.iter().flat_map(|run| run.iter().copied()).collect::<Vec<_>>();
let analysis = build_import_analysis(ctx, use_runs, use_items);
let use_item_skip_lines = collect_cfg_test_module_skip_lines(&analysis.use_item_analyses);
let mut probe_violations = Vec::new();
let mut probe_edits = Vec::new();
apply_use_item_rules(
ctx,
&mut probe_violations,
&mut probe_edits,
true,
&analysis.use_item_analyses,
&use_item_skip_lines,
false,
);
if probe_edits.is_empty() {
return false;
}
let mut rewritten = ctx.text.clone();
let Ok(applied) = fixes::apply_edits(&mut rewritten, probe_edits) else {
return false;
};
if applied == 0 {
return false;
}
let Ok(Some(next_ctx)) = shared::read_file_context_from_text(&ctx.path, rewritten) else {
return false;
};
let next_use_runs = collect_non_pub_use_runs(&next_ctx);
if next_use_runs.is_empty() {
return false;
}
let next_use_items =
next_use_runs.iter().flat_map(|run| run.iter().copied()).collect::<Vec<_>>();
let next_analysis = build_import_analysis(&next_ctx, next_use_runs, next_use_items);
let next_use_item_skip_lines =
collect_cfg_test_module_skip_lines(&next_analysis.use_item_analyses);
let mut next_probe_violations = Vec::new();
let mut next_probe_edits = Vec::new();
let next_import004_fixed_lines = apply_use_item_rules(
&next_ctx,
&mut next_probe_violations,
&mut next_probe_edits,
false,
&next_analysis.use_item_analyses,
&next_use_item_skip_lines,
false,
);
let mut next_import_group_skip_lines = next_use_item_skip_lines;
next_import_group_skip_lines.extend(next_import004_fixed_lines);
let (_planned_import_group_edits, fixable_import_group_lines) =
build_import_group_fix_plans(&next_ctx, &next_analysis, &next_import_group_skip_lines);
!fixable_import_group_lines.is_empty()
}
fn apply_import010_no_super_use_rule(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
use_items: &[&TopItem],
) -> HashSet<usize> {
let top_use_lines = use_items.iter().map(|item| item.line).collect::<HashSet<_>>();
let mut fixed_top_level_lines = HashSet::new();
for use_item in ctx.source_file.syntax().descendants().filter_map(Use::cast) {
let Some(use_tree) = use_item.use_tree() else {
continue;
};
let use_path = compact_path_for_match(&use_tree.syntax().text().to_string());
let start = usize::from(use_item.syntax().text_range().start());
let end = usize::from(use_item.syntax().text_range().end());
let line = shared::line_from_offset(&ctx.line_starts, start);
if let Some((super_depth, tail)) = leading_super_depth_and_tail(&use_path) {
let current_module_path = current_module_path_segments(ctx, &use_item);
let fixable = super_depth <= current_module_path.len();
shared::push_violation(
violations,
ctx,
line,
"RUST-STYLE-IMPORT-010",
"Do not use `super` imports; use crate-absolute imports.",
fixable,
);
if !emit_edits || !fixable {
continue;
}
let parent_depth = current_module_path.len() - super_depth;
let replacement_path =
crate_absolute_use_path(¤t_module_path[..parent_depth], tail);
let replacement = rewrite_use_item_with_path(
&use_item.syntax().text().to_string(),
&replacement_path,
);
let Some(replacement) = replacement else {
continue;
};
edits.push(Edit { start, end, replacement, rule: "RUST-STYLE-IMPORT-010" });
if top_use_lines.contains(&line) {
fixed_top_level_lines.insert(line);
}
continue;
}
}
fixed_top_level_lines
}
fn apply_import007_no_glob_use_rule(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
use_items: &[&TopItem],
) -> HashSet<usize> {
let top_use_lines = use_items.iter().map(|item| item.line).collect::<HashSet<_>>();
let mut fixed_top_level_lines = HashSet::new();
for use_item in ctx.source_file.syntax().descendants().filter_map(Use::cast) {
let Some(use_tree) = use_item.use_tree() else {
continue;
};
let use_path = use_tree.syntax().text().to_string();
if !use_path.contains('*') {
continue;
}
let start = usize::from(use_item.syntax().text_range().start());
let end = usize::from(use_item.syntax().text_range().end());
let line = shared::line_from_offset(&ctx.line_starts, start);
let replacement =
build_glob_use_replacement(ctx, &use_item, &use_path).and_then(|replacement_path| {
rewrite_use_item_with_path(&use_item.syntax().text().to_string(), &replacement_path)
});
let fixable = replacement.is_some();
shared::push_violation(
violations,
ctx,
line,
"RUST-STYLE-IMPORT-007",
"Glob imports are not allowed; import explicit symbols.",
fixable,
);
if !emit_edits {
continue;
}
if let Some(replacement) = replacement {
edits.push(Edit { start, end, replacement, rule: "RUST-STYLE-IMPORT-007" });
if top_use_lines.contains(&line) {
fixed_top_level_lines.insert(line);
}
}
}
fixed_top_level_lines
}
fn build_glob_use_replacement(ctx: &FileContext, use_item: &Use, use_path: &str) -> Option<String> {
let compact = compact_path_for_match(use_path);
if compact.contains("crate::{") && compact.contains("prelude::*") {
let used_symbols = collect_used_symbols_from_syntax(ctx.source_file.syntax());
let symbols = exported_symbols_from_crate_prelude(ctx)?
.into_iter()
.filter(|symbol| used_symbols.contains(symbol))
.collect::<Vec<_>>();
if symbols.is_empty() {
return None;
}
let replacement =
compact.replace("prelude::*", &format!("prelude::{{{}}}", symbols.join(",")));
return Some(replacement);
}
let prefix = compact.strip_suffix("::*")?;
if prefix == "rayon::prelude" {
let traits = rayon_traits_for_usage(ctx);
if traits.is_empty() {
return None;
}
let names = traits.into_iter().map(str::to_owned).collect::<Vec<_>>();
return Some(format_expanded_braced_use_path("rayon::iter", &names));
}
let symbols = exported_symbols_for_glob_prefix(ctx, use_item, prefix)?;
if symbols.is_empty() {
return None;
}
let names = if use_item.visibility().is_some() {
symbols.into_iter().collect::<Vec<_>>()
} else {
let used_symbols = collect_used_symbols_from_syntax(ctx.source_file.syntax());
symbols.into_iter().filter(|symbol| used_symbols.contains(symbol)).collect::<Vec<_>>()
};
if names.is_empty() {
return None;
}
Some(format_expanded_braced_use_path(prefix, &names))
}
fn format_expanded_braced_use_path(prefix: &str, names: &[String]) -> String {
format!("{prefix}::{{{}}}", names.join(", "))
}
fn rayon_traits_for_usage(ctx: &FileContext) -> Vec<&'static str> {
let mut traits = Vec::new();
if ctx.text.contains(".par_iter(") || ctx.text.contains(".par_iter()") {
traits.push("IntoParallelRefIterator");
}
if ctx.text.contains(".par_iter_mut(") || ctx.text.contains(".par_iter_mut()") {
traits.push("IntoParallelRefMutIterator");
}
if ctx.text.contains(".into_par_iter(") || ctx.text.contains(".into_par_iter()") {
traits.push("IntoParallelIterator");
}
if !traits.is_empty() {
traits.push("ParallelIterator");
}
traits.sort_unstable();
traits.dedup();
traits
}
fn exported_symbols_for_glob_prefix(
ctx: &FileContext,
use_item: &Use,
prefix: &str,
) -> Option<BTreeSet<String>> {
if prefix == "crate::prelude" {
return exported_symbols_from_crate_prelude(ctx);
}
if prefix == "super" {
return exported_symbols_from_super_scope(use_item);
}
if let Some(module_name) = prefix.strip_prefix("self::")
&& !module_name.is_empty()
&& !module_name.contains("::")
{
return exported_symbols_from_sibling_module(use_item, module_name);
}
None
}
fn exported_symbols_from_crate_prelude(ctx: &FileContext) -> Option<BTreeSet<String>> {
let crate_dir = find_crate_dir(&ctx.path)?;
let src_dir = crate_dir.join("src");
let root_candidates = [src_dir.join("lib.rs"), src_dir.join("main.rs")];
for root in root_candidates {
if !root.is_file() {
continue;
}
let symbols = exported_symbols_from_named_module(&root, "prelude");
if !symbols.is_empty() {
return Some(symbols);
}
}
None
}
fn exported_symbols_from_named_module(
root_file: &std::path::Path,
module_name: &str,
) -> BTreeSet<String> {
let Ok(text) = fs::read_to_string(root_file) else {
return BTreeSet::new();
};
let parsed = ra_ap_syntax::SourceFile::parse(&text, Edition::CURRENT).tree();
let Some(module) = parsed
.syntax()
.children()
.filter_map(Module::cast)
.find(|item| item.name().is_some_and(|name| name.text() == module_name))
else {
return BTreeSet::new();
};
if let Some(item_list) = module.item_list() {
return exported_symbols_from_module_items(
item_list.syntax().children().filter_map(Item::cast),
);
}
if module.semicolon_token().is_some() {
let module_rs = root_file
.parent()
.map(|parent| parent.join(format!("{module_name}.rs")))
.unwrap_or_else(|| PathBuf::from(format!("{module_name}.rs")));
let module_mod_rs = root_file
.parent()
.map(|parent| parent.join(module_name).join("mod.rs"))
.unwrap_or_else(|| PathBuf::from(module_name).join("mod.rs"));
for candidate in [module_rs, module_mod_rs] {
if !candidate.is_file() {
continue;
}
let Ok(module_text) = fs::read_to_string(&candidate) else {
continue;
};
let module_parsed =
ra_ap_syntax::SourceFile::parse(&module_text, Edition::CURRENT).tree();
let exported = exported_symbols_from_module_items(
module_parsed.syntax().children().filter_map(Item::cast),
);
if !exported.is_empty() {
return exported;
}
}
}
BTreeSet::new()
}
fn exported_symbols_from_sibling_module(
use_item: &Use,
module_name: &str,
) -> Option<BTreeSet<String>> {
let parent = use_item.syntax().parent()?;
for item in parent.children().filter_map(Item::cast) {
let Item::Module(module_item) = item else {
continue;
};
let Some(name) = module_item.name() else {
continue;
};
if name.text() != module_name {
continue;
}
let item_list = module_item.item_list()?;
let symbols = exported_symbols_from_module_items(
item_list.syntax().children().filter_map(Item::cast),
);
if !symbols.is_empty() {
return Some(symbols);
}
}
None
}
fn exported_symbols_from_module_items(items: impl Iterator<Item = Item>) -> BTreeSet<String> {
let mut symbols = BTreeSet::new();
for item in items {
match item {
Item::Use(use_item) => {
if use_item.visibility().is_none() {
continue;
}
let Some(use_tree) = use_item.use_tree() else {
continue;
};
let use_path = use_tree.syntax().text().to_string();
for symbol in imported_symbols_from_use_path(&use_path) {
symbols.insert(symbol);
}
},
Item::MacroCall(macro_call) => {
let text = macro_call.syntax().text().to_string();
for symbol in exported_symbols_from_macro_call_text(text.as_str()) {
symbols.insert(symbol);
}
},
_ => {
if !module_item_is_pub(&item) {
continue;
}
if let Some(name) = item_name_text(&item) {
symbols.insert(name);
}
},
}
}
symbols
}
fn module_item_is_pub(item: &Item) -> bool {
match item {
Item::Module(node) => node.visibility().is_some(),
Item::Use(node) => node.visibility().is_some(),
Item::TypeAlias(node) => node.visibility().is_some(),
Item::Const(node) => node.visibility().is_some(),
Item::Static(node) => node.visibility().is_some(),
Item::Trait(node) => node.visibility().is_some(),
Item::Enum(node) => node.visibility().is_some(),
Item::Struct(node) => node.visibility().is_some(),
Item::Fn(node) => node.visibility().is_some(),
Item::Impl(node) => node.visibility().is_some(),
Item::Union(node) => node.visibility().is_some(),
Item::MacroRules(node) => node.visibility().is_some(),
_ => false,
}
}
fn exported_symbols_from_macro_call_text(text: &str) -> BTreeSet<String> {
let re = Regex::new(r"\b([A-Z][A-Z0-9_]{2,})\s*=").expect("Compile macro export symbol regex.");
let mut symbols = BTreeSet::new();
for capture in re.captures_iter(text) {
if let Some(name) = capture.get(1) {
symbols.insert(name.as_str().to_owned());
}
}
symbols
}
fn find_crate_dir(path: &std::path::Path) -> Option<PathBuf> {
let mut dir = path.parent()?.to_path_buf();
loop {
if dir.join("Cargo.toml").is_file() {
return Some(dir);
}
if !dir.pop() {
break;
}
}
None
}
fn current_module_path_segments(ctx: &FileContext, use_item: &Use) -> Vec<String> {
let mut module_path = file_module_path_segments(&ctx.path);
let mut inline_ancestors = use_item
.syntax()
.ancestors()
.filter_map(Module::cast)
.filter_map(|module| module.name().map(|name| name.text().to_string()))
.collect::<Vec<_>>();
inline_ancestors.reverse();
module_path.extend(inline_ancestors);
module_path
}
fn file_module_path_segments(path: &std::path::Path) -> Vec<String> {
let Some(crate_dir) = find_crate_dir(path) else {
return Vec::new();
};
let src_dir = crate_dir.join("src");
let Ok(relative) = path.strip_prefix(src_dir) else {
return Vec::new();
};
let mut components = relative
.iter()
.map(|component| component.to_string_lossy().to_string())
.collect::<Vec<_>>();
let Some(file_name) = components.pop() else {
return Vec::new();
};
match file_name.as_str() {
"lib.rs" | "main.rs" => Vec::new(),
"mod.rs" => components,
_ => {
let stem = file_name.strip_suffix(".rs").unwrap_or(file_name.as_str());
components.push(stem.to_owned());
components
},
}
}
fn leading_super_depth_and_tail(path: &str) -> Option<(usize, &str)> {
let mut depth = 0_usize;
let mut rest = path;
loop {
if rest == "super" {
depth += 1;
rest = "";
break;
}
let Some(after) = rest.strip_prefix("super::") else {
break;
};
depth += 1;
rest = after;
if !rest.starts_with("super") {
break;
}
}
if depth == 0 { None } else { Some((depth, rest)) }
}
fn crate_absolute_use_path(parent_segments: &[String], tail: &str) -> String {
let mut path = String::from("crate");
for segment in parent_segments {
path.push_str("::");
path.push_str(segment);
}
if !tail.is_empty() {
path.push_str("::");
path.push_str(tail);
}
path
}
fn imported_symbols_from_current_module_use_items(
current_module: &Module,
current_use_item: &Use,
) -> HashSet<String> {
let mut out = HashSet::new();
let Some(item_list) = current_module.item_list() else {
return out;
};
for item in item_list.syntax().children().filter_map(Item::cast) {
let Item::Use(use_item) = item else {
continue;
};
if use_item.syntax().text_range() == current_use_item.syntax().text_range() {
continue;
}
let Some(use_tree) = use_item.use_tree() else {
continue;
};
let use_path = use_tree.syntax().text().to_string();
for symbol in imported_symbols_from_use_path(&use_path) {
if symbol == "*" {
continue;
}
out.insert(symbol);
}
}
out
}
fn collect_scope_symbols_from_items(items: impl Iterator<Item = Item>, out: &mut BTreeSet<String>) {
for item in items {
match item {
Item::Use(use_item) => {
let Some(use_tree) = use_item.use_tree() else {
continue;
};
let use_path = use_tree.syntax().text().to_string();
for symbol in imported_symbols_from_use_path(&use_path) {
out.insert(symbol);
}
},
_ =>
if let Some(name) = item_name_text(&item) {
out.insert(name);
},
}
}
}
fn item_name_text(item: &Item) -> Option<String> {
match item {
Item::Fn(it) => it.name().map(|n| n.text().to_string()),
Item::Struct(it) => it.name().map(|n| n.text().to_string()),
Item::Enum(it) => it.name().map(|n| n.text().to_string()),
Item::Trait(it) => it.name().map(|n| n.text().to_string()),
Item::TypeAlias(it) => it.name().map(|n| n.text().to_string()),
Item::Const(it) => it.name().map(|n| n.text().to_string()),
Item::Static(it) => it.name().map(|n| n.text().to_string()),
Item::Module(it) => it.name().map(|n| n.text().to_string()),
Item::Union(it) => it.name().map(|n| n.text().to_string()),
Item::MacroRules(it) => it.name().map(|n| n.text().to_string()),
_ => None,
}
}
fn collect_used_symbols_from_syntax(syntax: &SyntaxNode) -> HashSet<String> {
let mut used = HashSet::new();
for name_ref in syntax.descendants().filter_map(ast::NameRef::cast) {
let in_use_tree = name_ref.syntax().ancestors().any(|node| Use::can_cast(node.kind()));
if in_use_tree {
continue;
}
used.insert(name_ref.text().to_string());
}
used
}
fn apply_use_item_rules(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
use_item_analyses: &[UseItemAnalysis<'_>],
skip_lines: &HashSet<usize>,
with_import_shortening: bool,
) -> HashSet<usize> {
let mut import004_fixed_lines = HashSet::new();
for use_item_analysis in use_item_analyses {
let item = use_item_analysis.item;
if skip_lines.contains(&item.line) {
continue;
}
let path = &use_item_analysis.path;
if with_import_shortening {
if apply_import009_std_fmt_result_rule(ctx, violations, edits, emit_edits, item, path) {
import004_fixed_lines.insert(item.line);
continue;
}
if apply_import009_non_importable_root_use_rule(
ctx, violations, edits, emit_edits, item, path,
) {
import004_fixed_lines.insert(item.line);
continue;
}
}
apply_import002_normalization_rule(ctx, violations, edits, emit_edits, item, path);
let mut alias_rule_applied = false;
if apply_import003_trait_keep_alive_rule(ctx, violations, edits, emit_edits, item, path) {
import004_fixed_lines.insert(item.line);
alias_rule_applied = true;
}
if apply_import003_non_keep_alive_alias_rule(ctx, violations, edits, emit_edits, item, path)
{
import004_fixed_lines.insert(item.line);
alias_rule_applied = true;
}
if !alias_rule_applied {
push_alias_violation_if_needed(ctx, violations, item, path);
}
if apply_import004_free_fn_macro_rule(ctx, violations, edits, emit_edits, item, path) {
import004_fixed_lines.insert(item.line);
}
}
import004_fixed_lines
}
fn apply_import009_std_fmt_result_rule(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
item: &TopItem,
path: &str,
) -> bool {
if !imported_full_paths_from_use_path(path).iter().any(|full| full == "std::fmt::Result") {
return false;
}
let nongeneric_rewrites =
unqualified_nongeneric_type_path_rewrites(ctx, "Result", "std::fmt::Result");
let has_generic_result_uses = has_unqualified_generic_type_path_use(ctx, "Result");
let Some((_qualified_symbol_path, rewritten_use_path)) = import004_fix_plan(path, "Result")
else {
return false;
};
if nongeneric_rewrites.is_empty() && !has_generic_result_uses {
return false;
}
shared::push_violation(
violations,
ctx,
item.line,
"RUST-STYLE-IMPORT-009",
"Do not import `std::fmt::Result`; use `std::fmt::Result` at call sites and keep generic `Result<T, E>` unshadowed.",
true,
);
if !emit_edits {
return true;
}
for (start, end, replacement) in nongeneric_rewrites {
edits.push(Edit { start, end, replacement, rule: "RUST-STYLE-IMPORT-009" });
}
let Some(edit) = build_use_item_rewrite_edit(
ctx,
item,
rewritten_use_path.as_deref(),
"RUST-STYLE-IMPORT-009",
) else {
return false;
};
edits.push(edit);
true
}
fn apply_import009_non_importable_root_use_rule(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
item: &TopItem,
path: &str,
) -> bool {
let compact_path = path.replace(' ', "");
let Some(root) = compact_path.split("::").next() else {
return false;
};
if !is_non_importable_use_root(root) {
return false;
}
let mut rewrites = Vec::new();
for full_path in imported_full_paths_from_use_path(path) {
let Some(symbol) = symbol_from_full_import_path(&full_path) else {
continue;
};
rewrites.extend(unqualified_type_path_rewrites(ctx, &symbol, &full_path));
rewrites.extend(unqualified_value_path_rewrites(ctx, &symbol, &full_path));
}
shared::push_violation(
violations,
ctx,
item.line,
"RUST-STYLE-IMPORT-009",
"Do not import symbols from non-importable roots (`Self` or generic parameters); use qualified paths.",
true,
);
if !emit_edits {
return true;
}
for (start, end, replacement) in rewrites {
edits.push(Edit { start, end, replacement, rule: "RUST-STYLE-IMPORT-009" });
}
let Some(edit) = build_use_item_rewrite_edit(ctx, item, None, "RUST-STYLE-IMPORT-009") else {
return false;
};
edits.push(edit);
true
}
fn apply_import002_normalization_rule(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
item: &TopItem,
path: &str,
) {
let Some(normalized) = normalize_mixed_self_child_use_path(ctx, path) else {
return;
};
shared::push_violation(
violations,
ctx,
item.line,
"RUST-STYLE-IMPORT-002",
"Normalize imports like `use a::{b, b::c}` to `use a::{b::{self, c}}`.",
true,
);
if !emit_edits {
return;
}
if let Some(edit) =
build_use_item_rewrite_edit(ctx, item, Some(normalized.as_str()), "RUST-STYLE-IMPORT-002")
{
edits.push(edit);
}
}
fn push_alias_violation_if_needed(
ctx: &FileContext,
violations: &mut Vec<Violation>,
item: &TopItem,
path: &str,
) {
let Some(alias_caps) = Regex::new(r"\bas\s+([A-Za-z_][A-Za-z0-9_]*)\b")
.expect("Expected operation to succeed.")
.captures(path)
else {
return;
};
if alias_caps.get(1).map(|m| m.as_str()) != Some("_") {
shared::push_violation(
violations,
ctx,
item.line,
"RUST-STYLE-IMPORT-003",
"Import aliases are not allowed except `as _` keep-alive imports.",
false,
);
}
}
fn apply_import003_trait_keep_alive_rule(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
item: &TopItem,
path: &str,
) -> bool {
let Some((rewritten_use_path, affected_symbols)) =
normalize_trait_keep_alive_use_path(ctx, path)
else {
return false;
};
let mut symbols = affected_symbols.into_iter().collect::<Vec<_>>();
symbols.sort();
for symbol in symbols {
let referenced_directly = symbol_is_referenced_outside_use(ctx, &symbol);
let message = if referenced_directly {
format!("Trait import `{symbol}` is referenced directly; do not use `as _`.")
} else {
format!(
"Trait keep-alive import `{symbol}` should use `as _` when not referenced directly."
)
};
shared::push_violation(violations, ctx, item.line, "RUST-STYLE-IMPORT-003", &message, true);
}
let has_import004_pressure = use_path_needs_import004_fix(ctx, path);
if !emit_edits || rewritten_use_path == path {
return false;
}
if has_import004_pressure {
return true;
}
if let Some((start, end)) = item_text_range(ctx, item)
&& let Some(raw) = ctx.text.get(start..end)
&& let Some(rewritten) = rewrite_use_item_with_path(raw, &rewritten_use_path)
{
edits.push(Edit { start, end, replacement: rewritten, rule: "RUST-STYLE-IMPORT-003" });
return true;
}
false
}
fn apply_import003_non_keep_alive_alias_rule(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
item: &TopItem,
path: &str,
) -> bool {
let mut aliases = collect_non_keep_alive_alias_bindings(path);
aliases.retain(|(alias, qualified_path)| {
let Some(symbol) = symbol_from_full_import_path(qualified_path) else {
return true;
};
let is_trait_alias = looks_like_trait_import(&symbol, qualified_path);
let referenced = symbol_is_referenced_outside_use(ctx, alias);
!is_trait_alias || referenced
});
if aliases.is_empty() {
return false;
}
let mut referenced_aliases = Vec::new();
let mut rewrites = Vec::<(usize, usize, String)>::new();
let mut planned_ranges = Vec::<(usize, usize)>::new();
let mut fixable = true;
for (alias, qualified_path) in &aliases {
let referenced = symbol_is_referenced_outside_use(ctx, alias);
if referenced {
let mut type_rewrites = unqualified_type_path_rewrites(ctx, alias, qualified_path);
type_rewrites.extend(alias_root_type_path_rewrites(ctx, alias, qualified_path));
let mut value_rewrites = unqualified_value_path_rewrites(ctx, alias, qualified_path);
value_rewrites.extend(alias_root_value_path_rewrites(ctx, alias, qualified_path));
let root_rewrites = alias_root_path_name_ref_rewrites(ctx, alias, qualified_path);
let macro_rewrites = alias_macro_token_tree_rewrites(ctx, alias, qualified_path);
if type_rewrites.is_empty()
&& value_rewrites.is_empty()
&& root_rewrites.is_empty()
&& macro_rewrites.is_empty()
{
fixable = false;
} else {
for (start, end, replacement) in type_rewrites
.into_iter()
.chain(value_rewrites)
.chain(root_rewrites)
.chain(macro_rewrites)
{
if planned_ranges
.iter()
.any(|(used_start, used_end)| start < *used_end && end > *used_start)
{
continue;
}
planned_ranges.push((start, end));
rewrites.push((start, end, replacement));
}
}
referenced_aliases.push(alias.clone());
}
}
let alias_names = aliases.iter().map(|(alias, _)| alias.as_str()).collect::<Vec<_>>();
for alias_name in alias_names {
let message = format!(
"Import alias `{alias_name}` is not allowed; use a fully qualified path at use sites."
);
shared::push_violation(
violations,
ctx,
item.line,
"RUST-STYLE-IMPORT-003",
&message,
fixable,
);
}
if !emit_edits || !fixable {
return false;
}
let aliases_to_remove = aliases.into_iter().map(|(alias, _)| alias).collect::<HashSet<_>>();
let rewritten_use_path = remove_aliases_from_use_path(path, &aliases_to_remove);
let Some(use_edit) = build_use_item_rewrite_edit(
ctx,
item,
rewritten_use_path.as_deref(),
"RUST-STYLE-IMPORT-003",
) else {
return false;
};
for (start, end, replacement) in rewrites {
edits.push(Edit { start, end, replacement, rule: "RUST-STYLE-IMPORT-003" });
}
edits.push(use_edit);
!referenced_aliases.is_empty() || rewritten_use_path != Some(path.to_owned())
}
fn collect_non_keep_alive_alias_bindings(path: &str) -> Vec<(String, String)> {
let mut out = Vec::new();
let _ok = collect_alias_bindings_from_use_segment(path.trim(), &mut out);
out.retain(|(alias, _)| alias != "_");
out
}
fn collect_alias_bindings_from_use_segment(segment: &str, out: &mut Vec<(String, String)>) -> bool {
let trimmed = segment.trim();
if trimmed.is_empty() {
return true;
}
let mut brace_start = None;
let mut depth = 0_i32;
let mut brace_end = None;
for (idx, ch) in trimmed.char_indices() {
if ch == '{' {
if brace_start.is_none() {
brace_start = Some(idx);
}
depth += 1;
} else if ch == '}' {
depth -= 1;
if depth < 0 {
return false;
}
if depth == 0 {
brace_end = Some(idx);
}
}
}
if depth != 0 {
return false;
}
if let (Some(open), Some(close)) = (brace_start, brace_end) {
let prefix = trimmed[..open].trim();
let inner = &trimmed[open + 1..close];
let suffix = trimmed[close + 1..].trim();
if !suffix.is_empty() {
return false;
}
if !prefix.is_empty() && !prefix.ends_with("::") {
return false;
}
let prefix = prefix.strip_suffix("::").unwrap_or(prefix).trim();
for child in split_top_level_csv(inner) {
let child = child.trim();
if child.is_empty() {
continue;
}
let expanded =
if prefix.is_empty() { child.to_owned() } else { format!("{prefix}::{child}") };
if !collect_alias_bindings_from_use_segment(expanded.as_str(), out) {
return false;
}
}
return true;
}
if let Some((base, alias)) = trimmed.rsplit_once(" as ") {
let base = base.trim();
let alias = alias.trim();
if base.is_empty() || alias.is_empty() {
return false;
}
out.push((alias.to_owned(), base.to_owned()));
}
true
}
fn remove_aliases_from_use_path(path: &str, aliases_to_remove: &HashSet<String>) -> Option<String> {
rewrite_use_segment_without_aliases(path.trim(), aliases_to_remove)
}
fn rewrite_use_segment_without_aliases(
segment: &str,
aliases_to_remove: &HashSet<String>,
) -> Option<String> {
let trimmed = segment.trim();
if trimmed.is_empty() {
return None;
}
let mut brace_start = None;
let mut depth = 0_i32;
let mut brace_end = None;
for (idx, ch) in trimmed.char_indices() {
if ch == '{' {
if brace_start.is_none() {
brace_start = Some(idx);
}
depth += 1;
} else if ch == '}' {
depth -= 1;
if depth < 0 {
return None;
}
if depth == 0 {
brace_end = Some(idx);
}
}
}
if depth != 0 {
return None;
}
if let (Some(open), Some(close)) = (brace_start, brace_end) {
let prefix = trimmed[..open].trim();
let inner = &trimmed[open + 1..close];
let suffix = trimmed[close + 1..].trim();
if !suffix.is_empty() {
return None;
}
if !prefix.is_empty() && !prefix.ends_with("::") {
return None;
}
let mut rewritten_children = Vec::new();
for child in split_top_level_csv(inner) {
if let Some(rewritten_child) =
rewrite_use_segment_without_aliases(child.as_str(), aliases_to_remove)
{
rewritten_children.push(rewritten_child);
}
}
if rewritten_children.is_empty() {
return None;
}
let prefix = prefix.strip_suffix("::").unwrap_or(prefix).trim();
if prefix.is_empty() {
return Some(format!("{{{}}}", rewritten_children.join(", ")));
}
return Some(format!("{prefix}::{{{}}}", rewritten_children.join(", ")));
}
if let Some((base, alias)) = trimmed.rsplit_once(" as ") {
let base = base.trim();
let alias = alias.trim();
if aliases_to_remove.contains(alias) {
return None;
}
if base.is_empty() || alias.is_empty() {
return None;
}
}
Some(trimmed.to_owned())
}
fn looks_like_trait_import(symbol: &str, full_path: &str) -> bool {
let symbol_is_common_trait = matches!(
symbol,
"Read"
| "Write" | "BufRead"
| "Seek" | "AsyncRead"
| "AsyncWrite"
| "AsyncBufRead"
| "AsyncSeek"
| "Future"
| "Stream"
| "Sink" | "Iterator"
| "IntoIterator"
| "FromIterator"
| "ParallelIterator"
| "IntoParallelIterator"
| "IntoParallelRefIterator"
| "Serialize"
| "Deserialize"
| "Executor"
);
let symbol_is_trait_named =
symbol.ends_with("Ext") || symbol.ends_with("Trait") || symbol_is_common_trait;
let path_is_trait_named = full_path.contains("::trait::")
|| full_path.contains("::traits::")
|| full_path.contains("::prelude::")
|| full_path.ends_with("::eyre::Context");
(symbol_is_trait_named || path_is_trait_named)
&& symbol.chars().next().is_some_and(char::is_uppercase)
}
fn symbol_is_referenced_outside_use(ctx: &FileContext, symbol: &str) -> bool {
for path in ctx.source_file.syntax().descendants().filter_map(ast::Path::cast) {
if path.syntax().ancestors().any(|node| Use::cast(node).is_some()) {
continue;
}
let Some(segment) = path.segment() else {
continue;
};
let Some(name_ref) = segment.name_ref() else {
continue;
};
if is_same_ident(name_ref.text().as_str(), symbol) {
return true;
}
}
let derive_symbol_re = Regex::new(&format!(r"\b{}\b", regex::escape(symbol)))
.expect("Expected operation to succeed.");
for attr in ctx.source_file.syntax().descendants().filter_map(Attr::cast) {
let text = attr.syntax().text().to_string();
if !text.contains("derive") {
continue;
}
if derive_symbol_re.is_match(&text) {
return true;
}
}
false
}
fn normalize_trait_keep_alive_use_path(
ctx: &FileContext,
path: &str,
) -> Option<(String, HashSet<String>)> {
let has_child_module_declarations = has_non_inline_child_modules(ctx);
if path.contains('*') {
return None;
}
if let Some((rewritten_leaf, affected_symbols, changed, _trait_key)) =
normalize_trait_keep_alive_leaf(path.trim(), ctx, "", has_child_module_declarations)
&& changed
{
return Some((rewritten_leaf, affected_symbols));
}
let (prefix, close, segments) = parse_braced_path_parts_allow_alias(path)?;
let module_prefix = prefix[..prefix.len().saturating_sub(1)]
.trim()
.strip_suffix("::")
.unwrap_or(prefix[..prefix.len().saturating_sub(1)].trim())
.to_owned();
let (rewritten_segments, changed, affected_symbols) = normalize_trait_keep_alive_segments(
segments,
ctx,
&module_prefix,
has_child_module_declarations,
);
if !changed {
return None;
}
Some((
format!("{prefix}{}{}", rewritten_segments.join(", "), &path[close..=close]),
affected_symbols,
))
}
fn normalize_trait_keep_alive_segments(
segments: Vec<String>,
ctx: &FileContext,
module_prefix: &str,
has_child_module_declarations: bool,
) -> (Vec<String>, bool, HashSet<String>) {
let mut rewritten_segments = Vec::new();
let mut state = TraitKeepAliveNormalizationState::default();
for segment in segments {
let trimmed = segment.trim();
if trimmed.is_empty() {
continue;
}
if let Some((head, inner)) = parse_single_level_nested_use_segment(trimmed) {
if let Some(rewritten_segment) = normalize_trait_keep_alive_nested_segment(
trimmed,
head,
inner,
ctx,
module_prefix,
has_child_module_declarations,
&mut state,
) {
rewritten_segments.push(rewritten_segment);
}
continue;
}
if let Some(rewritten_segment) = normalize_trait_keep_alive_leaf_segment(
trimmed,
ctx,
module_prefix,
has_child_module_declarations,
&mut state,
) {
rewritten_segments.push(rewritten_segment);
}
}
(rewritten_segments, state.changed, state.affected_symbols)
}
fn normalize_trait_keep_alive_nested_segment(
original_segment: &str,
head: &str,
inner: &str,
ctx: &FileContext,
module_prefix: &str,
has_child_module_declarations: bool,
state: &mut TraitKeepAliveNormalizationState,
) -> Option<String> {
let nested_prefix =
if module_prefix.is_empty() { head.to_owned() } else { format!("{module_prefix}::{head}") };
let mut rewritten_children = Vec::new();
let mut changed = false;
for child in split_top_level_csv(inner) {
let child_trimmed = child.trim();
if child_trimmed.is_empty() {
continue;
}
if let Some((rewritten_child, child_symbols, child_changed, trait_key)) =
normalize_trait_keep_alive_leaf(
child_trimmed,
ctx,
&nested_prefix,
has_child_module_declarations,
) {
for symbol in child_symbols {
state.affected_symbols.insert(symbol);
}
if child_changed {
changed = true;
}
if let Some(key) = trait_key {
if state.seen_trait_keys.insert(key) {
rewritten_children.push(rewritten_child);
} else {
changed = true;
}
} else {
rewritten_children.push(rewritten_child);
}
} else {
rewritten_children.push(child_trimmed.to_owned());
}
}
if rewritten_children.is_empty() {
state.changed = true;
return None;
}
if !changed {
return Some(original_segment.to_owned());
}
state.changed = true;
let rewritten_segment = format!("{head}::{{{}}}", rewritten_children.join(", "));
Some(rewritten_segment)
}
fn normalize_trait_keep_alive_leaf_segment(
trimmed: &str,
ctx: &FileContext,
module_prefix: &str,
has_child_module_declarations: bool,
state: &mut TraitKeepAliveNormalizationState,
) -> Option<String> {
let Some((rewritten_segment, segment_symbols, segment_changed, trait_key)) =
normalize_trait_keep_alive_leaf(trimmed, ctx, module_prefix, has_child_module_declarations)
else {
return Some(trimmed.to_owned());
};
for symbol in segment_symbols {
state.affected_symbols.insert(symbol);
}
if segment_changed {
state.changed = true;
}
if let Some(key) = trait_key {
if state.seen_trait_keys.insert(key) {
return Some(rewritten_segment);
}
state.changed = true;
return None;
}
Some(rewritten_segment)
}
fn normalize_trait_keep_alive_leaf(
leaf: &str,
ctx: &FileContext,
import_prefix: &str,
has_child_module_declarations: bool,
) -> Option<(String, HashSet<String>, bool, Option<String>)> {
if leaf.is_empty() || leaf == "self" || leaf.contains('{') || leaf.contains('}') {
return None;
}
let (base, alias) = split_import_leaf_alias(leaf)?;
let full_path =
if import_prefix.is_empty() { base.to_owned() } else { format!("{import_prefix}::{base}") };
let full_path = full_path.replace(' ', "");
let symbol = symbol_from_full_import_path(&full_path)?;
if !looks_like_trait_import(&symbol, &full_path) {
return None;
}
if let Some(alias_name) = alias.as_deref().map(str::trim)
&& alias_name != "_"
&& symbol_is_referenced_outside_use(ctx, alias_name)
{
return None;
}
let symbol_is_ambiguous = imported_symbol_path_count(ctx, &symbol) > 1;
let should_keep_alive = (!symbol_is_referenced_outside_use(ctx, &symbol)
|| symbol_is_ambiguous)
&& !has_child_module_declarations;
let rewritten = if should_keep_alive { format!("{base} as _") } else { base.to_owned() };
let changed = compact_path_for_match(leaf) != compact_path_for_match(&rewritten);
let mut symbols = HashSet::new();
symbols.insert(symbol.clone());
Some((
rewritten,
symbols,
changed,
Some(format!(
"{}|{}",
compact_path_for_match(&full_path),
if should_keep_alive { "_" } else { "" }
)),
))
}
fn imported_symbol_path_count(ctx: &FileContext, symbol: &str) -> usize {
let mut paths = HashSet::new();
for item in &ctx.top_items {
if item.kind != TopKind::Use {
continue;
}
let Some(path) = extract_use_path(ctx, item) else {
continue;
};
for full_path in imported_full_paths_from_use_path(&path) {
if symbol_from_full_import_path(&full_path)
.is_some_and(|name| is_same_ident(&name, symbol))
{
paths.insert(full_path);
}
}
}
paths.len()
}
fn split_import_leaf_alias(leaf: &str) -> Option<(&str, Option<String>)> {
let trimmed = leaf.trim();
if trimmed.is_empty() {
return None;
}
if let Some((left, right)) = trimmed.rsplit_once(" as ") {
let base = left.trim();
let alias = right.trim();
if base.is_empty() || alias.is_empty() {
return None;
}
return Some((base, Some(alias.to_owned())));
}
Some((trimmed, None))
}
fn has_non_inline_child_modules(ctx: &FileContext) -> bool {
ctx.top_items.iter().any(|item| item.kind == TopKind::Mod && item.raw.trim_end().ends_with(';'))
}
fn apply_import004_free_fn_macro_rule(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
item: &TopItem,
path: &str,
) -> bool {
if !path.contains("::") {
return false;
}
let mut fixed = false;
for symbol in imported_symbols_from_use_path(path) {
if symbol.is_empty() || !symbol.chars().next().is_some_and(char::is_lowercase) {
continue;
}
let local_fn_defined = is_local_fn_defined(ctx, &symbol);
let local_macro_defined = is_local_macro_defined(ctx, &symbol);
let fn_ranges = unqualified_function_call_ranges(ctx, &symbol);
let macro_ranges = unqualified_macro_call_ranges(ctx, &symbol);
let needs_fn_fix = !fn_ranges.is_empty() && !local_fn_defined;
let needs_macro_fix = !macro_ranges.is_empty()
&& !local_macro_defined
&& !symbol_imported_from_std_like_root(path, &symbol);
if !(needs_fn_fix || needs_macro_fix) {
continue;
}
let mut fixable = false;
let mut qualified_symbol_path = String::new();
let mut use_item_edit = None;
if let Some((qualified_path, rewritten_use_path)) =
import004_free_fn_fix_plan(ctx, item, path, &symbol)
{
qualified_symbol_path = qualified_path;
fixable = true;
if emit_edits {
use_item_edit = build_use_item_rewrite_edit(
ctx,
item,
rewritten_use_path.as_deref(),
"RUST-STYLE-IMPORT-004",
);
if use_item_edit.is_none() {
fixable = false;
}
}
}
shared::push_violation(
violations,
ctx,
item.line,
"RUST-STYLE-IMPORT-004",
"Do not import free functions or macros into scope; prefer qualified module paths.",
fixable,
);
if !emit_edits || !fixable {
continue;
}
for (start, end) in fn_ranges.iter().copied() {
edits.push(Edit {
start,
end,
replacement: qualified_symbol_path.clone(),
rule: "RUST-STYLE-IMPORT-004",
});
}
for (start, end) in macro_ranges.iter().copied() {
edits.push(Edit {
start,
end,
replacement: qualified_symbol_path.clone(),
rule: "RUST-STYLE-IMPORT-004",
});
}
if let Some(edit) = use_item_edit {
edits.push(edit);
fixed = true;
}
break;
}
fixed
}
#[allow(clippy::too_many_arguments)]
fn apply_import004_qualified_function_path_rule(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
use_runs: &[Vec<&TopItem>],
local_module_roots: &HashSet<String>,
imported_full_paths_by_symbol: &HashMap<String, HashSet<String>>,
skip_cfg_test_module_paths: bool,
) -> HashSet<usize> {
let candidates =
collect_import004_qualified_function_candidates(ctx, skip_cfg_test_module_paths);
if candidates.is_empty() {
return HashSet::new();
}
let mut touched_lines = HashSet::new();
let mut pending_import_paths = BTreeSet::new();
let mut planned_access_paths = HashMap::<String, String>::new();
for candidate in candidates {
let Some(module_access_plan) = import004_preferred_module_access_plan(
ctx,
None,
None,
&candidate.parent_module_path,
&candidate.module_symbol,
) else {
continue;
};
let shortened_path = format!("{}::{}", module_access_plan.access_path, candidate.leaf_text);
if compact_path_for_match(&shortened_path) == compact_path_for_match(&candidate.full_path) {
continue;
}
if let Some(existing_parent) = planned_access_paths.get(&module_access_plan.access_path)
&& compact_path_for_match(existing_parent)
!= compact_path_for_match(&candidate.parent_module_path)
{
continue;
}
shared::push_violation(
violations,
ctx,
candidate.line,
"RUST-STYLE-IMPORT-004",
"Prefer parent-module qualified free-function paths when unambiguous.",
true,
);
if !emit_edits {
continue;
}
edits.push(Edit {
start: candidate.start,
end: candidate.end,
replacement: shortened_path,
rule: "RUST-STYLE-IMPORT-004",
});
touched_lines.insert(candidate.line);
if module_access_plan.keep_parent_module_import {
pending_import_paths.insert(candidate.parent_module_path.clone());
planned_access_paths
.entry(module_access_plan.access_path)
.or_insert(candidate.parent_module_path);
}
}
if !emit_edits {
return touched_lines;
}
touched_lines.extend(apply_pending_module_import_path_edits(
ctx,
edits,
use_runs,
local_module_roots,
&pending_import_paths,
imported_full_paths_by_symbol,
"RUST-STYLE-IMPORT-004",
));
touched_lines
}
fn symbol_imported_from_std_like_root(path: &str, symbol: &str) -> bool {
imported_full_paths_from_use_path(path).into_iter().any(|full_path| {
if symbol_from_full_import_path(&full_path).as_deref() != Some(symbol) {
return false;
}
matches!(full_path.split("::").next(), Some("std") | Some("core") | Some("alloc"))
})
}
fn use_path_needs_import004_fix(ctx: &FileContext, path: &str) -> bool {
for symbol in imported_symbols_from_use_path(path) {
if symbol.is_empty() || !symbol.chars().next().is_some_and(char::is_lowercase) {
continue;
}
let local_fn_defined = is_local_fn_defined(ctx, &symbol);
let local_macro_defined = is_local_macro_defined(ctx, &symbol);
let needs_fn_fix =
!unqualified_function_call_ranges(ctx, &symbol).is_empty() && !local_fn_defined;
let needs_macro_fix = !unqualified_macro_call_ranges(ctx, &symbol).is_empty()
&& !local_macro_defined
&& !symbol_imported_from_std_like_root(path, &symbol);
if needs_fn_fix || needs_macro_fix {
return true;
}
}
false
}
fn is_local_fn_defined(ctx: &FileContext, symbol: &str) -> bool {
let local_fn_def_re = Regex::new(&format!(
r"^\s*(?:pub(?:\([^)]*\))?\s+)?(?:async\s+)?(?:const\s+)?(?:unsafe\s+)?fn\s+{}\b",
regex::escape(symbol)
))
.expect("Expected operation to succeed.");
ctx.lines.iter().any(|line| {
let code = shared::strip_string_and_line_comment(line, false).0;
local_fn_def_re.is_match(&code)
})
}
fn is_local_macro_defined(ctx: &FileContext, symbol: &str) -> bool {
let local_macro_def_re = Regex::new(&format!(
r"^\s*(?:macro_rules!\s*{}\b|macro\s+{}\b)",
regex::escape(symbol),
regex::escape(symbol),
))
.expect("Expected operation to succeed.");
ctx.lines.iter().any(|line| {
let code = shared::strip_string_and_line_comment(line, false).0;
local_macro_def_re.is_match(&code)
})
}
fn collect_imported_symbol_maps(use_item_analyses: &[UseItemAnalysis<'_>]) -> ImportedSymbolMaps {
let mut maps = ImportedSymbolMaps::default();
for use_item_analysis in use_item_analyses {
for symbol in &use_item_analysis.imported_symbols {
maps.symbol_paths
.entry(symbol.clone())
.or_default()
.insert(use_item_analysis.path.clone());
maps.symbol_lines
.entry(symbol.clone())
.or_default()
.insert(use_item_analysis.item.line);
}
for full_path in &use_item_analysis.imported_full_paths {
let Some(symbol) = symbol_from_full_import_path(full_path) else {
continue;
};
if !use_item_analysis.imported_symbol_set.contains(normalize_ident(&symbol))
&& !use_item_analysis.imported_self_full_paths.contains(full_path)
{
continue;
}
maps.full_paths_by_symbol.entry(symbol).or_default().insert(full_path.clone());
}
}
maps
}
fn push_import004_ambiguous_symbol_violations(
ctx: &FileContext,
violations: &mut Vec<Violation>,
maps: &ImportedSymbolMaps,
) {
for (symbol, paths) in &maps.symbol_paths {
if paths.len() <= 1 {
continue;
}
for line in maps.symbol_lines.get(symbol).into_iter().flat_map(|lines| lines.iter()) {
shared::push_violation(
violations,
ctx,
*line,
"RUST-STYLE-IMPORT-004",
&format!(
"Ambiguous imported symbol `{symbol}` is not allowed; use fully qualified paths."
),
false,
);
}
}
}
fn collect_local_defined_symbols(ctx: &FileContext) -> HashSet<String> {
let mut out = HashSet::new();
for item in &ctx.top_items {
let Some(name) = item.name.as_deref() else {
continue;
};
out.insert(normalize_ident(name).to_owned());
}
out
}
fn apply_import009_rules(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
analysis: &ImportAnalysis<'_>,
skip_lines: &HashSet<usize>,
) -> HashSet<usize> {
let mut import009_fixed_lines = HashSet::new();
let mut locked_use_ranges = HashSet::new();
for (symbol, imported_paths) in &analysis.imported_symbol_maps.full_paths_by_symbol {
if analysis
.imported_symbol_maps
.symbol_lines
.get(symbol)
.is_some_and(|lines| lines.iter().any(|line| skip_lines.contains(line)))
{
continue;
}
if imported_paths.len() != 1 || analysis.local_defined_symbols.contains(symbol) {
continue;
}
let Some(imported_path) = imported_paths.iter().next().cloned() else {
continue;
};
let Some((fixable, type_rewrites, value_rewrites, use_item_plans)) = build_import009_plan(
ctx,
&analysis.use_item_analyses,
symbol,
&imported_path,
&analysis.qualified_type_paths_by_symbol,
&analysis.qualified_value_paths_by_symbol,
) else {
continue;
};
for line in analysis
.imported_symbol_maps
.symbol_lines
.get(symbol)
.into_iter()
.flat_map(|lines| lines.iter())
{
shared::push_violation(
violations,
ctx,
*line,
"RUST-STYLE-IMPORT-009",
&format!(
"Ambiguous symbol `{symbol}` should use fully qualified paths consistently."
),
fixable,
);
}
if !emit_edits || !fixable {
continue;
}
if use_item_plans
.iter()
.any(|(item, _, _)| locked_use_ranges.contains(&use_item_lock_key(item)))
{
continue;
}
let mut planned_use_item_edits = Vec::new();
for (item, _qualified_symbol_path, rewritten_use_path) in &use_item_plans {
let Some(edit) = build_use_item_rewrite_edit(
ctx,
item,
rewritten_use_path.as_deref(),
"RUST-STYLE-IMPORT-009",
) else {
planned_use_item_edits.clear();
break;
};
planned_use_item_edits.push((item.line, use_item_lock_key(item), edit));
}
if planned_use_item_edits.is_empty() {
continue;
}
for (start, end, replacement) in type_rewrites {
edits.push(Edit { start, end, replacement, rule: "RUST-STYLE-IMPORT-009" });
}
for (start, end, replacement) in value_rewrites {
edits.push(Edit { start, end, replacement, rule: "RUST-STYLE-IMPORT-009" });
}
for (line, lock_key, edit) in planned_use_item_edits {
edits.push(edit);
import009_fixed_lines.insert(line);
locked_use_ranges.insert(lock_key);
}
}
import009_fixed_lines
}
fn build_import009_plan<'a>(
ctx: &FileContext,
use_item_analyses: &'a [UseItemAnalysis<'a>],
symbol: &str,
imported_path: &str,
qualified_type_paths_by_symbol: &HashMap<String, HashSet<String>>,
qualified_value_paths_by_symbol: &HashMap<String, HashSet<String>>,
) -> Option<Import009Plan<'a>> {
let equivalent_qualified_paths = import009_equivalent_qualified_paths(ctx, imported_path);
let mut type_rewrites = unqualified_type_path_rewrites(ctx, symbol, imported_path);
type_rewrites.extend(unqualified_derive_attr_symbol_rewrites(ctx, symbol, imported_path));
let value_rewrites = unqualified_value_path_rewrites(ctx, symbol, imported_path);
let has_unqualified_type_uses = !type_rewrites.is_empty();
let has_unqualified_value_uses = !value_rewrites.is_empty();
let has_unqualified_uses = has_unqualified_type_uses || has_unqualified_value_uses;
let has_any_qualified_type_path =
qualified_type_paths_by_symbol.get(symbol).is_some_and(|paths| !paths.is_empty());
let has_conflicting_qualified_value_path =
qualified_value_paths_by_symbol.get(symbol).is_some_and(|paths| {
paths.iter().any(|path| !equivalent_qualified_paths.contains(path.as_str()))
});
let has_qualified_value_same_path =
qualified_value_paths_by_symbol.get(symbol).is_some_and(|paths| {
paths.iter().any(|path| equivalent_qualified_paths.contains(path.as_str()))
});
let has_qualified_type_same_path =
qualified_type_paths_by_symbol.get(symbol).is_some_and(|paths| {
paths.iter().any(|path| equivalent_qualified_paths.contains(path.as_str()))
});
let value_uses_record_only =
has_unqualified_value_uses && unqualified_value_paths_are_record_only(ctx, symbol);
let allow_value_only_type_like_conflict_rewrites = has_unqualified_value_uses
&& !has_unqualified_type_uses
&& !value_uses_record_only
&& is_import009_type_like_symbol(symbol)
&& has_conflicting_qualified_value_path;
if !has_unqualified_type_uses
&& !has_any_qualified_type_path
&& !value_uses_record_only
&& !allow_value_only_type_like_conflict_rewrites
&& !has_qualified_value_same_path
{
return None;
}
let mut qualified_paths = HashSet::new();
if let Some(paths) = qualified_type_paths_by_symbol.get(symbol) {
qualified_paths.extend(paths.iter().cloned());
}
if let Some(paths) = qualified_value_paths_by_symbol.get(symbol) {
qualified_paths.extend(paths.iter().cloned());
}
let has_any_qualified_path = !qualified_paths.is_empty();
let has_qualified_same_path =
qualified_paths.iter().any(|path| equivalent_qualified_paths.contains(path.as_str()));
let has_conflicting_qualified_path =
qualified_paths.iter().any(|path| !equivalent_qualified_paths.contains(path.as_str()));
let allow_value_only_record_rewrites =
has_unqualified_value_uses && !has_unqualified_type_uses && value_uses_record_only;
let allow_value_only_conflicting_qualified_paths = allow_value_only_record_rewrites;
let imported_root = imported_path.split("::").next().unwrap_or_default();
let import008_cycle_guard_enabled = !matches!(imported_root, "crate" | "self" | "super");
let import008_cycle_risk = import008_cycle_guard_enabled
&& !has_conflicting_qualified_path
&& has_unqualified_value_uses
&& has_qualified_type_same_path;
if has_unqualified_uses {
if !has_any_qualified_path {
return None;
}
} else if !has_qualified_same_path {
return None;
}
if (!has_unqualified_uses
&& has_conflicting_qualified_path
&& !allow_value_only_conflicting_qualified_paths)
|| (has_unqualified_uses
&& !has_qualified_same_path
&& !has_conflicting_qualified_path
&& !allow_value_only_record_rewrites)
{
return Some((false, Vec::new(), Vec::new(), Vec::new()));
}
if import008_cycle_risk {
return Some((false, Vec::new(), Vec::new(), Vec::new()));
}
let mut use_item_plans = Vec::new();
let mut fixable = true;
for use_item_analysis in use_item_analyses {
if !use_item_imports_symbol_path(&use_item_analysis.path, symbol, imported_path) {
continue;
}
let removal_fix_plan = import004_free_fn_fix_plan(
ctx,
use_item_analysis.item,
&use_item_analysis.path,
symbol,
);
let Some((qualified_symbol_path, rewritten_use_path)) = removal_fix_plan else {
fixable = false;
break;
};
use_item_plans.push((use_item_analysis.item, qualified_symbol_path, rewritten_use_path));
}
if use_item_plans.is_empty() {
fixable = false;
}
Some((fixable, type_rewrites, value_rewrites, use_item_plans))
}
fn import009_equivalent_qualified_paths<'a>(
ctx: &FileContext,
imported_path: &'a str,
) -> HashSet<Cow<'a, str>> {
let mut paths = HashSet::from([Cow::Borrowed(imported_path)]);
let Some((parent_module_path, symbol)) = imported_path.rsplit_once("::") else {
return paths;
};
let Some(module_symbol) = parent_module_path.rsplit("::").next() else {
return paths;
};
let module_symbol = normalize_ident(module_symbol);
if module_symbol.is_empty() || matches!(module_symbol, "crate" | "self" | "super") {
return paths;
}
if let Some(module_access_plan) =
import004_preferred_module_access_plan(ctx, None, None, parent_module_path, module_symbol)
{
paths.insert(Cow::Owned(format!("{}::{}", module_access_plan.access_path, symbol)));
}
paths
}
fn build_use_item_rewrite_edit(
ctx: &FileContext,
item: &TopItem,
rewritten_use_path: Option<&str>,
rule: &'static str,
) -> Option<Edit> {
let (item_start, item_end) = item_syntax_text_range(ctx, item)?;
if let Some(new_use_path) = rewritten_use_path {
if let Some(raw) = ctx.text.get(item_start..item_end)
&& let Some(rewritten) = rewrite_use_item_with_path(raw, new_use_path)
{
return Some(Edit { start: item_start, end: item_end, replacement: rewritten, rule });
}
return None;
}
if let Some((line_start, line_end)) =
full_line_item_delete_range(ctx, item, item_start, item_end)
{
return Some(Edit { start: line_start, end: line_end, replacement: String::new(), rule });
}
Some(Edit { start: item_start, end: item_end, replacement: String::new(), rule })
}
fn use_item_lock_key(item: &TopItem) -> (usize, usize) {
(item.start_offset, item.end_offset)
}
fn item_syntax_text_range(ctx: &FileContext, item: &TopItem) -> Option<(usize, usize)> {
if item.start_offset > item.end_offset || item.end_offset > ctx.text.len() {
return None;
}
Some((item.start_offset, item.end_offset))
}
fn full_line_item_delete_range(
ctx: &FileContext,
item: &TopItem,
item_start: usize,
item_end: usize,
) -> Option<(usize, usize)> {
let line_start = shared::offset_from_line(&ctx.line_starts, item.start_line)?;
let line_end =
shared::offset_from_line(&ctx.line_starts, item.end_line + 1).unwrap_or(ctx.text.len());
let prefix = ctx.text.get(line_start..item_start)?;
let suffix = ctx.text.get(item_end..line_end)?;
if prefix.trim().is_empty() && suffix.trim().is_empty() {
Some((line_start, line_end))
} else {
None
}
}
#[allow(clippy::too_many_arguments)]
fn apply_import008_rules(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
analysis: &ImportAnalysis<'_>,
) -> HashSet<usize> {
let import008_candidates = collect_import008_candidates(ctx);
let import008_use_recovery_candidates = collect_import008_use_recovery_candidates(
&analysis.use_item_analyses,
&analysis.imported_symbol_maps.full_paths_by_symbol,
);
let blocked_symbols = build_import008_blocked_symbols(
&import008_candidates,
&analysis.imported_symbol_maps.full_paths_by_symbol,
&analysis.local_defined_symbols,
&analysis.qualified_type_paths_by_symbol,
&analysis.qualified_value_paths_by_symbol,
);
let mut pending_import_paths = BTreeSet::new();
let mut import008_group_skip_lines = HashSet::new();
apply_import008_shorten_candidates(
ctx,
violations,
edits,
emit_edits,
&import008_candidates,
&blocked_symbols,
&analysis.imported_symbol_maps.full_paths_by_symbol,
&mut pending_import_paths,
&mut import008_group_skip_lines,
);
apply_import008_use_recovery_edits(
ctx,
violations,
edits,
emit_edits,
&import008_use_recovery_candidates,
&blocked_symbols,
&analysis.local_defined_symbols,
&analysis.imported_symbol_maps.full_paths_by_symbol,
&mut pending_import_paths,
&mut import008_group_skip_lines,
);
if emit_edits {
import008_group_skip_lines.extend(apply_pending_module_import_path_edits(
ctx,
edits,
&analysis.use_runs,
&analysis.local_module_roots,
&pending_import_paths,
&analysis.imported_symbol_maps.full_paths_by_symbol,
"RUST-STYLE-IMPORT-008",
));
}
import008_group_skip_lines
}
fn build_import008_blocked_symbols(
import008_candidates: &[Import008Candidate],
imported_full_paths_by_symbol: &HashMap<String, HashSet<String>>,
local_defined_symbols: &HashSet<String>,
qualified_type_paths_by_symbol: &HashMap<String, HashSet<String>>,
qualified_value_paths_by_symbol: &HashMap<String, HashSet<String>>,
) -> HashSet<String> {
let mut candidate_paths_by_symbol: HashMap<String, HashSet<String>> = HashMap::new();
let mut non_value_receiver_candidate_symbols = HashSet::new();
for candidate in import008_candidates {
candidate_paths_by_symbol
.entry(candidate.symbol.clone())
.or_default()
.insert(candidate.import_path.clone());
if candidate.kind != Import008CandidateKind::ValueReceiver {
non_value_receiver_candidate_symbols.insert(candidate.symbol.clone());
}
}
let mut blocked_symbols = HashSet::new();
for (symbol, candidate_paths) in &candidate_paths_by_symbol {
let mut all_paths = imported_full_paths_by_symbol.get(symbol).cloned().unwrap_or_default();
all_paths.extend(candidate_paths.iter().cloned());
if let Some(type_paths) = qualified_type_paths_by_symbol.get(symbol) {
all_paths.extend(type_paths.iter().cloned());
}
if let Some(value_paths) = qualified_value_paths_by_symbol.get(symbol) {
all_paths.extend(value_paths.iter().cloned());
}
let has_non_value_receiver_candidate =
non_value_receiver_candidate_symbols.contains(symbol);
let import009_consistency_conflict = has_non_value_receiver_candidate
&& is_import009_type_like_symbol(symbol)
&& qualified_value_paths_by_symbol.get(symbol).is_some_and(|value_paths| {
candidate_paths.iter().any(|path| value_paths.contains(path))
});
if all_paths.len() > 1
|| local_defined_symbols.contains(symbol)
|| import009_consistency_conflict
{
blocked_symbols.insert(symbol.clone());
}
}
blocked_symbols
}
#[allow(clippy::too_many_arguments)]
fn apply_import008_shorten_candidates(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
import008_candidates: &[Import008Candidate],
blocked_symbols: &HashSet<String>,
imported_full_paths_by_symbol: &HashMap<String, HashSet<String>>,
pending_import_paths: &mut BTreeSet<String>,
import008_group_skip_lines: &mut HashSet<usize>,
) {
for candidate in import008_candidates {
if blocked_symbols.contains(&candidate.symbol) {
continue;
}
shared::push_violation(
violations,
ctx,
candidate.line,
"RUST-STYLE-IMPORT-008",
"Prefer importing non-function, non-macro symbols and using short paths when unambiguous.",
true,
);
if !emit_edits {
continue;
}
let already_imported = imported_full_paths_by_symbol
.get(&candidate.symbol)
.is_some_and(|paths| paths.contains(&candidate.import_path));
edits.push(Edit {
start: candidate.start,
end: candidate.end,
replacement: candidate.replacement.clone(),
rule: "RUST-STYLE-IMPORT-008",
});
import008_group_skip_lines.insert(candidate.line);
if !already_imported {
pending_import_paths.insert(candidate.import_path.clone());
}
}
}
#[allow(clippy::too_many_arguments)]
fn apply_import008_use_recovery_edits(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
import008_use_recovery_candidates: &[Import008UseRecoveryCandidate],
blocked_symbols: &HashSet<String>,
local_defined_symbols: &HashSet<String>,
imported_full_paths_by_symbol: &HashMap<String, HashSet<String>>,
pending_import_paths: &mut BTreeSet<String>,
import008_group_skip_lines: &mut HashSet<usize>,
) {
for candidate in import008_use_recovery_candidates {
if blocked_symbols.contains(&candidate.symbol)
|| local_defined_symbols.contains(&candidate.symbol)
{
continue;
}
shared::push_violation(
violations,
ctx,
candidate.line,
"RUST-STYLE-IMPORT-008",
"Prefer merging child imports into existing parent module imports.",
true,
);
if !emit_edits {
continue;
}
if let (Some(start), Some(end)) = (
shared::offset_from_line(&ctx.line_starts, candidate.start_line),
shared::offset_from_line(&ctx.line_starts, candidate.end_line + 1),
) {
edits.push(Edit {
start,
end,
replacement: String::new(),
rule: "RUST-STYLE-IMPORT-008",
});
import008_group_skip_lines.insert(candidate.line);
let target_compact = compact_path_for_match(&candidate.import_path);
let already_imported =
imported_full_paths_by_symbol.get(&candidate.symbol).is_some_and(|paths| {
paths.iter().any(|path| compact_path_for_match(path) == target_compact)
});
if !already_imported {
pending_import_paths.insert(candidate.import_path.clone());
}
}
}
}
fn collect_import008_use_recovery_candidates(
use_item_analyses: &[UseItemAnalysis<'_>],
imported_full_paths_by_symbol: &HashMap<String, HashSet<String>>,
) -> Vec<Import008UseRecoveryCandidate> {
let mut candidates = Vec::new();
for use_item_analysis in use_item_analyses {
let Some((prefix, symbol)) = simple_import_prefix_symbol(&use_item_analysis.path) else {
continue;
};
if prefix.contains("::") {
continue;
}
if matches!(prefix.as_str(), "std" | "core" | "alloc" | "crate" | "self" | "super" | "Self")
{
continue;
}
let root = normalize_ident(&prefix).to_owned();
let symbol_normalized = normalize_ident(&symbol).to_owned();
let Some(root_full_paths) = imported_full_paths_by_symbol.get(&root) else {
continue;
};
if root_full_paths.len() != 1 {
continue;
}
let Some(root_full) = root_full_paths.iter().next() else {
continue;
};
let root_full_compact = compact_path_for_match(root_full);
let prefix_compact = compact_path_for_match(&prefix);
if root_full_compact == prefix_compact {
continue;
}
let import_path = format!("{root_full}::{symbol}");
let current_compact = compact_path_for_match(&use_item_analysis.path);
let import_path_compact = compact_path_for_match(&import_path);
if let Some(existing_symbol_paths) = imported_full_paths_by_symbol.get(&symbol_normalized)
&& existing_symbol_paths.iter().any(|existing| {
let existing_compact = compact_path_for_match(existing);
existing_compact != current_compact && existing_compact != import_path_compact
}) {
continue;
}
candidates.push(Import008UseRecoveryCandidate {
line: use_item_analysis.item.line,
start_line: use_item_analysis.item.start_line,
end_line: use_item_analysis.item.end_line,
symbol: symbol_normalized,
import_path,
});
}
candidates
}
fn push_import_group_order_spacing_violations(
ctx: &FileContext,
violations: &mut Vec<Violation>,
use_runs: &[Vec<&TopItem>],
fixable_import_group_lines: &HashSet<usize>,
local_module_roots: &HashSet<String>,
) {
for run in use_runs {
for pair in run.windows(2) {
let prev = pair[0];
let curr = pair[1];
let Some(prev_path) = extract_use_path(ctx, prev) else {
continue;
};
let Some(curr_path) = extract_use_path(ctx, curr) else {
continue;
};
let prev_origin = use_origin(&prev_path, local_module_roots);
let curr_origin = use_origin(&curr_path, local_module_roots);
let is_fixable = fixable_import_group_lines.contains(&curr.line);
let between = separator_lines(ctx, prev, curr);
let has_blank = between.iter().any(|line| line.trim().is_empty());
let has_header_comment = between.iter().any(|line| line.trim_start().starts_with("//"));
if curr_origin < prev_origin {
shared::push_violation(
violations,
ctx,
curr.line,
"RUST-STYLE-IMPORT-001",
"Import groups must be ordered: std, third-party, self/workspace.",
is_fixable,
);
}
if curr_origin != prev_origin && !has_blank {
shared::push_violation(
violations,
ctx,
curr.line,
"RUST-STYLE-IMPORT-002",
"Separate import groups with one blank line.",
is_fixable,
);
}
if curr_origin == prev_origin && has_blank {
shared::push_violation(
violations,
ctx,
curr.line,
"RUST-STYLE-IMPORT-002",
"Do not place blank lines inside an import group.",
is_fixable,
);
}
if has_header_comment {
shared::push_violation(
violations,
ctx,
curr.line,
"RUST-STYLE-IMPORT-002",
"Do not use header comments for import groups.",
is_fixable,
);
}
}
}
}
fn apply_import011_derive_order_rule(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
local_module_roots: &HashSet<String>,
local_defined_symbols: &HashSet<String>,
imported_full_paths_by_symbol: &HashMap<String, HashSet<String>>,
) {
let locked_ranges = collect_locked_derive_attr_ranges(edits);
for candidate in collect_import011_candidates(
ctx,
local_module_roots,
local_defined_symbols,
imported_full_paths_by_symbol,
) {
if candidate.entries.len() < 2
|| locked_ranges
.iter()
.any(|(start, end)| ranges_overlap(candidate.start, candidate.end, *start, *end))
{
continue;
}
let mut ordered = candidate.entries.iter().collect::<Vec<_>>();
ordered.sort_by(|left, right| {
left.origin
.cmp(&right.origin)
.then(left.display_text.cmp(&right.display_text))
.then(left.original_index.cmp(&right.original_index))
});
if ordered.iter().enumerate().all(|(idx, entry)| entry.original_index == idx) {
continue;
}
shared::push_violation(
violations,
ctx,
candidate.line,
"RUST-STYLE-IMPORT-011",
"Order `#[derive(...)]` entries like imports: std/core/alloc, third-party, then workspace; alphabetize within each group.",
true,
);
if !emit_edits {
continue;
}
let prefix = if candidate.is_inner { "#![derive(" } else { "#[derive(" };
let replacement = format!(
"{prefix}{})]",
ordered.iter().map(|entry| entry.display_text.as_str()).collect::<Vec<_>>().join(", ")
);
edits.push(Edit {
start: candidate.start,
end: candidate.end,
replacement,
rule: "RUST-STYLE-IMPORT-011",
});
}
}
fn collect_import011_candidates(
ctx: &FileContext,
local_module_roots: &HashSet<String>,
local_defined_symbols: &HashSet<String>,
imported_full_paths_by_symbol: &HashMap<String, HashSet<String>>,
) -> Vec<DeriveOrderCandidate> {
let mut candidates = Vec::new();
for attr in ctx.source_file.syntax().descendants().filter_map(Attr::cast) {
let Some(meta) = attr.meta() else {
continue;
};
let Some(meta_path) = meta.path() else {
continue;
};
let Some(meta_name) = meta_path.segment().and_then(|segment| segment.name_ref()) else {
continue;
};
if meta_name.text() != "derive" {
continue;
}
let Some(token_tree) = meta.token_tree() else {
continue;
};
let tree_text = token_tree.syntax().text().to_string();
let body = tree_text
.strip_prefix('(')
.and_then(|value| value.strip_suffix(')'))
.unwrap_or_default();
if body.trim().is_empty() {
continue;
}
let mut entries = Vec::new();
let mut resolvable = true;
for (idx, segment) in split_top_level_csv(body).into_iter().enumerate() {
let display_text = compact_path_for_match(segment.trim());
if display_text.is_empty() {
resolvable = false;
break;
}
let Some(origin) = derive_entry_origin(
&display_text,
local_module_roots,
local_defined_symbols,
imported_full_paths_by_symbol,
) else {
resolvable = false;
break;
};
entries.push(DeriveOrderEntry { display_text, origin, original_index: idx });
}
if !resolvable || entries.len() < 2 {
continue;
}
let range = attr.syntax().text_range();
let start = usize::from(range.start());
let end = usize::from(range.end());
if start >= end {
continue;
}
candidates.push(DeriveOrderCandidate {
line: shared::line_from_offset(&ctx.line_starts, start),
start,
end,
is_inner: attr.syntax().text().to_string().starts_with("#!["),
entries,
});
}
candidates
}
fn derive_entry_origin(
display_text: &str,
local_module_roots: &HashSet<String>,
local_defined_symbols: &HashSet<String>,
imported_full_paths_by_symbol: &HashMap<String, HashSet<String>>,
) -> Option<usize> {
if display_text.contains("::") {
let root = display_text.split("::").next().unwrap_or_default();
if is_non_importable_root(root) {
return None;
}
return Some(use_origin(display_text, local_module_roots));
}
let symbol = normalize_ident(display_text);
if is_std_builtin_derive(symbol) {
return Some(0);
}
if local_defined_symbols.contains(symbol) {
return None;
}
let imported_paths = imported_full_paths_by_symbol.get(symbol)?;
if imported_paths.len() != 1 {
return None;
}
imported_paths.iter().next().map(|path| use_origin(path, local_module_roots))
}
fn is_std_builtin_derive(symbol: &str) -> bool {
matches!(
normalize_ident(symbol),
"Clone" | "Copy" | "Debug" | "Default" | "Eq" | "Hash" | "Ord" | "PartialEq" | "PartialOrd"
)
}
fn collect_locked_derive_attr_ranges(edits: &[Edit]) -> Vec<(usize, usize)> {
edits
.iter()
.filter(|edit| matches!(edit.rule, "RUST-STYLE-IMPORT-008" | "RUST-STYLE-IMPORT-009"))
.map(|edit| (edit.start, edit.end))
.collect()
}
fn ranges_overlap(start: usize, end: usize, other_start: usize, other_end: usize) -> bool {
start < other_end && other_start < end
}
fn collect_import004_qualified_function_candidates(
ctx: &FileContext,
skip_cfg_test_module_paths: bool,
) -> Vec<Import004QualifiedFunctionCandidate> {
let mut candidates = Vec::new();
let mut seen_ranges = HashSet::new();
collect_import004_qualified_function_call_candidates(
ctx,
skip_cfg_test_module_paths,
&mut candidates,
&mut seen_ranges,
);
collect_import004_qualified_function_macro_candidates(
ctx,
skip_cfg_test_module_paths,
&mut candidates,
&mut seen_ranges,
);
candidates
}
fn collect_import004_qualified_function_call_candidates(
ctx: &FileContext,
skip_cfg_test_module_paths: bool,
candidates: &mut Vec<Import004QualifiedFunctionCandidate>,
seen_ranges: &mut HashSet<(usize, usize)>,
) {
for call_expr in ctx.source_file.syntax().descendants().filter_map(CallExpr::cast) {
let Some(expr) = call_expr.expr() else {
continue;
};
let Some(path_expr) = PathExpr::cast(expr.syntax().clone()) else {
continue;
};
let Some(path) = path_expr.path() else {
continue;
};
if path.qualifier().is_none()
|| path_is_qualifier_subpath(&path)
|| (skip_cfg_test_module_paths && is_inside_cfg_test_module(&path))
{
continue;
}
let mut segments = Vec::new();
if !collect_path_segment_texts(&path, &mut segments) {
continue;
}
let Some(leaf_text) = path.segment().map(|segment| segment.syntax().text().to_string())
else {
continue;
};
let range = path.syntax().text_range();
import004_push_qualified_function_candidate(
ctx,
candidates,
seen_ranges,
&segments,
usize::from(range.start()),
usize::from(range.end()),
leaf_text,
);
}
}
fn collect_import004_qualified_function_macro_candidates(
ctx: &FileContext,
skip_cfg_test_module_paths: bool,
candidates: &mut Vec<Import004QualifiedFunctionCandidate>,
seen_ranges: &mut HashSet<(usize, usize)>,
) {
for macro_call in ctx.source_file.syntax().descendants().filter_map(MacroCall::cast) {
if skip_cfg_test_module_paths && syntax_is_inside_cfg_test_module(macro_call.syntax()) {
continue;
}
let Some(token_tree) = macro_call.token_tree() else {
continue;
};
let tree_text = token_tree.syntax().text().to_string();
let tree_start = usize::from(token_tree.syntax().text_range().start());
for found in qualified_macro_path_regex().find_iter(&tree_text) {
if !macro_symbol_has_import004_function_follow(&tree_text, found.end())
|| !macro_symbol_is_import009_unqualified(&tree_text, found.start())
{
continue;
}
let segments = found
.as_str()
.split("::")
.map(str::trim)
.filter(|segment| !segment.is_empty())
.map(ToOwned::to_owned)
.collect::<Vec<_>>();
let Some(leaf_text) = segments.last().cloned() else {
continue;
};
import004_push_qualified_function_candidate(
ctx,
candidates,
seen_ranges,
&segments,
tree_start + found.start(),
tree_start + found.end(),
leaf_text,
);
}
}
}
fn import004_push_qualified_function_candidate(
ctx: &FileContext,
candidates: &mut Vec<Import004QualifiedFunctionCandidate>,
seen_ranges: &mut HashSet<(usize, usize)>,
segments: &[String],
start: usize,
end: usize,
leaf_text: String,
) {
if segments.len() < 3 || start >= end || !seen_ranges.insert((start, end)) {
return;
}
let symbol = normalize_ident(&leaf_text).to_owned();
if symbol.is_empty() || !symbol.chars().next().is_some_and(char::is_lowercase) {
return;
}
let parent_module_path = segments[..segments.len() - 1].join("::");
let Some(module_symbol) = parent_module_path.rsplit("::").next() else {
return;
};
let module_symbol = normalize_ident(module_symbol).to_owned();
if module_symbol.is_empty() || !module_symbol.chars().next().is_some_and(char::is_lowercase) {
return;
}
candidates.push(Import004QualifiedFunctionCandidate {
line: shared::line_from_offset(&ctx.line_starts, start),
start,
end,
full_path: segments.join("::"),
parent_module_path,
module_symbol,
leaf_text,
});
}
fn collect_import008_candidates(ctx: &FileContext) -> Vec<Import008Candidate> {
let Ok(derive_path_re) = Regex::new(
r"(?:[A-Za-z_][A-Za-z0-9_]*|r#[A-Za-z_][A-Za-z0-9_]*)\s*(?:::\s*(?:[A-Za-z_][A-Za-z0-9_]*|r#[A-Za-z_][A-Za-z0-9_]*)\s*)+",
) else {
return Vec::new();
};
let mut candidates = Vec::new();
let mut seen_ranges = HashSet::new();
collect_import008_from_paths(ctx, &mut candidates, &mut seen_ranges);
collect_import008_from_value_receivers(ctx, &mut candidates, &mut seen_ranges);
collect_import008_from_macro_calls(ctx, &mut candidates, &mut seen_ranges);
collect_import008_from_derive_attrs(ctx, &derive_path_re, &mut candidates, &mut seen_ranges);
candidates
}
fn collect_import008_from_paths(
ctx: &FileContext,
candidates: &mut Vec<Import008Candidate>,
seen_ranges: &mut HashSet<(usize, usize)>,
) {
for path_type in ctx.source_file.syntax().descendants().filter_map(PathType::cast) {
let Some(candidate) = path_type.path().and_then(|path| {
classify_type_path_candidate(ctx, path, PathQualificationRequirement::Qualified)
}) else {
continue;
};
let mut segments = Vec::new();
if !collect_path_segment_texts(&candidate.path, &mut segments) {
continue;
}
if segments.len() < 2 {
continue;
}
let symbol = normalize_ident(segments[segments.len() - 1].as_str()).to_owned();
if matches!(symbol.as_str(), "" | "self" | "super" | "crate" | "Self") {
continue;
}
let root = segments[0].as_str();
if is_non_importable_root(root) {
continue;
}
let import_path = segments.join("::");
if matches!(
import_path.as_str(),
"std::fmt::Result"
| "core::fmt::Result"
| "std::result::Result"
| "core::result::Result"
) {
continue;
}
let replacement =
candidate.path.segment().map(|segment| segment.syntax().text().to_string());
let Some(replacement) = replacement else {
continue;
};
let range = candidate.path.syntax().text_range();
let start = usize::from(range.start());
let end = usize::from(range.end());
if start >= end || !seen_ranges.insert((start, end)) {
continue;
}
let line = shared::line_from_offset(&ctx.line_starts, start);
candidates.push(Import008Candidate {
line,
start,
end,
kind: Import008CandidateKind::TypePath,
symbol,
import_path,
replacement,
});
}
}
fn collect_import008_from_value_receivers(
ctx: &FileContext,
candidates: &mut Vec<Import008Candidate>,
seen_ranges: &mut HashSet<(usize, usize)>,
) {
for path in ctx.source_file.syntax().descendants().filter_map(ast::Path::cast) {
let Some(candidate) = classify_value_path_candidate(
ctx,
path,
PathQualificationRequirement::Qualified,
false,
true,
) else {
continue;
};
let mut segments = Vec::new();
if !collect_path_segment_texts(&candidate.path, &mut segments) {
continue;
}
let Some((symbol, import_path)) = qualified_associated_receiver_symbol_path(&segments)
else {
continue;
};
let Some(receiver_leaf) = segments.get(segments.len().saturating_sub(2)) else {
continue;
};
let Some(leaf) = segments.last() else {
continue;
};
if matches!(symbol.as_str(), "" | "self" | "super" | "crate" | "Self") {
continue;
}
let root = segments[0].as_str();
if is_non_importable_root(root) {
continue;
}
let replacement = format!("{receiver_leaf}::{leaf}");
let range = candidate.path.syntax().text_range();
let start = usize::from(range.start());
let end = usize::from(range.end());
if start >= end || !seen_ranges.insert((start, end)) {
continue;
}
let line = shared::line_from_offset(&ctx.line_starts, start);
candidates.push(Import008Candidate {
line,
start,
end,
kind: Import008CandidateKind::ValueReceiver,
symbol,
import_path,
replacement,
});
}
}
fn qualified_macro_path_regex() -> &'static Regex {
static QUALIFIED_MACRO_PATH_RE: OnceLock<Regex> = OnceLock::new();
QUALIFIED_MACRO_PATH_RE.get_or_init(|| {
Regex::new(
r"(?:\b(?:crate|self|super|[A-Za-z_][A-Za-z0-9_]*|r#[A-Za-z_][A-Za-z0-9_]*)(?:\s*::\s*(?:[A-Za-z_][A-Za-z0-9_]*|r#[A-Za-z_][A-Za-z0-9_]*)\s*)+)",
)
.expect("Compile qualified macro path regex.")
})
}
fn collect_import008_from_macro_record_receivers(
ctx: &FileContext,
candidates: &mut Vec<Import008Candidate>,
seen_ranges: &mut HashSet<(usize, usize)>,
tree_text: &str,
tree_start: usize,
) {
if !tree_text.contains("::") || !tree_text.contains('{') {
return;
}
for found in qualified_macro_path_regex().find_iter(tree_text) {
if !macro_symbol_has_record_follow(tree_text, found.end())
|| !macro_symbol_is_import009_unqualified(tree_text, found.start())
{
continue;
}
let segments = found
.as_str()
.split("::")
.map(str::trim)
.filter(|segment| !segment.is_empty())
.map(ToOwned::to_owned)
.collect::<Vec<_>>();
if segments.len() < 3 {
continue;
}
let root = segments[0].as_str();
if is_non_importable_root(root) {
continue;
}
let receiver_leaf = segments[segments.len() - 2].clone();
let symbol = normalize_ident(&receiver_leaf).to_owned();
if !is_import009_type_like_symbol(&symbol) {
continue;
}
let Some(leaf) = segments.last().cloned() else {
continue;
};
let import_path = segments[..segments.len() - 1].join("::");
let replacement = format!("{receiver_leaf}::{leaf}");
let start = tree_start + found.start();
let end = tree_start + found.end();
if start >= end || !seen_ranges.insert((start, end)) {
continue;
}
let line = shared::line_from_offset(&ctx.line_starts, start);
candidates.push(Import008Candidate {
line,
start,
end,
kind: Import008CandidateKind::ValueReceiver,
symbol,
import_path,
replacement,
});
}
}
fn collect_import008_from_macro_calls(
ctx: &FileContext,
candidates: &mut Vec<Import008Candidate>,
seen_ranges: &mut HashSet<(usize, usize)>,
) {
for macro_call in ctx.source_file.syntax().descendants().filter_map(MacroCall::cast) {
if syntax_is_inside_cfg_test_module(macro_call.syntax()) {
continue;
}
if let Some(token_tree) = macro_call.token_tree() {
let tree_text = token_tree.syntax().text().to_string();
let tree_start = usize::from(token_tree.syntax().text_range().start());
collect_import008_from_macro_record_receivers(
ctx,
candidates,
seen_ranges,
&tree_text,
tree_start,
);
}
let Some(path) = macro_call.path() else {
continue;
};
if path.qualifier().is_none() {
continue;
}
let mut segments = Vec::new();
if !collect_path_segment_texts(&path, &mut segments) {
continue;
}
if segments.len() < 3 {
continue;
}
let module_name = segments[segments.len() - 2].clone();
let macro_name = segments[segments.len() - 1].clone();
if !is_same_ident(&module_name, ¯o_name) {
continue;
}
let symbol = normalize_ident(&module_name).to_owned();
if matches!(symbol.as_str(), "" | "self" | "super" | "crate" | "Self") {
continue;
}
let root = segments[0].as_str();
if is_non_importable_root(root) {
continue;
}
let import_path = segments[..segments.len() - 1].join("::");
let replacement = format!("{module_name}::{macro_name}");
let range = path.syntax().text_range();
let start = usize::from(range.start());
let end = usize::from(range.end());
if start >= end || !seen_ranges.insert((start, end)) {
continue;
}
let line = shared::line_from_offset(&ctx.line_starts, start);
candidates.push(Import008Candidate {
line,
start,
end,
kind: Import008CandidateKind::MacroModule,
symbol,
import_path,
replacement,
});
}
}
fn collect_import008_from_derive_attrs(
ctx: &FileContext,
derive_path_re: &Regex,
candidates: &mut Vec<Import008Candidate>,
seen_ranges: &mut HashSet<(usize, usize)>,
) {
for attr in ctx.source_file.syntax().descendants().filter_map(Attr::cast) {
let Some(attr_path) = attr.path() else {
continue;
};
let Some(attr_name) = attr_path.segment().and_then(|segment| segment.name_ref()) else {
continue;
};
if attr_name.text() != "derive" {
continue;
}
let is_cfg_test = attr.syntax().ancestors().filter_map(Module::cast).any(|module| {
module
.attrs()
.any(|attr| attr.syntax().text().to_string().replace(' ', "").contains("cfg(test)"))
});
if is_cfg_test {
continue;
}
let attr_text = attr.syntax().text().to_string();
let attr_start = usize::from(attr.syntax().text_range().start());
let mut derive_body = None;
if let Some(open_paren_offset) = attr_text.find('(') {
let body_start = open_paren_offset + 1;
let mut depth = 1_usize;
let mut close_offset = None;
for (idx, ch) in attr_text.as_bytes().iter().enumerate().skip(body_start) {
match ch {
b'(' => depth += 1,
b')' => {
depth -= 1;
if depth == 0 {
close_offset = Some(idx);
break;
}
},
_ => {},
}
}
if let Some(close_offset) = close_offset
&& close_offset >= body_start
{
derive_body = Some((body_start, close_offset));
}
}
let Some((body_start, body_end)) = derive_body else {
continue;
};
let body = &attr_text[body_start..body_end];
let body_offset = body_start;
for derive_match in derive_path_re.find_iter(body) {
let compact_match =
derive_match.as_str().chars().filter(|ch| !ch.is_whitespace()).collect::<String>();
let segments = compact_match.split("::").map(ToString::to_string).collect::<Vec<_>>();
if segments.len() < 2 {
continue;
}
let symbol = normalize_ident(segments[segments.len() - 1].as_str()).to_owned();
if matches!(symbol.as_str(), "" | "self" | "super" | "crate" | "Self") {
continue;
}
let root = normalize_ident(segments[0].as_str());
if is_non_importable_root(root) {
continue;
}
let import_path = compact_match;
if matches!(
import_path.as_str(),
"std::fmt::Result"
| "core::fmt::Result"
| "std::result::Result"
| "core::result::Result"
) {
continue;
}
let replacement = segments[segments.len() - 1].clone();
let start = attr_start + body_offset + derive_match.start();
let end = attr_start + body_offset + derive_match.end();
if start >= end || !seen_ranges.insert((start, end)) {
continue;
}
let line = shared::line_from_offset(&ctx.line_starts, start);
candidates.push(Import008Candidate {
line,
start,
end,
kind: Import008CandidateKind::Derive,
symbol,
import_path,
replacement,
});
}
}
}
fn collect_qualified_type_paths_by_symbol(ctx: &FileContext) -> HashMap<String, HashSet<String>> {
let mut out: HashMap<String, HashSet<String>> = HashMap::new();
for path_type in ctx.source_file.syntax().descendants().filter_map(PathType::cast) {
let Some(candidate) = path_type.path().and_then(|path| {
classify_type_path_candidate(ctx, path, PathQualificationRequirement::Qualified)
}) else {
continue;
};
let mut segments = Vec::new();
if !collect_path_segment_texts(&candidate.path, &mut segments) {
continue;
}
if segments.len() < 2 {
continue;
}
let root = segments[0].as_str();
if is_non_importable_root(root) {
continue;
}
let full_path = segments.join("::");
let Some(symbol) = symbol_from_full_import_path(&full_path) else {
continue;
};
out.entry(symbol).or_default().insert(full_path);
}
out
}
fn collect_qualified_value_paths_by_symbol(ctx: &FileContext) -> HashMap<String, HashSet<String>> {
let mut out: HashMap<String, HashSet<String>> = HashMap::new();
for path in ctx.source_file.syntax().descendants().filter_map(ast::Path::cast) {
let Some(candidate) = classify_value_path_candidate(
ctx,
path,
PathQualificationRequirement::Qualified,
false,
true,
) else {
continue;
};
let mut segments = Vec::new();
if !collect_path_segment_texts(&candidate.path, &mut segments) {
continue;
}
if segments.len() < 2 {
continue;
}
let root = segments[0].as_str();
if is_non_importable_root(root) {
continue;
}
let full_path = segments.join("::");
let Some(symbol) = symbol_from_full_import_path(&full_path) else {
continue;
};
out.entry(symbol).or_default().insert(full_path);
if let Some((receiver_symbol, receiver_path)) =
qualified_associated_receiver_symbol_path(&segments)
{
out.entry(receiver_symbol).or_default().insert(receiver_path);
}
}
out
}
fn qualified_associated_receiver_symbol_path(segments: &[String]) -> Option<(String, String)> {
if segments.len() < 3 {
return None;
}
let receiver_path = segments[..segments.len() - 1].join("::");
let receiver_symbol = symbol_from_full_import_path(&receiver_path)?;
if !is_import009_type_like_symbol(&receiver_symbol) {
return None;
}
Some((receiver_symbol, receiver_path))
}
fn is_import009_type_like_symbol(symbol: &str) -> bool {
symbol.chars().next().is_some_and(|first| first.is_ascii_uppercase())
}
fn is_value_path_usage(path: &ast::Path) -> bool {
path.syntax().ancestors().any(|node| {
PathExpr::cast(node.clone()).is_some()
|| PathPat::cast(node.clone()).is_some()
|| RecordExpr::cast(node.clone()).is_some()
|| RecordPat::cast(node).is_some()
})
}
fn name_ref_is_method_segment(ctx: &FileContext, name_ref: &ast::NameRef) -> bool {
let bytes = ctx.text.as_bytes();
let mut idx = usize::from(name_ref.syntax().text_range().start());
while idx > 0 && bytes[idx - 1].is_ascii_whitespace() {
idx -= 1;
}
idx > 0 && bytes[idx - 1] == b'.'
}
fn classify_value_path_candidate(
ctx: &FileContext,
path: ast::Path,
qualification: PathQualificationRequirement,
allow_macro_context: bool,
skip_qualifier_subpath: bool,
) -> Option<ValuePathCandidate> {
if skip_qualifier_subpath && path_is_qualifier_subpath(&path) {
return None;
}
match qualification {
PathQualificationRequirement::Qualified if path.qualifier().is_none() => return None,
PathQualificationRequirement::Unqualified if path.qualifier().is_some() => return None,
_ => {},
}
if is_inside_cfg_test_module(&path) {
return None;
}
if path.syntax().ancestors().any(|node| Use::cast(node).is_some()) {
return None;
}
if path.syntax().ancestors().any(|node| PathType::cast(node).is_some()) {
return None;
}
let macro_context = path.syntax().ancestors().any(|node| MacroCall::cast(node).is_some());
if macro_context {
if !allow_macro_context {
return None;
}
} else if !is_value_path_usage(&path) {
return None;
}
let segment = path.segment()?;
let name_ref = segment.name_ref()?;
if name_ref_is_method_segment(ctx, &name_ref) {
return None;
}
let is_record_context = path
.syntax()
.ancestors()
.any(|node| RecordExpr::cast(node.clone()).is_some() || RecordPat::cast(node).is_some());
Some(ValuePathCandidate { path, name_ref, is_record_context })
}
fn classify_type_path_candidate(
ctx: &FileContext,
path: ast::Path,
qualification: PathQualificationRequirement,
) -> Option<TypePathCandidate> {
match qualification {
PathQualificationRequirement::Qualified if path.qualifier().is_none() => return None,
PathQualificationRequirement::Unqualified if path.qualifier().is_some() => return None,
_ => {},
}
if is_inside_cfg_test_module(&path) {
return None;
}
let segment = path.segment()?;
let name_ref = segment.name_ref()?;
if name_ref_is_method_segment(ctx, &name_ref) {
return None;
}
let segment_text = segment.syntax().text().to_string();
let name_text = name_ref.text().to_string();
let suffix = segment_text.strip_prefix(&name_text)?.to_owned();
Some(TypePathCandidate { path, name_ref, suffix })
}
fn unqualified_type_path_rewrites(
ctx: &FileContext,
symbol: &str,
qualified_path: &str,
) -> Vec<(usize, usize, String)> {
let mut rewrites = Vec::new();
for path_type in ctx.source_file.syntax().descendants().filter_map(PathType::cast) {
let Some(candidate) = path_type.path().and_then(|path| {
classify_type_path_candidate(ctx, path, PathQualificationRequirement::Unqualified)
}) else {
continue;
};
if !is_same_ident(candidate.name_ref.text().as_str(), symbol) {
continue;
}
rewrites.push((
usize::from(candidate.name_ref.syntax().text_range().start()),
usize::from(candidate.name_ref.syntax().text_range().end()),
qualified_path.to_owned(),
));
}
rewrites
}
fn alias_root_type_path_rewrites(
ctx: &FileContext,
alias: &str,
qualified_path: &str,
) -> Vec<(usize, usize, String)> {
let mut rewrites = Vec::new();
for path_type in ctx.source_file.syntax().descendants().filter_map(PathType::cast) {
let Some(candidate) = path_type.path().and_then(|path| {
classify_type_path_candidate(ctx, path, PathQualificationRequirement::Qualified)
}) else {
continue;
};
let Some(replacement) = rewrite_alias_root_path(
candidate.path.syntax().text().to_string().as_str(),
alias,
qualified_path,
) else {
continue;
};
rewrites.push((
usize::from(candidate.path.syntax().text_range().start()),
usize::from(candidate.path.syntax().text_range().end()),
replacement,
));
}
rewrites
}
fn rewrite_alias_root_path(path_text: &str, alias: &str, qualified_path: &str) -> Option<String> {
let compact = compact_path_for_match(path_text);
let (head, tail) = compact
.split_once("::")
.map_or((compact.as_str(), None), |(left, right)| (left, Some(right)));
if !is_same_ident(head, alias) {
return None;
}
if let Some(tail) = tail {
return Some(format!("{qualified_path}::{tail}"));
}
Some(qualified_path.to_owned())
}
fn alias_root_path_name_ref_rewrites(
ctx: &FileContext,
alias: &str,
qualified_path: &str,
) -> Vec<(usize, usize, String)> {
let mut rewrites = Vec::new();
let mut seen = HashSet::new();
for path in ctx.source_file.syntax().descendants().filter_map(ast::Path::cast) {
let Some(candidate) = classify_value_path_candidate(
ctx,
path,
PathQualificationRequirement::Unqualified,
false,
false,
) else {
continue;
};
if !is_same_ident(candidate.name_ref.text().as_str(), alias) {
continue;
}
let range = candidate.name_ref.syntax().text_range();
let start = usize::from(range.start());
let end = usize::from(range.end());
if !seen.insert((start, end)) {
continue;
}
rewrites.push((start, end, qualified_path.to_owned()));
}
rewrites
}
fn alias_macro_token_tree_rewrites(
ctx: &FileContext,
alias: &str,
qualified_path: &str,
) -> Vec<(usize, usize, String)> {
let pattern = format!(r"\b{}\b::", regex::escape(alias));
let re = Regex::new(&pattern).expect("Compile alias macro path regex.");
let replacement = format!("{qualified_path}::");
let mut rewrites = Vec::new();
for macro_call in ctx.source_file.syntax().descendants().filter_map(MacroCall::cast) {
let Some(token_tree) = macro_call.token_tree() else {
continue;
};
let original = token_tree.syntax().text().to_string();
let rewritten = re.replace_all(&original, replacement.as_str()).into_owned();
if rewritten == original {
continue;
}
let range = token_tree.syntax().text_range();
rewrites.push((usize::from(range.start()), usize::from(range.end()), rewritten));
}
rewrites
}
fn unqualified_derive_attr_symbol_rewrites(
ctx: &FileContext,
symbol: &str,
qualified_path: &str,
) -> Vec<(usize, usize, String)> {
let symbol_re = Regex::new(&format!(r"\b{}\b", regex::escape(symbol)))
.expect("Compile import009 derive symbol regex.");
let mut rewrites = Vec::new();
for attr in ctx.source_file.syntax().descendants().filter_map(Attr::cast) {
let Some(meta) = attr.meta() else {
continue;
};
let Some(meta_path) = meta.path() else {
continue;
};
let Some(meta_name) = meta_path.segment().and_then(|segment| segment.name_ref()) else {
continue;
};
if meta_name.text() != "derive" {
continue;
}
let Some(token_tree) = meta.token_tree() else {
continue;
};
let tree_text = token_tree.syntax().text().to_string();
let tree_start = usize::from(token_tree.syntax().text_range().start());
for found in symbol_re.find_iter(&tree_text) {
let symbol_start = found.start();
let symbol_end = found.end();
if !derive_symbol_is_unqualified(&tree_text, symbol_start)
|| !derive_symbol_has_import009_follow(&tree_text, symbol_end)
{
continue;
}
rewrites.push((
tree_start + symbol_start,
tree_start + symbol_end,
qualified_path.to_owned(),
));
}
}
rewrites
}
fn derive_symbol_is_unqualified(text: &str, symbol_start: usize) -> bool {
if symbol_start == 0 {
return true;
}
let bytes = text.as_bytes();
let mut idx = symbol_start;
while idx > 0 && bytes[idx - 1].is_ascii_whitespace() {
idx -= 1;
}
if idx == 0 {
return true;
}
let prev = bytes[idx - 1];
if prev.is_ascii_alphanumeric() || prev == b'_' || prev == b':' || prev == b'#' {
return false;
}
true
}
fn derive_symbol_has_import009_follow(text: &str, symbol_end: usize) -> bool {
let bytes = text.as_bytes();
let mut idx = symbol_end;
while idx < bytes.len() && bytes[idx].is_ascii_whitespace() {
idx += 1;
}
if idx >= bytes.len() {
return true;
}
matches!(bytes[idx], b',' | b')')
}
fn unqualified_nongeneric_type_path_rewrites(
ctx: &FileContext,
symbol: &str,
qualified_path: &str,
) -> Vec<(usize, usize, String)> {
let mut rewrites = Vec::new();
for path_type in ctx.source_file.syntax().descendants().filter_map(PathType::cast) {
let Some(candidate) = path_type.path().and_then(|path| {
classify_type_path_candidate(ctx, path, PathQualificationRequirement::Unqualified)
}) else {
continue;
};
if !is_same_ident(candidate.name_ref.text().as_str(), symbol) {
continue;
}
if !candidate.suffix.is_empty() {
continue;
}
rewrites.push((
usize::from(candidate.path.syntax().text_range().start()),
usize::from(candidate.path.syntax().text_range().end()),
qualified_path.to_owned(),
));
}
rewrites
}
fn has_unqualified_generic_type_path_use(ctx: &FileContext, symbol: &str) -> bool {
for path_type in ctx.source_file.syntax().descendants().filter_map(PathType::cast) {
let Some(candidate) = path_type.path().and_then(|path| {
classify_type_path_candidate(ctx, path, PathQualificationRequirement::Unqualified)
}) else {
continue;
};
if !is_same_ident(candidate.name_ref.text().as_str(), symbol) {
continue;
}
if !candidate.suffix.is_empty() {
return true;
}
}
false
}
fn unqualified_value_path_rewrites(
ctx: &FileContext,
symbol: &str,
qualified_path: &str,
) -> Vec<(usize, usize, String)> {
let mut rewrites = Vec::new();
let mut seen_ranges = HashSet::new();
for path in ctx.source_file.syntax().descendants().filter_map(ast::Path::cast) {
let Some(candidate) = classify_value_path_candidate(
ctx,
path,
PathQualificationRequirement::Unqualified,
false,
false,
) else {
continue;
};
if !is_same_ident(candidate.name_ref.text().as_str(), symbol) {
continue;
}
let start = usize::from(candidate.name_ref.syntax().text_range().start());
let end = usize::from(candidate.name_ref.syntax().text_range().end());
if seen_ranges.insert((start, end)) {
rewrites.push((start, end, qualified_path.to_owned()));
}
}
for (start, end, replacement) in
unqualified_macro_token_symbol_rewrites(ctx, symbol, qualified_path)
{
if seen_ranges.insert((start, end)) {
rewrites.push((start, end, replacement));
}
}
rewrites
}
fn unqualified_macro_token_symbol_rewrites(
ctx: &FileContext,
symbol: &str,
qualified_path: &str,
) -> Vec<(usize, usize, String)> {
let symbol_re = Regex::new(&format!(r"\b{}\b", regex::escape(symbol)))
.expect("Compile import009 macro symbol regex.");
let mut rewrites = Vec::new();
for macro_call in ctx.source_file.syntax().descendants().filter_map(MacroCall::cast) {
let Some(token_tree) = macro_call.token_tree() else {
continue;
};
let tree_text = token_tree.syntax().text().to_string();
let tree_start = usize::from(token_tree.syntax().text_range().start());
for found in symbol_re.find_iter(&tree_text) {
let symbol_start = found.start();
let symbol_end = found.end();
if !macro_symbol_has_import009_follow(&tree_text, symbol_end)
|| !macro_symbol_is_import009_unqualified(&tree_text, symbol_start)
{
continue;
}
rewrites.push((
tree_start + symbol_start,
tree_start + symbol_end,
qualified_path.to_owned(),
));
}
}
rewrites
}
fn macro_symbol_has_import009_follow(text: &str, symbol_end: usize) -> bool {
let bytes = text.as_bytes();
let mut idx = symbol_end;
while idx < bytes.len() && bytes[idx].is_ascii_whitespace() {
idx += 1;
}
if idx >= bytes.len() {
return false;
}
if bytes[idx] == b'{' {
return true;
}
idx + 1 < bytes.len() && bytes[idx] == b':' && bytes[idx + 1] == b':'
}
fn macro_symbol_has_record_follow(text: &str, symbol_end: usize) -> bool {
let bytes = text.as_bytes();
let mut idx = symbol_end;
while idx < bytes.len() && bytes[idx].is_ascii_whitespace() {
idx += 1;
}
idx < bytes.len() && bytes[idx] == b'{'
}
fn macro_symbol_is_import009_unqualified(text: &str, symbol_start: usize) -> bool {
if symbol_start == 0 {
return true;
}
let bytes = text.as_bytes();
let mut idx = symbol_start;
while idx > 0 {
idx -= 1;
if bytes[idx].is_ascii_whitespace() {
continue;
}
return bytes[idx] != b':'
&& bytes[idx] != b'.'
&& !bytes[idx].is_ascii_alphanumeric()
&& bytes[idx] != b'_';
}
true
}
fn unqualified_value_paths_are_record_only(ctx: &FileContext, symbol: &str) -> bool {
let mut saw_matching_path = false;
for path in ctx.source_file.syntax().descendants().filter_map(ast::Path::cast) {
let Some(candidate) = classify_value_path_candidate(
ctx,
path,
PathQualificationRequirement::Unqualified,
false,
false,
) else {
continue;
};
if !is_same_ident(candidate.name_ref.text().as_str(), symbol) {
continue;
}
saw_matching_path = true;
if !candidate.is_record_context {
return false;
}
}
saw_matching_path
}
fn alias_root_value_path_rewrites(
ctx: &FileContext,
alias: &str,
qualified_path: &str,
) -> Vec<(usize, usize, String)> {
let mut rewrites = Vec::new();
for path in ctx.source_file.syntax().descendants().filter_map(ast::Path::cast) {
let Some(candidate) = classify_value_path_candidate(
ctx,
path,
PathQualificationRequirement::Qualified,
true,
true,
) else {
continue;
};
let Some(replacement) = rewrite_alias_root_path(
candidate.path.syntax().text().to_string().as_str(),
alias,
qualified_path,
) else {
continue;
};
rewrites.push((
usize::from(candidate.path.syntax().text_range().start()),
usize::from(candidate.path.syntax().text_range().end()),
replacement,
));
}
rewrites
}
fn use_item_imports_symbol_path(path: &str, symbol: &str, import_path: &str) -> bool {
if !imported_symbols_from_use_path(path).iter().any(|item_symbol| item_symbol == symbol) {
return false;
}
imported_full_paths_from_use_path(path).into_iter().any(|path| path == import_path)
}
fn is_inside_cfg_test_module(path: &ast::Path) -> bool {
syntax_is_inside_cfg_test_module(path.syntax())
}
fn path_is_qualifier_subpath(path: &ast::Path) -> bool {
path.syntax().parent().and_then(ast::Path::cast).is_some()
}
fn collect_path_segment_texts(path: &ast::Path, out: &mut Vec<String>) -> bool {
if let Some(qualifier) = path.qualifier()
&& !collect_path_segment_texts(&qualifier, out)
{
return false;
}
let Some(segment) = path.segment() else {
return false;
};
if segment.type_anchor().is_some()
|| segment.parenthesized_arg_list().is_some()
|| segment.ret_type().is_some()
|| segment.return_type_syntax().is_some()
{
return false;
}
if let Some(name_ref) = segment.name_ref() {
out.push(name_ref.text().to_string());
return true;
}
let raw = segment.syntax().text().to_string();
let head = raw.split('<').next().unwrap_or(raw.as_str()).trim();
if matches!(head, "crate" | "self" | "super") {
out.push(head.to_owned());
true
} else {
false
}
}
fn symbol_from_full_import_path(path: &str) -> Option<String> {
let symbol = path.rsplit("::").next()?.trim();
let symbol = normalize_ident(symbol);
if matches!(symbol, "" | "*" | "self" | "super" | "crate") {
None
} else {
Some(symbol.to_owned())
}
}
fn imported_full_paths_from_use_path(path: &str) -> Vec<String> {
let mut paths = Vec::new();
if !collect_full_paths_from_use_segment(path.trim(), &mut paths) {
return Vec::new();
}
paths
.into_iter()
.map(|path| path.replace(' ', ""))
.filter(|path| !path.is_empty() && !path.ends_with("::*"))
.collect()
}
fn imported_self_full_paths_from_use_path(path: &str) -> HashSet<String> {
let mut paths = HashSet::new();
if !collect_self_full_paths_from_use_segment(path.trim(), &mut paths) {
return HashSet::new();
}
paths.into_iter().map(|path| path.replace(' ', "")).collect()
}
fn syntax_is_inside_cfg_test_module(syntax: &SyntaxNode) -> bool {
syntax.ancestors().filter_map(Module::cast).any(|module| {
module
.attrs()
.any(|attr| attr.syntax().text().to_string().replace(' ', "").contains("cfg(test)"))
})
}
fn collect_self_full_paths_from_use_segment(segment: &str, out: &mut HashSet<String>) -> bool {
let trimmed = segment.trim();
if trimmed.is_empty() {
return true;
}
if trimmed.ends_with("::*") {
return true;
}
let mut brace_start = None;
let mut depth = 0_i32;
let mut brace_end = None;
for (idx, ch) in trimmed.char_indices() {
if ch == '{' {
if brace_start.is_none() {
brace_start = Some(idx);
}
depth += 1;
} else if ch == '}' {
depth -= 1;
if depth < 0 {
return false;
}
if depth == 0 {
brace_end = Some(idx);
}
}
}
if depth != 0 {
return false;
}
if let (Some(open), Some(close)) = (brace_start, brace_end) {
let prefix = trimmed[..open].trim();
let inner = &trimmed[open + 1..close];
let suffix = trimmed[close + 1..].trim();
if !suffix.is_empty() {
return false;
}
if !prefix.is_empty() && !prefix.ends_with("::") {
return false;
}
let prefix = prefix.strip_suffix("::").unwrap_or(prefix).trim();
for child in split_top_level_csv(inner) {
let child = child.trim();
if child.is_empty() {
continue;
}
if child == "self" {
if !prefix.is_empty() {
out.insert(prefix.to_owned());
}
continue;
}
let expanded =
if prefix.is_empty() { child.to_owned() } else { format!("{prefix}::{child}") };
if !collect_self_full_paths_from_use_segment(&expanded, out) {
return false;
}
}
}
true
}
fn collect_full_paths_from_use_segment(segment: &str, out: &mut Vec<String>) -> bool {
let trimmed = segment.trim();
if trimmed.is_empty() {
return true;
}
if trimmed.ends_with("::*") {
return true;
}
let mut brace_start = None;
let mut depth = 0_i32;
let mut brace_end = None;
for (idx, ch) in trimmed.char_indices() {
if ch == '{' {
if brace_start.is_none() {
brace_start = Some(idx);
}
depth += 1;
} else if ch == '}' {
depth -= 1;
if depth < 0 {
return false;
}
if depth == 0 {
brace_end = Some(idx);
}
}
}
if depth != 0 {
return false;
}
if let (Some(open), Some(close)) = (brace_start, brace_end) {
let prefix = trimmed[..open].trim();
let inner = &trimmed[open + 1..close];
let suffix = trimmed[close + 1..].trim();
if !suffix.is_empty() {
return false;
}
if !prefix.is_empty() && !prefix.ends_with("::") {
return false;
}
let prefix = prefix.strip_suffix("::").unwrap_or(prefix).trim();
for child in split_top_level_csv(inner) {
let child = child.trim();
if child.is_empty() {
continue;
}
if child == "self" {
if !prefix.is_empty() {
out.push(prefix.to_owned());
}
continue;
}
let expanded =
if prefix.is_empty() { child.to_owned() } else { format!("{prefix}::{child}") };
if !collect_full_paths_from_use_segment(&expanded, out) {
return false;
}
}
return true;
}
let base = trimmed.split(" as ").next().unwrap_or(trimmed).trim();
if base.is_empty() || base == "self" || base == "super" || base == "crate" || base == "*" {
return true;
}
out.push(base.to_owned());
true
}
fn build_import008_insert_edit(
ctx: &FileContext,
use_runs: &[Vec<&TopItem>],
local_module_roots: &HashSet<String>,
pending_import_paths: &BTreeSet<String>,
rule: &'static str,
) -> Option<(Edit, HashSet<usize>)> {
if pending_import_paths.is_empty() {
return None;
}
let mut grouped: BTreeMap<usize, Vec<String>> = BTreeMap::new();
for path in pending_import_paths {
grouped.entry(use_origin(path, local_module_roots)).or_default().push(path.clone());
}
for paths in grouped.values_mut() {
paths.sort();
paths.dedup();
}
let mut block = String::new();
for (group_idx, paths) in grouped.values().enumerate() {
if group_idx > 0 {
block.push_str("\n\n");
}
for (idx, path) in paths.iter().enumerate() {
if idx > 0 {
block.push('\n');
}
block.push_str("use ");
block.push_str(path);
block.push(';');
}
}
if block.is_empty() {
return None;
}
block.push('\n');
if let Some(run) = use_runs.first() {
let (Some(first), Some(last)) = (run.first(), run.last()) else {
return None;
};
let (_, run_end) = run_text_range(ctx, first, last)?;
let insert_pos = item_syntax_text_range(ctx, last).map(|(_, end)| end).unwrap_or(run_end);
let last_origin =
extract_use_path(ctx, last).map(|path| use_origin(&path, local_module_roots));
let first_new_origin = grouped.keys().next().copied();
let mut replacement = String::from("\n");
if let (Some(last_origin), Some(first_new_origin)) = (last_origin, first_new_origin)
&& last_origin != first_new_origin
{
replacement.push('\n');
}
replacement.push_str(&block);
if let Some(indent) = import006_scope_insert_indent(ctx, insert_pos)
&& !indent.is_empty()
{
replacement = import006_indent_insert_block(&replacement, &indent);
}
return Some((
Edit { start: insert_pos, end: insert_pos, replacement, rule },
run.iter().map(|item| item.line).collect::<HashSet<_>>(),
));
}
let insert_line = import008_insert_line(ctx);
let insert_pos =
shared::offset_from_line(&ctx.line_starts, insert_line).unwrap_or(ctx.text.len());
let mut replacement = block;
if let Some(indent) = import006_scope_insert_indent(ctx, insert_pos)
&& !indent.is_empty()
{
replacement = import006_indent_insert_block(&replacement, &indent);
}
if !ctx.top_items.is_empty() {
replacement.push('\n');
}
Some((Edit { start: insert_pos, end: insert_pos, replacement, rule }, HashSet::new()))
}
fn merge_import008_into_existing_module_use_items(
ctx: &FileContext,
edits: &mut Vec<Edit>,
use_runs: &[Vec<&TopItem>],
pending_import_paths: &BTreeSet<String>,
imported_full_paths_by_symbol: &HashMap<String, HashSet<String>>,
rule: &'static str,
) -> (BTreeSet<String>, HashSet<usize>) {
if pending_import_paths.is_empty() {
return (BTreeSet::new(), HashSet::new());
}
#[derive(Clone)]
struct MergeTarget {
anchor_line: usize,
root_full: String,
}
let mut remaining = BTreeSet::new();
let mut plans: BTreeMap<usize, BTreeMap<String, BTreeSet<String>>> = BTreeMap::new();
let mut touched_lines = HashSet::new();
let mut use_items = HashMap::new();
let mut merge_targets_by_pending: HashMap<String, MergeTarget> = HashMap::new();
let mut successful_merge_roots: HashSet<(usize, String)> = HashSet::new();
for item in use_runs.iter().flat_map(|run| run.iter().copied()) {
use_items.insert(item.line, item);
}
for pending in pending_import_paths {
let Some((root_full, child_tail)) =
resolve_import008_merge_target_for_pending_path(pending, imported_full_paths_by_symbol)
else {
remaining.insert(pending.clone());
continue;
};
let Some(anchor_line) = find_use_item_line_importing_full_path(ctx, use_runs, root_full)
else {
remaining.insert(pending.clone());
continue;
};
plans
.entry(anchor_line)
.or_default()
.entry(root_full.to_owned())
.or_default()
.insert(child_tail.to_owned());
merge_targets_by_pending
.insert(pending.clone(), MergeTarget { anchor_line, root_full: root_full.to_owned() });
}
for (line, root_plans) in plans {
let Some(item) = use_items.get(&line).copied() else {
continue;
};
let Some((start, end)) = item_text_range(ctx, item) else {
continue;
};
let Some(raw) = ctx.text.get(start..end) else {
continue;
};
let Some(use_path) = extract_use_path_from_text(raw) else {
continue;
};
let mut merged_path = use_path;
let mut merged_any = false;
for (root_full, children) in root_plans {
let Some(next_path) =
merge_children_into_use_path_for_root(&merged_path, &root_full, &children)
else {
continue;
};
successful_merge_roots.insert((line, root_full));
merged_any |= next_path != merged_path;
merged_path = next_path;
}
if !merged_any {
continue;
}
if let Some(rewritten) = rewrite_use_item_with_path(raw, &merged_path)
&& rewritten != raw
{
edits.push(Edit { start, end, replacement: rewritten, rule });
touched_lines.insert(line);
}
}
for pending in pending_import_paths {
let Some(target) = merge_targets_by_pending.get(pending) else {
remaining.insert(pending.clone());
continue;
};
let can_consume =
successful_merge_roots.contains(&(target.anchor_line, target.root_full.clone()));
if !can_consume {
remaining.insert(pending.clone());
}
}
(remaining, touched_lines)
}
fn apply_pending_module_import_path_edits(
ctx: &FileContext,
edits: &mut Vec<Edit>,
use_runs: &[Vec<&TopItem>],
local_module_roots: &HashSet<String>,
pending_import_paths: &BTreeSet<String>,
imported_full_paths_by_symbol: &HashMap<String, HashSet<String>>,
rule: &'static str,
) -> HashSet<usize> {
if pending_import_paths.is_empty() {
return HashSet::new();
}
let (pending_after_merge, merged_lines) = merge_import008_into_existing_module_use_items(
ctx,
edits,
use_runs,
pending_import_paths,
imported_full_paths_by_symbol,
rule,
);
let mut touched_lines = merged_lines;
if let Some((edit, inserted_lines)) =
build_import008_insert_edit(ctx, use_runs, local_module_roots, &pending_after_merge, rule)
{
edits.push(edit);
touched_lines.extend(inserted_lines);
}
touched_lines
}
fn resolve_import008_merge_target_for_pending_path<'a>(
pending: &'a str,
imported_full_paths_by_symbol: &'a HashMap<String, HashSet<String>>,
) -> Option<(&'a str, &'a str)> {
let pending = pending.trim();
if pending.is_empty() {
return None;
}
if let Some((root, child_tail)) = pending.split_once("::")
&& let Some(root_full_paths) = imported_full_paths_by_symbol.get(root)
&& root_full_paths.len() == 1
&& let Some(root_full) = root_full_paths.iter().next()
{
let child_tail = child_tail.trim();
if !child_tail.is_empty() {
return Some((root_full.as_str(), child_tail));
}
}
if let Some((parent, module_name)) = pending.rsplit_once("::") {
let module_name = module_name.trim();
let module_root = parent.trim();
if is_same_ident(module_root.rsplit("::").next().unwrap_or(module_root).trim(), module_name)
{
let module_compact = compact_path_for_match(module_root);
if imported_full_paths_by_symbol.values().any(|full_paths| {
full_paths.iter().any(|full_path| {
full_path.rsplit_once("::").is_some_and(|(parent_full, _)| {
compact_path_for_match(parent_full) == module_compact
})
})
}) {
return Some((module_root, "self"));
}
}
}
let pending_compact = compact_path_for_match(pending);
if imported_full_paths_by_symbol.values().any(|full_paths| {
full_paths.iter().any(|full_path| {
full_path.rsplit_once("::").is_some_and(|(parent_full, _)| {
compact_path_for_match(parent_full) == pending_compact
})
})
}) {
return Some((pending, "self"));
}
let mut best: Option<(&str, &str)> = None;
for root_full in imported_full_paths_by_symbol.values().filter_map(|paths| {
if paths.len() == 1 { paths.iter().next().map(String::as_str) } else { None }
}) {
let prefix = format!("{root_full}::");
if !pending.starts_with(&prefix) {
continue;
}
let child_tail = pending.strip_prefix(&prefix)?.trim();
if child_tail.is_empty() {
continue;
}
match best {
Some((current_root, _)) if current_root.len() >= root_full.len() => {},
_ => best = Some((root_full, child_tail)),
}
}
best
}
fn find_use_item_line_importing_full_path(
ctx: &FileContext,
use_runs: &[Vec<&TopItem>],
target_path: &str,
) -> Option<usize> {
let target_compact = compact_path_for_match(target_path);
let mut containing_line = None;
for item in use_runs.iter().flat_map(|run| run.iter().copied()) {
let Some(path) = extract_use_path(ctx, item) else {
continue;
};
let compact_path = compact_path_for_match(&path);
if let Some((prefix, _, _)) = parse_braced_path_parts_allow_alias(&path) {
let prefix_root = compact_path_for_match(prefix.trim_end_matches('{'));
let prefix_root = prefix_root.strip_suffix("::").unwrap_or(&prefix_root);
if prefix_root == target_compact {
containing_line.get_or_insert(item.line);
}
}
if compact_path == target_compact {
return Some(item.line);
}
if imported_full_paths_from_use_path(&path).into_iter().any(|full_path| {
let full_path_compact = compact_path_for_match(&full_path);
full_path_compact == target_compact
|| full_path.rsplit_once("::").is_some_and(|(parent_full, _)| {
compact_path_for_match(parent_full) == target_compact
})
}) {
containing_line.get_or_insert(item.line);
}
}
containing_line
}
fn merge_children_into_use_path_for_root(
use_path: &str,
root_full: &str,
children: &BTreeSet<String>,
) -> Option<String> {
let mut current = use_path.to_owned();
let mut changed = false;
for child in children {
let next = merge_single_child_into_use_path_for_root(¤t, root_full, child)?;
if next != current {
changed = true;
current = next;
}
}
if changed { Some(current) } else { None }
}
fn merge_single_child_into_use_path_for_root(
use_path: &str,
root_full: &str,
child_tail: &str,
) -> Option<String> {
let root_compact = compact_path_for_match(root_full);
if compact_path_for_match(use_path) == root_compact {
return Some(format!("{root_full}::{{self, {child_tail}}}"));
}
if let Some(merged) =
try_merge_child_into_simple_root_use_path(use_path, &root_compact, child_tail)
{
return Some(merged);
}
if let Some(merged) =
try_merge_child_into_direct_root_braced_use_path(use_path, &root_compact, child_tail)
{
return Some(merged);
}
try_merge_child_into_parent_braced_use_path(use_path, root_full, child_tail)
}
fn try_merge_child_into_simple_root_use_path(
use_path: &str,
root_compact: &str,
child_tail: &str,
) -> Option<String> {
let (prefix, imported_symbol) = simple_import_prefix_symbol(use_path)?;
if compact_path_for_match(&prefix) != *root_compact {
return None;
}
if is_same_ident(&imported_symbol, child_tail) {
return Some(use_path.to_owned());
}
Some(if child_tail == "self" {
format!("{prefix}::{{self, {imported_symbol}}}")
} else {
format!("{prefix}::{{{imported_symbol}, {child_tail}}}")
})
}
fn try_merge_child_into_direct_root_braced_use_path(
use_path: &str,
root_compact: &str,
child_tail: &str,
) -> Option<String> {
let (prefix, close, mut segments) = parse_braced_path_parts_allow_alias(use_path)?;
let prefix_root = prefix[..prefix.len().saturating_sub(1)].trim();
let prefix_root = prefix_root.strip_suffix("::").unwrap_or(prefix_root).trim();
if compact_path_for_match(prefix_root) != *root_compact {
return None;
}
if merge_child_tail_into_braced_segments(&mut segments, child_tail) {
return Some(format!("{}{}{}", prefix, segments.join(", "), &use_path[close..=close]));
}
None
}
fn try_merge_child_into_parent_braced_use_path(
use_path: &str,
root_full: &str,
child_tail: &str,
) -> Option<String> {
let (parent_full, root_head) = root_full.rsplit_once("::")?;
let (prefix, close, mut segments) = parse_braced_path_parts_allow_alias(use_path)?;
let prefix_root = prefix[..prefix.len().saturating_sub(1)].trim();
let prefix_root = prefix_root.strip_suffix("::").unwrap_or(prefix_root).trim();
if compact_path_for_match(prefix_root) != compact_path_for_match(parent_full) {
return None;
}
let root_head_compact = compact_path_for_match(root_head);
let mut matched_root = false;
let mut changed = false;
for segment in &mut segments {
let trimmed = segment.trim();
if compact_path_for_match(trimmed) == root_head_compact {
*segment = format!("{root_head}::{{self, {child_tail}}}");
matched_root = true;
changed = true;
break;
}
if let Some((head, child)) = trimmed.split_once("::")
&& compact_path_for_match(head) == root_head_compact
&& !child.contains('{')
&& !child.contains('}')
&& child != "*"
{
if is_same_ident(child.trim(), child_tail) {
matched_root = true;
break;
}
*segment = if child_tail == "self" {
format!("{head}::{{self, {}}}", child.trim())
} else {
format!("{head}::{{{}, {child_tail}}}", child.trim())
};
matched_root = true;
changed = true;
break;
}
let Some((head, inner)) = parse_single_level_nested_use_segment(trimmed) else {
continue;
};
if compact_path_for_match(head) != root_head_compact {
continue;
}
let mut nested_children = split_top_level_csv(inner);
let nested_changed =
merge_child_tail_into_braced_segments(&mut nested_children, child_tail);
if !nested_changed {
matched_root = true;
break;
}
*segment = format!("{head}::{{{}}}", nested_children.join(", "));
matched_root = true;
changed = true;
break;
}
if !matched_root {
return None;
}
if !changed {
return Some(use_path.to_owned());
}
Some(format!("{}{}{}", prefix, segments.join(", "), &use_path[close..=close]))
}
fn parse_single_level_nested_use_segment(segment: &str) -> Option<(&str, &str)> {
let (head, rest) = segment.split_once("::{")?;
if !rest.ends_with('}') {
return None;
}
let inner = &rest[..rest.len().saturating_sub(1)];
if inner.contains('{') || inner.contains('}') {
return None;
}
Some((head.trim(), inner))
}
fn merge_child_tail_into_braced_segments(segments: &mut Vec<String>, child_tail: &str) -> bool {
let child_compact = compact_path_for_match(child_tail);
for segment in segments.iter_mut() {
let trimmed = segment.trim();
if compact_path_for_match(trimmed) == child_compact {
return false;
}
if is_keep_alive_alias_for_child(trimmed, child_tail) {
*segment = child_tail.to_owned();
return true;
}
let Some((head, inner)) = parse_single_level_nested_use_segment(trimmed) else {
continue;
};
let Some((child_head, child_rest)) = child_tail.split_once("::") else {
continue;
};
if compact_path_for_match(head) != compact_path_for_match(child_head) {
continue;
}
let child_rest_compact = compact_path_for_match(child_rest);
let mut children = split_top_level_csv(inner);
let mut found_exact = false;
let mut replaced_alias = false;
for child in children.iter_mut() {
let child_trimmed = child.trim();
if compact_path_for_match(child_trimmed) == child_rest_compact {
found_exact = true;
break;
}
if is_keep_alive_alias_for_child(child_trimmed, child_rest) {
*child = child_rest.to_owned();
replaced_alias = true;
break;
}
}
if found_exact {
return false;
}
if replaced_alias {
*segment = format!("{head}::{{{}}}", children.join(", "));
return true;
}
if child_rest_compact == "self" {
children.insert(0, child_rest.to_owned());
} else {
children.push(child_rest.to_owned());
}
*segment = format!("{head}::{{{}}}", children.join(", "));
return true;
}
if child_compact == "self" {
segments.insert(0, child_tail.to_owned());
} else {
segments.push(child_tail.to_owned());
}
true
}
fn is_keep_alive_alias_for_child(segment: &str, child_tail: &str) -> bool {
let Some((base, alias)) = split_import_leaf_alias(segment) else {
return false;
};
alias.is_some_and(|value| value == "_")
&& compact_path_for_match(base) == compact_path_for_match(child_tail)
}
fn compact_path_for_match(path: &str) -> String {
path.chars().filter(|ch| !ch.is_whitespace()).collect()
}
fn normalize_use_path_for_equivalence(path: &str) -> String {
let mut normalized = compact_path_for_match(path);
while normalized.contains(",}") {
normalized = normalized.replace(",}", "}");
}
normalized
}
fn import008_insert_line(ctx: &FileContext) -> usize {
if ctx.top_items.is_empty() {
return 1;
}
let first_item_line = ctx.top_items[0].start_line;
let mut after_leading_mods = first_item_line;
let mut saw_leading_mod = false;
for item in &ctx.top_items {
if item.kind == TopKind::Mod && !is_cfg_test_attrs(&item.attrs) {
after_leading_mods = item.end_line + 1;
saw_leading_mod = true;
} else {
break;
}
}
if saw_leading_mod { after_leading_mods } else { first_item_line }
}
fn is_cfg_test_attrs(attrs: &[String]) -> bool {
attrs.iter().any(|attr| attr.replace(' ', "").contains("#[cfg(test)]"))
}
fn extract_use_path_from_text(text: &str) -> Option<String> {
find_use_path_range(text)
.and_then(|(start, end)| text.get(start..end).map(|s| s.trim().to_owned()))
}
fn find_use_path_range(text: &str) -> Option<(usize, usize)> {
for (idx, _) in text.match_indices("use") {
let prev = text[..idx].chars().next_back();
let next = text.get(idx + 3..).and_then(|tail| tail.chars().next());
let is_prev_boundary = prev.is_none_or(|ch| !(ch.is_ascii_alphanumeric() || ch == '_'));
let is_next_whitespace = next.is_some_and(char::is_whitespace);
if !is_prev_boundary || !is_next_whitespace {
continue;
}
let bytes = text.as_bytes();
let mut start = idx + 3;
while start < bytes.len() && bytes[start].is_ascii_whitespace() {
start += 1;
}
let tail = text.get(start..)?;
let semi = tail.find(';')?;
return Some((start, start + semi));
}
None
}
fn extract_use_path(ctx: &FileContext, item: &TopItem) -> Option<String> {
item.use_path.clone().or_else(|| extract_use_path_from_text(&item.raw)).or_else(|| {
ctx.lines.get(item.line.saturating_sub(1)).and_then(|line| extract_use_path_from_text(line))
})
}
fn imported_symbols_from_use_path(path: &str) -> Vec<String> {
let compact = path.replace(' ', "");
if compact.ends_with("::*") {
return Vec::new();
}
fn normalize_symbol(segment: &str) -> Option<String> {
let mut symbol = segment.trim().to_owned();
if symbol.is_empty() {
return None;
}
if let Some((_left, right)) = symbol.split_once(" as ") {
if right.trim() == "_" {
return None;
}
symbol = right.trim().to_owned();
}
if matches!(symbol.as_str(), "*" | "self" | "super" | "crate") {
return None;
}
if let Some((_, right)) = symbol.rsplit_once("::") {
symbol = right.to_owned();
}
if let Some(stripped) = symbol.strip_prefix("r#") {
symbol = stripped.to_owned();
}
if matches!(symbol.as_str(), "*" | "self" | "super" | "crate") {
return None;
}
if symbol.is_empty() { None } else { Some(symbol) }
}
fn collect_symbols_from_segment(
segment: &str,
out: &mut Vec<String>,
normalize_symbol: &impl Fn(&str) -> Option<String>,
) -> bool {
let trimmed = segment.trim();
if trimmed.is_empty() {
return true;
}
let mut brace_start = None;
let mut depth = 0_i32;
let mut brace_end = None;
for (idx, ch) in trimmed.char_indices() {
if ch == '{' {
if brace_start.is_none() {
brace_start = Some(idx);
}
depth += 1;
} else if ch == '}' {
depth -= 1;
if depth < 0 {
return false;
}
if depth == 0 {
brace_end = Some(idx);
}
}
}
if depth != 0 {
return false;
}
if let (Some(open), Some(close)) = (brace_start, brace_end) {
let prefix = trimmed[..open].trim();
let inner = &trimmed[open + 1..close];
let suffix = trimmed[close + 1..].trim();
if !suffix.is_empty() {
return false;
}
if !prefix.is_empty() && !prefix.ends_with("::") {
return false;
}
for child in split_top_level_csv(inner) {
let expanded = if prefix.is_empty() { child } else { format!("{prefix}{child}") };
if !collect_symbols_from_segment(&expanded, out, normalize_symbol) {
return false;
}
}
return true;
}
if let Some(symbol) = normalize_symbol(trimmed) {
out.push(symbol);
}
true
}
let mut out = Vec::new();
if !collect_symbols_from_segment(path, &mut out, &normalize_symbol) {
return Vec::new();
}
out
}
fn normalize_ident(name: &str) -> &str {
name.strip_prefix("r#").unwrap_or(name)
}
fn is_same_ident(lhs: &str, rhs: &str) -> bool {
normalize_ident(lhs) == normalize_ident(rhs)
}
fn is_non_importable_root(root: &str) -> bool {
if matches!(root, "Self" | "self" | "super" | "crate") {
return root == "Self";
}
root.chars().next().is_some_and(char::is_uppercase)
}
fn is_non_importable_use_root(root: &str) -> bool {
root == "Self" || (root.len() == 1 && root.chars().next().is_some_and(char::is_uppercase))
}
fn simple_import_prefix_symbol(path: &str) -> Option<(String, String)> {
if path.contains('{') || path.contains('}') || path.contains('*') || path.contains(" as ") {
return None;
}
let compact = path.replace(' ', "");
let mut parts = compact.split("::").collect::<Vec<_>>();
if parts.len() < 2 {
return None;
}
let symbol = parts.pop()?.to_owned();
let prefix = parts.join("::");
if prefix.is_empty() || symbol.is_empty() {
return None;
}
Some((prefix, symbol))
}
fn braced_import_fix_plan(path: &str, symbol: &str) -> Option<(String, Option<String>)> {
let (open, close) = top_level_brace_range(path)?;
if !path[close + 1..].trim().is_empty() {
return None;
}
let prefix = braced_import_prefix(path, open)?;
let inside = &path[open + 1..close];
let parsed_segments = parse_braced_import_segments(inside)?;
let (qualified_symbol_path, mut kept) =
extract_braced_import_target_and_kept(&prefix, &parsed_segments, symbol)?;
let module_path = prefix.trim_end_matches("::");
let use_module_alias = module_alias_from_parent_path(module_path);
let module_leaf = module_path.rsplit("::").next().unwrap_or_default();
let mut qualified_symbol_path = apply_parent_alias_to_qualified_path(
module_path,
use_module_alias.as_deref(),
qualified_symbol_path,
);
if is_same_ident(module_leaf, symbol) {
let module_access = use_module_alias.as_deref().unwrap_or(module_leaf);
if !kept.iter().any(|segment| segment == "self") {
kept.insert(0, "self".to_owned());
}
qualified_symbol_path = format!("{module_access}::{symbol}");
}
build_braced_import_rewritten_path(
&prefix,
module_path,
use_module_alias.as_deref(),
qualified_symbol_path,
kept.clone(),
false,
)
}
fn top_level_brace_range(text: &str) -> Option<(usize, usize)> {
let open = text.find('{')?;
let mut depth = 0_i32;
let mut close = None;
for (idx, ch) in text.char_indices().skip(open) {
if ch == '{' {
depth += 1;
} else if ch == '}' {
depth -= 1;
if depth == 0 {
close = Some(idx);
break;
}
}
}
if depth != 0 {
return None;
}
Some((open, close?))
}
fn braced_import_prefix(path: &str, open: usize) -> Option<String> {
let mut prefix = path[..open].trim().to_owned();
if prefix.is_empty() {
return None;
}
if !prefix.ends_with("::") {
prefix.push_str("::");
}
Some(prefix)
}
fn parse_braced_import_segments(inside: &str) -> Option<Vec<BracedImportSegment>> {
let segments = split_top_level_csv(inside);
if segments.is_empty() {
return None;
}
let mut out = Vec::new();
for segment in segments {
let trimmed = segment.trim();
if trimmed.is_empty() {
continue;
}
if trimmed == "*" {
return None;
}
if trimmed.contains(" as ") {
out.push(BracedImportSegment::Simple(trimmed.to_owned()));
continue;
}
if let Some((head, rest)) = trimmed.split_once("::{") {
let nested = parse_nested_braced_segment(head, rest)?;
out.push(nested);
continue;
}
if trimmed.contains('{') || trimmed.contains('}') {
return None;
}
out.push(BracedImportSegment::Simple(trimmed.to_owned()));
}
if out.is_empty() { None } else { Some(out) }
}
fn parse_nested_braced_segment(head: &str, rest: &str) -> Option<BracedImportSegment> {
if !rest.ends_with('}') {
return None;
}
let nested_inside = &rest[..rest.len().saturating_sub(1)];
let nested_children = split_top_level_csv(nested_inside);
if nested_children.is_empty() {
return None;
}
if nested_children.iter().any(|child| {
let child = child.trim();
child.is_empty() || child == "*" || child.contains('{') || child.contains('}')
}) {
return None;
}
Some(BracedImportSegment::Nested {
head: head.trim().to_owned(),
children: nested_children.into_iter().map(|child| child.trim().to_owned()).collect(),
})
}
fn extract_braced_import_target_and_kept(
prefix: &str,
segments: &[BracedImportSegment],
symbol: &str,
) -> Option<(String, Vec<String>)> {
let mut qualified_symbol_path = None::<String>;
let mut kept = Vec::new();
for segment in segments {
match segment {
BracedImportSegment::Simple(name) => {
if qualified_symbol_path.is_none()
&& braced_import_segment_matches_symbol(name, symbol)
{
qualified_symbol_path = Some(format!("{prefix}{name}"));
} else {
kept.push(name.clone());
}
},
BracedImportSegment::Nested { head, children } => {
let mut child_kept = Vec::new();
for child in children {
if qualified_symbol_path.is_none()
&& braced_import_segment_matches_symbol(child, symbol)
{
qualified_symbol_path = Some(format!("{prefix}{head}::{child}"));
} else {
child_kept.push(child.clone());
}
}
if !child_kept.is_empty() {
kept.push(format!("{head}::{{{}}}", child_kept.join(", ")));
}
},
}
}
Some((qualified_symbol_path?, kept))
}
fn braced_import_segment_matches_symbol(segment: &str, symbol: &str) -> bool {
if is_same_ident(segment, symbol) {
return true;
}
symbol_from_full_import_path(segment).as_deref().is_some_and(|leaf| is_same_ident(leaf, symbol))
}
fn apply_parent_alias_to_qualified_path(
module_path: &str,
use_module_alias: Option<&str>,
qualified_symbol_path: String,
) -> String {
let Some(alias) = use_module_alias else {
return qualified_symbol_path;
};
if let Some(tail) =
qualified_symbol_path.strip_prefix(module_path).and_then(|rest| rest.strip_prefix("::"))
{
format!("{alias}::{tail}")
} else {
qualified_symbol_path
}
}
fn build_braced_import_rewritten_path(
prefix: &str,
module_path: &str,
use_module_alias: Option<&str>,
qualified_symbol_path: String,
kept: Vec<String>,
keep_module_access: bool,
) -> Option<(String, Option<String>)> {
if kept.is_empty() {
if use_module_alias.is_some() || keep_module_access {
return Some((qualified_symbol_path, Some(module_path.to_owned())));
}
return Some((qualified_symbol_path, None));
}
let rewritten_use_path = if use_module_alias.is_some() || keep_module_access {
let mut kept_no_self = kept;
kept_no_self.retain(|segment| segment != "self");
if kept_no_self.is_empty() {
format!("{prefix}{{self}}")
} else {
format!("{prefix}{{self, {}}}", kept_no_self.join(", "))
}
} else {
format!("{prefix}{{{}}}", kept.join(", "))
};
Some((qualified_symbol_path, Some(rewritten_use_path)))
}
fn parse_braced_path_parts_allow_alias(path: &str) -> Option<(String, usize, Vec<String>)> {
let (open, close) = top_level_brace_range(path)?;
if !path[close + 1..].trim().is_empty() {
return None;
}
let prefix = path[..open + 1].to_owned();
let inner = &path[open + 1..close];
let segments = split_top_level_csv(inner);
if segments.is_empty() {
return None;
}
Some((prefix, close, segments))
}
fn import004_fix_plan(path: &str, symbol: &str) -> Option<(String, Option<String>)> {
if let Some((prefix, imported_symbol)) = simple_import_prefix_symbol(path)
&& is_same_ident(&imported_symbol, symbol)
{
if let Some(alias) = module_alias_from_parent_path(&prefix) {
return Some((format!("{alias}::{imported_symbol}"), Some(prefix)));
}
return Some((format!("{prefix}::{imported_symbol}"), None));
}
braced_import_fix_plan(path, symbol)
}
fn import004_free_fn_fix_plan(
ctx: &FileContext,
current_item: &TopItem,
path: &str,
symbol: &str,
) -> Option<(String, Option<String>)> {
let (default_qualified_symbol_path, rewritten_use_path_without_symbol) =
import004_fix_plan(path, symbol)?;
let Some((parent_module_path, module_symbol)) = import004_parent_module_target(path, symbol)
else {
return Some((default_qualified_symbol_path, rewritten_use_path_without_symbol));
};
let Some(module_access_plan) = import004_preferred_module_access_plan(
ctx,
Some(current_item),
Some(path),
&parent_module_path,
&module_symbol,
) else {
return Some((default_qualified_symbol_path, rewritten_use_path_without_symbol));
};
let (_, rewritten_use_path_with_parent_module) = import004_fix_plan_with_parent_module(
ctx,
path,
symbol,
&parent_module_path,
&module_access_plan.access_path,
)?;
let rewritten_use_path = if module_access_plan.keep_parent_module_import {
rewritten_use_path_with_parent_module
} else {
rewritten_use_path_without_symbol
};
Some((format!("{}::{}", module_access_plan.access_path, symbol), rewritten_use_path))
}
fn import004_parent_module_target(path: &str, symbol: &str) -> Option<(String, String)> {
let mut parent_module_paths = imported_full_paths_from_use_path(path)
.into_iter()
.filter(|full_path| {
symbol_from_full_import_path(full_path)
.as_deref()
.is_some_and(|leaf| is_same_ident(leaf, symbol))
})
.filter_map(|full_path| {
full_path.rsplit_once("::").map(|(parent_module_path, _)| parent_module_path.to_owned())
})
.collect::<Vec<_>>();
parent_module_paths.sort();
parent_module_paths.dedup();
if parent_module_paths.len() != 1 {
return None;
}
let parent_module_path = parent_module_paths.into_iter().next()?;
let module_symbol = parent_module_path.rsplit("::").next()?.trim().to_owned();
if module_symbol.is_empty() || matches!(module_symbol.as_str(), "crate" | "self" | "super") {
return None;
}
Some((parent_module_path, normalize_ident(&module_symbol).to_owned()))
}
fn import004_preferred_module_access_plan(
ctx: &FileContext,
current_item: Option<&TopItem>,
current_use_path: Option<&str>,
parent_module_path: &str,
module_symbol: &str,
) -> Option<Import004ModuleAccessPlan> {
if let Some(alias) = module_alias_from_parent_path(parent_module_path) {
return Some(Import004ModuleAccessPlan {
access_path: alias,
keep_parent_module_import: true,
});
}
let compact_parent_module_path = compact_path_for_match(parent_module_path);
let mut keep_parent_module_import = true;
if current_use_path.is_some_and(|path| {
import004_use_path_imports_parent_module(path, &compact_parent_module_path)
}) {
keep_parent_module_import = false;
}
for item in &ctx.top_items {
let Some(other_path) = item.use_path.clone().or_else(|| extract_use_path(ctx, item)) else {
if item.kind != TopKind::Use
&& item.name.as_deref().is_some_and(|name| is_same_ident(name, module_symbol))
{
return None;
}
continue;
};
let is_current_item = current_item
.is_some_and(|current_item| use_item_lock_key(item) == use_item_lock_key(current_item));
if item.kind != TopKind::Use {
if item.name.as_deref().is_some_and(|name| is_same_ident(name, module_symbol)) {
return None;
}
continue;
}
if import004_use_path_imports_parent_module(&other_path, &compact_parent_module_path) {
keep_parent_module_import = false;
if is_current_item {
continue;
}
}
if is_current_item {
continue;
}
if import004_use_path_conflicts_with_parent_module(
&other_path,
&compact_parent_module_path,
module_symbol,
) {
return None;
}
}
if keep_parent_module_import
&& import004_has_conflicting_root_qualified_path_usage(
ctx,
module_symbol,
&compact_parent_module_path,
) {
return None;
}
Some(Import004ModuleAccessPlan {
access_path: module_symbol.to_owned(),
keep_parent_module_import,
})
}
fn import004_use_path_imports_parent_module(path: &str, parent_module_path: &str) -> bool {
imported_full_paths_from_use_path(path)
.into_iter()
.any(|full_path| compact_path_for_match(&full_path) == parent_module_path)
|| imported_self_full_paths_from_use_path(path)
.into_iter()
.any(|full_path| compact_path_for_match(&full_path) == parent_module_path)
}
fn import004_use_path_conflicts_with_parent_module(
path: &str,
parent_module_path: &str,
module_symbol: &str,
) -> bool {
imported_full_paths_from_use_path(path).into_iter().any(|full_path| {
import004_imported_full_path_conflicts_with_parent_module(
&full_path,
parent_module_path,
module_symbol,
)
}) || imported_self_full_paths_from_use_path(path).into_iter().any(|full_path| {
import004_imported_self_path_conflicts_with_parent_module(
&full_path,
parent_module_path,
module_symbol,
)
})
}
fn import004_imported_full_path_conflicts_with_parent_module(
full_path: &str,
parent_module_path: &str,
module_symbol: &str,
) -> bool {
let compact_full_path = compact_path_for_match(full_path);
if compact_full_path == parent_module_path
|| compact_full_path.starts_with(&format!("{parent_module_path}::"))
{
return false;
}
import004_root_module_symbol_matches_conflicting_path(&compact_full_path, module_symbol)
|| symbol_from_full_import_path(&compact_full_path)
.as_deref()
.is_some_and(|symbol| is_same_ident(symbol, module_symbol))
}
fn import004_imported_self_path_conflicts_with_parent_module(
full_path: &str,
parent_module_path: &str,
module_symbol: &str,
) -> bool {
let compact_full_path = compact_path_for_match(full_path);
if compact_full_path == parent_module_path
|| compact_full_path.starts_with(&format!("{parent_module_path}::"))
{
return false;
}
import004_root_module_symbol_matches_conflicting_path(&compact_full_path, module_symbol)
|| compact_full_path
.rsplit("::")
.next()
.is_some_and(|segment| is_same_ident(segment, module_symbol))
}
fn import004_root_module_symbol_matches_conflicting_path(path: &str, module_symbol: &str) -> bool {
path.trim_start_matches(':')
.split("::")
.next()
.is_some_and(|root| !root.is_empty() && is_same_ident(root, module_symbol))
}
fn import004_has_conflicting_root_qualified_path_usage(
ctx: &FileContext,
module_symbol: &str,
parent_module_path: &str,
) -> bool {
import004_has_conflicting_root_qualified_ast_path_usage(ctx, module_symbol, parent_module_path)
|| import004_has_conflicting_root_qualified_macro_path_usage(
ctx,
module_symbol,
parent_module_path,
)
}
fn import004_has_conflicting_root_qualified_ast_path_usage(
ctx: &FileContext,
module_symbol: &str,
parent_module_path: &str,
) -> bool {
for path in ctx.source_file.syntax().descendants().filter_map(ast::Path::cast) {
if path_is_qualifier_subpath(&path)
|| path.qualifier().is_none()
|| !import004_is_in_current_scope(path.syntax())
{
continue;
}
if path.syntax().ancestors().any(|node| Use::cast(node).is_some()) {
continue;
}
let mut segments = Vec::new();
if !collect_path_segment_texts(&path, &mut segments) || segments.len() < 2 {
continue;
}
if !segments.first().is_some_and(|root| is_same_ident(root, module_symbol)) {
continue;
}
let compact_full_path = compact_path_for_match(&segments.join("::"));
if compact_full_path == parent_module_path
|| compact_full_path.starts_with(&format!("{parent_module_path}::"))
{
continue;
}
return true;
}
false
}
fn import004_has_conflicting_root_qualified_macro_path_usage(
ctx: &FileContext,
module_symbol: &str,
parent_module_path: &str,
) -> bool {
let path_re = Regex::new(
r"(?:\b(?:crate|self|super|[A-Za-z_][A-Za-z0-9_]*|r#[A-Za-z_][A-Za-z0-9_]*)(?:\s*::\s*(?:[A-Za-z_][A-Za-z0-9_]*|r#[A-Za-z_][A-Za-z0-9_]*)\s*)+)",
)
.expect("Compile import004 root conflict macro path regex.");
for macro_call in ctx.source_file.syntax().descendants().filter_map(MacroCall::cast) {
if !import004_is_in_current_scope(macro_call.syntax()) {
continue;
}
let Some(token_tree) = macro_call.token_tree() else {
continue;
};
let tree_text = token_tree.syntax().text().to_string();
for found in path_re.find_iter(&tree_text) {
if !macro_symbol_is_import009_unqualified(&tree_text, found.start()) {
continue;
}
let segments = found
.as_str()
.split("::")
.map(str::trim)
.filter(|segment| !segment.is_empty())
.collect::<Vec<_>>();
if segments.len() < 2
|| !segments.first().is_some_and(|root| is_same_ident(root, module_symbol))
{
continue;
}
let compact_full_path = compact_path_for_match(&segments.join("::"));
if compact_full_path == parent_module_path
|| compact_full_path.starts_with(&format!("{parent_module_path}::"))
{
continue;
}
return true;
}
}
false
}
fn import004_is_in_current_scope(syntax: &SyntaxNode) -> bool {
!syntax.ancestors().any(|node| Module::cast(node).is_some())
}
fn import004_fix_plan_with_parent_module(
ctx: &FileContext,
path: &str,
symbol: &str,
parent_module_path: &str,
module_access: &str,
) -> Option<(String, Option<String>)> {
if let Some((prefix, imported_symbol)) = simple_import_prefix_symbol(path)
&& is_same_ident(&imported_symbol, symbol)
{
return Some((format!("{module_access}::{imported_symbol}"), Some(prefix)));
}
import004_braced_fix_plan_with_parent_module(
ctx,
path,
symbol,
parent_module_path,
module_access,
)
}
fn import004_braced_fix_plan_with_parent_module(
ctx: &FileContext,
path: &str,
symbol: &str,
parent_module_path: &str,
module_access: &str,
) -> Option<(String, Option<String>)> {
let (_default_qualified_symbol_path, rewritten_use_path_without_symbol) =
braced_import_fix_plan(path, symbol)?;
let rewritten_use_path = match rewritten_use_path_without_symbol {
Some(path_without_symbol) => Some(import004_merge_parent_module_use_path(
ctx,
&path_without_symbol,
parent_module_path,
)?),
None => Some(parent_module_path.to_owned()),
};
Some((format!("{module_access}::{symbol}"), rewritten_use_path))
}
fn import004_merge_parent_module_use_path(
ctx: &FileContext,
path_without_symbol: &str,
parent_module_path: &str,
) -> Option<String> {
if import004_use_path_imports_parent_module(
path_without_symbol,
&compact_path_for_match(parent_module_path),
) {
return Some(path_without_symbol.to_owned());
}
let (prefix, close, mut segments) = parse_braced_path_parts_allow_alias(path_without_symbol)?;
let parent_segment = import004_parent_module_relative_segment(&prefix, parent_module_path)?;
if !segments
.iter()
.any(|segment| compact_path_for_match(segment) == compact_path_for_match(&parent_segment))
{
segments.insert(0, parent_segment);
}
let combined_path =
format!("{prefix}{}{}", segments.join(", "), &path_without_symbol[close..=close]);
Some(
normalize_mixed_self_child_use_path_preserve_self(ctx, &combined_path)
.unwrap_or(combined_path),
)
}
fn import004_parent_module_relative_segment(
braced_prefix: &str,
parent_module_path: &str,
) -> Option<String> {
let prefix_module_path =
braced_prefix.trim_end_matches('{').trim().trim_end_matches("::").trim();
let compact_prefix_module_path = compact_path_for_match(prefix_module_path);
let compact_parent_module_path = compact_path_for_match(parent_module_path);
if compact_prefix_module_path == compact_parent_module_path {
return Some("self".to_owned());
}
let relative =
compact_parent_module_path.strip_prefix(&format!("{compact_prefix_module_path}::"))?.trim();
if relative.is_empty() { None } else { Some(relative.to_owned()) }
}
fn module_alias_from_parent_path(path: &str) -> Option<String> {
if !path.starts_with("super::") {
return None;
}
path.rsplit("::").next().filter(|segment| !segment.is_empty()).map(str::to_owned)
}
fn unqualified_function_call_ranges(ctx: &FileContext, symbol: &str) -> Vec<(usize, usize)> {
let mut ranges = Vec::new();
let mut seen_ranges = HashSet::new();
for call_expr in ctx.source_file.syntax().descendants().filter_map(CallExpr::cast) {
let Some(expr) = call_expr.expr() else {
continue;
};
let Some(path_expr) = PathExpr::cast(expr.syntax().clone()) else {
continue;
};
let Some(path) = path_expr.path() else {
continue;
};
if path.qualifier().is_some() {
continue;
}
let Some(seg) = path.segment() else {
continue;
};
let Some(name_ref) = seg.name_ref() else {
continue;
};
if !is_same_ident(name_ref.text().as_str(), symbol) {
continue;
}
let range = (
usize::from(path.syntax().text_range().start()),
usize::from(path.syntax().text_range().end()),
);
if seen_ranges.insert(range) {
ranges.push(range);
}
}
for range in unqualified_macro_token_function_call_ranges(ctx, symbol) {
if seen_ranges.insert(range) {
ranges.push(range);
}
}
ranges
}
fn unqualified_macro_token_function_call_ranges(
ctx: &FileContext,
symbol: &str,
) -> Vec<(usize, usize)> {
let symbol_re = Regex::new(&format!(r"\b{}\b", regex::escape(symbol)))
.expect("Compile import004 macro function regex.");
let mut ranges = Vec::new();
for macro_call in ctx.source_file.syntax().descendants().filter_map(MacroCall::cast) {
let Some(token_tree) = macro_call.token_tree() else {
continue;
};
let tree_text = token_tree.syntax().text().to_string();
let tree_start = usize::from(token_tree.syntax().text_range().start());
for found in symbol_re.find_iter(&tree_text) {
let symbol_start = found.start();
let symbol_end = found.end();
if !macro_symbol_has_import004_function_follow(&tree_text, symbol_end)
|| !macro_symbol_is_import009_unqualified(&tree_text, symbol_start)
{
continue;
}
ranges.push((tree_start + symbol_start, tree_start + symbol_end));
}
}
ranges
}
fn macro_symbol_has_import004_function_follow(text: &str, symbol_end: usize) -> bool {
let bytes = text.as_bytes();
let mut idx = symbol_end;
while idx < bytes.len() && bytes[idx].is_ascii_whitespace() {
idx += 1;
}
if idx >= bytes.len() {
return false;
}
if bytes[idx] == b'(' {
return true;
}
if idx + 1 >= bytes.len() || bytes[idx] != b':' || bytes[idx + 1] != b':' {
return false;
}
idx += 2;
while idx < bytes.len() && bytes[idx].is_ascii_whitespace() {
idx += 1;
}
idx < bytes.len() && bytes[idx] == b'<'
}
fn unqualified_macro_call_ranges(ctx: &FileContext, symbol: &str) -> Vec<(usize, usize)> {
let mut ranges = Vec::new();
for macro_call in ctx.source_file.syntax().descendants().filter_map(MacroCall::cast) {
let Some(path) = macro_call.path() else {
continue;
};
if path.qualifier().is_some() {
continue;
}
let Some(seg) = path.segment() else {
continue;
};
let Some(name_ref) = seg.name_ref() else {
continue;
};
if !is_same_ident(name_ref.text().as_str(), symbol) {
continue;
}
ranges.push((
usize::from(path.syntax().text_range().start()),
usize::from(path.syntax().text_range().end()),
));
}
ranges
}
fn use_origin(path: &str, local_module_roots: &HashSet<String>) -> usize {
let trimmed = path.replace("pub ", "");
let root = trimmed.trim_start_matches(':').split("::").next().unwrap_or_default();
let normalized_root = normalize_ident(root);
if matches!(root, "std" | "core" | "alloc") {
0
} else if matches!(root, "crate" | "self" | "super")
|| local_module_roots.contains(normalized_root)
|| WORKSPACE_IMPORT_ROOTS.contains(normalized_root)
|| WORKSPACE_IMPORT_ROOTS.contains(&normalized_root.replace('-', "_"))
{
2
} else {
1
}
}
fn collect_local_module_roots(ctx: &FileContext) -> HashSet<String> {
ctx.top_items
.iter()
.filter(|item| item.kind == TopKind::Mod)
.filter_map(|item| item.name.as_deref())
.map(|name| normalize_ident(name).to_owned())
.collect()
}
fn split_top_level_csv(text: &str) -> Vec<String> {
let chars = text.char_indices().collect::<Vec<_>>();
let mut out = Vec::new();
let mut start = 0_usize;
let mut depth_brace = 0_i32;
let mut depth_angle = 0_i32;
for (idx, ch) in &chars {
match ch {
'{' => depth_brace += 1,
'}' => depth_brace = (depth_brace - 1).max(0),
'<' => depth_angle += 1,
'>' => depth_angle = (depth_angle - 1).max(0),
',' if depth_brace == 0 && depth_angle == 0 => {
let segment = text[start..*idx].trim();
if !segment.is_empty() {
out.push(segment.to_owned());
}
start = *idx + 1;
},
_ => {},
}
}
let tail = text[start..].trim();
if !tail.is_empty() {
out.push(tail.to_owned());
}
out
}
fn normalize_mixed_self_child_use_path(ctx: &FileContext, path: &str) -> Option<String> {
normalize_mixed_self_child_use_path_with_options(ctx, path, false)
}
fn normalize_mixed_self_child_use_path_preserve_self(
ctx: &FileContext,
path: &str,
) -> Option<String> {
normalize_mixed_self_child_use_path_with_options(ctx, path, true)
}
fn normalize_mixed_self_child_use_path_with_options(
ctx: &FileContext,
path: &str,
force_keep_self: bool,
) -> Option<String> {
let (prefix, close, segments) = parse_braced_path_parts_allow_alias(path)?;
let root = prefix
.trim()
.trim_start_matches("pub ")
.trim()
.trim_end_matches('{')
.trim()
.trim_end_matches("::")
.trim()
.split("::")
.next()
.unwrap_or_default();
let allow_drop_unused_self = matches!(root, "crate" | "self" | "super");
let (groups, parsed_heads) = build_mixed_use_groups(&segments);
let rewritten = rewrite_mixed_use_segments(
ctx,
&segments,
&groups,
&parsed_heads,
allow_drop_unused_self,
force_keep_self,
)?;
let rewritten_path = format!("{prefix}{}{}", rewritten.join(", "), &path[close..=close]);
if rewritten_path == path { None } else { Some(rewritten_path) }
}
fn build_mixed_use_groups(
segments: &[String],
) -> (HashMap<String, MixedUseGroup>, Vec<Option<String>>) {
let mut groups: HashMap<String, MixedUseGroup> = HashMap::new();
let mut parsed_heads = Vec::with_capacity(segments.len());
for (idx, segment) in segments.iter().enumerate() {
let parsed_head = parse_mixed_use_segment_into_group(idx, segment, &mut groups);
parsed_heads.push(parsed_head);
}
(groups, parsed_heads)
}
fn parse_mixed_use_segment_into_group(
idx: usize,
segment: &str,
groups: &mut HashMap<String, MixedUseGroup>,
) -> Option<String> {
if let Some((head, rest)) = segment.split_once("::") {
let head = head.trim();
if head.is_empty() || head.contains('{') || head.contains('}') {
return None;
}
if rest.starts_with('{') && rest.ends_with('}') {
return parse_mixed_nested_group(idx, head, rest, groups);
}
let child = rest.trim();
if child.is_empty() || child.contains("::") {
return None;
}
let group = groups.entry(head.to_owned()).or_default();
group.indices.push(idx);
group.children.push(child.to_owned());
return Some(head.to_owned());
}
let head = segment.trim();
if head.is_empty() || head.contains('{') || head.contains('}') {
return None;
}
let group = groups.entry(head.to_owned()).or_default();
group.indices.push(idx);
group.has_self = true;
Some(head.to_owned())
}
fn parse_mixed_nested_group(
idx: usize,
head: &str,
rest: &str,
groups: &mut HashMap<String, MixedUseGroup>,
) -> Option<String> {
let inner = &rest[1..rest.len().saturating_sub(1)];
let child_parts = split_top_level_csv(inner);
let group = groups.entry(head.to_owned()).or_default();
group.indices.push(idx);
for child in child_parts {
if child == "self" {
group.has_self = true;
} else {
group.children.push(child);
}
}
Some(head.to_owned())
}
fn rewrite_mixed_use_segments(
ctx: &FileContext,
segments: &[String],
groups: &HashMap<String, MixedUseGroup>,
parsed_heads: &[Option<String>],
allow_drop_unused_self: bool,
force_keep_self: bool,
) -> Option<Vec<String>> {
let mut emit = vec![true; segments.len()];
let mut merged = false;
let mut rewritten = Vec::new();
for (idx, segment) in segments.iter().enumerate() {
if !emit[idx] {
continue;
}
let Some(head) = parsed_heads.get(idx).cloned().flatten() else {
rewritten.push(segment.to_owned());
continue;
};
let Some(group) = groups.get(&head) else {
rewritten.push(segment.to_owned());
continue;
};
if !can_merge_mixed_group(group, idx) {
if allow_drop_unused_self
&& group.indices.first().copied() == Some(idx)
&& group.has_self
&& !symbol_is_referenced_outside_use(ctx, &head)
&& let Some(rewritten_segment) =
drop_unused_self_from_nested_use_segment(segment, &head)
{
rewritten.push(rewritten_segment);
merged = true;
continue;
}
rewritten.push(segment.to_owned());
continue;
}
let children = dedup_mixed_group_children(group);
let keep_self = force_keep_self || symbol_is_referenced_outside_use(ctx, &head);
let combined = if keep_self {
format!("{head}::{{self, {}}}", children.join(", "))
} else {
format!("{head}::{{{}}}", children.join(", "))
};
rewritten.push(combined);
merged = true;
for original_idx in group.indices.iter().skip(1) {
emit[*original_idx] = false;
}
}
if merged { Some(rewritten) } else { None }
}
fn can_merge_mixed_group(group: &MixedUseGroup, idx: usize) -> bool {
group.has_self
&& !group.children.is_empty()
&& group.indices.len() > 1
&& group.indices.first().copied() == Some(idx)
}
fn dedup_mixed_group_children(group: &MixedUseGroup) -> Vec<String> {
let mut seen = HashSet::new();
let mut children = Vec::new();
for child in &group.children {
if seen.insert(child.clone()) {
children.push(child.clone());
}
}
children
}
fn drop_unused_self_from_nested_use_segment(segment: &str, head: &str) -> Option<String> {
let trimmed = segment.trim();
let (nested_head, inner) = parse_single_level_nested_use_segment(trimmed)?;
if !is_same_ident(nested_head, head) {
return None;
}
let mut children = split_top_level_csv(inner);
let original_len = children.len();
children.retain(|child| child.trim() != "self");
if children.len() == original_len {
return None;
}
if children.is_empty() {
Some(head.to_owned())
} else {
Some(format!("{head}::{{{}}}", children.join(", ")))
}
}
fn rewrite_use_item_with_path(raw: &str, new_path: &str) -> Option<String> {
let (start, end) = find_use_path_range(raw)?;
let original_path = raw.get(start..end)?;
if normalize_use_path_for_equivalence(original_path)
== normalize_use_path_for_equivalence(new_path)
{
return None;
}
let mut out = String::new();
out.push_str(&raw[..start]);
out.push_str(new_path);
out.push_str(&raw[end..]);
Some(out)
}
fn collect_non_pub_use_runs(ctx: &FileContext) -> Vec<Vec<&TopItem>> {
let mut runs = Vec::new();
let mut current = Vec::new();
for item in &ctx.top_items {
if item.kind == TopKind::Use && !item.is_pub {
current.push(item);
continue;
}
if !current.is_empty() {
runs.push(mem::take(&mut current));
}
}
if !current.is_empty() {
runs.push(current);
}
runs
}
fn collect_pub_use_runs(ctx: &FileContext) -> Vec<Vec<&TopItem>> {
let mut runs = Vec::new();
let mut current = Vec::new();
for item in &ctx.top_items {
if item.kind == TopKind::Use && item.is_pub {
current.push(item);
continue;
}
if !current.is_empty() {
runs.push(mem::take(&mut current));
}
}
if !current.is_empty() {
runs.push(current);
}
runs
}
fn apply_pub_use_group_rules(
ctx: &FileContext,
violations: &mut Vec<Violation>,
edits: &mut Vec<Edit>,
emit_edits: bool,
) {
let local_module_roots = collect_local_module_roots(ctx);
let pub_use_runs = collect_pub_use_runs(ctx);
for run in pub_use_runs {
let mut applied_self_group_edit = false;
for segment in collect_pub_use_self_group_segments(&run) {
if let Some((line, start, end, replacement)) =
build_pub_use_self_group_edit(ctx, segment, &local_module_roots)
{
shared::push_violation(
violations,
ctx,
line,
"RUST-STYLE-IMPORT-002",
"Prefer converging local module re-exports into `pub use self::{...};`.",
true,
);
if emit_edits {
edits.push(Edit { start, end, replacement, rule: "RUST-STYLE-IMPORT-002" });
}
applied_self_group_edit = true;
}
}
if applied_self_group_edit {
continue;
}
for pair in run.windows(2) {
let prev = pair[0];
let curr = pair[1];
let between = separator_lines(ctx, prev, curr);
if between.is_empty() || !between.iter().all(|line| line.trim().is_empty()) {
continue;
}
shared::push_violation(
violations,
ctx,
curr.line,
"RUST-STYLE-IMPORT-002",
"Do not place blank lines inside a pub use group.",
true,
);
if !emit_edits {
continue;
}
let Some((prev_start, prev_end)) = item_text_range(ctx, prev) else {
continue;
};
let Some((curr_start, _curr_end)) = item_text_range(ctx, curr) else {
continue;
};
if curr_start <= prev_end || prev_start >= prev_end {
continue;
}
edits.push(Edit {
start: prev_end,
end: curr_start,
replacement: String::new(),
rule: "RUST-STYLE-IMPORT-002",
});
}
}
}
fn collect_pub_use_self_group_segments<'a>(run: &'a [&'a TopItem]) -> Vec<&'a [&'a TopItem]> {
let mut segments = Vec::new();
let mut start = 0_usize;
while start < run.len() {
let visibility = run[start].visibility.trim();
if visibility.is_empty() {
start += 1;
continue;
}
let mut end = start + 1;
while end < run.len() {
let current = run[end];
if current.visibility.trim() != visibility || current.attrs != run[start].attrs {
break;
}
end += 1;
}
if end.saturating_sub(start) >= 2 {
segments.push(&run[start..end]);
}
start = end;
}
segments
}
fn build_pub_use_self_group_edit(
ctx: &FileContext,
run: &[&TopItem],
local_module_roots: &HashSet<String>,
) -> Option<(usize, usize, usize, String)> {
if run.len() < 2 {
return None;
}
if !run.windows(2).all(|pair| {
separator_lines(ctx, pair[0], pair[1]).iter().all(|line| line.trim().is_empty())
}) {
return None;
}
let first_visibility = run[0].visibility.trim();
let first_attrs = &run[0].attrs;
if first_visibility.is_empty() {
return None;
}
if run
.iter()
.any(|item| item.visibility.trim() != first_visibility || item.attrs != *first_attrs)
{
return None;
}
let mut grouped_paths = Vec::with_capacity(run.len());
for item in run {
let path = extract_use_path(ctx, item)?;
let root =
path.trim_start_matches(':').split("::").next().map(str::trim).unwrap_or_default();
let normalized_root = normalize_ident(root);
if matches!(normalized_root, "crate" | "self" | "super")
|| !local_module_roots.contains(normalized_root)
{
return None;
}
grouped_paths.push(path);
}
let (start, end) = run_text_range(ctx, run[0], run[run.len() - 1])?;
let original = ctx.text.get(start..end)?;
let attrs_prefix = if first_attrs.is_empty() {
String::new()
} else {
let attrs = first_attrs
.iter()
.map(|attr| attr.trim())
.filter(|attr| !attr.is_empty())
.collect::<Vec<_>>();
if attrs.is_empty() { String::new() } else { format!("{} ", attrs.join(" ")) }
};
let mut replacement =
format!("{attrs_prefix}{first_visibility} use self::{{{}}};", grouped_paths.join(", "));
if original.ends_with('\n') {
replacement.push('\n');
}
if replacement == original {
return None;
}
Some((run[0].line, start, end, replacement))
}
fn separator_lines<'a>(ctx: &'a FileContext, prev: &TopItem, curr: &TopItem) -> &'a [String] {
let start = prev.end_line;
let end = curr.start_line.saturating_sub(1);
if start >= end || end > ctx.lines.len() { &[] } else { &ctx.lines[start..end] }
}
fn item_text_range(ctx: &FileContext, item: &TopItem) -> Option<(usize, usize)> {
let start = shared::offset_from_line(&ctx.line_starts, item.start_line)?;
let end =
shared::offset_from_line(&ctx.line_starts, item.end_line + 1).unwrap_or(ctx.text.len());
if end < start { None } else { Some((start, end)) }
}
fn run_text_range(ctx: &FileContext, first: &TopItem, last: &TopItem) -> Option<(usize, usize)> {
let start = shared::offset_from_line(&ctx.line_starts, first.start_line)?;
let end =
shared::offset_from_line(&ctx.line_starts, last.end_line + 1).unwrap_or(ctx.text.len());
if end < start { None } else { Some((start, end)) }
}
fn build_import_group_fix_plans(
ctx: &FileContext,
analysis: &ImportAnalysis<'_>,
skip_lines: &HashSet<usize>,
) -> (Vec<Edit>, HashSet<usize>) {
let mut planned_edits = Vec::new();
let mut fixable_lines = HashSet::new();
for run in &analysis.use_runs {
if !is_use_run_rewrite_candidate(run, skip_lines) {
continue;
}
if !use_run_has_blank_only_separators(ctx, run) {
continue;
}
let Some(entries) = collect_use_run_entries(ctx, run, analysis) else {
continue;
};
let Some((run_start, run_end)) = run_text_range(ctx, run[0], run[run.len() - 1]) else {
continue;
};
let Some(original) = ctx.text.get(run_start..run_end) else {
continue;
};
let replacement = build_use_run_replacement(original, &entries);
if replacement == original {
continue;
}
for entry in entries {
fixable_lines.insert(entry.item.line);
}
planned_edits.push(Edit {
start: run_start,
end: run_end,
replacement,
rule: "RUST-STYLE-IMPORT-002",
});
}
(planned_edits, fixable_lines)
}
fn is_use_run_rewrite_candidate(run: &[&TopItem], skip_lines: &HashSet<usize>) -> bool {
run.len() >= 2 && !run.iter().any(|item| skip_lines.contains(&item.line))
}
fn use_run_has_blank_only_separators(ctx: &FileContext, run: &[&TopItem]) -> bool {
run.windows(2).all(|pair| {
separator_lines(ctx, pair[0], pair[1]).iter().all(|line| line.trim().is_empty())
})
}
fn collect_use_run_entries<'a>(
ctx: &'a FileContext,
run: &'a [&'a TopItem],
analysis: &'a ImportAnalysis<'a>,
) -> Option<Vec<UseEntry<'a>>> {
let mut entries = Vec::with_capacity(run.len());
for (order, item) in run.iter().enumerate() {
let use_item_analysis = analysis.use_item_analysis(item)?;
let (start, end) = item_text_range(ctx, item)?;
let block = ctx.text.get(start..end)?;
let normalized_block = normalize_use_item_block(block)?;
entries.push(UseEntry {
item,
origin: use_origin(&use_item_analysis.path, &analysis.local_module_roots),
order,
block: normalized_block,
});
}
Some(entries)
}
fn normalize_use_item_block(block: &str) -> Option<String> {
let mut block_lines = Vec::new();
let mut has_item_start = false;
for line in block.lines() {
let trimmed = line.trim();
if !has_item_start {
if trimmed.is_empty() {
continue;
}
if line.trim_start().starts_with("//") {
return None;
}
has_item_start = true;
}
block_lines.push(line);
}
if block_lines.is_empty() {
return None;
}
let mut normalized_block = block_lines.join("\n");
if block.ends_with('\n') {
normalized_block.push('\n');
}
Some(normalized_block)
}
fn build_use_run_replacement(original: &str, entries: &[UseEntry<'_>]) -> String {
let mut ordered_entries = entries.iter().collect::<Vec<_>>();
ordered_entries.sort_by_key(|entry| (entry.origin, entry.order));
let mut replacement = String::new();
for (idx, entry) in ordered_entries.iter().enumerate() {
if idx > 0 {
let prev_origin = ordered_entries[idx - 1].origin;
if entry.origin == prev_origin {
replacement.push('\n');
} else {
replacement.push_str("\n\n");
}
}
replacement.push_str(entry.block.trim_end_matches('\n'));
}
if original.ends_with('\n') {
replacement.push('\n');
}
replacement
}