use std::path::Path;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Mutex;
use std::time::SystemTime;
use super::file_metadata;
use crate::lang::treesitter::{
definition_weight, elixir_definition_weight, extract_definition_name,
extract_elixir_definition_name, extract_impl_trait, extract_impl_type,
extract_implemented_interfaces, is_elixir_definition, DEFINITION_KINDS,
};
use crate::error::SrcwalkError;
use crate::lang::detect_file_type;
use crate::lang::outline::outline_language;
use crate::search::rank;
use crate::types::{FileType, Match, SearchResult};
use grep_regex::RegexMatcher;
use grep_searcher::sinks::UTF8;
use grep_searcher::Searcher;
pub fn search_batch(
queries: &[&str],
scope: &Path,
cache: Option<&crate::cache::OutlineCache>,
context: Option<&Path>,
glob: Option<&str>,
) -> Result<Vec<SearchResult>, SrcwalkError> {
if queries.is_empty() {
return Ok(Vec::new());
}
if queries.len() == 1 {
return Ok(vec![search(queries[0], scope, cache, context, glob)?]);
}
let ac = aho_corasick::AhoCorasick::new(queries).map_err(|e| SrcwalkError::InvalidQuery {
query: queries.join(","),
reason: e.to_string(),
})?;
let alt = queries
.iter()
.map(|q| regex_syntax::escape(q))
.collect::<Vec<_>>()
.join("|");
let pattern = format!(r"\b(?:{alt})\b");
let matcher = RegexMatcher::new(&pattern).map_err(|e| SrcwalkError::InvalidQuery {
query: queries.join(","),
reason: e.to_string(),
})?;
let (defs_by_q, usages_by_q) = rayon::join(
|| find_definitions_batch(queries, &ac, scope, glob, cache),
|| find_usages_batch(queries, &matcher, scope, glob),
);
let defs_by_q = defs_by_q?;
let usages_by_q = usages_by_q?;
let mut out = Vec::with_capacity(queries.len());
for (i, query) in queries.iter().enumerate() {
let defs = defs_by_q[i].clone();
let usages = usages_by_q[i].clone();
let mut merged: Vec<Match> = defs;
let def_count = merged.len();
for m in usages {
let dominated = merged[..def_count]
.iter()
.any(|d| d.path == m.path && d.line == m.line);
if !dominated {
merged.push(m);
}
}
let total = merged.len();
let usage_count = total - def_count;
rank::sort(&mut merged, query, scope, context);
out.push(SearchResult {
query: (*query).to_string(),
scope: scope.to_path_buf(),
matches: merged,
total_found: total,
definitions: def_count,
usages: usage_count,
has_more: false,
offset: 0,
});
}
Ok(out)
}
fn find_definitions_batch(
queries: &[&str],
ac: &aho_corasick::AhoCorasick,
scope: &Path,
glob: Option<&str>,
cache: Option<&crate::cache::OutlineCache>,
) -> Result<Vec<Vec<Match>>, SrcwalkError> {
let buckets: Mutex<Vec<Vec<Match>>> = Mutex::new(vec![Vec::new(); queries.len()]);
let walker = super::walker(scope, glob)?;
walker.run(|| {
let buckets = &buckets;
Box::new(move |entry| {
let Ok(entry) = entry else {
return ignore::WalkState::Continue;
};
if !entry.file_type().is_some_and(|ft| ft.is_file()) {
return ignore::WalkState::Continue;
}
let path = entry.path();
let file_size = match std::fs::metadata(path) {
Ok(meta) => {
if meta.len() > 500_000 {
return ignore::WalkState::Continue;
}
meta.len()
}
Err(_) => return ignore::WalkState::Continue,
};
if super::io::is_minified_filename(path) {
return ignore::WalkState::Continue;
}
let Some(bytes) = super::read_file_bytes(path, file_size) else {
return ignore::WalkState::Continue;
};
let mut hit_mask = vec![false; queries.len()];
let mut any_hit = false;
for m in ac.find_iter(&bytes[..]) {
hit_mask[m.pattern().as_usize()] = true;
any_hit = true;
}
if !any_hit {
return ignore::WalkState::Continue;
}
if file_size >= super::io::MINIFIED_CHECK_THRESHOLD && super::io::looks_minified(&bytes)
{
return ignore::WalkState::Continue;
}
let Ok(content) = std::str::from_utf8(&bytes) else {
return ignore::WalkState::Continue;
};
let (file_lines, mtime) = file_metadata(path);
let file_type = detect_file_type(path);
let lang = match file_type {
FileType::Code(l) => Some(l),
_ => None,
};
let ts_language = lang.and_then(outline_language);
let mut local: Vec<Vec<Match>> = vec![Vec::new(); queries.len()];
if let Some(ref ts_lang) = ts_language {
for (i, q) in queries.iter().enumerate() {
if !hit_mask[i] {
continue;
}
let defs = find_defs_treesitter(
path, q, ts_lang, lang, content, file_lines, mtime, cache,
);
if !defs.is_empty() {
local[i] = defs;
}
}
} else {
for (i, q) in queries.iter().enumerate() {
if !hit_mask[i] {
continue;
}
let defs = find_defs_heuristic_buf(path, q, content, file_lines, mtime);
if !defs.is_empty() {
local[i] = defs;
}
}
}
let any_local = local.iter().any(|v| !v.is_empty());
if any_local {
let mut all = buckets
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
for (i, v) in local.into_iter().enumerate() {
if !v.is_empty() {
all[i].extend(v);
}
}
}
ignore::WalkState::Continue
})
});
Ok(buckets
.into_inner()
.unwrap_or_else(std::sync::PoisonError::into_inner))
}
fn find_usages_batch(
queries: &[&str],
matcher: &RegexMatcher,
scope: &Path,
glob: Option<&str>,
) -> Result<Vec<Vec<Match>>, SrcwalkError> {
let buckets: Mutex<Vec<Vec<Match>>> = Mutex::new(vec![Vec::new(); queries.len()]);
let walker = super::walker(scope, glob)?;
walker.run(|| {
let buckets = &buckets;
Box::new(move |entry| {
let Ok(entry) = entry else {
return ignore::WalkState::Continue;
};
if !entry.file_type().is_some_and(|ft| ft.is_file()) {
return ignore::WalkState::Continue;
}
let path = entry.path();
if let Ok(meta) = std::fs::metadata(path) {
if meta.len() > 500_000 {
return ignore::WalkState::Continue;
}
}
let (file_lines, mtime) = file_metadata(path);
let mut local: Vec<Vec<Match>> = vec![Vec::new(); queries.len()];
let mut searcher = Searcher::new();
let _ = searcher.search_path(
matcher,
path,
UTF8(|line_num, line| {
let bytes = line.as_bytes();
for (i, q) in queries.iter().enumerate() {
let qb = q.as_bytes();
let mut start = 0;
let mut hit = false;
while let Some(pos) = memchr::memmem::find(&bytes[start..], qb) {
let abs = start + pos;
let before_ok = abs == 0 || !is_word_byte(bytes[abs - 1]);
let after = abs + qb.len();
let after_ok = after >= bytes.len() || !is_word_byte(bytes[after]);
if before_ok && after_ok {
hit = true;
break;
}
start = abs + 1;
}
if hit {
local[i].push(Match {
path: path.to_path_buf(),
line: line_num as u32,
text: line.trim_end().to_string(),
is_definition: false,
exact: true,
file_lines,
mtime,
def_range: None,
def_name: None,
def_weight: 0,
impl_target: None,
});
}
}
Ok(true)
}),
);
let any_local = local.iter().any(|v| !v.is_empty());
if any_local {
let mut all = buckets
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
for (i, v) in local.into_iter().enumerate() {
if !v.is_empty() {
all[i].extend(v);
}
}
}
ignore::WalkState::Continue
})
});
Ok(buckets
.into_inner()
.unwrap_or_else(std::sync::PoisonError::into_inner))
}
pub fn search(
query: &str,
scope: &Path,
cache: Option<&crate::cache::OutlineCache>,
context: Option<&Path>,
glob: Option<&str>,
) -> Result<SearchResult, SrcwalkError> {
let word_pattern = format!(r"\b{}\b", regex_syntax::escape(query));
let matcher = RegexMatcher::new(&word_pattern).map_err(|e| SrcwalkError::InvalidQuery {
query: query.to_string(),
reason: e.to_string(),
})?;
let (defs, usages) = rayon::join(
|| find_definitions(query, scope, glob, cache),
|| find_usages(query, &matcher, scope, glob),
);
let defs = defs?;
let usages = usages?;
let mut merged: Vec<Match> = defs;
let def_count = merged.len();
for m in usages {
let dominated = merged[..def_count]
.iter()
.any(|d| d.path == m.path && d.line == m.line);
if !dominated {
merged.push(m);
}
}
let total = merged.len();
let usage_count = total - def_count;
rank::sort(&mut merged, query, scope, context);
Ok(SearchResult {
query: query.to_string(),
scope: scope.to_path_buf(),
matches: merged,
total_found: total,
definitions: def_count,
usages: usage_count,
has_more: false,
offset: 0,
})
}
fn find_definitions(
query: &str,
scope: &Path,
glob: Option<&str>,
cache: Option<&crate::cache::OutlineCache>,
) -> Result<Vec<Match>, SrcwalkError> {
let matches: Mutex<Vec<Match>> = Mutex::new(Vec::new());
let found_count = AtomicUsize::new(0);
let needle = query.as_bytes();
let walker = super::walker(scope, glob)?;
walker.run(|| {
let matches = &matches;
let found_count = &found_count;
Box::new(move |entry| {
let Ok(entry) = entry else {
return ignore::WalkState::Continue;
};
if !entry.file_type().is_some_and(|ft| ft.is_file()) {
return ignore::WalkState::Continue;
}
let path = entry.path();
let file_size = match std::fs::metadata(path) {
Ok(meta) => {
if meta.len() > 500_000 {
return ignore::WalkState::Continue;
}
meta.len()
}
Err(_) => return ignore::WalkState::Continue,
};
if super::io::is_minified_filename(path) {
return ignore::WalkState::Continue;
}
let Some(bytes) = super::read_file_bytes(path, file_size) else {
return ignore::WalkState::Continue;
};
if memchr::memmem::find(&bytes, needle).is_none() {
return ignore::WalkState::Continue;
}
if file_size >= super::io::MINIFIED_CHECK_THRESHOLD && super::io::looks_minified(&bytes)
{
return ignore::WalkState::Continue;
}
let Ok(content) = std::str::from_utf8(&bytes) else {
return ignore::WalkState::Continue;
};
let (file_lines, mtime) = file_metadata(path);
let file_type = detect_file_type(path);
let lang = match file_type {
FileType::Code(l) => Some(l),
_ => None,
};
let ts_language = lang.and_then(outline_language);
let mut file_defs = if let Some(ref ts_lang) = ts_language {
find_defs_treesitter(
path, query, ts_lang, lang, content, file_lines, mtime, cache,
)
} else {
Vec::new()
};
if file_defs.is_empty() && ts_language.is_none() {
file_defs = find_defs_heuristic_buf(path, query, content, file_lines, mtime);
}
if !file_defs.is_empty() {
found_count.fetch_add(file_defs.len(), Ordering::Relaxed);
let mut all = matches
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
all.extend(file_defs);
}
ignore::WalkState::Continue
})
});
Ok(matches
.into_inner()
.unwrap_or_else(std::sync::PoisonError::into_inner))
}
fn find_defs_treesitter(
path: &Path,
query: &str,
ts_lang: &tree_sitter::Language,
lang: Option<crate::types::Lang>,
content: &str,
file_lines: u32,
mtime: SystemTime,
cache: Option<&crate::cache::OutlineCache>,
) -> Vec<Match> {
let tree = if let Some(c) = cache {
let Some(tree) = c.get_or_parse(path, mtime, content, ts_lang) else {
return Vec::new();
};
tree
} else {
let mut parser = tree_sitter::Parser::new();
if parser.set_language(ts_lang).is_err() {
return Vec::new();
}
let Some(tree) = parser.parse(content, None) else {
return Vec::new();
};
tree
};
let lines: Vec<&str> = content.lines().collect();
let root = tree.root_node();
let mut defs = Vec::new();
walk_for_definitions(
root, query, path, &lines, file_lines, mtime, &mut defs, lang, 0,
);
defs
}
fn walk_for_definitions(
node: tree_sitter::Node,
query: &str,
path: &Path,
lines: &[&str],
file_lines: u32,
mtime: SystemTime,
defs: &mut Vec<Match>,
lang: Option<crate::types::Lang>,
depth: usize,
) {
if depth > 3 {
return;
}
let kind = node.kind();
if DEFINITION_KINDS.contains(&kind) {
if let Some(name) = extract_definition_name(node, lines) {
if name == query {
let line_num = node.start_position().row as u32 + 1;
let line_text = lines
.get(node.start_position().row)
.unwrap_or(&"")
.trim_end();
defs.push(Match {
path: path.to_path_buf(),
line: line_num,
text: line_text.to_string(),
is_definition: true,
exact: true,
file_lines,
mtime,
def_range: Some((
node.start_position().row as u32 + 1,
node.end_position().row as u32 + 1,
)),
def_name: Some(query.to_string()),
def_weight: definition_weight(node.kind()),
impl_target: None,
});
}
}
if kind == "impl_item" {
if let Some(trait_name) = extract_impl_trait(node, lines) {
if trait_name == query {
let impl_type =
extract_impl_type(node, lines).unwrap_or_else(|| "<unknown>".to_string());
let line_num = node.start_position().row as u32 + 1;
let line_text = lines
.get(node.start_position().row)
.unwrap_or(&"")
.trim_end();
defs.push(Match {
path: path.to_path_buf(),
line: line_num,
text: line_text.to_string(),
is_definition: true,
exact: true,
file_lines,
mtime,
def_range: Some((
node.start_position().row as u32 + 1,
node.end_position().row as u32 + 1,
)),
def_name: Some(format!("impl {query} for {impl_type}")),
def_weight: 80,
impl_target: Some(query.to_string()),
});
}
}
} else if kind == "class_declaration" || kind == "class_definition" {
let interfaces = extract_implemented_interfaces(node, lines);
if interfaces.iter().any(|i| i == query) {
let class_name = extract_definition_name(node, lines)
.unwrap_or_else(|| "<anonymous>".to_string());
let line_num = node.start_position().row as u32 + 1;
let line_text = lines
.get(node.start_position().row)
.unwrap_or(&"")
.trim_end();
defs.push(Match {
path: path.to_path_buf(),
line: line_num,
text: line_text.to_string(),
is_definition: true,
exact: true,
file_lines,
mtime,
def_range: Some((
node.start_position().row as u32 + 1,
node.end_position().row as u32 + 1,
)),
def_name: Some(format!("{class_name} implements {query}")),
def_weight: 80,
impl_target: Some(query.to_string()),
});
}
}
} else if lang == Some(crate::types::Lang::Elixir) && is_elixir_definition(node, lines) {
if let Some(name) = extract_elixir_definition_name(node, lines) {
if name == query {
let line_num = node.start_position().row as u32 + 1;
let line_text = lines
.get(node.start_position().row)
.unwrap_or(&"")
.trim_end();
defs.push(Match {
path: path.to_path_buf(),
line: line_num,
text: line_text.to_string(),
is_definition: true,
exact: true,
file_lines,
mtime,
def_range: Some((
node.start_position().row as u32 + 1,
node.end_position().row as u32 + 1,
)),
def_name: Some(query.to_string()),
def_weight: elixir_definition_weight(node, lines),
impl_target: None,
});
}
}
}
let mut cursor = node.walk();
for child in node.children(&mut cursor) {
walk_for_definitions(
child,
query,
path,
lines,
file_lines,
mtime,
defs,
lang,
depth + 1,
);
}
}
fn find_defs_heuristic_buf(
path: &Path,
query: &str,
content: &str,
file_lines: u32,
mtime: SystemTime,
) -> Vec<Match> {
let mut defs = Vec::new();
for (i, line) in content.lines().enumerate() {
if line.contains(query) && is_definition_line(line) {
defs.push(Match {
path: path.to_path_buf(),
line: (i + 1) as u32,
text: line.trim_end().to_string(),
is_definition: true,
exact: true,
file_lines,
mtime,
def_range: None,
def_name: Some(query.to_string()),
def_weight: 60,
impl_target: None,
});
}
}
defs
}
fn find_usages(
query: &str,
matcher: &RegexMatcher,
scope: &Path,
glob: Option<&str>,
) -> Result<Vec<Match>, SrcwalkError> {
let matches: Mutex<Vec<Match>> = Mutex::new(Vec::new());
let found_count = AtomicUsize::new(0);
let walker = super::walker(scope, glob)?;
walker.run(|| {
let matches = &matches;
let found_count = &found_count;
Box::new(move |entry| {
let Ok(entry) = entry else {
return ignore::WalkState::Continue;
};
if !entry.file_type().is_some_and(|ft| ft.is_file()) {
return ignore::WalkState::Continue;
}
let path = entry.path();
if let Ok(meta) = std::fs::metadata(path) {
if meta.len() > 500_000 {
return ignore::WalkState::Continue;
}
}
let (file_lines, mtime) = file_metadata(path);
let mut file_matches = Vec::new();
let mut searcher = Searcher::new();
let _ = searcher.search_path(
matcher,
path,
UTF8(|line_num, line| {
file_matches.push(Match {
path: path.to_path_buf(),
line: line_num as u32,
text: line.trim_end().to_string(),
is_definition: false,
exact: line.contains(query),
file_lines,
mtime,
def_range: None,
def_name: None,
def_weight: 0,
impl_target: None,
});
Ok(true)
}),
);
if !file_matches.is_empty() {
found_count.fetch_add(file_matches.len(), Ordering::Relaxed);
let mut all = matches
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
all.extend(file_matches);
}
ignore::WalkState::Continue
})
});
Ok(matches
.into_inner()
.unwrap_or_else(std::sync::PoisonError::into_inner))
}
fn is_word_byte(b: u8) -> bool {
b.is_ascii_alphanumeric() || b == b'_'
}
fn is_definition_line(line: &str) -> bool {
let trimmed = line.trim();
trimmed.starts_with("fn ")
|| trimmed.starts_with("pub fn ")
|| trimmed.starts_with("pub(crate) fn ")
|| trimmed.starts_with("async fn ")
|| trimmed.starts_with("pub async fn ")
|| trimmed.starts_with("function ")
|| trimmed.starts_with("export function ")
|| trimmed.starts_with("export default function ")
|| trimmed.starts_with("export async function ")
|| trimmed.starts_with("async function ")
|| trimmed.starts_with("const ")
|| trimmed.starts_with("export const ")
|| trimmed.starts_with("let ")
|| trimmed.starts_with("export let ")
|| trimmed.starts_with("var ")
|| trimmed.starts_with("export var ")
|| trimmed.starts_with("class ")
|| trimmed.starts_with("export class ")
|| trimmed.starts_with("interface ")
|| trimmed.starts_with("export interface ")
|| trimmed.starts_with("type ")
|| trimmed.starts_with("export type ")
|| trimmed.starts_with("struct ")
|| trimmed.starts_with("pub struct ")
|| trimmed.starts_with("enum ")
|| trimmed.starts_with("pub enum ")
|| trimmed.starts_with("trait ")
|| trimmed.starts_with("pub trait ")
|| trimmed.starts_with("impl ")
|| trimmed.starts_with("def ")
|| trimmed.starts_with("async def ")
|| trimmed.starts_with("func ")
}
fn normalize_ident(s: &str) -> String {
let mut out = String::with_capacity(s.len());
for b in s.bytes() {
if b == b'_' {
continue;
}
out.push(b.to_ascii_lowercase() as char);
}
out
}
pub fn suggest(
query: &str,
scope: &Path,
glob: Option<&str>,
top_n: usize,
) -> Vec<(String, std::path::PathBuf, u32)> {
if query.is_empty() {
return Vec::new();
}
let q_norm = normalize_ident(query);
if q_norm.is_empty() {
return Vec::new();
}
let max_dist: usize = if q_norm.len() >= 6 { 2 } else { 1 };
let Ok(walker) = super::walker(scope, glob) else {
return Vec::new();
};
let hits: Mutex<std::collections::HashMap<String, (std::path::PathBuf, u32)>> =
Mutex::new(std::collections::HashMap::new());
walker.run(|| {
let hits = &hits;
let q_norm = q_norm.clone();
Box::new(move |entry| {
let Ok(entry) = entry else {
return ignore::WalkState::Continue;
};
if !entry.file_type().is_some_and(|ft| ft.is_file()) {
return ignore::WalkState::Continue;
}
let path = entry.path();
if !matches!(
crate::lang::detect_file_type(path),
crate::types::FileType::Code(_)
) {
return ignore::WalkState::Continue;
}
if let Ok(meta) = std::fs::metadata(path) {
if meta.len() > 500_000 {
return ignore::WalkState::Continue;
}
}
let Ok(content) = std::fs::read_to_string(path) else {
return ignore::WalkState::Continue;
};
let mut local: Vec<(String, u32)> = Vec::new();
let mut seen_on_path: std::collections::HashSet<String> =
std::collections::HashSet::new();
for (line_idx, line) in content.lines().enumerate() {
let bytes = line.as_bytes();
let mut i = 0;
while i < bytes.len() {
let b = bytes[i];
if !(b.is_ascii_alphabetic() || b == b'_') {
i += 1;
continue;
}
let start = i;
while i < bytes.len() && (bytes[i].is_ascii_alphanumeric() || bytes[i] == b'_')
{
i += 1;
}
let word = &line[start..i];
let w_norm_len = word.bytes().filter(|&c| c != b'_').count();
if w_norm_len == 0
|| w_norm_len + max_dist < q_norm.len()
|| w_norm_len > q_norm.len() + max_dist
{
continue;
}
if seen_on_path.contains(word) {
continue;
}
let w_norm = normalize_ident(word);
if w_norm == q_norm && word == query {
continue;
}
let d = crate::read::edit_distance(&q_norm, &w_norm);
if d <= max_dist {
seen_on_path.insert(word.to_string());
local.push((word.to_string(), line_idx as u32 + 1));
}
}
}
if !local.is_empty() {
let mut h = hits
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
for (spelling, line) in local {
h.entry(spelling)
.or_insert_with(|| (path.to_path_buf(), line));
}
}
ignore::WalkState::Continue
})
});
let mut all: Vec<(String, std::path::PathBuf, u32)> = hits
.into_inner()
.unwrap_or_else(std::sync::PoisonError::into_inner)
.into_iter()
.filter(|(s, _)| s != query)
.map(|(s, (p, l))| (s, p, l))
.collect();
all.sort_by(|a, b| {
let an = normalize_ident(&a.0);
let bn = normalize_ident(&b.0);
let da = crate::read::edit_distance(&q_norm, &an);
let db = crate::read::edit_distance(&q_norm, &bn);
da.cmp(&db).then_with(|| a.0.cmp(&b.0))
});
all.truncate(top_n);
all
}
#[cfg(test)]
mod tests {
use super::*;
use std::time::SystemTime;
#[test]
fn rust_definitions_detected() {
let code = r#"pub fn hello(name: &str) -> String {
format!("Hello, {}", name)
}
pub struct Foo {
bar: i32,
}
pub(crate) fn dispatch_tool(tool: &str) -> Result<String, String> {
match tool {
"read" => Ok("read".to_string()),
_ => Err("unknown".to_string()),
}
}
"#;
let ts_lang = crate::lang::outline::outline_language(crate::types::Lang::Rust).unwrap();
let defs = find_defs_treesitter(
std::path::Path::new("test.rs"),
"hello",
&ts_lang,
Some(crate::types::Lang::Rust),
code,
15,
SystemTime::now(),
None,
);
assert!(!defs.is_empty(), "should find 'hello' definition");
assert!(defs[0].is_definition);
assert!(defs[0].def_range.is_some());
let defs = find_defs_treesitter(
std::path::Path::new("test.rs"),
"Foo",
&ts_lang,
Some(crate::types::Lang::Rust),
code,
15,
SystemTime::now(),
None,
);
assert!(!defs.is_empty(), "should find 'Foo' definition");
let defs = find_defs_treesitter(
std::path::Path::new("test.rs"),
"dispatch_tool",
&ts_lang,
Some(crate::types::Lang::Rust),
code,
15,
SystemTime::now(),
None,
);
assert!(!defs.is_empty(), "should find 'dispatch_tool' definition");
}
fn elixir_find(code: &str, name: &str) -> Vec<Match> {
let ts_lang = crate::lang::outline::outline_language(crate::types::Lang::Elixir).unwrap();
let lines = code.lines().count() as u32;
find_defs_treesitter(
std::path::Path::new("test.ex"),
name,
&ts_lang,
Some(crate::types::Lang::Elixir),
code,
lines,
SystemTime::now(),
None,
)
}
#[test]
fn elixir_definitions_detected() {
let code = r#"defmodule MyApp.Greeter do
@type t :: %{name: String.t()}
def hello(name) do
"Hello, #{name}!"
end
defp private_helper(x), do: x + 1
defmacro my_macro(expr) do
quote do: unquote(expr)
end
end
"#;
let defs = elixir_find(code, "MyApp.Greeter");
assert!(!defs.is_empty(), "should find 'MyApp.Greeter' module def");
assert!(defs[0].is_definition);
assert!(
!elixir_find(code, "hello").is_empty(),
"should find 'hello'"
);
assert!(
!elixir_find(code, "private_helper").is_empty(),
"should find 'private_helper'"
);
assert!(
!elixir_find(code, "my_macro").is_empty(),
"should find 'my_macro'"
);
}
#[test]
fn elixir_guard_clause_definitions() {
let code = r#"defmodule Guards do
def safe_div(a, b) when b != 0 do
a / b
end
defp checked(x) when is_integer(x), do: x
defguard is_positive(x) when x > 0
end
"#;
assert!(
!elixir_find(code, "safe_div").is_empty(),
"should find 'safe_div' with guard clause"
);
assert!(
!elixir_find(code, "checked").is_empty(),
"should find 'checked' with guard clause"
);
assert!(
!elixir_find(code, "is_positive").is_empty(),
"should find 'is_positive' defguard"
);
}
#[test]
fn elixir_multi_clause_and_no_arg() {
let code = r#"defmodule Dispatch do
def handle(:ok), do: :success
def handle(:error), do: :failure
def version, do: "1.0"
end
"#;
let defs = elixir_find(code, "handle");
assert!(
defs.len() >= 2,
"should find both 'handle' clauses, got {}: {defs:?}",
defs.len()
);
assert!(
!elixir_find(code, "version").is_empty(),
"should find no-arg 'version'"
);
}
#[test]
fn elixir_protocol_impl_exception() {
let code = r#"defprotocol Printable do
@callback format(t) :: String.t()
def to_string(data)
end
defimpl Printable, for: User do
def to_string(user), do: user.name
end
defmodule MyError do
defexception [:message, :code]
end
"#;
let defs = elixir_find(code, "Printable");
assert!(
defs.len() >= 2,
"should find both defprotocol and defimpl for 'Printable', got {}",
defs.len()
);
assert!(
!elixir_find(code, "defexception").is_empty(),
"should find 'defexception'"
);
assert!(
!elixir_find(code, "MyError").is_empty(),
"should find 'MyError' module"
);
}
#[test]
fn elixir_delegate_and_nested_modules() {
let code = r#"defmodule Outer do
defdelegate count(list), to: Enum
defmodule Inner do
def nested_func, do: :ok
end
end
"#;
assert!(
!elixir_find(code, "count").is_empty(),
"should find 'count' defdelegate"
);
assert!(
!elixir_find(code, "Inner").is_empty(),
"should find nested 'Inner' module"
);
}
#[test]
fn suggest_finds_case_variant() {
let dir = std::env::temp_dir().join(format!("srcwalk_p13_suggest_{}", std::process::id()));
std::fs::create_dir_all(&dir).unwrap();
let path = dir.join("foo.rs");
std::fs::write(
&path,
"pub fn orderExists() -> bool { true }\nfn other() {}\n",
)
.unwrap();
let hits = suggest("OrderExists", &dir, None, 3);
assert!(
hits.iter().any(|(s, _, _)| s == "orderExists"),
"expected case-variant suggestion, got: {hits:?}"
);
let no_match = suggest("CompletelyUnrelatedXyz", &dir, None, 3);
assert!(
no_match.is_empty(),
"no fuzzy hit expected, got: {no_match:?}"
);
let _ = std::fs::remove_dir_all(&dir);
}
#[test]
fn suggest_crosses_naming_convention() {
let dir = std::env::temp_dir().join(format!("srcwalk_p13fix_conv_{}", std::process::id()));
std::fs::create_dir_all(&dir).unwrap();
let path = dir.join("foo.rs");
std::fs::write(
&path,
"pub fn search_symbol() -> bool { true }\npub fn HotReloadProcessor() {}\n",
)
.unwrap();
let hits = suggest("searchSymbol", &dir, None, 3);
assert!(
hits.iter().any(|(s, _, _)| s == "search_symbol"),
"expected snake_case suggestion for camelCase query, got: {hits:?}"
);
let hits2 = suggest("hotreloadprocessor", &dir, None, 3);
assert!(
hits2.iter().any(|(s, _, _)| s == "HotReloadProcessor"),
"expected PascalCase suggestion for lowercase query, got: {hits2:?}"
);
let _ = std::fs::remove_dir_all(&dir);
}
#[test]
fn suggest_handles_lev1_typo() {
let dir = std::env::temp_dir().join(format!("srcwalk_p13fix_typo_{}", std::process::id()));
std::fs::create_dir_all(&dir).unwrap();
let path = dir.join("foo.rs");
std::fs::write(&path, "pub fn run_inner() {}\n").unwrap();
let hits = suggest("run_iner", &dir, None, 3);
assert!(
hits.iter().any(|(s, _, _)| s == "run_inner"),
"expected typo-tolerant suggestion, got: {hits:?}"
);
let _ = std::fs::remove_dir_all(&dir);
}
#[test]
fn suggest_skips_non_source_files() {
let dir = std::env::temp_dir().join(format!("srcwalk_p13fix_skip_{}", std::process::id()));
std::fs::create_dir_all(&dir).unwrap();
std::fs::write(dir.join("es.json"), r#"{"sesion": "iniciar"}"#).unwrap();
std::fs::write(dir.join("SOURCES.txt"), "src/foo/sesion.py\n").unwrap();
std::fs::write(dir.join("real.py"), "def session(): pass\n").unwrap();
let hits = suggest("Sesion", &dir, None, 5);
assert!(
hits.iter().all(|(_, p, _)| {
let n = p.file_name().and_then(|s| s.to_str()).unwrap_or("");
n != "es.json" && n != "SOURCES.txt"
}),
"expected non-source files filtered out, got: {hits:?}"
);
assert!(
hits.iter().any(|(s, _, _)| s == "session"),
"expected real .py hit, got: {hits:?}"
);
let _ = std::fs::remove_dir_all(&dir);
}
}