use std::collections::{HashMap, HashSet};
use tower_lsp::lsp_types::*;
use crate::Backend;
use crate::types::*;
use crate::util::{short_name, strip_fqn_prefix};
use super::class_completion::{
ClassItemCtx, ClassItemTexts, build_affinity_table, class_edit_texts, expand_alias_prefix,
is_anonymous_class, matches_class_prefix,
};
use crate::completion::builder::analyze_use_block;
use crate::completion::source::comment_position::position_to_byte_offset;
use crate::completion::source::throws_analysis;
#[derive(Debug)]
pub(crate) struct CatchContext {
pub partial: String,
pub suggested_types: Vec<String>,
pub has_specific_types: bool,
}
pub(crate) fn detect_catch_context(content: &str, position: Position) -> Option<CatchContext> {
let byte_offset = position_to_byte_offset(content, position);
let before_cursor = &content[..byte_offset.min(content.len())];
let (catch_paren_offset, partial, already_listed) = find_catch_paren(before_cursor)?;
let before_paren = &content[..catch_paren_offset];
let trimmed = before_paren.trim_end();
if !trimmed.ends_with("catch") {
return None;
}
let catch_end = trimmed.len();
let catch_start = catch_end - 5;
if catch_start > 0 {
let prev_byte = trimmed.as_bytes()[catch_start - 1];
if prev_byte.is_ascii_alphanumeric() || prev_byte == b'_' {
return None;
}
}
let before_catch = trimmed[..catch_start].trim_end();
if !before_catch.ends_with('}') {
return None;
}
let try_body = find_try_block_body(content, before_catch)?;
let mut suggested_types = Vec::new();
let mut seen = std::collections::HashSet::new();
let throws = throws_analysis::find_throw_statements(&try_body);
for throw in &throws {
let short_name = throw
.type_name
.trim_start_matches('\\')
.rsplit('\\')
.next()
.unwrap_or(&throw.type_name);
if !short_name.is_empty() && seen.insert(short_name.to_lowercase()) {
suggested_types.push(short_name.to_string());
}
}
let inline_throws = throws_analysis::find_inline_throws_annotations(&try_body);
for info in &inline_throws {
let short_name = info
.type_name
.trim_start_matches('\\')
.rsplit('\\')
.next()
.unwrap_or(&info.type_name);
if !short_name.is_empty() && seen.insert(short_name.to_lowercase()) {
suggested_types.push(short_name.to_string());
}
}
let propagated = throws_analysis::find_propagated_throws(&try_body, content);
let propagated: Vec<String> = propagated.iter().map(|t| t.type_name.clone()).collect();
for exc_type in &propagated {
let short_name = exc_type
.trim_start_matches('\\')
.rsplit('\\')
.next()
.unwrap_or(exc_type);
if !short_name.is_empty() && seen.insert(short_name.to_lowercase()) {
suggested_types.push(short_name.to_string());
}
}
let throw_expr_types = throws_analysis::find_throw_expression_types(&try_body, content);
let throw_expr_types: Vec<String> = throw_expr_types
.iter()
.map(|t| t.type_name.clone())
.collect();
for exc_type in &throw_expr_types {
let short_name = exc_type
.trim_start_matches('\\')
.rsplit('\\')
.next()
.unwrap_or(exc_type);
if !short_name.is_empty() && seen.insert(short_name.to_lowercase()) {
suggested_types.push(short_name.to_string());
}
}
let has_specific_types = !suggested_types.is_empty();
if seen.insert("throwable".to_string()) {
suggested_types.push("\\Throwable".to_string());
}
let already_lower: Vec<String> = already_listed.iter().map(|s| s.to_lowercase()).collect();
suggested_types.retain(|t| !already_lower.contains(&t.to_lowercase()));
Some(CatchContext {
partial,
suggested_types,
has_specific_types,
})
}
fn resolve_exception_fqn(
name: &str,
use_map: &HashMap<String, String>,
file_namespace: &Option<String>,
) -> String {
let trimmed = name.trim_start_matches('\\');
if name.starts_with('\\') {
return trimmed.to_string();
}
if let Some(fqn) = use_map.get(trimmed) {
return fqn.clone();
}
if let Some(ns) = file_namespace {
return format!("{}\\{}", ns, trimmed);
}
trimmed.to_string()
}
pub(crate) fn build_catch_completions(
ctx: &CatchContext,
use_map: &HashMap<String, String>,
file_namespace: &Option<String>,
) -> Vec<CompletionItem> {
let mut items = Vec::new();
let partial_lower = ctx.partial.to_lowercase();
for (idx, exc_type) in ctx.suggested_types.iter().enumerate() {
let fqn = resolve_exception_fqn(exc_type, use_map, file_namespace);
let sn = short_name(&fqn);
if !partial_lower.is_empty()
&& !sn.to_lowercase().starts_with(&partial_lower)
&& !fqn.to_lowercase().starts_with(&partial_lower)
{
continue;
}
let sort_text = if exc_type.starts_with('\\') {
format!("1_{:03}_{}", idx, sn)
} else {
format!("0_{:03}_{}", idx, sn)
};
items.push(CompletionItem {
label: fqn.clone(),
kind: Some(CompletionItemKind::CLASS),
detail: Some("Exception thrown in try block".to_string()),
sort_text: Some(sort_text),
filter_text: Some(fqn),
..CompletionItem::default()
});
}
items
}
fn find_catch_paren(before_cursor: &str) -> Option<(usize, String, Vec<String>)> {
let bytes = before_cursor.as_bytes();
let mut pos = bytes.len();
let mut depth = 0i32;
while pos > 0 {
pos -= 1;
match bytes[pos] {
b')' => depth += 1,
b'(' => {
if depth == 0 {
let inside = &before_cursor[pos + 1..];
let (partial, already_listed) = parse_catch_paren_content(inside);
return Some((pos, partial, already_listed));
}
depth -= 1;
}
b';' | b'{' => return None,
_ => {}
}
}
None
}
fn parse_catch_paren_content(inside: &str) -> (String, Vec<String>) {
let parts: Vec<&str> = inside.split('|').collect();
let mut already_listed = Vec::new();
if parts.len() <= 1 {
let partial = inside.trim().trim_start_matches('\\').to_string();
return (partial, already_listed);
}
for part in &parts[..parts.len() - 1] {
let t = part.trim().trim_start_matches('\\');
if !t.is_empty() {
let type_name = t.split_whitespace().next().unwrap_or(t);
if !type_name.starts_with('$') {
already_listed.push(type_name.to_string());
}
}
}
let last = parts.last().unwrap_or(&"");
let partial = last.trim().trim_start_matches('\\').to_string();
(partial, already_listed)
}
fn find_try_block_body(_content: &str, before_catch: &str) -> Option<String> {
let close_brace_offset = before_catch.len() - 1;
let block_open =
crate::util::find_matching_backward(before_catch, close_brace_offset, b'{', b'}')?;
let before_block = before_catch[..block_open].trim_end();
if before_block.ends_with(')') {
let close_paren = before_block.len() - 1;
let open_paren =
crate::util::find_matching_backward(before_block, close_paren, b'(', b')')?;
let before_paren = before_block[..open_paren].trim_end();
if before_paren.ends_with("catch") {
let kw_start = before_paren.len() - 5;
if kw_start == 0
|| (!before_paren.as_bytes()[kw_start - 1].is_ascii_alphanumeric()
&& before_paren.as_bytes()[kw_start - 1] != b'_')
{
let before_kw = before_paren[..kw_start].trim_end();
if before_kw.ends_with('}') {
return find_try_block_body(_content, before_kw);
}
}
}
return None;
}
if before_block.ends_with("finally") {
let kw_start = before_block.len() - 7;
if kw_start == 0
|| (!before_block.as_bytes()[kw_start - 1].is_ascii_alphanumeric()
&& before_block.as_bytes()[kw_start - 1] != b'_')
{
let before_kw = before_block[..kw_start].trim_end();
if before_kw.ends_with('}') {
return find_try_block_body(_content, before_kw);
}
}
return None;
}
if before_block.ends_with("try") {
let kw_start = before_block.len() - 3;
if kw_start == 0
|| (!before_block.as_bytes()[kw_start - 1].is_ascii_alphanumeric()
&& before_block.as_bytes()[kw_start - 1] != b'_')
{
let body = &before_catch[block_open + 1..close_brace_offset];
return Some(body.to_string());
}
}
None
}
impl Backend {
fn is_throwable_descendant(&self, class_name: &str, depth: u32) -> bool {
if depth > 20 {
return false; }
let short = short_name(class_name);
if matches!(short, "Throwable" | "Exception" | "Error") {
return true;
}
match self.find_class_in_ast_map(class_name) {
Some(ci) => {
if let Some(parent) = &ci.parent_class
&& self.is_throwable_descendant(parent, depth + 1)
{
return true;
}
for iface in &ci.interfaces {
if self.is_throwable_descendant(iface, depth + 1) {
return true;
}
}
false
}
None => false, }
}
fn is_class_or_interface_in_ast_map(&self, class_name: &str) -> bool {
self.find_class_in_ast_map(class_name)
.is_some_and(|c| matches!(c.kind, ClassLikeKind::Class | ClassLikeKind::Interface))
}
fn collect_loaded_fqns(&self) -> HashSet<String> {
let mut loaded = HashSet::new();
let amap = self.ast_map.read();
let nmap = self.namespace_map.read();
for (uri, classes) in amap.iter() {
let file_ns = nmap.get(uri).and_then(|opt| opt.as_deref());
for cls in classes {
let fqn = if let Some(ns) = file_ns {
format!("{}\\{}", ns, cls.name)
} else {
cls.name.clone()
};
loaded.insert(fqn);
}
}
loaded
}
pub(crate) fn build_catch_class_name_completions(
&self,
ctx: &crate::types::FileContext,
prefix: &str,
content: &str,
is_new: bool,
position: Position,
uri: &str,
) -> (Vec<CompletionItem>, bool) {
let file_use_map = &ctx.use_map;
let file_namespace = &ctx.namespace;
let has_leading_backslash = prefix.starts_with('\\');
let normalized = strip_fqn_prefix(prefix);
let prefix_lower = normalized.to_lowercase();
let is_fqn_prefix = has_leading_backslash || normalized.contains('\\');
let expanded = expand_alias_prefix(normalized, file_use_map);
let expanded_lower = expanded.as_deref().map(|s| s.to_lowercase());
let expanded_prefix_lower = expanded_lower.as_deref();
let fqn_replace_range = if is_fqn_prefix {
Some(Range {
start: Position {
line: position.line,
character: position
.character
.saturating_sub(prefix.chars().count() as u32),
},
end: position,
})
} else {
None
};
let mut seen_fqns: HashSet<String> = HashSet::new();
let mut items: Vec<CompletionItem> = Vec::new();
let quality_prefix = match normalized.rfind('\\') {
Some(pos) => normalized[pos + 1..].to_string(),
None => normalized.to_string(),
};
let affinity_table = build_affinity_table(file_use_map, file_namespace);
let prefix_has_namespace = normalized.contains('\\');
let ctx = ClassItemCtx {
is_fqn_prefix,
is_new,
is_attribute: false,
fqn_replace_range,
file_use_map,
use_block: analyze_use_block(content),
file_namespace,
affinity_table,
quality_prefix,
prefix_has_namespace,
uri,
};
let loaded_fqns = self.collect_loaded_fqns();
for (short_name, fqn) in file_use_map {
if !matches_class_prefix(
short_name,
fqn,
&prefix_lower,
is_fqn_prefix,
expanded_prefix_lower,
) {
continue;
}
if !seen_fqns.insert(fqn.clone()) {
continue;
}
if !self.is_class_or_interface_in_ast_map(fqn) {
continue;
}
if !self.is_throwable_descendant(fqn, 0) {
continue;
}
let (base_name, filter, _use_import) = class_edit_texts(
short_name,
fqn,
is_fqn_prefix,
has_leading_backslash,
file_namespace,
);
let texts = ClassItemTexts {
base_name,
filter,
use_import: None,
};
items.push(ctx.build_item(texts, fqn, '0', false, None, false));
}
{
let nmap = self.namespace_map.read();
let same_ns_uris: Vec<String> = nmap
.iter()
.filter_map(|(uri, opt_ns)| {
let uri_ns = opt_ns.as_deref();
let file_ns = file_namespace.as_deref();
if uri_ns == file_ns {
Some(uri.clone())
} else {
None
}
})
.collect();
drop(nmap);
let mut candidates: Vec<(String, String, Option<String>)> = Vec::new();
{
let amap = self.ast_map.read();
for uri in &same_ns_uris {
if let Some(classes) = amap.get(uri) {
for cls in classes {
if is_anonymous_class(&cls.name) {
continue;
}
if !matches!(cls.kind, ClassLikeKind::Class | ClassLikeKind::Interface)
{
continue;
}
let cls_fqn = match file_namespace {
Some(ns) => format!("{}\\{}", ns, cls.name),
None => cls.name.clone(),
};
if !matches_class_prefix(
&cls.name,
&cls_fqn,
&prefix_lower,
is_fqn_prefix,
expanded_prefix_lower,
) {
continue;
}
if !seen_fqns.insert(cls_fqn.clone()) {
continue;
}
candidates.push((
cls.name.clone(),
cls_fqn,
cls.deprecation_message.clone(),
));
}
}
}
}
for (name, fqn, deprecation_message) in candidates {
if !self.is_throwable_descendant(&fqn, 0) {
continue;
}
let (base_name, filter, _use_import) = class_edit_texts(
&name,
&fqn,
is_fqn_prefix,
has_leading_backslash,
file_namespace,
);
let texts = ClassItemTexts {
base_name,
filter,
use_import: None,
};
items.push(ctx.build_item(
texts,
&fqn,
'1',
false,
None,
deprecation_message.is_some(),
));
}
}
{
let idx = self.class_index.read();
for fqn in idx.keys() {
let sn = short_name(fqn);
if !matches_class_prefix(
sn,
fqn,
&prefix_lower,
is_fqn_prefix,
expanded_prefix_lower,
) {
continue;
}
if !seen_fqns.insert(fqn.clone()) {
continue;
}
if !self.is_class_or_interface_in_ast_map(fqn) {
continue;
}
if !self.is_throwable_descendant(fqn, 0) {
continue;
}
let (base_name, filter, use_import) = class_edit_texts(
sn,
fqn,
is_fqn_prefix,
has_leading_backslash,
file_namespace,
);
let mut texts = ClassItemTexts {
base_name,
filter,
use_import,
};
ctx.apply_import_fixups(&mut texts.base_name, &mut texts.use_import, false);
items.push(ctx.build_item(texts, fqn, '2', false, None, false));
}
}
{
let cmap = self.classmap.read();
for fqn in cmap.keys() {
if loaded_fqns.contains(fqn) {
continue;
}
let sn = short_name(fqn);
if !matches_class_prefix(
sn,
fqn,
&prefix_lower,
is_fqn_prefix,
expanded_prefix_lower,
) {
continue;
}
if !seen_fqns.insert(fqn.clone()) {
continue;
}
let demoted = !sn.ends_with("Exception") && !sn.ends_with("Error");
let (base_name, filter, use_import) = class_edit_texts(
sn,
fqn,
is_fqn_prefix,
has_leading_backslash,
file_namespace,
);
let mut texts = ClassItemTexts {
base_name,
filter,
use_import,
};
ctx.apply_import_fixups(&mut texts.base_name, &mut texts.use_import, false);
items.push(ctx.build_item(texts, fqn, '2', demoted, None, false));
}
}
let stub_idx = self.stub_index.read();
for &name in stub_idx.keys() {
if loaded_fqns.contains(name) {
continue;
}
let sn = short_name(name);
if !matches_class_prefix(
sn,
name,
&prefix_lower,
is_fqn_prefix,
expanded_prefix_lower,
) {
continue;
}
if !seen_fqns.insert(name.to_string()) {
continue;
}
let demoted = !sn.ends_with("Exception") && !sn.ends_with("Error");
let (base_name, filter, use_import) = class_edit_texts(
sn,
name,
is_fqn_prefix,
has_leading_backslash,
file_namespace,
);
let mut texts = ClassItemTexts {
base_name,
filter,
use_import,
};
ctx.apply_import_fixups(&mut texts.base_name, &mut texts.use_import, false);
items.push(ctx.build_item(texts, name, '2', demoted, None, false));
}
let is_incomplete = items.len() > Self::MAX_CLASS_COMPLETIONS;
if is_incomplete {
items.sort_by(|a, b| a.sort_text.cmp(&b.sort_text));
items.truncate(Self::MAX_CLASS_COMPLETIONS);
}
(items, is_incomplete)
}
}
#[cfg(test)]
#[path = "catch_completion_tests.rs"]
mod tests;