use std::collections::{HashMap, HashSet};
use tower_lsp::lsp_types::*;
use crate::Backend;
use crate::completion::named_args::position_to_char_offset;
use crate::completion::use_edit;
use crate::types::*;
use crate::util::{short_name, strip_fqn_prefix};
use crate::completion::builder::{
analyze_use_block, build_callable_snippet, build_use_edit, use_import_conflicts,
};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum ClassNameContext {
Any,
New,
ExtendsClass,
ExtendsInterface,
Implements,
TraitUse,
Instanceof,
TypeHint,
UseImport,
UseFunction,
UseConst,
NamespaceDeclaration,
Attribute(u8),
}
impl ClassNameContext {
pub(crate) fn matches(&self, cls: &ClassInfo) -> bool {
match self {
Self::Any => true,
Self::New => cls.kind == ClassLikeKind::Class && !cls.is_abstract,
Self::ExtendsClass => cls.kind == ClassLikeKind::Class && !cls.is_final,
Self::ExtendsInterface => cls.kind == ClassLikeKind::Interface,
Self::Implements => cls.kind == ClassLikeKind::Interface,
Self::TraitUse => cls.kind == ClassLikeKind::Trait,
Self::Instanceof | Self::TypeHint => cls.kind != ClassLikeKind::Trait,
Self::UseImport => true,
Self::UseFunction | Self::UseConst | Self::NamespaceDeclaration => false,
Self::Attribute(target) => {
cls.attribute_targets != 0 && (cls.attribute_targets & target) != 0
}
}
}
#[cfg(test)]
pub(crate) fn matches_kind_flags(
&self,
kind: ClassLikeKind,
is_abstract: bool,
is_final: bool,
) -> bool {
match self {
Self::Any | Self::UseImport => true,
Self::New => kind == ClassLikeKind::Class && !is_abstract,
Self::ExtendsClass => kind == ClassLikeKind::Class && !is_final,
Self::ExtendsInterface => kind == ClassLikeKind::Interface,
Self::Implements => kind == ClassLikeKind::Interface,
Self::TraitUse => kind == ClassLikeKind::Trait,
Self::Instanceof | Self::TypeHint => kind != ClassLikeKind::Trait,
Self::UseFunction | Self::UseConst | Self::NamespaceDeclaration => false,
Self::Attribute(_) => true,
}
}
pub(crate) fn is_class_only(&self) -> bool {
matches!(
self,
Self::New
| Self::ExtendsClass
| Self::ExtendsInterface
| Self::Implements
| Self::TraitUse
| Self::Instanceof
| Self::TypeHint
| Self::UseImport
| Self::Attribute(_)
)
}
pub(crate) fn is_new(&self) -> bool {
matches!(self, Self::New)
}
pub(crate) fn is_attribute(&self) -> bool {
matches!(self, Self::Attribute(_))
}
pub(crate) fn is_narrow_kind(&self) -> bool {
matches!(
self,
Self::TraitUse | Self::Implements | Self::ExtendsInterface
)
}
pub(crate) fn likely_mismatch(&self, short_name: &str) -> bool {
match self {
Self::New => likely_non_instantiable(short_name),
Self::ExtendsClass => likely_interface_name(short_name),
Self::Implements | Self::ExtendsInterface => likely_non_interface_name(short_name),
Self::TraitUse => likely_non_instantiable(short_name),
Self::Attribute(_) => !likely_attribute_name(short_name),
_ => false,
}
}
}
fn keyword_ends_at(chars: &[char], end: usize, keyword: &str) -> bool {
let kw_len = keyword.len();
if end < kw_len {
return false;
}
let start = end - kw_len;
if start > 0 && (chars[start - 1].is_alphanumeric() || chars[start - 1] == '_') {
return false;
}
let candidate: String = chars[start..end].iter().collect();
candidate.eq_ignore_ascii_case(keyword)
}
fn determine_extends_context(chars: &[char], extends_start: usize) -> ClassNameContext {
let mut i = extends_start;
while i > 0 && chars[i - 1].is_ascii_whitespace() {
i -= 1;
}
while i > 0 && (chars[i - 1].is_alphanumeric() || chars[i - 1] == '_') {
i -= 1;
}
while i > 0 && chars[i - 1].is_ascii_whitespace() {
i -= 1;
}
if keyword_ends_at(chars, i, "interface") {
return ClassNameContext::ExtendsInterface;
}
if keyword_ends_at(chars, i, "class") {
return ClassNameContext::ExtendsClass;
}
for _ in 0..5 {
while i > 0 && (chars[i - 1].is_alphanumeric() || chars[i - 1] == '_') {
i -= 1;
}
while i > 0 && chars[i - 1].is_ascii_whitespace() {
i -= 1;
}
if keyword_ends_at(chars, i, "class") {
return ClassNameContext::ExtendsClass;
}
}
ClassNameContext::ExtendsClass
}
fn brace_depth_at(chars: &[char], pos: usize) -> i32 {
let mut depth = 0i32;
for &c in &chars[..pos] {
match c {
'{' => depth += 1,
'}' => depth -= 1,
_ => {}
}
}
depth
}
pub(crate) fn detect_class_name_context(content: &str, position: Position) -> ClassNameContext {
let chars: Vec<char> = content.chars().collect();
let Some(offset) = position_to_char_offset(&chars, position) else {
return ClassNameContext::Any;
};
let mut i = offset;
while i > 0 && (chars[i - 1].is_alphanumeric() || chars[i - 1] == '_' || chars[i - 1] == '\\') {
i -= 1;
}
if let Some(target) = detect_attribute_context(&chars, i, content, position) {
return ClassNameContext::Attribute(target);
}
while i > 0 && chars[i - 1].is_ascii_whitespace() {
i -= 1;
}
while i > 0 && chars[i - 1] == ',' {
i -= 1; while i > 0 && chars[i - 1].is_ascii_whitespace() {
i -= 1;
}
while i > 0
&& (chars[i - 1].is_alphanumeric() || chars[i - 1] == '_' || chars[i - 1] == '\\')
{
i -= 1;
}
while i > 0 && chars[i - 1].is_ascii_whitespace() {
i -= 1;
}
}
if keyword_ends_at(&chars, i, "instanceof") {
return ClassNameContext::Instanceof;
}
if keyword_ends_at(&chars, i, "new") {
return ClassNameContext::New;
}
if keyword_ends_at(&chars, i, "implements") {
return ClassNameContext::Implements;
}
if keyword_ends_at(&chars, i, "extends") {
let extends_start = i - "extends".len();
return determine_extends_context(&chars, extends_start);
}
if keyword_ends_at(&chars, i, "function") {
let kw_start = i - "function".len();
let mut j = kw_start;
while j > 0 && chars[j - 1].is_ascii_whitespace() {
j -= 1;
}
if keyword_ends_at(&chars, j, "use") && brace_depth_at(&chars, j) < 1 {
return ClassNameContext::UseFunction;
}
}
if keyword_ends_at(&chars, i, "const") {
let kw_start = i - "const".len();
let mut j = kw_start;
while j > 0 && chars[j - 1].is_ascii_whitespace() {
j -= 1;
}
if keyword_ends_at(&chars, j, "use") && brace_depth_at(&chars, j) < 1 {
return ClassNameContext::UseConst;
}
}
if keyword_ends_at(&chars, i, "use") {
if brace_depth_at(&chars, i) >= 1 {
return ClassNameContext::TraitUse;
}
return ClassNameContext::UseImport;
}
if keyword_ends_at(&chars, i, "namespace") && brace_depth_at(&chars, i) < 1 {
return ClassNameContext::NamespaceDeclaration;
}
ClassNameContext::Any
}
pub(crate) fn is_class_declaration_name(content: &str, position: Position) -> bool {
let chars: Vec<char> = content.chars().collect();
let Some(offset) = position_to_char_offset(&chars, position) else {
return false;
};
let mut i = offset;
while i > 0 && (chars[i - 1].is_alphanumeric() || chars[i - 1] == '_') {
i -= 1;
}
while i > 0 && chars[i - 1].is_ascii_whitespace() {
i -= 1;
}
let is_decl = keyword_ends_at(&chars, i, "class")
|| keyword_ends_at(&chars, i, "interface")
|| keyword_ends_at(&chars, i, "trait")
|| keyword_ends_at(&chars, i, "enum");
if !is_decl {
return false;
}
if keyword_ends_at(&chars, i, "class") {
let kw_start = i - "class".len();
let mut j = kw_start;
while j > 0 && chars[j - 1].is_ascii_whitespace() {
j -= 1;
}
if keyword_ends_at(&chars, j, "new") {
return false;
}
}
true
}
#[cfg(test)]
pub(crate) fn detect_stub_class_kind(
class_name: &str,
source: &str,
) -> Option<(ClassLikeKind, bool, bool)> {
let sn = short_name(class_name);
if !source.contains(sn) {
return None;
}
for line in source.lines() {
let trimmed = line.trim();
if trimmed.is_empty()
|| trimmed.starts_with("//")
|| trimmed.starts_with('*')
|| trimmed.starts_with("/*")
{
continue;
}
let tokens: Vec<&str> = trimmed.split_whitespace().collect();
for (idx, token) in tokens.iter().enumerate() {
let kind = match token.to_lowercase().as_str() {
"class" => Some(ClassLikeKind::Class),
"interface" => Some(ClassLikeKind::Interface),
"trait" => Some(ClassLikeKind::Trait),
"enum" => Some(ClassLikeKind::Enum),
_ => None,
};
if let Some(kind) = kind {
if let Some(name_token) = tokens.get(idx + 1) {
let name = name_token.trim_end_matches(['{', ':']);
if name == sn {
let prefix = &tokens[..idx];
let is_abstract = prefix.iter().any(|t| t.eq_ignore_ascii_case("abstract"));
let is_final = prefix.iter().any(|t| t.eq_ignore_ascii_case("final"));
return Some((kind, is_abstract, is_final));
}
}
}
}
}
None
}
fn likely_attribute_name(short_name: &str) -> bool {
let lower = short_name.to_lowercase();
if lower.contains("attribute") {
return true;
}
matches!(
short_name,
"Override"
| "Deprecated"
| "SensitiveParameter"
| "AllowDynamicProperties"
| "ReturnTypeWillChange"
)
}
fn likely_interface_name(name: &str) -> bool {
if name.starts_with('I') && name.len() > 1 {
let second = name.chars().nth(1).unwrap();
if second.is_uppercase() {
return true;
}
}
if name.ends_with("Interface") {
return true;
}
false
}
fn likely_non_interface_name(name: &str) -> bool {
let lower = name.to_ascii_lowercase();
if lower.starts_with("abstract") || lower.ends_with("abstract") {
return true;
}
if name.starts_with("Base") && name.len() >= 5 {
let fifth = name.as_bytes()[4];
if fifth.is_ascii_uppercase() {
return true;
}
}
false
}
fn likely_non_instantiable(name: &str) -> bool {
if likely_interface_name(name) {
return true;
}
if name.starts_with("Abstract") {
return true;
}
if name.starts_with("Base") && name.len() >= 5 {
let fifth = name.as_bytes()[4];
if fifth.is_ascii_uppercase() {
return true;
}
}
if name.ends_with("Abstract") || name.ends_with("Trait") {
return true;
}
false
}
fn detect_attribute_context(
chars: &[char],
j: usize,
content: &str,
position: Position,
) -> Option<u8> {
let mut k = j;
loop {
while k > 0 && chars[k - 1].is_ascii_whitespace() {
k -= 1;
}
if k == 0 {
return None;
}
if k >= 2 && chars[k - 2] == '#' && chars[k - 1] == '[' {
let target = infer_attribute_target(content, position);
return Some(target);
}
if chars[k - 1] == '[' {
return None;
}
if chars[k - 1] == ')' {
k -= 1;
let mut depth = 1i32;
while k > 0 && depth > 0 {
k -= 1;
match chars[k] {
')' => depth += 1,
'(' => depth -= 1,
_ => {}
}
}
while k > 0
&& (chars[k - 1].is_alphanumeric() || chars[k - 1] == '_' || chars[k - 1] == '\\')
{
k -= 1;
}
while k > 0 && chars[k - 1].is_ascii_whitespace() {
k -= 1;
}
if k > 0 && chars[k - 1] == ',' {
k -= 1;
continue;
}
continue;
}
if chars[k - 1] == ',' {
k -= 1;
while k > 0 && chars[k - 1].is_ascii_whitespace() {
k -= 1;
}
if k > 0 && chars[k - 1] == ')' {
continue;
}
while k > 0
&& (chars[k - 1].is_alphanumeric() || chars[k - 1] == '_' || chars[k - 1] == '\\')
{
k -= 1;
}
continue;
}
return None;
}
}
fn infer_attribute_target(content: &str, position: Position) -> u8 {
use crate::types::attribute_target;
let lines: Vec<&str> = content.lines().collect();
let cursor_line = position.line as usize;
let depth = {
let mut d = 0i32;
for (idx, line) in lines.iter().enumerate() {
if idx >= cursor_line {
break;
}
for ch in line.chars() {
match ch {
'{' => d += 1,
'}' => d -= 1,
_ => {}
}
}
}
d
};
for line in lines
.iter()
.take(lines.len().min(cursor_line + 10))
.skip(cursor_line + 1)
{
let trimmed = line.trim();
if trimmed.is_empty() || trimmed.starts_with('#') || trimmed.starts_with("//") {
continue;
}
let words = declaration_keywords(trimmed);
if words.contains(&"class")
|| words.contains(&"interface")
|| words.contains(&"trait")
|| words.contains(&"enum")
{
return attribute_target::TARGET_CLASS;
}
if words.contains(&"function") {
return if depth >= 1 {
attribute_target::TARGET_METHOD
} else {
attribute_target::TARGET_FUNCTION
};
}
if words.contains(&"const") {
return attribute_target::TARGET_CLASS_CONSTANT;
}
if depth >= 1 {
let has_modifier = words.iter().any(|w| {
matches!(
*w,
"public"
| "protected"
| "private"
| "readonly"
| "static"
| "var"
| "abstract"
| "final"
)
});
if has_modifier {
return attribute_target::TARGET_PROPERTY;
}
}
break;
}
if depth >= 1 {
attribute_target::TARGET_METHOD
| attribute_target::TARGET_PROPERTY
| attribute_target::TARGET_CLASS_CONSTANT
} else {
attribute_target::TARGET_CLASS | attribute_target::TARGET_FUNCTION
}
}
fn declaration_keywords(line: &str) -> Vec<&str> {
let mut result = Vec::new();
for word in line.split_whitespace() {
if word.starts_with('$')
|| word.starts_with('(')
|| word.starts_with('{')
|| word.starts_with('/')
|| word.starts_with('#')
{
break;
}
match word.trim_end_matches(|c: char| !c.is_alphanumeric() && c != '_') {
"public" | "protected" | "private" | "static" | "abstract" | "final" | "readonly"
| "function" | "class" | "interface" | "trait" | "enum" | "const" | "var" => {
result.push(word.trim_end_matches(|c: char| !c.is_alphanumeric() && c != '_'));
}
_ => break,
}
}
result
}
pub(in crate::completion) fn is_anonymous_class(name: &str) -> bool {
name.starts_with("__anonymous@")
}
pub(in crate::completion) fn expand_alias_prefix(
normalized_prefix: &str,
use_map: &HashMap<String, String>,
) -> Option<String> {
let bs = normalized_prefix.find('\\')?;
let first_segment = &normalized_prefix[..bs];
let rest = &normalized_prefix[bs + 1..]; let fqn_ns = use_map.get(first_segment)?;
if rest.is_empty() {
Some(format!("{}\\", fqn_ns))
} else {
Some(format!("{}\\{}", fqn_ns, rest))
}
}
pub(in crate::completion) fn matches_class_prefix(
short_name: &str,
fqn: &str,
prefix_lower: &str,
is_fqn: bool,
expanded_prefix_lower: Option<&str>,
) -> bool {
if is_fqn {
short_name.to_lowercase().contains(prefix_lower)
|| fqn.to_lowercase().contains(prefix_lower)
|| expanded_prefix_lower.is_some_and(|exp| fqn.to_lowercase().contains(exp))
} else {
short_name.to_lowercase().contains(prefix_lower)
}
}
fn shorten_fqn_via_use_map(fqn: &str, use_map: &HashMap<String, String>) -> Option<String> {
let mut best: Option<String> = None;
for (alias, import_fqn) in use_map {
let shortened = if fqn == import_fqn {
Some(alias.clone())
} else {
fqn.strip_prefix(&format!("{}\\", import_fqn))
.map(|suffix| format!("{}\\{}", alias, suffix))
};
if let Some(ref s) = shortened
&& best.as_ref().is_none_or(|b| s.len() < b.len())
{
best = shortened;
}
}
best
}
pub(crate) fn build_affinity_table(
use_map: &HashMap<String, String>,
file_namespace: &Option<String>,
) -> HashMap<String, u32> {
let mut table: HashMap<String, u32> = HashMap::new();
let mut namespaces: Vec<&str> = Vec::new();
if let Some(ns) = file_namespace {
namespaces.push(ns.as_str());
}
for fqn in use_map.values() {
if let Some(pos) = fqn.rfind('\\') {
namespaces.push(&fqn[..pos]);
}
}
for ns in namespaces {
let parts: Vec<&str> = ns.split('\\').collect();
for depth in 1..=parts.len() {
let prefix = parts[..depth].join("\\");
*table.entry(prefix).or_insert(0) += 1;
}
}
table
}
pub(crate) fn affinity_score(fqn: &str, table: &HashMap<String, u32>) -> u32 {
let ns = match fqn.rfind('\\') {
Some(pos) => &fqn[..pos],
None => return 0,
};
let parts: Vec<&str> = ns.split('\\').collect();
let mut score = 0u32;
for depth in 1..=parts.len() {
let prefix = parts[..depth].join("\\");
if let Some(&count) = table.get(&prefix) {
score += count;
}
}
score
}
pub(in crate::completion) fn match_quality(short_name: &str, prefix: &str) -> char {
if prefix.is_empty() {
return 'b';
}
let sn = short_name.to_lowercase();
let p = prefix.to_lowercase();
if sn == p {
'a'
} else if sn.starts_with(&p) {
'b'
} else {
'c'
}
}
pub(in crate::completion) fn class_sort_text(
short_name: &str,
fqn: &str,
prefix: &str,
source_tier: char,
demoted: bool,
affinity_table: &HashMap<String, u32>,
) -> String {
let quality = match_quality(short_name, prefix);
let score = affinity_score(fqn, affinity_table);
let affinity = format!("{:04}", 9999_u32.saturating_sub(score.min(9999)));
let gap = format!(
"{:03}",
short_name.len().saturating_sub(prefix.len()).min(999)
);
let demote = if demoted { '1' } else { '0' };
format!(
"{}{}{}{}{}_{}",
quality,
source_tier,
affinity,
demote,
gap,
short_name.to_lowercase()
)
}
pub(in crate::completion) fn class_edit_texts(
short_name: &str,
fqn: &str,
is_fqn: bool,
has_leading_backslash: bool,
file_namespace: &Option<String>,
) -> (String, String, Option<String>) {
if is_fqn {
if let Some(ns) = file_namespace {
let ns_prefix = format!("{}\\", ns);
if let Some(relative) = fqn.strip_prefix(&ns_prefix) {
let filter = if has_leading_backslash {
format!("\\{}", fqn)
} else {
fqn.to_string()
};
return (relative.to_string(), filter, None);
}
}
let insert = if has_leading_backslash {
format!("\\{}", fqn)
} else {
fqn.to_string()
};
(insert.clone(), insert, None)
} else {
let filter = short_name.to_string();
(short_name.to_string(), filter, Some(fqn.to_string()))
}
}
pub(in crate::completion) struct ClassItemCtx<'a> {
pub(in crate::completion) is_fqn_prefix: bool,
pub(in crate::completion) is_new: bool,
pub(in crate::completion) is_attribute: bool,
pub(in crate::completion) fqn_replace_range: Option<Range>,
pub(in crate::completion) file_use_map: &'a HashMap<String, String>,
pub(in crate::completion) use_block: use_edit::UseBlockInfo,
pub(in crate::completion) file_namespace: &'a Option<String>,
pub(in crate::completion) affinity_table: HashMap<String, u32>,
pub(in crate::completion) quality_prefix: String,
pub(in crate::completion) prefix_has_namespace: bool,
pub(in crate::completion) uri: &'a str,
}
pub(crate) struct ClassCompletionParams<'a> {
pub(crate) file_use_map: &'a HashMap<String, String>,
pub(crate) file_namespace: &'a Option<String>,
pub(crate) prefix: &'a str,
pub(crate) content: &'a str,
pub(crate) context: ClassNameContext,
pub(crate) position: Position,
pub(crate) affinity_table_override: Option<HashMap<String, u32>>,
pub(crate) uri: &'a str,
}
pub(in crate::completion) struct ClassItemTexts {
pub(in crate::completion) base_name: String,
pub(in crate::completion) filter: String,
pub(in crate::completion) use_import: Option<String>,
}
impl ClassItemCtx<'_> {
pub(in crate::completion) fn apply_import_fixups(
&self,
base_name: &mut String,
use_import: &mut Option<String>,
was_shortened: bool,
) {
if let Some(ref import_fqn) = *use_import
&& use_import_conflicts(import_fqn, self.file_use_map)
{
*base_name = format!("\\{}", import_fqn);
*use_import = None;
}
if self.is_fqn_prefix
&& !was_shortened
&& !base_name.starts_with('\\')
&& let Some(first_seg) = base_name.split('\\').next()
&& self
.file_use_map
.keys()
.any(|a| a.eq_ignore_ascii_case(first_seg))
{
*base_name = format!("\\{}", base_name);
}
}
pub(in crate::completion) fn build_item(
&self,
texts: ClassItemTexts,
fqn: &str,
source_tier: char,
demoted: bool,
ctor_params: Option<&[ParameterInfo]>,
is_deprecated: bool,
) -> CompletionItem {
let short_name = crate::util::short_name(fqn);
let sort_text = class_sort_text(
short_name,
fqn,
&self.quality_prefix,
source_tier,
demoted,
&self.affinity_table,
);
let (insert_text, insert_text_format) = if self.is_attribute {
let snippet = crate::completion::builder::build_attribute_snippet(
&texts.base_name,
ctor_params.unwrap_or(&[]),
);
if snippet.contains("$0") {
(snippet, Some(InsertTextFormat::SNIPPET))
} else {
(snippet, None)
}
} else if self.is_new {
Backend::build_new_insert(&texts.base_name, ctor_params)
} else {
(texts.base_name, None)
};
let (label, filter_text, label_details) = if self.prefix_has_namespace {
(fqn.to_string(), texts.filter, None)
} else {
let ns = fqn.rsplit_once('\\').map(|(ns, _)| ns.to_string());
(
short_name.to_string(),
short_name.to_string(),
ns.map(|desc| CompletionItemLabelDetails {
detail: None,
description: Some(desc),
}),
)
};
let data = serde_json::to_value(crate::completion::resolve::CompletionItemData {
class_name: String::new(),
member_name: fqn.to_string(),
kind: "class".to_string(),
uri: self.uri.to_string(),
extra_class_names: vec![],
})
.ok();
CompletionItem {
label,
label_details,
kind: Some(CompletionItemKind::CLASS),
detail: Some(fqn.to_string()),
insert_text: Some(insert_text.clone()),
insert_text_format,
filter_text: Some(filter_text),
sort_text: Some(sort_text),
tags: if is_deprecated {
Some(vec![CompletionItemTag::DEPRECATED])
} else {
None
},
text_edit: self.fqn_replace_range.map(|range| {
CompletionTextEdit::Edit(TextEdit {
range,
new_text: insert_text.clone(),
})
}),
additional_text_edits: texts.use_import.as_ref().and_then(|import_fqn| {
build_use_edit(import_fqn, &self.use_block, self.file_namespace)
}),
data,
..CompletionItem::default()
}
}
}
impl Backend {
pub fn extract_partial_class_name(content: &str, position: Position) -> Option<String> {
let lines: Vec<&str> = content.lines().collect();
if position.line as usize >= lines.len() {
return None;
}
let line = lines[position.line as usize];
let chars: Vec<char> = line.chars().collect();
let col = (position.character as usize).min(chars.len());
let mut i = col;
while i > 0
&& (chars[i - 1].is_alphanumeric() || chars[i - 1] == '_' || chars[i - 1] == '\\')
{
i -= 1;
}
if i == col {
return None;
}
if i > 0 && chars[i - 1] == '$' {
return None;
}
if i >= 2 && chars[i - 2] == '-' && chars[i - 1] == '>' {
return None;
}
if i >= 2 && chars[i - 2] == ':' && chars[i - 1] == ':' {
return None;
}
if i >= 2 && chars[i - 2] == '<' && chars[i - 1] == '?' {
return None;
}
let partial: String = chars[i..col].iter().collect();
if partial.is_empty() {
return None;
}
Some(partial)
}
pub(crate) fn is_throw_new_context(content: &str, position: Position) -> bool {
let lines: Vec<&str> = content.lines().collect();
if position.line as usize >= lines.len() {
return false;
}
let line = lines[position.line as usize];
let chars: Vec<char> = line.chars().collect();
let col = (position.character as usize).min(chars.len());
let mut i = col;
while i > 0
&& (chars[i - 1].is_alphanumeric() || chars[i - 1] == '_' || chars[i - 1] == '\\')
{
i -= 1;
}
while i > 0 && chars[i - 1].is_ascii_whitespace() {
i -= 1;
}
if i < 3 {
return false;
}
let new_candidate: String = chars[i - 3..i].iter().collect();
if !new_candidate.eq_ignore_ascii_case("new") {
return false;
}
let j = i - 3;
let mut k = j;
while k > 0 && chars[k - 1].is_ascii_whitespace() {
k -= 1;
}
if k < 5 {
return false;
}
let throw_candidate: String = chars[k - 5..k].iter().collect();
throw_candidate.eq_ignore_ascii_case("throw")
}
pub(in crate::completion) fn build_new_insert(
name: &str,
ctor_params: Option<&[ParameterInfo]>,
) -> (String, Option<InsertTextFormat>) {
if let Some(params) = ctor_params
&& !params.is_empty()
{
let snippet = build_callable_snippet(name, params);
(snippet, Some(InsertTextFormat::SNIPPET))
} else {
(format!("{name}()$0"), Some(InsertTextFormat::SNIPPET))
}
}
fn ctor_params_for(&self, class_name: &str) -> Option<Vec<ParameterInfo>> {
let cls = self.load_stub_class(class_name)?;
let ctor = cls
.methods
.iter()
.find(|m| m.name.eq_ignore_ascii_case("__construct"))?;
Some(ctor.parameters.clone())
}
pub(in crate::completion) const MAX_CLASS_COMPLETIONS: usize = 100;
pub(crate) fn build_class_name_completions(
&self,
params: ClassCompletionParams<'_>,
) -> (Vec<CompletionItem>, bool) {
let ClassCompletionParams {
file_use_map,
file_namespace,
prefix,
content,
context,
position,
affinity_table_override,
uri,
} = params;
let is_new = context.is_new();
let is_attribute = context.is_attribute();
let is_use_import = matches!(context, ClassNameContext::UseImport);
let should_shorten_via_imports = !is_use_import;
let has_leading_backslash = prefix.starts_with('\\');
let normalized = strip_fqn_prefix(prefix);
let prefix_lower = normalized.to_lowercase();
let is_fqn_prefix = has_leading_backslash || normalized.contains('\\') || is_use_import;
let expanded = expand_alias_prefix(normalized, file_use_map);
let expanded_lower = expanded.as_deref().map(|s| s.to_lowercase());
let expanded_prefix_lower = expanded_lower.as_deref();
let no_namespace: Option<String> = None;
let effective_namespace = if is_use_import {
&no_namespace
} else {
file_namespace
};
let fqn_replace_range = if is_fqn_prefix {
Some(Range {
start: Position {
line: position.line,
character: position
.character
.saturating_sub(prefix.chars().count() as u32),
},
end: position,
})
} else {
None
};
let mut seen_fqns: HashSet<String> = HashSet::new();
let mut items: Vec<CompletionItem> = Vec::new();
let affinity_table = affinity_table_override
.unwrap_or_else(|| build_affinity_table(file_use_map, file_namespace));
let quality_prefix = match normalized.rfind('\\') {
Some(pos) => normalized[pos + 1..].to_string(),
None => normalized.to_string(),
};
let prefix_has_namespace =
normalized.contains('\\') || has_leading_backslash || is_use_import;
let needs_ctor = is_new || is_attribute;
let ctx = ClassItemCtx {
is_fqn_prefix,
is_new,
is_attribute,
fqn_replace_range,
file_use_map,
use_block: analyze_use_block(content),
file_namespace: effective_namespace,
affinity_table,
quality_prefix,
prefix_has_namespace,
uri,
};
for (sn, fqn) in file_use_map {
if !matches_class_prefix(sn, fqn, &prefix_lower, is_fqn_prefix, expanded_prefix_lower) {
continue;
}
if self.is_likely_namespace_not_class(fqn) {
continue;
}
if !seen_fqns.insert(fqn.clone()) {
continue;
}
if context.is_class_only()
&& let Some(false) = self.check_context_match(fqn, context)
{
continue;
}
if context.is_narrow_kind() && !self.is_known_class_like(fqn) {
continue;
}
let (mut base_name, filter, _use_import) = class_edit_texts(
sn,
fqn,
is_fqn_prefix,
has_leading_backslash,
effective_namespace,
);
if should_shorten_via_imports
&& let Some(shortened) = shorten_fqn_via_use_map(fqn, file_use_map)
{
base_name = shortened;
}
let texts = ClassItemTexts {
base_name,
filter,
use_import: None,
};
let demoted = context.is_class_only()
&& self.check_context_match(fqn, context).is_none()
&& context.likely_mismatch(sn);
let ctor = if needs_ctor {
self.ctor_params_for(fqn)
} else {
None
};
items.push(ctx.build_item(texts, fqn, '0', demoted, ctor.as_deref(), false));
}
if !is_use_import && let Some(ns) = file_namespace {
let nmap = self.namespace_map.read();
let same_ns_uris: Vec<String> = nmap
.iter()
.filter_map(|(uri, opt_ns)| {
if opt_ns.as_deref() == Some(ns.as_str()) {
Some(uri.clone())
} else {
None
}
})
.collect();
drop(nmap);
{
let amap = self.ast_map.read();
for uri in &same_ns_uris {
if let Some(classes) = amap.get(uri) {
for cls in classes {
if is_anonymous_class(&cls.name) {
continue;
}
let cls_fqn = format!("{}\\{}", ns, cls.name);
if !matches_class_prefix(
&cls.name,
&cls_fqn,
&prefix_lower,
is_fqn_prefix,
expanded_prefix_lower,
) {
continue;
}
if context.is_class_only() && !context.matches(cls) {
continue;
}
if !seen_fqns.insert(cls_fqn.clone()) {
continue;
}
let (mut base_name, filter, _use_import) = class_edit_texts(
&cls.name,
&cls_fqn,
is_fqn_prefix,
has_leading_backslash,
effective_namespace,
);
if should_shorten_via_imports
&& let Some(shortened) =
shorten_fqn_via_use_map(&cls_fqn, file_use_map)
{
base_name = shortened;
}
let ctor_params: Option<Vec<ParameterInfo>> = if needs_ctor {
cls.methods
.iter()
.find(|m| m.name.eq_ignore_ascii_case("__construct"))
.map(|m| m.parameters.clone())
} else {
None
};
let texts = ClassItemTexts {
base_name,
filter,
use_import: None,
};
items.push(ctx.build_item(
texts,
&cls_fqn,
'1',
false,
ctor_params.as_deref(),
cls.deprecation_message.is_some(),
));
}
}
}
}
}
{
let idx = self.class_index.read();
for fqn in idx.keys() {
let sn = short_name(fqn);
if !matches_class_prefix(
sn,
fqn,
&prefix_lower,
is_fqn_prefix,
expanded_prefix_lower,
) {
continue;
}
if !seen_fqns.insert(fqn.clone()) {
continue;
}
let ctx_match = if context.is_class_only() {
let m = self.check_context_match(fqn, context);
if m == Some(false) {
continue;
}
m
} else {
None
};
let (mut base_name, filter, mut use_import) = class_edit_texts(
sn,
fqn,
is_fqn_prefix,
has_leading_backslash,
effective_namespace,
);
let mut was_shortened = false;
if should_shorten_via_imports
&& let Some(shortened) = shorten_fqn_via_use_map(fqn, file_use_map)
{
base_name = shortened;
use_import = None;
was_shortened = true;
}
let mut texts = ClassItemTexts {
base_name,
filter,
use_import,
};
ctx.apply_import_fixups(&mut texts.base_name, &mut texts.use_import, was_shortened);
let demoted = ctx_match.is_none() && context.likely_mismatch(sn);
let ctor = if needs_ctor {
self.ctor_params_for(fqn)
} else {
None
};
items.push(ctx.build_item(texts, fqn, '2', demoted, ctor.as_deref(), false));
}
}
{
let cmap = self.classmap.read();
for fqn in cmap.keys() {
let sn = short_name(fqn);
if !matches_class_prefix(
sn,
fqn,
&prefix_lower,
is_fqn_prefix,
expanded_prefix_lower,
) {
continue;
}
if !seen_fqns.insert(fqn.clone()) {
continue;
}
let ctx_match = if context.is_class_only() {
let m = self.check_context_match(fqn, context);
if m == Some(false) {
continue;
}
m
} else {
None
};
let (mut base_name, filter, mut use_import) = class_edit_texts(
sn,
fqn,
is_fqn_prefix,
has_leading_backslash,
effective_namespace,
);
let mut was_shortened = false;
if should_shorten_via_imports
&& let Some(shortened) = shorten_fqn_via_use_map(fqn, file_use_map)
{
base_name = shortened;
use_import = None;
was_shortened = true;
}
let mut texts = ClassItemTexts {
base_name,
filter,
use_import,
};
ctx.apply_import_fixups(&mut texts.base_name, &mut texts.use_import, was_shortened);
let demoted = ctx_match.is_none() && context.likely_mismatch(sn);
let ctor = if needs_ctor {
self.ctor_params_for(fqn)
} else {
None
};
items.push(ctx.build_item(texts, fqn, '2', demoted, ctor.as_deref(), false));
}
}
let stub_idx = self.stub_index.read();
for &name in stub_idx.keys() {
let sn = short_name(name);
if !matches_class_prefix(
sn,
name,
&prefix_lower,
is_fqn_prefix,
expanded_prefix_lower,
) {
continue;
}
if !seen_fqns.insert(name.to_string()) {
continue;
}
let ctx_match = if context.is_class_only() {
let m = self.check_context_match(name, context);
if m == Some(false) {
continue;
}
m
} else {
None
};
let (mut base_name, filter, mut use_import) = class_edit_texts(
sn,
name,
is_fqn_prefix,
has_leading_backslash,
effective_namespace,
);
let mut was_shortened = false;
if should_shorten_via_imports
&& let Some(shortened) = shorten_fqn_via_use_map(name, file_use_map)
{
base_name = shortened;
use_import = None;
was_shortened = true;
}
let mut texts = ClassItemTexts {
base_name,
filter,
use_import,
};
ctx.apply_import_fixups(&mut texts.base_name, &mut texts.use_import, was_shortened);
let demoted = ctx_match.is_none() && context.likely_mismatch(sn);
let ctor = if needs_ctor {
self.ctor_params_for(name)
} else {
None
};
items.push(ctx.build_item(texts, name, '2', demoted, ctor.as_deref(), false));
}
if is_fqn_prefix {
let ns_prefix_end = normalized.rfind('\\').map(|p| p + 1).unwrap_or(0);
if ns_prefix_end > 0 {
let raw_ns_prefix = &normalized[..ns_prefix_end];
let expanded_ns = expand_alias_prefix(raw_ns_prefix, file_use_map);
let ns_prefix_lower = expanded_ns
.as_deref()
.unwrap_or(raw_ns_prefix)
.to_lowercase();
let after_ns_lower = normalized[ns_prefix_end..].to_lowercase();
let mut seen_segments: HashSet<String> = HashSet::new();
for fqn in &seen_fqns {
let fqn_lower = fqn.to_lowercase();
if !fqn_lower.starts_with(&ns_prefix_lower) {
continue;
}
let rest = &fqn[ns_prefix_end..];
if let Some(next_bs) = rest.find('\\') {
let segment_short = &rest[..next_bs];
if !after_ns_lower.is_empty()
&& !segment_short.to_lowercase().starts_with(&after_ns_lower)
{
continue;
}
let segment = fqn[..ns_prefix_end + next_bs].to_string();
seen_segments.insert(segment);
}
}
for segment in &seen_segments {
let short = segment.rsplit('\\').next().unwrap_or(segment);
let (label, insert_ns) = if let Some(ns) = effective_namespace {
let ns_with_slash = format!("{}\\", ns);
if let Some(relative) = segment.strip_prefix(&ns_with_slash) {
(relative.to_string(), relative.to_string())
} else if has_leading_backslash {
(segment.clone(), format!("\\{}", segment))
} else {
(segment.clone(), segment.clone())
}
} else if has_leading_backslash {
(segment.clone(), format!("\\{}", segment))
} else {
(segment.clone(), segment.clone())
};
let filter = if has_leading_backslash {
format!("\\{}", segment)
} else {
segment.clone()
};
items.push(CompletionItem {
label,
kind: Some(CompletionItemKind::MODULE),
detail: Some(format!("namespace {}", segment)),
insert_text: Some(insert_ns.clone()),
filter_text: Some(filter),
sort_text: Some(format!("0!_{}", short.to_lowercase())),
text_edit: fqn_replace_range.map(|range| {
CompletionTextEdit::Edit(TextEdit {
range,
new_text: insert_ns,
})
}),
..CompletionItem::default()
});
}
}
}
items.sort_by(|a, b| a.sort_text.cmp(&b.sort_text));
let is_incomplete = items.len() > Self::MAX_CLASS_COMPLETIONS;
if is_incomplete {
items.truncate(Self::MAX_CLASS_COMPLETIONS);
}
(items, is_incomplete)
}
fn is_likely_namespace_not_class(&self, fqn: &str) -> bool {
if self.find_class_in_ast_map(fqn).is_some() {
return false;
}
if self.class_index.read().contains_key(fqn) {
return false;
}
if self.classmap.read().contains_key(fqn) {
return false;
}
if self.stub_index.read().contains_key(fqn) {
return false;
}
{
let nmap = self.namespace_map.read();
for ns in nmap.values().flatten() {
if ns == fqn {
return true;
}
}
}
let prefix = format!("{}\\", fqn);
if self
.class_index
.read()
.keys()
.any(|k| k.starts_with(&prefix))
{
return true;
}
if self.classmap.read().keys().any(|k| k.starts_with(&prefix)) {
return true;
}
if self
.stub_index
.read()
.keys()
.any(|k| k.starts_with(&prefix))
{
return true;
}
false
}
fn check_context_match(&self, class_name: &str, context: ClassNameContext) -> Option<bool> {
if let Some(cls) = self.load_stub_class(class_name) {
return Some(context.matches(&cls));
}
None
}
fn is_known_class_like(&self, class_name: &str) -> bool {
if self.find_class_in_ast_map(class_name).is_some() {
return true;
}
if self.stub_index.read().contains_key(class_name) {
return true;
}
if self.class_index.read().contains_key(class_name) {
return true;
}
if self.classmap.read().contains_key(class_name) {
return true;
}
false
}
}
#[cfg(test)]
#[path = "class_completion_tests.rs"]
mod tests;