use std::sync::Arc;
use mir_codebase::storage::{Location, TemplateParam};
use mir_issues::Issue;
use mir_types::Union;
use rustc_hash::FxHashMap;
use super::*;
#[derive(Debug, Clone, Copy)]
pub struct ClassKind {
pub is_interface: bool,
pub is_trait: bool,
pub is_enum: bool,
pub is_abstract: bool,
}
pub fn class_kind_via_db(db: &dyn MirDatabase, fqcn: &str) -> Option<ClassKind> {
let node = db.lookup_class_node(fqcn).filter(|n| n.active(db))?;
Some(ClassKind {
is_interface: node.is_interface(db),
is_trait: node.is_trait(db),
is_enum: node.is_enum(db),
is_abstract: node.is_abstract(db),
})
}
pub fn type_exists_via_db(db: &dyn MirDatabase, fqcn: &str) -> bool {
db.lookup_class_node(fqcn).is_some_and(|n| n.active(db))
}
#[allow(dead_code)]
pub fn function_exists_via_db(db: &dyn MirDatabase, fqn: &str) -> bool {
db.lookup_function_node(fqn).is_some_and(|n| n.active(db))
}
#[allow(dead_code)]
pub fn constant_exists_via_db(db: &dyn MirDatabase, fqn: &str) -> bool {
db.lookup_global_constant_node(fqn)
.is_some_and(|n| n.active(db))
}
pub fn resolve_name_via_db(db: &dyn MirDatabase, file: &str, name: &str) -> String {
if name.starts_with('\\') {
return name.trim_start_matches('\\').to_string();
}
let lower = name.to_ascii_lowercase();
if matches!(lower.as_str(), "self" | "static" | "parent") {
return name.to_string();
}
if name.contains('\\') {
if let Some(imports) = (!name.starts_with('\\')).then(|| db.file_imports(file)) {
if let Some((first, rest)) = name.split_once('\\') {
if let Some(base) = imports.get(first) {
return format!("{base}\\{rest}");
}
}
}
if type_exists_via_db(db, name) {
return name.to_string();
}
if let Some(ns) = db.file_namespace(file) {
let qualified = format!("{}\\{}", ns, name);
if type_exists_via_db(db, &qualified) {
return qualified;
}
}
return name.to_string();
}
let imports = db.file_imports(file);
if let Some(fqcn) = imports.get(name) {
return fqcn.clone();
}
if let Some((_, fqcn)) = imports
.iter()
.find(|(alias, _)| alias.eq_ignore_ascii_case(name))
{
return fqcn.clone();
}
if let Some(ns) = db.file_namespace(file) {
return format!("{}\\{}", ns, name);
}
name.to_string()
}
pub fn class_template_params_via_db(
db: &dyn MirDatabase,
fqcn: &str,
) -> Option<Arc<[TemplateParam]>> {
let node = db.lookup_class_node(fqcn).filter(|n| n.active(db))?;
Some(node.template_params(db))
}
pub fn inherited_template_bindings_via_db(
db: &dyn MirDatabase,
fqcn: &str,
) -> std::collections::HashMap<Arc<str>, Union> {
let mut bindings: std::collections::HashMap<Arc<str>, Union> = std::collections::HashMap::new();
let mut visited: rustc_hash::FxHashSet<Arc<str>> = rustc_hash::FxHashSet::default();
let mut current: Arc<str> = Arc::from(fqcn);
loop {
if !visited.insert(current.clone()) {
break;
}
let node = match db
.lookup_class_node(current.as_ref())
.filter(|n| n.active(db))
{
Some(n) => n,
None => break,
};
let parent = match node.parent(db) {
Some(p) => p,
None => break,
};
let extends_type_args = node.extends_type_args(db);
if !extends_type_args.is_empty() {
if let Some(parent_tps) = class_template_params_via_db(db, parent.as_ref()) {
for (tp, ty) in parent_tps.iter().zip(extends_type_args.iter()) {
bindings
.entry(tp.name.clone())
.or_insert_with(|| ty.clone());
}
}
}
current = parent;
}
bindings
}
pub fn has_unknown_ancestor_via_db(db: &dyn MirDatabase, fqcn: &str) -> bool {
let Some(node) = db.lookup_class_node(fqcn).filter(|n| n.active(db)) else {
return false;
};
class_ancestors(db, node)
.0
.iter()
.any(|ancestor| !type_exists_via_db(db, ancestor))
}
pub fn method_is_concretely_implemented(
db: &dyn MirDatabase,
fqcn: &str,
method_name: &str,
) -> bool {
let lower = method_name.to_lowercase();
let Some(self_node) = db.lookup_class_node(fqcn).filter(|n| n.active(db)) else {
return false;
};
if self_node.is_interface(db) {
return false;
}
if let Some(m) = db.lookup_method_node(fqcn, &lower).filter(|m| m.active(db)) {
if !m.is_abstract(db) {
return true;
}
}
let mut visited_traits: rustc_hash::FxHashSet<String> = rustc_hash::FxHashSet::default();
for t in self_node.traits(db).iter() {
if trait_provides_method(db, t.as_ref(), &lower, &mut visited_traits) {
return true;
}
}
for ancestor in class_ancestors(db, self_node).0.iter() {
let Some(anc_node) = db
.lookup_class_node(ancestor.as_ref())
.filter(|n| n.active(db))
else {
continue;
};
if anc_node.is_interface(db) {
continue;
}
if !anc_node.is_trait(db) {
if let Some(m) = db
.lookup_method_node(ancestor.as_ref(), &lower)
.filter(|m| m.active(db))
{
if !m.is_abstract(db) {
return true;
}
}
}
if anc_node.is_trait(db) {
if trait_provides_method(db, ancestor.as_ref(), &lower, &mut visited_traits) {
return true;
}
} else {
for t in anc_node.traits(db).iter() {
if trait_provides_method(db, t.as_ref(), &lower, &mut visited_traits) {
return true;
}
}
}
}
false
}
fn trait_provides_method(
db: &dyn MirDatabase,
trait_fqcn: &str,
method_lower: &str,
visited: &mut rustc_hash::FxHashSet<String>,
) -> bool {
if !visited.insert(trait_fqcn.to_string()) {
return false;
}
if let Some(m) = db
.lookup_method_node(trait_fqcn, method_lower)
.filter(|m| m.active(db))
{
if !m.is_abstract(db) {
return true;
}
}
let Some(node) = db.lookup_class_node(trait_fqcn).filter(|n| n.active(db)) else {
return false;
};
if !node.is_trait(db) {
return false;
}
for t in node.traits(db).iter() {
if trait_provides_method(db, t.as_ref(), method_lower, visited) {
return true;
}
}
false
}
pub fn lookup_method_in_chain(
db: &dyn MirDatabase,
fqcn: &str,
method_name: &str,
) -> Option<MethodNode> {
let mut visited_mixins: rustc_hash::FxHashSet<String> = rustc_hash::FxHashSet::default();
lookup_method_in_chain_inner(db, fqcn, &method_name.to_lowercase(), &mut visited_mixins)
}
fn lookup_method_in_chain_inner(
db: &dyn MirDatabase,
fqcn: &str,
lower: &str,
visited_mixins: &mut rustc_hash::FxHashSet<String>,
) -> Option<MethodNode> {
let self_node = db.lookup_class_node(fqcn).filter(|n| n.active(db))?;
if let Some(node) = db.lookup_method_node(fqcn, lower).filter(|n| n.active(db)) {
return Some(node);
}
for m in self_node.mixins(db).iter() {
if visited_mixins.insert(m.to_string()) {
if let Some(node) = lookup_method_in_chain_inner(db, m.as_ref(), lower, visited_mixins)
{
return Some(node);
}
}
}
let mut visited_traits: rustc_hash::FxHashSet<String> = rustc_hash::FxHashSet::default();
for t in self_node.traits(db).iter() {
if let Some(node) = trait_provides_method_node(db, t.as_ref(), lower, &mut visited_traits) {
return Some(node);
}
}
for ancestor in class_ancestors(db, self_node).0.iter() {
if let Some(node) = db
.lookup_method_node(ancestor.as_ref(), lower)
.filter(|n| n.active(db))
{
return Some(node);
}
if let Some(anc_node) = db
.lookup_class_node(ancestor.as_ref())
.filter(|n| n.active(db))
{
if anc_node.is_trait(db) {
if let Some(node) =
trait_provides_method_node(db, ancestor.as_ref(), lower, &mut visited_traits)
{
return Some(node);
}
} else {
for t in anc_node.traits(db).iter() {
if let Some(node) =
trait_provides_method_node(db, t.as_ref(), lower, &mut visited_traits)
{
return Some(node);
}
}
for m in anc_node.mixins(db).iter() {
if visited_mixins.insert(m.to_string()) {
if let Some(node) =
lookup_method_in_chain_inner(db, m.as_ref(), lower, visited_mixins)
{
return Some(node);
}
}
}
}
}
}
None
}
fn trait_provides_method_node(
db: &dyn MirDatabase,
trait_fqcn: &str,
method_lower: &str,
visited: &mut rustc_hash::FxHashSet<String>,
) -> Option<MethodNode> {
if !visited.insert(trait_fqcn.to_string()) {
return None;
}
if let Some(node) = db
.lookup_method_node(trait_fqcn, method_lower)
.filter(|n| n.active(db))
{
return Some(node);
}
let node = db.lookup_class_node(trait_fqcn).filter(|n| n.active(db))?;
if !node.is_trait(db) {
return None;
}
for t in node.traits(db).iter() {
if let Some(found) = trait_provides_method_node(db, t.as_ref(), method_lower, visited) {
return Some(found);
}
}
None
}
#[allow(dead_code)]
fn trait_declares_method(
db: &dyn MirDatabase,
trait_fqcn: &str,
method_lower: &str,
visited: &mut rustc_hash::FxHashSet<String>,
) -> bool {
if !visited.insert(trait_fqcn.to_string()) {
return false;
}
if db
.lookup_method_node(trait_fqcn, method_lower)
.is_some_and(|m| m.active(db))
{
return true;
}
let Some(node) = db.lookup_class_node(trait_fqcn).filter(|n| n.active(db)) else {
return false;
};
if !node.is_trait(db) {
return false;
}
for t in node.traits(db).iter() {
if trait_declares_method(db, t.as_ref(), method_lower, visited) {
return true;
}
}
false
}
#[allow(dead_code)]
pub fn method_exists_via_db(db: &dyn MirDatabase, fqcn: &str, method_name: &str) -> bool {
let lower = method_name.to_lowercase();
let Some(self_node) = db.lookup_class_node(fqcn).filter(|n| n.active(db)) else {
return false;
};
if db
.lookup_method_node(fqcn, &lower)
.is_some_and(|m| m.active(db))
{
return true;
}
let mut visited_traits: rustc_hash::FxHashSet<String> = rustc_hash::FxHashSet::default();
for t in self_node.traits(db).iter() {
if trait_declares_method(db, t.as_ref(), &lower, &mut visited_traits) {
return true;
}
}
for ancestor in class_ancestors(db, self_node).0.iter() {
if db
.lookup_method_node(ancestor.as_ref(), &lower)
.is_some_and(|m| m.active(db))
{
return true;
}
if let Some(anc_node) = db
.lookup_class_node(ancestor.as_ref())
.filter(|n| n.active(db))
{
if anc_node.is_trait(db) {
if trait_declares_method(db, ancestor.as_ref(), &lower, &mut visited_traits) {
return true;
}
} else {
for t in anc_node.traits(db).iter() {
if trait_declares_method(db, t.as_ref(), &lower, &mut visited_traits) {
return true;
}
}
}
}
}
false
}
pub fn lookup_property_in_chain(
db: &dyn MirDatabase,
fqcn: &str,
prop_name: &str,
) -> Option<PropertyNode> {
let mut visited_mixins: rustc_hash::FxHashSet<String> = rustc_hash::FxHashSet::default();
lookup_property_in_chain_inner(db, fqcn, prop_name, &mut visited_mixins)
}
fn lookup_property_in_chain_inner(
db: &dyn MirDatabase,
fqcn: &str,
prop_name: &str,
visited_mixins: &mut rustc_hash::FxHashSet<String>,
) -> Option<PropertyNode> {
let self_node = db.lookup_class_node(fqcn).filter(|n| n.active(db))?;
if let Some(node) = db
.lookup_property_node(fqcn, prop_name)
.filter(|n| n.active(db))
{
return Some(node);
}
for m in self_node.mixins(db).iter() {
if visited_mixins.insert(m.to_string()) {
if let Some(node) =
lookup_property_in_chain_inner(db, m.as_ref(), prop_name, visited_mixins)
{
return Some(node);
}
}
}
for ancestor in class_ancestors(db, self_node).0.iter() {
if let Some(node) = db
.lookup_property_node(ancestor.as_ref(), prop_name)
.filter(|n| n.active(db))
{
return Some(node);
}
if let Some(anc_node) = db
.lookup_class_node(ancestor.as_ref())
.filter(|n| n.active(db))
{
for m in anc_node.mixins(db).iter() {
if visited_mixins.insert(m.to_string()) {
if let Some(node) =
lookup_property_in_chain_inner(db, m.as_ref(), prop_name, visited_mixins)
{
return Some(node);
}
}
}
}
}
None
}
pub fn class_constant_exists_in_chain(db: &dyn MirDatabase, fqcn: &str, const_name: &str) -> bool {
if db
.lookup_class_constant_node(fqcn, const_name)
.is_some_and(|n| n.active(db))
{
return true;
}
let Some(class_node) = db.lookup_class_node(fqcn).filter(|n| n.active(db)) else {
return false;
};
for ancestor in class_ancestors(db, class_node).0.iter() {
if db
.lookup_class_constant_node(ancestor.as_ref(), const_name)
.is_some_and(|n| n.active(db))
{
return true;
}
}
false
}
pub fn member_location_via_db(
db: &dyn MirDatabase,
fqcn: &str,
member_name: &str,
) -> Option<Location> {
if let Some(node) = lookup_method_in_chain(db, fqcn, member_name) {
if let Some(loc) = node.location(db) {
return Some(loc);
}
}
if let Some(node) = lookup_property_in_chain(db, fqcn, member_name) {
if let Some(loc) = node.location(db) {
return Some(loc);
}
}
if let Some(node) = db
.lookup_class_constant_node(fqcn, member_name)
.filter(|n| n.active(db))
{
if let Some(loc) = node.location(db) {
return Some(loc);
}
}
let class_node = db.lookup_class_node(fqcn).filter(|n| n.active(db))?;
for ancestor in class_ancestors(db, class_node).0.iter() {
if let Some(node) = db
.lookup_class_constant_node(ancestor.as_ref(), member_name)
.filter(|n| n.active(db))
{
if let Some(loc) = node.location(db) {
return Some(loc);
}
}
}
None
}
pub fn extends_or_implements_via_db(db: &dyn MirDatabase, child: &str, ancestor: &str) -> bool {
if child == ancestor {
return true;
}
let Some(node) = db.lookup_class_node(child).filter(|n| n.active(db)) else {
return false;
};
if node.is_enum(db) {
if node.interfaces(db).iter().any(|i| i.as_ref() == ancestor) {
return true;
}
if ancestor == "UnitEnum" || ancestor == "\\UnitEnum" {
return true;
}
if (ancestor == "BackedEnum" || ancestor == "\\BackedEnum") && node.is_backed_enum(db) {
return true;
}
return false;
}
class_ancestors(db, node)
.0
.iter()
.any(|p| p.as_ref() == ancestor)
}
pub fn collect_file_definitions_uncached(
db: &dyn MirDatabase,
file: SourceFile,
) -> FileDefinitions {
let path = file.path(db);
let text = file.text(db);
let arena = crate::arena::create_parse_arena(text.len());
let parsed = php_rs_parser::parse(&arena, &text);
let mut all_issues: Vec<Issue> = parsed
.errors
.iter()
.map(|err| {
Issue::new(
mir_issues::IssueKind::ParseError {
message: err.to_string(),
},
mir_issues::Location {
file: path.clone(),
line: 1,
line_end: 1,
col_start: 0,
col_end: 0,
},
)
})
.collect();
let collector =
crate::collector::DefinitionCollector::new_for_slice(path, &text, &parsed.source_map);
let (slice, collector_issues) = collector.collect_slice(&parsed.program);
all_issues.extend(collector_issues);
FileDefinitions {
slice: Arc::new(slice),
issues: Arc::new(all_issues),
}
}
#[salsa::tracked]
pub fn collect_file_definitions(db: &dyn MirDatabase, file: SourceFile) -> FileDefinitions {
collect_file_definitions_uncached(db, file)
}
type MethodInferMap = FxHashMap<(Arc<str>, Arc<str>), Arc<Union>>;
#[derive(Clone, Debug)]
pub struct InferredFileTypes {
pub functions: Arc<FxHashMap<Arc<str>, Arc<Union>>>,
pub methods: Arc<MethodInferMap>,
}
impl InferredFileTypes {
pub fn empty() -> Self {
Self {
functions: Arc::new(FxHashMap::default()),
methods: Arc::new(MethodInferMap::default()),
}
}
}
impl PartialEq for InferredFileTypes {
fn eq(&self, other: &Self) -> bool {
if Arc::ptr_eq(&self.functions, &other.functions)
&& Arc::ptr_eq(&self.methods, &other.methods)
{
return true;
}
if self.functions.len() != other.functions.len()
|| self.methods.len() != other.methods.len()
{
return false;
}
for (k, v) in self.functions.iter() {
if other.functions.get(k).is_none_or(|ov| *ov != *v) {
return false;
}
}
for (k, v) in self.methods.iter() {
if other.methods.get(k).is_none_or(|ov| *ov != *v) {
return false;
}
}
true
}
}
unsafe impl salsa::Update for InferredFileTypes {
unsafe fn maybe_update(old_ptr: *mut Self, new_val: Self) -> bool {
let old = unsafe { &mut *old_ptr };
if *old == new_val {
return false;
}
*old = new_val;
true
}
}
#[salsa::tracked]
pub fn infer_file_return_types(db: &dyn MirDatabase, file: SourceFile) -> InferredFileTypes {
use std::str::FromStr as _;
let path = file.path(db);
let text = file.text(db);
let php_version = crate::php_version::PhpVersion::from_str(db.php_version_str().as_ref())
.unwrap_or(crate::php_version::PhpVersion::LATEST);
let arena = crate::arena::create_parse_arena(text.len());
let parsed = php_rs_parser::parse(&arena, text.as_ref());
if !parsed.errors.is_empty() {
return InferredFileTypes::empty();
}
let driver = crate::pass2::Pass2Driver::new_inference_only(db, php_version);
driver.analyze_bodies(&parsed.program, path, text.as_ref(), &parsed.source_map);
let inferred = driver.take_inferred_types();
let mut functions: FxHashMap<Arc<str>, Arc<Union>> =
FxHashMap::with_capacity_and_hasher(inferred.functions.len(), Default::default());
for (fqn, ty) in inferred.functions {
functions.insert(fqn, Arc::new(ty));
}
let mut methods: FxHashMap<(Arc<str>, Arc<str>), Arc<Union>> =
FxHashMap::with_capacity_and_hasher(inferred.methods.len(), Default::default());
for (fqcn, name, ty) in inferred.methods {
let name_lower: Arc<str> = if name.chars().all(|c| !c.is_uppercase()) {
name
} else {
Arc::from(name.to_lowercase().as_str())
};
methods.insert((fqcn, name_lower), Arc::new(ty));
}
InferredFileTypes {
functions: Arc::new(functions),
methods: Arc::new(methods),
}
}
#[allow(dead_code)]
pub(crate) fn collect_accumulated_issues(
db: &dyn MirDatabase,
files: &[(Arc<str>, SourceFile)],
php_version: &str,
) -> Vec<Issue> {
let mut all_issues = Vec::new();
let input = AnalyzeFileInput::new(db, Arc::from(php_version));
for (_path, file) in files {
analyze_file(db, *file, input);
let accumulated: Vec<&IssueAccumulator> = analyze_file::accumulated(db, *file, input);
for acc in accumulated {
all_issues.push(acc.0.clone());
}
}
all_issues
}