1use std::collections::{HashSet, VecDeque};
2use std::sync::Arc;
3
4use cairo_lang_defs::ids::{LanguageElementId, ModuleId};
5use cairo_lang_defs::plugin::PluginDiagnostic;
6use cairo_lang_filesystem::db::{ext_as_virtual, get_parent_and_mapping, translate_location};
7use cairo_lang_filesystem::ids::{CodeOrigin, FileId, FileLongId};
8use cairo_lang_parser::db::ParserGroup;
9use cairo_lang_syntax::node::SyntaxNode;
10use cairo_lang_syntax::node::helpers::QueryAttrs;
11use cairo_lang_syntax::node::kind::SyntaxKind;
12use cairo_lang_utils::ordered_hash_set::OrderedHashSet;
13use if_chain::if_chain;
14
15use crate::context::{
16 get_all_checking_functions, get_name_for_diagnostic_message, is_lint_enabled_by_default,
17};
18use crate::{CairoLintToolMetadata, CorelibContext};
19
20use crate::mappings::{get_origin_module_item_as_syntax_node, get_origin_syntax_node};
21
22mod db;
23use cairo_lang_defs::db::DefsGroup;
24pub use db::{LinterAnalysisDatabase, LinterAnalysisDatabaseBuilder};
25use salsa::Database;
26
27#[derive(PartialEq, Eq, Hash, Debug, Clone)]
28pub struct LinterDiagnosticParams {
29 pub only_generated_files: bool,
30 pub tool_metadata: CairoLintToolMetadata,
31}
32
33pub trait LinterGroup: Database {
34 fn linter_diagnostics<'db>(
35 &'db self,
36 params: LinterDiagnosticParams,
37 module_id: ModuleId<'db>,
38 ) -> &'db Vec<PluginDiagnostic<'db>> {
39 linter_diagnostics(self.as_dyn_database(), params, module_id)
40 }
41
42 fn node_resultants<'db>(&'db self, node: SyntaxNode<'db>) -> &'db Option<Vec<SyntaxNode<'db>>> {
43 node_resultants(self.as_dyn_database(), node)
44 }
45
46 fn file_and_subfiles_with_corresponding_modules<'db>(
47 &'db self,
48 file: FileId<'db>,
49 ) -> &'db Option<(HashSet<FileId<'db>>, HashSet<ModuleId<'db>>)> {
50 file_and_subfiles_with_corresponding_modules(self.as_dyn_database(), file)
51 }
52
53 fn find_generated_nodes<'db>(
54 &'db self,
55 node_descendant_files: Arc<[FileId<'db>]>,
56 node: SyntaxNode<'db>,
57 ) -> &'db OrderedHashSet<SyntaxNode<'db>> {
58 find_generated_nodes(self.as_dyn_database(), node_descendant_files, node)
59 }
60
61 fn corelib_context<'db>(&'db self) -> &'db CorelibContext<'db> {
62 corelib_context(self.as_dyn_database())
63 }
64}
65
66impl<T: Database + ?Sized> LinterGroup for T {}
67
68#[tracing::instrument(skip_all, level = "trace")]
69#[salsa::tracked(returns(ref))]
70fn linter_diagnostics<'db>(
71 db: &'db dyn Database,
72 params: LinterDiagnosticParams,
73 module_id: ModuleId<'db>,
74) -> Vec<PluginDiagnostic<'db>> {
75 let mut diags: Vec<(PluginDiagnostic, FileId)> = Vec::new();
76 let Ok(module_data) = module_id.module_data(db) else {
77 return Vec::default();
78 };
79 for item in module_data.items(db) {
80 let mut item_diagnostics = Vec::new();
81 let module_file = db.module_main_file(module_id).unwrap();
82 let item_file = item.stable_location(db).file_id(db).long(db);
83 let is_generated_item =
84 matches!(item_file, FileLongId::Virtual(_) | FileLongId::External(_));
85
86 if is_generated_item && !params.only_generated_files {
87 let item_syntax_node = item.stable_location(db).stable_ptr().lookup(db);
88 let origin_node = get_origin_module_item_as_syntax_node(db, item);
89
90 if_chain! {
91 if let Some(node) = origin_node;
92 if let Some(resultants) = db.node_resultants(node);
93 if resultants.len() == 1;
98 if node.get_text_without_trivia(db).long(db).as_str().contains(item_syntax_node.get_text_without_trivia(db).long(db).as_str());
102 then {
103 let checking_functions = get_all_checking_functions();
104 for checking_function in checking_functions {
105 checking_function(db, item, &mut item_diagnostics);
106 }
107
108 diags.extend(item_diagnostics.into_iter().filter_map(|mut diag| {
109 let ptr = diag.stable_ptr;
110 diag.stable_ptr = get_origin_syntax_node(db, &ptr)?.stable_ptr(db);
111 Some((diag, module_file))}));
112 }
113 }
114 } else if !is_generated_item || params.only_generated_files {
115 let checking_functions = get_all_checking_functions();
116 for checking_function in checking_functions {
117 checking_function(db, item, &mut item_diagnostics);
118 }
119
120 diags.extend(item_diagnostics.into_iter().filter_map(|diag| {
121 get_origin_syntax_node(db, &diag.stable_ptr).map(|_| (diag, module_file))
123 }));
124 }
125 }
126
127 diags
128 .into_iter()
129 .filter(|diag: &(PluginDiagnostic, FileId)| {
130 let diagnostic = &diag.0;
131 let node = diagnostic.stable_ptr.lookup(db);
132 let allowed_name = get_name_for_diagnostic_message(&diagnostic.message).unwrap();
133 let default_allowed = is_lint_enabled_by_default(&diagnostic.message).unwrap();
134 let is_rule_allowed_globally = *params
135 .tool_metadata
136 .get(allowed_name)
137 .unwrap_or(&default_allowed);
138 !node_has_ascendants_with_allow_name_attr(db, node, allowed_name)
139 && is_rule_allowed_globally
140 })
141 .map(|diag| diag.0)
142 .collect()
143}
144
145#[tracing::instrument(level = "trace", skip(db))]
146#[salsa::tracked(returns(ref))]
147fn node_resultants<'db>(
148 db: &'db dyn Database,
149 node: SyntaxNode<'db>,
150) -> Option<Vec<SyntaxNode<'db>>> {
151 let main_file = node.stable_ptr(db).file_id(db);
152
153 let (files, _) = db
154 .file_and_subfiles_with_corresponding_modules(main_file)
155 .as_ref()?;
156
157 let files: Arc<[FileId]> = files
158 .iter()
159 .filter(|file| **file != main_file)
160 .cloned()
161 .collect();
162 let resultants = db.find_generated_nodes(files, node);
163
164 Some(resultants.into_iter().cloned().collect())
165}
166
167#[tracing::instrument(level = "trace", skip(db))]
168#[salsa::tracked(returns(ref))]
169pub fn file_and_subfiles_with_corresponding_modules<'db>(
170 db: &'db dyn Database,
171 file: FileId<'db>,
172) -> Option<(HashSet<FileId<'db>>, HashSet<ModuleId<'db>>)> {
173 let mut modules: HashSet<_> = db.file_modules(file).ok()?.iter().copied().collect();
174 let mut files = HashSet::from([file]);
175 let mut modules_queue: VecDeque<_> = modules.iter().copied().collect();
191 while let Some(module_id) = modules_queue.pop_front() {
192 for file_id in db.module_files(module_id).ok()?.iter() {
193 if files.insert(*file_id) {
194 for module_id in db.file_modules(*file_id).ok()?.iter() {
195 if modules.insert(*module_id) {
196 modules_queue.push_back(*module_id);
197 }
198 }
199 }
200 }
201 }
202 Some((files, modules))
203}
204
205#[tracing::instrument(level = "trace", skip(db))]
206#[salsa::tracked(returns(ref))]
207pub fn find_generated_nodes<'db>(
208 db: &'db dyn Database,
209 node_descendant_files: Arc<[FileId<'db>]>,
210 node: SyntaxNode<'db>,
211) -> OrderedHashSet<SyntaxNode<'db>> {
212 let start_file = node.stable_ptr(db).file_id(db);
213
214 let mut result = OrderedHashSet::default();
215
216 let mut is_replaced = false;
217
218 for file in node_descendant_files.iter().cloned() {
219 let Some((parent, mappings)) = get_parent_and_mapping(db, file) else {
220 continue;
221 };
222
223 if parent != start_file {
224 continue;
225 }
226
227 let Ok(file_syntax) = db.file_syntax(file) else {
228 continue;
229 };
230
231 let mappings: Vec<_> = mappings
232 .iter()
233 .filter(|mapping| match mapping.origin {
234 CodeOrigin::CallSite(_) => true,
235 CodeOrigin::Start(start) => start == node.span(db).start,
236 CodeOrigin::Span(span) => node.span(db).contains(span),
237 })
238 .cloned()
239 .collect();
240 if mappings.is_empty() {
241 continue;
242 }
243
244 let is_replacing_og_item = match file.long(db) {
245 FileLongId::Virtual(vfs) => vfs.original_item_removed,
246 FileLongId::External(id) => ext_as_virtual(db, *id).original_item_removed,
247 _ => unreachable!(),
248 };
249
250 let mut new_nodes: OrderedHashSet<_> = Default::default();
251
252 for mapping in &mappings {
253 for token in file_syntax.lookup_offset(db, mapping.span.start).tokens(db) {
254 if token.kind(db) == SyntaxKind::TerminalEndOfFile {
259 continue;
260 }
261 let nodes: Vec<_> = token
262 .ancestors_with_self(db)
263 .map_while(|new_node| {
264 translate_location(&mappings, new_node.span(db))
265 .map(|span_in_parent| (new_node, span_in_parent))
266 })
267 .take_while(|(_, span_in_parent)| node.span(db).contains(*span_in_parent))
268 .collect();
269
270 if let Some((last_node, _)) = nodes.last().cloned() {
271 let (new_node, _) = nodes
272 .into_iter()
273 .rev()
274 .take_while(|(node, _)| node.span(db) == last_node.span(db))
275 .last()
276 .unwrap();
277
278 new_nodes.insert(new_node);
279 }
280 }
281 }
282
283 if !new_nodes.is_empty() {
285 is_replaced = is_replaced || is_replacing_og_item;
286 }
287
288 for new_node in new_nodes {
289 result.extend(
290 find_generated_nodes(db, Arc::clone(&node_descendant_files), new_node)
291 .into_iter()
292 .cloned(),
293 );
294 }
295 }
296
297 if !is_replaced {
298 result.insert(node);
299 }
300
301 result
302}
303
304#[salsa::tracked(returns(ref))]
305fn corelib_context<'db>(db: &'db dyn Database) -> CorelibContext<'db> {
306 CorelibContext::new(db)
307}
308
309#[tracing::instrument(skip_all, level = "trace")]
310fn node_has_ascendants_with_allow_name_attr<'db>(
311 db: &'db dyn Database,
312 node: SyntaxNode<'db>,
313 allowed_name: &'static str,
314) -> bool {
315 for node in node.ancestors_with_self(db) {
316 if node.has_attr_with_arg(db, "allow", allowed_name) {
317 return true;
318 }
319 }
320 false
321}