1use crate::{
2 GrimoireCssError, Spell,
3 config::{ConfigFs, ConfigInMemory, ConfigInMemoryEntry},
4 core::{Filesystem, parser::Parser},
5};
6use serde::Serialize;
7use std::{
8 collections::{HashMap, HashSet},
9 fs,
10 path::{Path, PathBuf},
11};
12
13use glob::glob;
14
15#[derive(Debug, Clone, Serialize)]
16pub struct ExplainClassTokenResult {
17 pub class_token: String,
18 pub expanded_spells: Vec<String>,
19 pub css: String,
20}
21
22pub struct Analyzer;
23
24#[derive(Debug, Clone, Serialize)]
25pub struct DryOccurrence {
26 pub file: String,
27 pub line: usize,
28 pub column: usize,
29 pub tokens: Vec<String>,
30}
31
32#[derive(Debug, Clone, Serialize)]
33pub struct DryCandidate {
34 pub tokens: Vec<String>,
35 pub support: usize,
36 pub occurrences: Vec<DryOccurrence>,
37}
38
39#[derive(Debug, Clone, Serialize)]
40pub struct DryCandidatesResult {
41 pub files_scanned: usize,
42 pub class_occurrences: usize,
43 pub candidates: Vec<DryCandidate>,
44}
45
46#[derive(Debug, Clone, Serialize)]
47pub struct TokenOccurrence {
48 pub token: String,
49 pub file: String,
50 pub byte_offset: usize,
51 pub byte_len: usize,
52 pub line: usize,
53 pub column: usize,
54}
55
56#[derive(Debug, Clone, Serialize)]
57pub struct ScrollReference {
58 pub scroll: String,
59 pub arity: usize,
60 pub occurrence: TokenOccurrence,
61}
62
63#[derive(Debug, Clone, Serialize)]
64pub struct SpellReference {
65 pub spell: String,
66 pub occurrence: TokenOccurrence,
67}
68
69#[derive(Debug, Clone, Serialize)]
70pub struct SpellFrequency {
71 pub spell: String,
72 pub count: u64,
73}
74
75#[derive(Debug, Clone, Serialize)]
76pub struct IndexError {
77 pub file: String,
78 pub byte_offset: usize,
79 pub byte_len: usize,
80 pub message: String,
81}
82
83#[derive(Debug, Clone, Serialize)]
84pub struct IndexResult {
85 pub files_scanned: usize,
86 pub token_occurrences: usize,
87 pub scroll_references: Vec<ScrollReference>,
88 pub top_expanded_spells: Vec<SpellFrequency>,
89 pub css_variables_read: Vec<String>,
90 pub css_variables_written: Vec<String>,
91 pub errors: Vec<IndexError>,
92}
93
94#[derive(Debug, Clone, Serialize)]
95pub struct VariableReference {
96 pub variable: String,
97 pub kind: String,
98 pub spell: String,
99 pub occurrence: TokenOccurrence,
100}
101
102#[derive(Debug, Clone, Serialize)]
103pub struct GrimoireVariableDefinition {
104 pub name: String,
105 pub value: String,
106}
107
108#[derive(Debug, Clone, Serialize)]
109pub struct GrimoireVariableReference {
110 pub variable: String,
111 pub spell: String,
112 pub occurrence: TokenOccurrence,
113}
114
115#[derive(Debug, Clone, Serialize)]
116pub struct LintMessage {
117 pub level: String,
118 pub code: String,
119 pub message: String,
120 #[serde(skip_serializing_if = "Option::is_none")]
121 pub occurrence: Option<TokenOccurrence>,
122}
123
124#[derive(Debug, Clone, Serialize)]
125pub struct LintResult {
126 pub errors: Vec<LintMessage>,
127 pub warnings: Vec<LintMessage>,
128 pub notes: Vec<LintMessage>,
129}
130
131#[derive(Debug, Clone, Serialize)]
132pub struct ConfigProjectSummary {
133 pub name: String,
134 pub input_paths: Vec<String>,
135 pub output_dir_path: Option<String>,
136 pub single_output_file_name: Option<String>,
137}
138
139#[derive(Debug, Clone, Serialize)]
140pub struct ConfigSummary {
141 pub config_path: String,
142 pub projects: Vec<ConfigProjectSummary>,
143 pub scrolls: Vec<String>,
144 pub variables: Vec<GrimoireVariableDefinition>,
145 pub shared_spells: Vec<String>,
146 pub custom_animations: Vec<String>,
147 pub css_custom_properties: Vec<String>,
148 pub external_scroll_files: Vec<String>,
149 pub external_variable_files: Vec<String>,
150}
151
152impl Analyzer {
153 pub fn load_config(current_dir: &Path) -> Result<ConfigFs, GrimoireCssError> {
154 ConfigFs::load(current_dir)
155 }
156
157 pub fn explain_class_token(
163 current_dir: &Path,
164 class_token: &str,
165 ) -> Result<ExplainClassTokenResult, GrimoireCssError> {
166 let config_fs = Self::load_config(current_dir)?;
167
168 let shared_spells = config_fs.shared_spells.clone();
169 let spell = Spell::new(
170 class_token,
171 &shared_spells,
172 &config_fs.scrolls,
173 (0, 0),
174 None,
175 )?
176 .ok_or_else(|| {
177 GrimoireCssError::InvalidInput(format!(
178 "Could not parse '{class_token}' as a spell or scroll invocation"
179 ))
180 })?;
181
182 let expanded_spells: Vec<String> = if let Some(scroll_spells) = &spell.scroll_spells {
183 scroll_spells
184 .iter()
185 .map(|s| s.raw_spell.clone())
186 .collect::<Vec<String>>()
187 } else {
188 vec![spell.raw_spell.clone()]
189 };
190
191 let html = format!("<div class=\"{class_token}\"></div>");
194
195 let config_in_memory = ConfigInMemory {
196 projects: vec![ConfigInMemoryEntry {
197 name: "explain".to_string(),
198 content: vec![html],
199 }],
200 variables: config_fs.variables.clone(),
201 scrolls: config_fs.scrolls.clone(),
202 custom_animations: config_fs.custom_animations.clone(),
203 browserslist_content: None,
204 };
205
206 let compiled = crate::start_in_memory_pretty(&config_in_memory)?;
207 let css = compiled
208 .into_iter()
209 .next()
210 .map(|c| c.content)
211 .unwrap_or_default();
212
213 Ok(ExplainClassTokenResult {
214 class_token: class_token.to_string(),
215 expanded_spells,
216 css,
217 })
218 }
219
220 pub fn config_summary(current_dir: &Path) -> Result<ConfigSummary, GrimoireCssError> {
221 let config_fs = Self::load_config(current_dir)?;
222 let config_path = Filesystem::get_config_path(current_dir)?;
223 let config_dir = config_path.parent().unwrap_or(current_dir);
224
225 let projects = config_fs
226 .projects
227 .iter()
228 .map(|p| ConfigProjectSummary {
229 name: p.project_name.clone(),
230 input_paths: p.input_paths.clone(),
231 output_dir_path: p.output_dir_path.clone(),
232 single_output_file_name: p.single_output_file_name.clone(),
233 })
234 .collect::<Vec<_>>();
235
236 let mut scrolls = config_fs
237 .scrolls
238 .as_ref()
239 .map(|m| m.keys().cloned().collect::<Vec<_>>())
240 .unwrap_or_default();
241 scrolls.sort();
242
243 let variables = config_fs
244 .variables
245 .clone()
246 .unwrap_or_default()
247 .into_iter()
248 .map(|(name, value)| GrimoireVariableDefinition { name, value })
249 .collect::<Vec<_>>();
250
251 let shared_spells = Self::sorted_set(config_fs.shared_spells.clone());
252
253 let mut custom_animations = config_fs
254 .custom_animations
255 .keys()
256 .cloned()
257 .collect::<Vec<_>>();
258 custom_animations.sort();
259
260 let css_custom_properties =
261 Self::sorted_set(Self::defined_css_custom_properties(&config_fs));
262
263 let scroll_pattern = config_dir
264 .join("grimoire.*.scrolls.json")
265 .to_string_lossy()
266 .to_string();
267 let variable_pattern = config_dir
268 .join("grimoire.*.variables.json")
269 .to_string_lossy()
270 .to_string();
271
272 let mut external_scroll_files = Self::glob_paths(&scroll_pattern)?
273 .into_iter()
274 .map(|p| Self::to_rel(current_dir, &p))
275 .collect::<Vec<_>>();
276 external_scroll_files.sort();
277
278 let mut external_variable_files = Self::glob_paths(&variable_pattern)?
279 .into_iter()
280 .map(|p| Self::to_rel(current_dir, &p))
281 .collect::<Vec<_>>();
282 external_variable_files.sort();
283
284 Ok(ConfigSummary {
285 config_path: Self::to_rel(current_dir, &config_path),
286 projects,
287 scrolls,
288 variables,
289 shared_spells,
290 custom_animations,
291 css_custom_properties,
292 external_scroll_files,
293 external_variable_files,
294 })
295 }
296
297 pub fn index(current_dir: &Path, top: usize) -> Result<IndexResult, GrimoireCssError> {
298 let config_fs = Self::load_config(current_dir)?;
299 let parser = Parser::new();
300
301 let mut files = HashSet::<PathBuf>::new();
302 for project in &config_fs.projects {
303 for pattern in &project.input_paths {
304 for path in Self::expand_input_pattern(current_dir, pattern)? {
305 if path.is_file() {
306 files.insert(path);
307 }
308 }
309 }
310 }
311
312 let mut scroll_references: Vec<ScrollReference> = Vec::new();
313 let mut errors: Vec<IndexError> = Vec::new();
314 let mut token_occurrences: usize = 0;
315
316 let mut expanded_spell_counts: HashMap<String, u64> = HashMap::new();
317 let mut css_variables_read: HashSet<String> = HashSet::new();
318 let mut css_variables_written: HashSet<String> = HashSet::new();
319
320 let mut file_list: Vec<PathBuf> = files.into_iter().collect();
321 file_list.sort();
322
323 for file_path in &file_list {
324 let content = match fs::read_to_string(file_path) {
325 Ok(c) => c,
326 Err(e) => {
327 errors.push(IndexError {
328 file: Self::to_rel(current_dir, file_path),
329 byte_offset: 0,
330 byte_len: 0,
331 message: format!("Failed to read file: {e}"),
332 });
333 continue;
334 }
335 };
336
337 let line_index = LineIndex::new(&content);
338
339 let mut candidates: Vec<(String, (usize, usize))> = Vec::new();
340 parser.collect_candidates_all(&content, &mut candidates)?;
342
343 for (token, (byte_offset, byte_len)) in candidates {
344 token_occurrences += 1;
345
346 let (line, column) = line_index.line_col(byte_offset);
347 let occurrence = TokenOccurrence {
348 token: token.clone(),
349 file: Self::to_rel(current_dir, file_path),
350 byte_offset,
351 byte_len,
352 line,
353 column,
354 };
355
356 let parsed = Spell::new(
357 &token,
358 &config_fs.shared_spells,
359 &config_fs.scrolls,
360 (byte_offset, byte_len),
361 None,
362 );
363
364 let spell = match parsed {
365 Ok(Some(s)) => s,
366 Ok(None) => continue,
367 Err(e) => {
368 errors.push(IndexError {
369 file: occurrence.file.clone(),
370 byte_offset,
371 byte_len,
372 message: e.to_string(),
373 });
374 continue;
375 }
376 };
377
378 if let Some(expanded_spells) = &spell.scroll_spells {
379 let scroll_name = spell.component().to_string();
382 if !scroll_name.is_empty() {
383 let arity = if spell.component_target().is_empty() {
384 0
385 } else {
386 spell.component_target().split('_').count()
387 };
388
389 scroll_references.push(ScrollReference {
390 scroll: scroll_name,
391 arity,
392 occurrence: occurrence.clone(),
393 });
394 }
395
396 for inner in expanded_spells {
397 Self::collect_css_variable_usage(
398 &inner.raw_spell,
399 &mut css_variables_read,
400 &mut css_variables_written,
401 );
402 *expanded_spell_counts
403 .entry(inner.raw_spell.clone())
404 .or_default() += 1;
405 }
406 } else {
407 Self::collect_css_variable_usage(
408 &spell.raw_spell,
409 &mut css_variables_read,
410 &mut css_variables_written,
411 );
412 *expanded_spell_counts
413 .entry(spell.raw_spell.clone())
414 .or_default() += 1;
415 }
416 }
417 }
418
419 let mut top_expanded_spells = Self::top_counts(expanded_spell_counts, top);
420 top_expanded_spells
422 .sort_by(|a, b| b.count.cmp(&a.count).then_with(|| a.spell.cmp(&b.spell)));
423
424 Ok(IndexResult {
425 files_scanned: file_list.len(),
426 token_occurrences,
427 scroll_references,
428 top_expanded_spells,
429 css_variables_read: Self::sorted_set(css_variables_read),
430 css_variables_written: Self::sorted_set(css_variables_written),
431 errors,
432 })
433 }
434
435 pub fn dry_candidates(
440 current_dir: &Path,
441 min_support: usize,
442 min_items: usize,
443 ) -> Result<DryCandidatesResult, GrimoireCssError> {
444 let config_fs = Self::load_config(current_dir)?;
445 let parser = Parser::new();
446
447 let mut files = HashSet::<PathBuf>::new();
448 for project in &config_fs.projects {
449 for pattern in &project.input_paths {
450 for path in Self::expand_input_pattern(current_dir, pattern)? {
451 if path.is_file() {
452 files.insert(path);
453 }
454 }
455 }
456 }
457
458 let mut file_list: Vec<PathBuf> = files.into_iter().collect();
459 file_list.sort();
460
461 let mut occurrences: Vec<DryOccurrence> = Vec::new();
462 for file_path in &file_list {
463 let content = match fs::read_to_string(file_path) {
464 Ok(c) => c,
465 Err(_) => continue,
466 };
467
468 let line_index = LineIndex::new(&content);
469
470 let mut groups: Vec<crate::core::parser::RegularClassGroup> = Vec::new();
471 parser.collect_regular_class_groups(&content, &mut groups)?;
473
474 for g in groups {
475 let mut toks: Vec<(String, (usize, usize))> = Vec::new();
477 for (t, span) in g.tokens {
478 if t.is_empty() {
479 continue;
480 }
481
482 let parsed =
483 Spell::new(&t, &config_fs.shared_spells, &config_fs.scrolls, span, None)?;
484
485 if parsed.is_some() {
486 toks.push((t, span));
487 }
488 }
489
490 if toks.len() < min_items {
491 continue;
492 }
493
494 let mut norm: Vec<String> = toks.iter().map(|(t, _)| t.clone()).collect();
496 norm.sort();
497 norm.dedup();
498 if norm.len() < min_items {
499 continue;
500 }
501
502 let (line, column) = line_index.line_col(toks[0].1.0);
503
504 occurrences.push(DryOccurrence {
505 file: Self::to_rel(current_dir, file_path),
506 line,
507 column,
508 tokens: norm,
509 });
510 }
511 }
512
513 let mut candidate_support: HashMap<String, (Vec<String>, HashSet<usize>)> = HashMap::new();
515
516 for i in 0..occurrences.len() {
517 for j in (i + 1)..occurrences.len() {
518 let inter = intersect_sorted(&occurrences[i].tokens, &occurrences[j].tokens);
519 if inter.len() < min_items {
520 continue;
521 }
522
523 let key = inter.join("\u{1f}");
524 let entry = candidate_support
525 .entry(key)
526 .or_insert_with(|| (inter.clone(), HashSet::new()));
527 entry.1.insert(i);
528 entry.1.insert(j);
529 }
530 }
531
532 for (_, (tokens, support)) in candidate_support.iter_mut() {
534 for (idx, occ) in occurrences.iter().enumerate() {
535 if is_subset(tokens, &occ.tokens) {
536 support.insert(idx);
537 }
538 }
539 }
540
541 let mut candidates: Vec<(Vec<String>, Vec<usize>)> = candidate_support
542 .into_values()
543 .filter_map(|(tokens, support)| {
544 if support.len() >= min_support {
545 let mut v: Vec<usize> = support.into_iter().collect();
546 v.sort();
547 Some((tokens, v))
548 } else {
549 None
550 }
551 })
552 .collect();
553
554 candidates.sort_by(|a, b| b.0.len().cmp(&a.0.len()));
556 let mut kept: Vec<(Vec<String>, Vec<usize>)> = Vec::new();
557 'outer: for (toks, supp) in candidates {
558 for (kt, ks) in &kept {
559 if ks == &supp && is_subset(&toks, kt) {
560 continue 'outer;
561 }
562 }
563 kept.push((toks, supp));
564 }
565
566 let mut out: Vec<DryCandidate> = Vec::new();
567 for (tokens, support) in kept {
568 let occs = support
569 .iter()
570 .map(|&i| occurrences[i].clone())
571 .collect::<Vec<_>>();
572 out.push(DryCandidate {
573 support: occs.len(),
574 tokens,
575 occurrences: occs,
576 });
577 }
578
579 out.sort_by(|a, b| {
581 b.tokens
582 .len()
583 .cmp(&a.tokens.len())
584 .then_with(|| b.support.cmp(&a.support))
585 });
586
587 Ok(DryCandidatesResult {
588 files_scanned: file_list.len(),
589 class_occurrences: occurrences.len(),
590 candidates: out,
591 })
592 }
593
594 pub fn refs_scroll(
595 current_dir: &Path,
596 scroll_name: &str,
597 ) -> Result<Vec<ScrollReference>, GrimoireCssError> {
598 let mut index = Self::index(current_dir, 0)?;
599 index.scroll_references.retain(|r| r.scroll == scroll_name);
600 Ok(index.scroll_references)
601 }
602
603 pub fn refs_spell(
604 current_dir: &Path,
605 raw_spell: &str,
606 ) -> Result<Vec<SpellReference>, GrimoireCssError> {
607 let config_fs = Self::load_config(current_dir)?;
608 let parser = Parser::new();
609
610 let mut files = HashSet::<PathBuf>::new();
611 for project in &config_fs.projects {
612 for pattern in &project.input_paths {
613 for path in Self::expand_input_pattern(current_dir, pattern)? {
614 if path.is_file() {
615 files.insert(path);
616 }
617 }
618 }
619 }
620
621 let mut file_list: Vec<PathBuf> = files.into_iter().collect();
622 file_list.sort();
623
624 let mut refs: Vec<SpellReference> = Vec::new();
625
626 for file_path in &file_list {
627 let content = fs::read_to_string(file_path)?;
628 let line_index = LineIndex::new(&content);
629
630 let mut candidates: Vec<(String, (usize, usize))> = Vec::new();
631 parser.collect_candidates_all(&content, &mut candidates)?;
632
633 for (token, (byte_offset, byte_len)) in candidates {
634 let parsed = Spell::new(
635 &token,
636 &config_fs.shared_spells,
637 &config_fs.scrolls,
638 (byte_offset, byte_len),
639 None,
640 );
641
642 let spell = match parsed {
643 Ok(Some(s)) => s,
644 _ => continue,
645 };
646
647 let (line, column) = line_index.line_col(byte_offset);
648 let occurrence = TokenOccurrence {
649 token: token.clone(),
650 file: Self::to_rel(current_dir, file_path),
651 byte_offset,
652 byte_len,
653 line,
654 column,
655 };
656
657 if let Some(scroll_spells) = &spell.scroll_spells {
658 for inner in scroll_spells {
659 if inner.raw_spell == raw_spell {
660 refs.push(SpellReference {
661 spell: raw_spell.to_string(),
662 occurrence: occurrence.clone(),
663 });
664 }
665 }
666 } else if spell.raw_spell == raw_spell {
667 refs.push(SpellReference {
668 spell: raw_spell.to_string(),
669 occurrence,
670 });
671 }
672 }
673 }
674
675 Ok(refs)
676 }
677
678 pub fn spell_count(current_dir: &Path, raw_spell: &str) -> Result<u64, GrimoireCssError> {
679 Ok(Self::refs_spell(current_dir, raw_spell)?.len() as u64)
680 }
681
682 pub fn stats_spells(
683 current_dir: &Path,
684 top: usize,
685 ) -> Result<Vec<SpellFrequency>, GrimoireCssError> {
686 let index = Self::index(current_dir, top)?;
687 Ok(index.top_expanded_spells)
688 }
689
690 pub fn refs_variable(
691 current_dir: &Path,
692 variable: &str,
693 ) -> Result<Vec<VariableReference>, GrimoireCssError> {
694 let config_fs = Self::load_config(current_dir)?;
695 let parser = Parser::new();
696
697 let variable = if variable.starts_with("--") {
698 variable.to_string()
699 } else {
700 format!("--{variable}")
701 };
702
703 let mut files = HashSet::<PathBuf>::new();
704 for project in &config_fs.projects {
705 for pattern in &project.input_paths {
706 for path in Self::expand_input_pattern(current_dir, pattern)? {
707 if path.is_file() {
708 files.insert(path);
709 }
710 }
711 }
712 }
713
714 let mut file_list: Vec<PathBuf> = files.into_iter().collect();
715 file_list.sort();
716
717 let mut refs: Vec<VariableReference> = Vec::new();
718
719 for file_path in &file_list {
721 let content = fs::read_to_string(file_path)?;
722 let line_index = LineIndex::new(&content);
723
724 let mut candidates: Vec<(String, (usize, usize))> = Vec::new();
725 parser.collect_candidates_all(&content, &mut candidates)?;
726
727 for (token, (byte_offset, byte_len)) in candidates {
728 let parsed = Spell::new(
729 &token,
730 &config_fs.shared_spells,
731 &config_fs.scrolls,
732 (byte_offset, byte_len),
733 None,
734 );
735
736 let spell = match parsed {
737 Ok(Some(s)) => s,
738 _ => continue,
739 };
740
741 let (line, column) = line_index.line_col(byte_offset);
742 let occurrence = TokenOccurrence {
743 token: token.clone(),
744 file: Self::to_rel(current_dir, file_path),
745 byte_offset,
746 byte_len,
747 line,
748 column,
749 };
750
751 let expanded: Vec<&str> = if let Some(scroll_spells) = &spell.scroll_spells {
752 scroll_spells.iter().map(|s| s.raw_spell.as_str()).collect()
753 } else {
754 vec![spell.raw_spell.as_str()]
755 };
756
757 for raw_spell in expanded {
758 let mut reads = Vec::new();
759 let mut writes = Vec::new();
760 Self::extract_css_variable_usage(raw_spell, &mut reads, &mut writes);
761
762 if reads.iter().any(|v| v == &variable) {
763 refs.push(VariableReference {
764 variable: variable.clone(),
765 kind: "read".to_string(),
766 spell: raw_spell.to_string(),
767 occurrence: occurrence.clone(),
768 });
769 }
770 if writes.iter().any(|v| v == &variable) {
771 refs.push(VariableReference {
772 variable: variable.clone(),
773 kind: "write".to_string(),
774 spell: raw_spell.to_string(),
775 occurrence: occurrence.clone(),
776 });
777 }
778 }
779 }
780 }
781
782 for file_path in Self::scroll_config_files(current_dir) {
784 let content = fs::read_to_string(&file_path)?;
785 let line_index = LineIndex::new(&content);
786 let json: serde_json::Value = serde_json::from_str(&content).map_err(|e| {
787 GrimoireCssError::InvalidInput(format!(
788 "Failed to parse JSON in {}: {e}",
789 Self::to_rel(current_dir, &file_path)
790 ))
791 })?;
792
793 let mut search_from: usize = 0;
794
795 let Some(scrolls) = json.get("scrolls").and_then(|v| v.as_array()) else {
796 continue;
797 };
798
799 for scroll in scrolls {
800 if let Some(spells) = scroll.get("spells").and_then(|v| v.as_array()) {
802 for s in spells.iter().filter_map(|v| v.as_str()) {
803 Self::push_css_var_ref_if_match(
804 current_dir,
805 &file_path,
806 &content,
807 &line_index,
808 &variable,
809 s,
810 &mut search_from,
811 &mut refs,
812 )?;
813 }
814 }
815
816 if let Some(obj) = scroll.get("spellsByArgs").and_then(|v| v.as_object()) {
818 for (_k, arr) in obj {
819 let Some(spells) = arr.as_array() else {
820 continue;
821 };
822 for s in spells.iter().filter_map(|v| v.as_str()) {
823 Self::push_css_var_ref_if_match(
824 current_dir,
825 &file_path,
826 &content,
827 &line_index,
828 &variable,
829 s,
830 &mut search_from,
831 &mut refs,
832 )?;
833 }
834 }
835 }
836 }
837 }
838
839 Ok(refs)
840 }
841
842 #[allow(clippy::too_many_arguments)]
843 fn push_css_var_ref_if_match(
844 current_dir: &Path,
845 file_path: &Path,
846 content: &str,
847 line_index: &LineIndex,
848 variable: &str,
849 raw_spell: &str,
850 search_from: &mut usize,
851 out: &mut Vec<VariableReference>,
852 ) -> Result<(), GrimoireCssError> {
853 let mut reads = Vec::new();
854 let mut writes = Vec::new();
855 Self::extract_css_variable_usage(raw_spell, &mut reads, &mut writes);
856
857 let matched_reads = reads
858 .into_iter()
859 .filter(|v| v == variable)
860 .collect::<Vec<_>>();
861 let matched_writes = writes
862 .into_iter()
863 .filter(|v| v == variable)
864 .collect::<Vec<_>>();
865
866 if matched_reads.is_empty() && matched_writes.is_empty() {
867 return Ok(());
868 }
869
870 let json_string = serde_json::to_string(raw_spell).map_err(|e| {
872 GrimoireCssError::InvalidInput(format!(
873 "Failed to encode JSON string for spell in {}: {e}",
874 Self::to_rel(current_dir, file_path)
875 ))
876 })?;
877
878 let mut found = None;
879 if *search_from < content.len()
880 && let Some(rel) = content[*search_from..].find(&json_string)
881 {
882 found = Some(*search_from + rel);
883 }
884 if found.is_none() {
885 found = content.find(&json_string);
886 }
887
888 let Some(byte_offset) = found else {
889 for v in matched_reads {
891 out.push(VariableReference {
892 variable: v,
893 kind: "read".to_string(),
894 spell: raw_spell.to_string(),
895 occurrence: TokenOccurrence {
896 token: raw_spell.to_string(),
897 file: Self::to_rel(current_dir, file_path),
898 byte_offset: 0,
899 byte_len: 0,
900 line: 1,
901 column: 1,
902 },
903 });
904 }
905 for v in matched_writes {
906 out.push(VariableReference {
907 variable: v,
908 kind: "write".to_string(),
909 spell: raw_spell.to_string(),
910 occurrence: TokenOccurrence {
911 token: raw_spell.to_string(),
912 file: Self::to_rel(current_dir, file_path),
913 byte_offset: 0,
914 byte_len: 0,
915 line: 1,
916 column: 1,
917 },
918 });
919 }
920 return Ok(());
921 };
922
923 *search_from = byte_offset + json_string.len();
924 let byte_len = json_string.len();
925 let (line, column) = line_index.line_col(byte_offset);
926 let occurrence = TokenOccurrence {
927 token: raw_spell.to_string(),
928 file: Self::to_rel(current_dir, file_path),
929 byte_offset,
930 byte_len,
931 line,
932 column,
933 };
934
935 for v in matched_reads {
936 out.push(VariableReference {
937 variable: v,
938 kind: "read".to_string(),
939 spell: raw_spell.to_string(),
940 occurrence: occurrence.clone(),
941 });
942 }
943 for v in matched_writes {
944 out.push(VariableReference {
945 variable: v,
946 kind: "write".to_string(),
947 spell: raw_spell.to_string(),
948 occurrence: occurrence.clone(),
949 });
950 }
951
952 Ok(())
953 }
954
955 pub fn list_grimoire_variables(
956 current_dir: &Path,
957 ) -> Result<Vec<GrimoireVariableDefinition>, GrimoireCssError> {
958 let config_fs = Self::load_config(current_dir)?;
959 let mut out = config_fs
960 .variables
961 .unwrap_or_default()
962 .into_iter()
963 .map(|(name, value)| GrimoireVariableDefinition { name, value })
964 .collect::<Vec<_>>();
965 out.sort_by(|a, b| a.name.cmp(&b.name));
966 Ok(out)
967 }
968
969 pub fn refs_grimoire_variable(
970 current_dir: &Path,
971 variable: &str,
972 ) -> Result<Vec<GrimoireVariableReference>, GrimoireCssError> {
973 let config_fs = Self::load_config(current_dir)?;
974 let parser = Parser::new();
975
976 let mut files = HashSet::<PathBuf>::new();
977 for project in &config_fs.projects {
978 for pattern in &project.input_paths {
979 for path in Self::expand_input_pattern(current_dir, pattern)? {
980 if path.is_file() {
981 files.insert(path);
982 }
983 }
984 }
985 }
986
987 let mut file_list: Vec<PathBuf> = files.into_iter().collect();
988 file_list.sort();
989
990 let needle = format!("${variable}");
991 let mut refs: Vec<GrimoireVariableReference> = Vec::new();
992
993 for file_path in &file_list {
994 let content = fs::read_to_string(file_path)?;
995 let line_index = LineIndex::new(&content);
996
997 let mut candidates: Vec<(String, (usize, usize))> = Vec::new();
998 parser.collect_candidates_all(&content, &mut candidates)?;
999
1000 for (token, (byte_offset, byte_len)) in candidates {
1001 let parsed = Spell::new(
1002 &token,
1003 &config_fs.shared_spells,
1004 &config_fs.scrolls,
1005 (byte_offset, byte_len),
1006 None,
1007 );
1008
1009 let spell = match parsed {
1010 Ok(Some(s)) => s,
1011 _ => continue,
1012 };
1013
1014 let (line, column) = line_index.line_col(byte_offset);
1015 let occurrence = TokenOccurrence {
1016 token: token.clone(),
1017 file: Self::to_rel(current_dir, file_path),
1018 byte_offset,
1019 byte_len,
1020 line,
1021 column,
1022 };
1023
1024 let expanded: Vec<&str> = if let Some(scroll_spells) = &spell.scroll_spells {
1025 scroll_spells.iter().map(|s| s.raw_spell.as_str()).collect()
1026 } else {
1027 vec![spell.raw_spell.as_str()]
1028 };
1029
1030 for raw_spell in expanded {
1031 if raw_spell.contains(&needle) {
1032 refs.push(GrimoireVariableReference {
1033 variable: variable.to_string(),
1034 spell: raw_spell.to_string(),
1035 occurrence: occurrence.clone(),
1036 });
1037 }
1038 }
1039 }
1040 }
1041
1042 for file_path in Self::scroll_config_files(current_dir) {
1045 let content = fs::read_to_string(&file_path)?;
1046 let line_index = LineIndex::new(&content);
1047 let json: serde_json::Value = serde_json::from_str(&content).map_err(|e| {
1048 GrimoireCssError::InvalidInput(format!(
1049 "Failed to parse JSON in {}: {e}",
1050 Self::to_rel(current_dir, &file_path)
1051 ))
1052 })?;
1053
1054 let mut search_from: usize = 0;
1055
1056 let Some(scrolls) = json.get("scrolls").and_then(|v| v.as_array()) else {
1057 continue;
1058 };
1059
1060 for scroll in scrolls {
1061 if let Some(spells) = scroll.get("spells").and_then(|v| v.as_array()) {
1063 for s in spells.iter().filter_map(|v| v.as_str()) {
1064 Self::push_gvar_ref_if_match(
1065 current_dir,
1066 &file_path,
1067 &content,
1068 &line_index,
1069 variable,
1070 &needle,
1071 s,
1072 &mut search_from,
1073 &mut refs,
1074 )?;
1075 }
1076 }
1077
1078 if let Some(obj) = scroll.get("spellsByArgs").and_then(|v| v.as_object()) {
1080 for (_k, arr) in obj {
1081 let Some(spells) = arr.as_array() else {
1082 continue;
1083 };
1084 for s in spells.iter().filter_map(|v| v.as_str()) {
1085 Self::push_gvar_ref_if_match(
1086 current_dir,
1087 &file_path,
1088 &content,
1089 &line_index,
1090 variable,
1091 &needle,
1092 s,
1093 &mut search_from,
1094 &mut refs,
1095 )?;
1096 }
1097 }
1098 }
1099 }
1100 }
1101
1102 Ok(refs)
1103 }
1104
1105 fn scroll_config_files(current_dir: &Path) -> Vec<PathBuf> {
1106 let config_dir = current_dir.join("grimoire").join("config");
1107 if !config_dir.exists() {
1108 return Vec::new();
1109 }
1110
1111 let mut out = Vec::new();
1112 let main = config_dir.join("grimoire.config.json");
1113 if main.is_file() {
1114 out.push(main);
1115 }
1116
1117 let pattern = config_dir
1118 .join("grimoire.*.scrolls.json")
1119 .to_string_lossy()
1120 .to_string();
1121 if let Ok(entries) = glob(&pattern) {
1122 for p in entries.flatten() {
1123 if p.is_file() {
1124 out.push(p);
1125 }
1126 }
1127 }
1128
1129 out.sort();
1130 out.dedup();
1131 out
1132 }
1133
1134 #[allow(clippy::too_many_arguments)]
1135 fn push_gvar_ref_if_match(
1136 current_dir: &Path,
1137 file_path: &Path,
1138 content: &str,
1139 line_index: &LineIndex,
1140 variable: &str,
1141 needle: &str,
1142 raw_spell: &str,
1143 search_from: &mut usize,
1144 out: &mut Vec<GrimoireVariableReference>,
1145 ) -> Result<(), GrimoireCssError> {
1146 if !raw_spell.contains(needle) {
1147 return Ok(());
1148 }
1149
1150 let json_string = serde_json::to_string(raw_spell).map_err(|e| {
1152 GrimoireCssError::InvalidInput(format!(
1153 "Failed to encode JSON string for spell in {}: {e}",
1154 Self::to_rel(current_dir, file_path)
1155 ))
1156 })?;
1157
1158 let mut found = None;
1159 if *search_from < content.len()
1160 && let Some(rel) = content[*search_from..].find(&json_string)
1161 {
1162 found = Some(*search_from + rel);
1163 }
1164 if found.is_none() {
1165 found = content.find(&json_string);
1166 }
1167
1168 let Some(byte_offset) = found else {
1169 out.push(GrimoireVariableReference {
1171 variable: variable.to_string(),
1172 spell: raw_spell.to_string(),
1173 occurrence: TokenOccurrence {
1174 token: raw_spell.to_string(),
1175 file: Self::to_rel(current_dir, file_path),
1176 byte_offset: 0,
1177 byte_len: 0,
1178 line: 1,
1179 column: 1,
1180 },
1181 });
1182 return Ok(());
1183 };
1184
1185 *search_from = byte_offset + json_string.len();
1186
1187 let byte_len = json_string.len();
1188 let (line, column) = line_index.line_col(byte_offset);
1189
1190 out.push(GrimoireVariableReference {
1191 variable: variable.to_string(),
1192 spell: raw_spell.to_string(),
1193 occurrence: TokenOccurrence {
1194 token: raw_spell.to_string(),
1195 file: Self::to_rel(current_dir, file_path),
1196 byte_offset,
1197 byte_len,
1198 line,
1199 column,
1200 },
1201 });
1202
1203 Ok(())
1204 }
1205
1206 pub fn lint(current_dir: &Path) -> Result<LintResult, GrimoireCssError> {
1207 let config_fs = Self::load_config(current_dir)?;
1208 let index = Self::index(current_dir, 200)?;
1209 let _config_path = Filesystem::get_config_path(current_dir)?;
1210
1211 let mut errors: Vec<LintMessage> = Vec::new();
1212 let mut warnings: Vec<LintMessage> = Vec::new();
1213 let notes: Vec<LintMessage> = Vec::new();
1214
1215 if !index.errors.is_empty() {
1216 let occurrence = index.errors.first().and_then(|e| {
1217 let abs = current_dir.join(&e.file);
1219 let content = fs::read_to_string(&abs).ok()?;
1220 let (line, column) = line_col_from_byte_offset(&content, e.byte_offset);
1221 Some(TokenOccurrence {
1222 token: "parse_error".to_string(),
1223 file: e.file.clone(),
1224 byte_offset: e.byte_offset,
1225 byte_len: e.byte_len,
1226 line,
1227 column,
1228 })
1229 });
1230
1231 errors.push(LintMessage {
1232 level: "error".to_string(),
1233 code: "parse_error".to_string(),
1234 message: format!(
1235 "Encountered {} parse/compile errors while scanning project files",
1236 index.errors.len()
1237 ),
1238 occurrence,
1239 });
1240 }
1241
1242 if let Some(scrolls) = &config_fs.scrolls {
1243 for r in &index.scroll_references {
1245 if r.arity == 0 {
1246 continue;
1247 }
1248 if let Some(def) = scrolls.get(&r.scroll)
1249 && let Some(map) = &def.spells_by_args
1250 && !map.is_empty()
1251 {
1252 let key = r.arity.to_string();
1253 if !map.contains_key(&key) {
1254 errors.push(LintMessage {
1255 level: "error".to_string(),
1256 code: "missing_overload".to_string(),
1257 message: format!(
1258 "Scroll '{}' is used with arity {}, but spellsByArgs['{}'] is not defined",
1259 r.scroll, r.arity, key
1260 ),
1261 occurrence: Some(r.occurrence.clone()),
1262 });
1263 }
1264 }
1265 }
1266 }
1267
1268 if let Some(shared) = &config_fs.shared {
1271 let parser = Parser::new();
1272 let mut files = HashSet::<PathBuf>::new();
1273 for project in &config_fs.projects {
1274 for pattern in &project.input_paths {
1275 for path in Self::expand_input_pattern(current_dir, pattern)? {
1276 if path.is_file() {
1277 files.insert(path);
1278 }
1279 }
1280 }
1281 }
1282
1283 let mut file_list: Vec<PathBuf> = files.into_iter().collect();
1284 file_list.sort();
1285
1286 let mut used_tokens: HashSet<String> = HashSet::new();
1287 for file_path in &file_list {
1288 let Ok(content) = fs::read_to_string(file_path) else {
1289 continue;
1290 };
1291
1292 let mut candidates: Vec<(String, (usize, usize))> = Vec::new();
1293 parser.collect_candidates_all(&content, &mut candidates)?;
1294 for (token, _span) in candidates {
1295 if token.is_empty() {
1296 continue;
1297 }
1298 used_tokens.insert(token);
1299 }
1300 }
1301
1302 let mut unused_shared: Vec<String> = Vec::new();
1303 for s in shared {
1304 let Some(styles) = &s.styles else {
1305 continue;
1306 };
1307 for t in styles {
1308 if t.is_empty() {
1309 continue;
1310 }
1311
1312 let parsed = Spell::new(
1314 t,
1315 &config_fs.shared_spells,
1316 &config_fs.scrolls,
1317 (0, 0),
1318 None,
1319 );
1320 let Ok(Some(_)) = parsed else {
1321 continue;
1322 };
1323
1324 if !used_tokens.contains(t) {
1325 unused_shared.push(t.clone());
1326 }
1327 }
1328 }
1329
1330 unused_shared.sort();
1331 unused_shared.dedup();
1332
1333 if !unused_shared.is_empty() {
1334 warnings.push(LintMessage {
1335 level: "warning".to_string(),
1336 code: "unused_shared_style".to_string(),
1337 message: format!(
1338 "{} shared style(s) are configured but never used in scanned project inputs: {}",
1339 unused_shared.len(),
1340 unused_shared.join(", ")
1341 ),
1342 occurrence: None,
1343 });
1344 }
1345 }
1346
1347 let defined_tokens = Self::defined_css_custom_properties(&config_fs);
1349 if !defined_tokens.is_empty() {
1350 let used_tokens: HashSet<String> = index.css_variables_read.iter().cloned().collect();
1351
1352 let mut unused_tokens: Vec<String> = defined_tokens
1353 .iter()
1354 .filter(|t| !used_tokens.contains(*t))
1355 .cloned()
1356 .collect();
1357 unused_tokens.sort();
1358
1359 if !unused_tokens.is_empty() {
1360 warnings.push(LintMessage {
1361 level: "warning".to_string(),
1362 code: "unused_token".to_string(),
1363 message: format!(
1364 "{} token(s) are defined in cssCustomProperties but never read via var(--token): {}",
1365 unused_tokens.len(),
1366 unused_tokens.join(", ")
1367 ),
1368 occurrence: None,
1369 });
1370 }
1371 }
1372
1373 Ok(LintResult {
1374 errors,
1375 warnings,
1376 notes,
1377 })
1378 }
1379
1380 fn expand_input_pattern(
1381 current_dir: &Path,
1382 pattern: &str,
1383 ) -> Result<Vec<PathBuf>, GrimoireCssError> {
1384 let abs = current_dir.join(pattern);
1385
1386 if abs.exists() && abs.is_dir() {
1387 let mut dir_pattern = abs.to_string_lossy().to_string();
1388 if !dir_pattern.ends_with('/') {
1389 dir_pattern.push('/');
1390 }
1391 dir_pattern.push_str("**/*");
1392 return Self::glob_paths(&dir_pattern);
1393 }
1394
1395 Self::glob_paths(&abs.to_string_lossy())
1397 }
1398
1399 fn glob_paths(pattern: &str) -> Result<Vec<PathBuf>, GrimoireCssError> {
1400 let mut out = Vec::new();
1401 let entries = glob(pattern).map_err(|e| {
1402 GrimoireCssError::InvalidInput(format!("Invalid glob pattern '{pattern}': {e}"))
1403 })?;
1404 for entry in entries {
1405 match entry {
1406 Ok(path) => out.push(path),
1407 Err(e) => {
1408 return Err(GrimoireCssError::InvalidInput(format!(
1409 "Failed to expand glob '{pattern}': {e}"
1410 )));
1411 }
1412 }
1413 }
1414 Ok(out)
1415 }
1416
1417 fn to_rel(current_dir: &Path, p: &Path) -> String {
1418 p.strip_prefix(current_dir)
1419 .unwrap_or(p)
1420 .to_string_lossy()
1421 .to_string()
1422 }
1423
1424 fn sorted_set(set: HashSet<String>) -> Vec<String> {
1425 let mut out: Vec<String> = set.into_iter().collect();
1426 out.sort();
1427 out
1428 }
1429
1430 fn top_counts(map: HashMap<String, u64>, top: usize) -> Vec<SpellFrequency> {
1431 if top == 0 {
1432 return Vec::new();
1433 }
1434
1435 let mut items: Vec<(String, u64)> = map.into_iter().collect();
1436 items.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
1437 items
1438 .into_iter()
1439 .take(top)
1440 .map(|(spell, count)| SpellFrequency { spell, count })
1441 .collect()
1442 }
1443
1444 fn defined_css_custom_properties(config_fs: &ConfigFs) -> HashSet<String> {
1445 let mut set = HashSet::new();
1446
1447 if let Some(shared) = &config_fs.shared {
1448 for s in shared {
1449 if let Some(props) = &s.css_custom_properties {
1450 for p in props {
1451 for (k, _) in &p.css_variables {
1452 if k.starts_with("--") {
1453 set.insert(k.clone());
1454 } else {
1455 set.insert(format!("--{k}"));
1456 }
1457 }
1458 }
1459 }
1460 }
1461 }
1462 if let Some(critical) = &config_fs.critical {
1463 for c in critical {
1464 if let Some(props) = &c.css_custom_properties {
1465 for p in props {
1466 for (k, _) in &p.css_variables {
1467 if k.starts_with("--") {
1468 set.insert(k.clone());
1469 } else {
1470 set.insert(format!("--{k}"));
1471 }
1472 }
1473 }
1474 }
1475 }
1476 }
1477
1478 set
1479 }
1480
1481 fn collect_css_variable_usage(
1482 raw_spell: &str,
1483 reads: &mut HashSet<String>,
1484 writes: &mut HashSet<String>,
1485 ) {
1486 let mut r = Vec::new();
1487 let mut w = Vec::new();
1488 Self::extract_css_variable_usage(raw_spell, &mut r, &mut w);
1489 for v in r {
1490 reads.insert(v);
1491 }
1492 for v in w {
1493 writes.insert(v);
1494 }
1495 }
1496
1497 fn extract_css_variable_usage(
1498 raw_spell: &str,
1499 reads: &mut Vec<String>,
1500 writes: &mut Vec<String>,
1501 ) {
1502 if let Some(name) = Self::extract_css_variable_write(raw_spell) {
1504 writes.push(name);
1505 }
1506
1507 let bytes = raw_spell.as_bytes();
1509 let mut i = 0;
1510 while i + 6 < bytes.len() {
1511 if bytes[i] == b'v'
1513 && bytes[i + 1] == b'a'
1514 && bytes[i + 2] == b'r'
1515 && bytes[i + 3] == b'('
1516 && bytes[i + 4] == b'-'
1517 && bytes[i + 5] == b'-'
1518 {
1519 let start = i + 4;
1520 let mut j = start;
1521 while j < bytes.len() {
1522 let c = bytes[j];
1523 let ok = c.is_ascii_lowercase()
1524 || c.is_ascii_uppercase()
1525 || c.is_ascii_digit()
1526 || c == b'-'
1527 || c == b'_';
1528 if !ok {
1529 break;
1530 }
1531 j += 1;
1532 }
1533 if j > start {
1534 reads.push(String::from_utf8_lossy(&bytes[start..j]).to_string());
1535 }
1536 i = j;
1537 continue;
1538 }
1539 i += 1;
1540 }
1541 }
1542
1543 fn extract_css_variable_write(raw_spell: &str) -> Option<String> {
1544 if !raw_spell.starts_with("--") {
1546 return None;
1547 }
1548 let eq = raw_spell.find('=')?;
1549 if eq <= 2 {
1550 return None;
1551 }
1552 let name = &raw_spell[..eq];
1553 if name.as_bytes().iter().all(|c| {
1554 (*c >= b'a' && *c <= b'z')
1555 || (*c >= b'A' && *c <= b'Z')
1556 || (*c >= b'0' && *c <= b'9')
1557 || *c == b'-'
1558 || *c == b'_'
1559 }) {
1560 Some(name.to_string())
1561 } else {
1562 None
1563 }
1564 }
1565}
1566
1567fn line_col_from_byte_offset(content: &str, byte_offset: usize) -> (usize, usize) {
1568 let mut i = byte_offset.min(content.len());
1569 while i > 0 && !content.is_char_boundary(i) {
1570 i -= 1;
1571 }
1572
1573 let prefix = &content[..i];
1574 let line = prefix.bytes().filter(|b| *b == b'\n').count();
1575
1576 let last_nl = prefix.rfind('\n').map(|p| p + 1).unwrap_or(0);
1577 let col = prefix[last_nl..].chars().count();
1578
1579 (line, col)
1580}
1581
1582fn intersect_sorted(a: &[String], b: &[String]) -> Vec<String> {
1583 let mut out: Vec<String> = Vec::new();
1584 let mut i = 0usize;
1585 let mut j = 0usize;
1586 while i < a.len() && j < b.len() {
1587 match a[i].cmp(&b[j]) {
1588 std::cmp::Ordering::Less => i += 1,
1589 std::cmp::Ordering::Greater => j += 1,
1590 std::cmp::Ordering::Equal => {
1591 out.push(a[i].clone());
1592 i += 1;
1593 j += 1;
1594 }
1595 }
1596 }
1597 out
1598}
1599
1600fn is_subset(needles: &[String], haystack_sorted: &[String]) -> bool {
1601 let mut i = 0usize;
1603 let mut j = 0usize;
1604 while i < needles.len() && j < haystack_sorted.len() {
1605 match needles[i].cmp(&haystack_sorted[j]) {
1606 std::cmp::Ordering::Less => return false,
1607 std::cmp::Ordering::Greater => j += 1,
1608 std::cmp::Ordering::Equal => {
1609 i += 1;
1610 j += 1;
1611 }
1612 }
1613 }
1614 i == needles.len()
1615}
1616
1617struct LineIndex {
1618 newlines: Vec<usize>,
1620}
1621
1622impl LineIndex {
1623 fn new(content: &str) -> Self {
1624 let mut newlines = Vec::new();
1625 for (i, b) in content.as_bytes().iter().enumerate() {
1626 if *b == b'\n' {
1627 newlines.push(i);
1628 }
1629 }
1630 Self { newlines }
1631 }
1632
1633 fn line_col(&self, byte_offset: usize) -> (usize, usize) {
1634 let line_idx = match self.newlines.binary_search(&byte_offset) {
1636 Ok(i) => i + 1,
1637 Err(i) => i,
1638 };
1639
1640 let line = line_idx + 1;
1641 let last_nl = if line_idx == 0 {
1642 None
1643 } else {
1644 self.newlines.get(line_idx - 1).copied()
1645 };
1646
1647 let col0 = match last_nl {
1648 Some(nl) => byte_offset.saturating_sub(nl + 1),
1649 None => byte_offset,
1650 };
1651 (line, col0 + 1)
1652 }
1653}