1use normalize_facts_rules_interpret as interpret;
4pub use normalize_rules_config::{RuleOverride, RulesConfig, SarifTool};
5use normalize_syntax_rules::{self, DebugFlags};
6use serde::{Deserialize, Serialize};
7use std::collections::{HashMap, HashSet};
8use std::path::{Path, PathBuf};
9use std::sync::{Mutex, OnceLock};
10
11struct SendableEngine(interpret::CachedRuleEngine);
21
22unsafe impl Send for SendableEngine {}
24
25static ENGINE_CACHE: OnceLock<Mutex<HashMap<String, SendableEngine>>> = OnceLock::new();
36
37fn engine_cache() -> &'static Mutex<HashMap<String, SendableEngine>> {
38 ENGINE_CACHE.get_or_init(|| Mutex::new(HashMap::new()))
39}
40
41fn engine_cache_key(root: &Path, rule_id: &str) -> String {
43 format!("{}::{}", root.to_string_lossy(), rule_id)
44}
45
46#[derive(Clone, Debug, Default, Serialize, Deserialize, schemars::JsonSchema)]
48#[serde(rename_all = "lowercase")]
49pub enum RuleKind {
50 #[default]
51 All,
52 Syntax,
53 Fact,
54 Native,
56 Sarif,
58}
59
60impl std::fmt::Display for RuleKind {
61 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
62 match self {
63 Self::All => f.write_str("all"),
64 Self::Syntax => f.write_str("syntax"),
65 Self::Fact => f.write_str("fact"),
66 Self::Native => f.write_str("native"),
67 Self::Sarif => f.write_str("sarif"),
68 }
69 }
70}
71
72impl std::str::FromStr for RuleKind {
73 type Err = String;
74 fn from_str(s: &str) -> Result<Self, Self::Err> {
75 match s {
76 "all" => Ok(Self::All),
77 "syntax" => Ok(Self::Syntax),
78 "fact" => Ok(Self::Fact),
79 "native" => Ok(Self::Native),
80 "sarif" => Ok(Self::Sarif),
81 _ => Err(format!(
82 "unknown rule type: {s}; valid: all, syntax, fact, native, sarif"
83 )),
84 }
85 }
86}
87
88#[derive(Debug, Clone, Serialize, Deserialize)]
90struct RuleLockEntry {
91 source: String,
92 content_hash: String,
93 added: String,
94}
95
96#[derive(Debug, Clone, Serialize, Deserialize, Default)]
98struct RulesLock {
99 rules: HashMap<String, RuleLockEntry>,
100}
101
102impl RulesLock {
103 fn load(path: &Path) -> Self {
104 if !path.exists() {
105 return Self::default();
106 }
107 std::fs::read_to_string(path)
108 .ok()
109 .and_then(|content| toml::from_str(&content).ok())
110 .unwrap_or_default()
111 }
112
113 fn save(&self, path: &Path) -> std::io::Result<()> {
114 let content = toml::to_string_pretty(self)
115 .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))?;
116 std::fs::write(path, content)
117 }
118}
119
120#[derive(Clone, Debug)]
122pub struct RulesRunConfig {
123 pub rule_tags: HashMap<String, Vec<String>>,
125 pub rules: RulesConfig,
128 pub walk: normalize_rules_config::WalkConfig,
130}
131
132fn tag_color(tag: &str) -> nu_ansi_term::Color {
141 use nu_ansi_term::Color;
142 const PALETTE: &[Color] = &[
147 Color::Cyan, Color::Green, Color::Magenta, Color::Fixed(80), Color::Fixed(111), Color::Fixed(141), Color::Fixed(78), Color::Fixed(117), Color::Fixed(183), Color::Fixed(159), ];
158 let mut hash = 0xcbf29ce484222325u64;
160 for byte in tag.bytes() {
161 hash ^= u64::from(byte);
162 hash = hash.wrapping_mul(0x100000001b3);
163 }
164 PALETTE[(hash as usize) % PALETTE.len()]
165}
166
167fn paint_tag(tag: &str, use_colors: bool) -> String {
169 if use_colors {
170 tag_color(tag).paint(tag).to_string()
171 } else {
172 format!("[{}]", tag)
173 }
174}
175
176fn paint_severity(severity: &str, use_colors: bool) -> String {
178 if !use_colors {
179 return severity.to_string();
180 }
181 match severity.trim() {
185 "error" => nu_ansi_term::Color::Red.paint(severity).to_string(),
186 "warning" => nu_ansi_term::Color::Yellow.paint(severity).to_string(),
187 "info" => nu_ansi_term::Color::Blue.paint(severity).to_string(),
188 _ => nu_ansi_term::Color::DarkGray.paint(severity).to_string(),
189 }
190}
191
192fn paint_tags(tags: &[String], use_colors: bool) -> String {
194 tags.iter()
195 .map(|t| paint_tag(t, use_colors))
196 .collect::<Vec<_>>()
197 .join(" ")
198}
199
200fn expand_tag<'a>(
213 tag: &str,
214 rule_tags: &'a HashMap<String, Vec<String>>,
215 all_rules: &'a [UnifiedRule],
216 visited: &mut HashSet<String>,
217) -> HashSet<&'a str> {
218 if !visited.insert(tag.to_string()) {
219 return HashSet::new();
221 }
222
223 let mut ids: HashSet<&'a str> = HashSet::new();
224
225 for r in all_rules {
227 if r.tags.iter().any(|t| t == tag) {
228 ids.insert(r.id.as_str());
229 }
230 }
231
232 if let Some(members) = rule_tags.get(tag) {
234 for member in members {
235 if all_rules.iter().any(|r| r.id == *member) {
237 ids.insert(member.as_str());
238 } else {
239 ids.extend(expand_tag(member, rule_tags, all_rules, visited));
241 }
242 }
243 }
244
245 ids
246}
247
248#[derive(Debug, Clone, Serialize, schemars::JsonSchema)]
256pub struct RuleEntry {
257 pub id: String,
258 pub rule_type: String,
259 pub severity: String,
260 pub source: String,
261 pub message: String,
262 pub enabled: bool,
263 pub tags: Vec<String>,
264 pub recommended: bool,
265}
266
267#[derive(Debug, Clone, Serialize, schemars::JsonSchema)]
269pub struct RulesListReport {
270 pub rules: Vec<RuleEntry>,
271 pub total: usize,
272 pub syntax_count: usize,
273 pub fact_count: usize,
274 pub native_count: usize,
275 pub disabled_count: usize,
276}
277
278impl normalize_output::OutputFormatter for RulesListReport {
279 fn format_text(&self) -> String {
280 let mut out = String::new();
281 if self.rules.is_empty() {
282 return "No rules found.\n".to_string();
283 }
284 let breakdown = {
285 let mut parts = Vec::new();
286 if self.syntax_count > 0 {
287 parts.push(format!("{} syntax", self.syntax_count));
288 }
289 if self.fact_count > 0 {
290 parts.push(format!("{} fact", self.fact_count));
291 }
292 if self.native_count > 0 {
293 parts.push(format!("{} native", self.native_count));
294 }
295 parts.join(", ")
296 };
297 if self.disabled_count > 0 {
298 out.push_str(&format!(
299 "{} rules ({}) — {} disabled\n\n",
300 self.total, breakdown, self.disabled_count
301 ));
302 } else {
303 out.push_str(&format!("{} rules ({})\n\n", self.total, breakdown));
304 }
305 for r in &self.rules {
306 let type_col = format!("{:<8}", format!("[{}]", r.rule_type));
307 let sev_col = format!("{:<8}", r.severity);
308 let state_col = if r.enabled { " " } else { "off" };
309 let tags_str = if r.tags.is_empty() {
310 String::new()
311 } else {
312 format!(
313 " {}",
314 r.tags
315 .iter()
316 .map(|t| format!("[{t}]"))
317 .collect::<Vec<_>>()
318 .join(" ")
319 )
320 };
321 out.push_str(&format!(
322 " {} {:<30} {} {} {:<7}{}\n",
323 type_col, r.id, sev_col, state_col, r.source, tags_str
324 ));
325 out.push_str(&format!(" {}\n", r.message));
326 }
327 out.push_str("\nConfigure: [rules.\"<id>\"] in .normalize/config.toml\n");
328 out.push_str(" severity, enabled, allow — or: normalize rules enable/disable <id>\n");
329 out.push_str(" Global patterns: [rules] global-allow = [\"**/fixtures/**\"]\n");
330 out.push_str(" Custom tag groups: [rule-tags] my-group = [\"tag1\", \"tag2\"]\n");
331 out
332 }
333
334 fn format_pretty(&self) -> String {
335 use nu_ansi_term::{Color, Style};
336
337 let mut out = String::new();
338 if self.rules.is_empty() {
339 return "No rules found.\n".to_string();
340 }
341 let breakdown = {
342 let mut parts = Vec::new();
343 if self.syntax_count > 0 {
344 parts.push(format!("{} syntax", self.syntax_count));
345 }
346 if self.fact_count > 0 {
347 parts.push(format!("{} fact", self.fact_count));
348 }
349 if self.native_count > 0 {
350 parts.push(format!("{} native", self.native_count));
351 }
352 parts.join(", ")
353 };
354 let header = if self.disabled_count > 0 {
355 format!(
356 "{} rules ({}) — {} disabled",
357 Color::White.bold().paint(self.total.to_string()),
358 breakdown,
359 Color::DarkGray.paint(self.disabled_count.to_string())
360 )
361 } else {
362 format!(
363 "{} rules ({})",
364 Color::White.bold().paint(self.total.to_string()),
365 breakdown
366 )
367 };
368 out.push_str(&format!("{header}\n\n"));
369
370 let gray = Color::DarkGray;
372 out.push_str(&format!(
373 "{}\n",
374 gray.paint(format!(
375 " {:<6} {:<30} {:<8} ST {:<7} TAGS",
376 "TYPE", "ID", "SEVERITY", "SOURCE"
377 ))
378 ));
379
380 for r in &self.rules {
381 let type_col = paint_rule_type(&r.rule_type);
382 let sev_col = paint_severity(&format!("{:<8}", r.severity), true);
383 let state_col = if r.enabled {
384 Style::new().paint(" ● ").to_string()
385 } else {
386 Color::DarkGray.paint(" ○ ").to_string()
387 };
388 let tags_str = if r.tags.is_empty() {
389 String::new()
390 } else {
391 format!(" {}", paint_tags(&r.tags, true))
392 };
393 let id_padded = format!("{:<30}", r.id);
395 let id_col = if r.enabled {
396 id_padded
397 } else {
398 Color::DarkGray.paint(id_padded).to_string()
399 };
400 out.push_str(&format!(
401 " {type_col} {id_col} {sev_col} {state_col} {:<7}{tags_str}\n",
402 r.source
403 ));
404 let desc = if r.enabled {
405 Color::DarkGray.paint(&r.message).to_string()
406 } else {
407 Color::DarkGray.dimmed().paint(&r.message).to_string()
408 };
409 out.push_str(&format!(" {desc}\n"));
410 }
411 let dim = Color::DarkGray;
412 out.push('\n');
413 out.push_str(
414 &dim.paint("Configure: [rules.\"<id>\"] in .normalize/config.toml\n")
415 .to_string(),
416 );
417 out.push_str(
418 &dim.paint(" severity, enabled, allow — or: normalize rules enable/disable <id>\n")
419 .to_string(),
420 );
421 out.push_str(
422 &dim.paint(" Global patterns: [rules] global-allow = [\"**/fixtures/**\"]\n")
423 .to_string(),
424 );
425 out.push_str(
426 &dim.paint(" Custom tag groups: [rule-tags] my-group = [\"tag1\", \"tag2\"]\n")
427 .to_string(),
428 );
429 out
430 }
431}
432
433#[derive(Debug, Clone, Serialize, schemars::JsonSchema)]
438pub struct RuleInfoReport {
439 pub id: String,
441 pub rule_type: String,
443 pub severity: String,
445 pub enabled: bool,
447 pub builtin: bool,
449 pub tags: Vec<String>,
451 pub languages: Vec<String>,
453 pub message: String,
455 #[serde(skip_serializing_if = "Option::is_none")]
457 pub fix: Option<String>,
458 #[serde(skip_serializing_if = "Option::is_none")]
460 pub description: Option<String>,
461 pub allow: Vec<String>,
463}
464
465impl normalize_output::OutputFormatter for RuleInfoReport {
466 fn format_text(&self) -> String {
467 let mut out = String::new();
468 out.push_str(&format!("{} [{}]\n", self.id, self.rule_type));
469 out.push_str(&format!(" severity: {}\n", self.severity));
470 out.push_str(&format!(" enabled: {}\n", self.enabled));
471 if !self.tags.is_empty() {
472 out.push_str(&format!(" tags: {}\n", self.tags.join(", ")));
473 }
474 if !self.languages.is_empty() {
475 out.push_str(&format!(" langs: {}\n", self.languages.join(", ")));
476 }
477 if !self.allow.is_empty() {
478 out.push_str(&format!(" allow: {}\n", self.allow.join(" ")));
479 }
480 if let Some(ref fix) = self.fix {
481 if fix.is_empty() {
482 out.push_str(" fix: (delete match)\n");
483 } else {
484 out.push_str(&format!(" fix: {}\n", fix));
485 }
486 }
487 out.push_str(&format!(" message: {}\n", self.message));
488 if let Some(ref doc) = self.description {
489 out.push('\n');
490 out.push_str(doc);
491 out.push('\n');
492 } else {
493 out.push('\n');
494 out.push_str(
495 "(no documentation — add a markdown comment block after the frontmatter)\n",
496 );
497 }
498 out
499 }
500}
501
502#[derive(Debug, Clone, Serialize, schemars::JsonSchema)]
504pub struct TagEntry {
505 pub tag: String,
507 pub source: String,
509 pub count: usize,
511 pub rules: Vec<String>,
513}
514
515#[derive(Debug, Clone, Serialize, schemars::JsonSchema)]
517pub struct RulesTagsReport {
518 pub tags: Vec<TagEntry>,
519}
520
521impl normalize_output::OutputFormatter for RulesTagsReport {
522 fn format_text(&self) -> String {
523 if self.tags.is_empty() {
524 return "No tags found.\n".to_string();
525 }
526 let mut out = String::new();
527 for entry in &self.tags {
528 if entry.rules.is_empty() || entry.count == entry.rules.len() {
529 out.push_str(&format!(
531 "{:20} [{}] {} rule{}\n",
532 entry.tag,
533 entry.source,
534 entry.count,
535 if entry.count == 1 { "" } else { "s" }
536 ));
537 } else {
538 out.push_str(&format!(
540 "{:20} [{}] {}\n",
541 entry.tag,
542 entry.source,
543 entry.rules.join(" ")
544 ));
545 }
546 }
547 out
548 }
549}
550
551fn paint_rule_type(rule_type: &str) -> String {
554 use nu_ansi_term::Color;
555 let col = match rule_type {
556 "syntax" => Color::Cyan,
557 "fact" => Color::Blue,
558 "native" => Color::Green,
559 _ => Color::DarkGray,
560 };
561 col.paint(format!("{:<6}", rule_type)).to_string()
563}
564
565struct UnifiedRule {
567 id: String,
568 rule_type: &'static str,
569 severity: String,
570 source: &'static str,
571 message: String,
572 enabled: bool,
573 tags: Vec<String>,
574 recommended: bool,
575}
576
577pub struct ListFilters<'a> {
579 pub type_filter: &'a RuleKind,
581 pub tag: Option<&'a str>,
583 pub enabled: bool,
586 pub disabled: bool,
589}
590
591pub fn build_list_report(
593 root: &Path,
594 filters: &ListFilters<'_>,
595 config: &RulesRunConfig,
596) -> RulesListReport {
597 let mut all_rules: Vec<UnifiedRule> = Vec::new();
598
599 if matches!(filters.type_filter, RuleKind::All | RuleKind::Syntax) {
601 let syntax_rules = normalize_syntax_rules::load_all_rules(root, &config.rules);
602 for r in &syntax_rules {
603 let source = if r.builtin { "builtin" } else { "project" };
604 all_rules.push(UnifiedRule {
605 id: r.id.clone(),
606 rule_type: "syntax",
607 severity: r.severity.to_string(),
608 source,
609 message: r.message.clone(),
610 enabled: r.enabled,
611 tags: r.tags.clone(),
612 recommended: r.recommended,
613 });
614 }
615 }
616
617 if matches!(filters.type_filter, RuleKind::All | RuleKind::Fact) {
619 let fact_rules = interpret::load_all_rules(root, &config.rules);
620 for r in &fact_rules {
621 let source = if r.builtin { "builtin" } else { "project" };
622 all_rules.push(UnifiedRule {
623 id: r.id.clone(),
624 rule_type: "fact",
625 severity: r.severity.to_string(),
626 source,
627 message: r.message.clone(),
628 enabled: r.enabled,
629 tags: r.tags.clone(),
630 recommended: r.recommended,
631 });
632 }
633 }
634
635 if matches!(filters.type_filter, RuleKind::All | RuleKind::Native) {
637 for desc in normalize_native_rules::NATIVE_RULES {
638 let override_ = config.rules.rules.get(desc.id);
639 let severity = override_
640 .and_then(|o| o.severity.as_deref())
641 .unwrap_or(desc.default_severity)
642 .to_string();
643 let enabled = override_
644 .and_then(|o| o.enabled)
645 .unwrap_or(desc.default_enabled);
646 let mut tags: Vec<String> = desc.tags.iter().map(|t| t.to_string()).collect();
647 if let Some(o) = override_ {
648 tags.extend(o.tags.iter().cloned());
649 }
650 all_rules.push(UnifiedRule {
651 id: desc.id.to_string(),
652 rule_type: "native",
653 severity,
654 source: "builtin",
655 message: desc.message.to_string(),
656 enabled,
657 tags,
658 recommended: false,
659 });
660 }
661 }
662
663 if let Some(tag) = filters.tag {
665 let rule_tags = &config.rule_tags;
666 let mut visited = HashSet::new();
667 let matching_ids: HashSet<String> = expand_tag(tag, rule_tags, &all_rules, &mut visited)
668 .into_iter()
669 .map(|s| s.to_string())
670 .collect();
671 all_rules.retain(|r| matching_ids.contains(&r.id));
672 }
673 if filters.enabled {
674 all_rules.retain(|r| r.enabled);
675 }
676 if filters.disabled {
677 all_rules.retain(|r| !r.enabled);
678 }
679
680 all_rules.sort_by(|a, b| a.rule_type.cmp(b.rule_type).then(a.id.cmp(&b.id)));
682
683 let syntax_count = all_rules.iter().filter(|r| r.rule_type == "syntax").count();
684 let fact_count = all_rules.iter().filter(|r| r.rule_type == "fact").count();
685 let native_count = all_rules.iter().filter(|r| r.rule_type == "native").count();
686 let disabled_count = all_rules.iter().filter(|r| !r.enabled).count();
687 let total = all_rules.len();
688
689 let rules = all_rules
690 .into_iter()
691 .map(|r| RuleEntry {
692 id: r.id,
693 rule_type: r.rule_type.to_string(),
694 severity: r.severity,
695 source: r.source.to_string(),
696 message: r.message,
697 enabled: r.enabled,
698 tags: r.tags,
699 recommended: r.recommended,
700 })
701 .collect();
702
703 RulesListReport {
704 rules,
705 total,
706 syntax_count,
707 fact_count,
708 native_count,
709 disabled_count,
710 }
711}
712
713fn build_unified_rules(
718 syntax_rules: &[normalize_syntax_rules::Rule],
719 fact_rules: &[interpret::FactsRule],
720) -> Vec<UnifiedRule> {
721 syntax_rules
722 .iter()
723 .map(|r| UnifiedRule {
724 id: r.id.clone(),
725 rule_type: "syntax",
726 severity: r.severity.to_string(),
727 source: if r.builtin { "builtin" } else { "project" },
728 message: r.message.clone(),
729 enabled: r.enabled,
730 tags: r.tags.clone(),
731 recommended: r.recommended,
732 })
733 .chain(fact_rules.iter().map(|r| UnifiedRule {
734 id: r.id.clone(),
735 rule_type: "fact",
736 severity: r.severity.to_string(),
737 source: if r.builtin { "builtin" } else { "project" },
738 message: r.message.clone(),
739 enabled: r.enabled,
740 tags: r.tags.clone(),
741 recommended: r.recommended,
742 }))
743 .collect()
744}
745
746pub fn enable_disable(
747 root: &Path,
748 id_or_tag: &str,
749 enable: bool,
750 dry_run: bool,
751 config: &RulesRunConfig,
752) -> Result<String, String> {
753 let syntax_rules = normalize_syntax_rules::load_all_rules(root, &config.rules);
755 let fact_rules = interpret::load_all_rules(root, &config.rules);
756 let all_unified = build_unified_rules(&syntax_rules, &fact_rules);
757
758 let rule_tags = &config.rule_tags;
760 let matched_ids: HashSet<&str> = {
761 if all_unified.iter().any(|r| r.id == id_or_tag) {
762 std::iter::once(id_or_tag).collect()
764 } else {
765 let mut visited = HashSet::new();
766 expand_tag(id_or_tag, rule_tags, &all_unified, &mut visited)
767 }
768 };
769
770 let matched_syntax: Vec<&normalize_syntax_rules::Rule> = syntax_rules
771 .iter()
772 .filter(|r| matched_ids.contains(r.id.as_str()))
773 .collect();
774 let matched_fact: Vec<&interpret::FactsRule> = fact_rules
775 .iter()
776 .filter(|r| matched_ids.contains(r.id.as_str()))
777 .collect();
778
779 if matched_syntax.is_empty() && matched_fact.is_empty() {
780 return Err(format!(
781 "No rules found matching '{}' (not a rule ID or tag)",
782 id_or_tag
783 ));
784 }
785
786 let verb = if enable { "enable" } else { "disable" };
787 let config_path = root.join(".normalize").join("config.toml");
788
789 let changes_syntax: Vec<&str> = matched_syntax
792 .iter()
793 .filter(|r| r.enabled != enable)
794 .map(|r| r.id.as_str())
795 .collect();
796 let changes_fact: Vec<&str> = matched_fact
797 .iter()
798 .filter(|r| r.enabled != enable)
799 .map(|r| r.id.as_str())
800 .collect();
801
802 let already_syntax: Vec<&str> = matched_syntax
804 .iter()
805 .filter(|r| r.enabled == enable)
806 .map(|r| r.id.as_str())
807 .collect();
808 let already_fact: Vec<&str> = matched_fact
809 .iter()
810 .filter(|r| r.enabled == enable)
811 .map(|r| r.id.as_str())
812 .collect();
813
814 let mut out = String::new();
815
816 for id in &already_syntax {
817 out.push_str(&format!("{}: already {}d (no change)\n", id, verb));
818 }
819 for id in &already_fact {
820 out.push_str(&format!("{}: already {}d (no change)\n", id, verb));
821 }
822
823 if changes_syntax.is_empty() && changes_fact.is_empty() {
824 return Ok(out);
825 }
826
827 for id in &changes_syntax {
828 if dry_run {
829 out.push_str(&format!("[dry-run] would {} {}\n", verb, id));
830 } else {
831 out.push_str(&format!("{}d {}\n", verb, id));
832 }
833 }
834 for id in &changes_fact {
835 if dry_run {
836 out.push_str(&format!("[dry-run] would {} {}\n", verb, id));
837 } else {
838 out.push_str(&format!("{}d {}\n", verb, id));
839 }
840 }
841
842 if dry_run {
843 return Ok(out);
844 }
845
846 let content = std::fs::read_to_string(&config_path).unwrap_or_default();
848 let mut doc: toml_edit::DocumentMut = content.parse().unwrap_or_else(|e| {
849 tracing::warn!("failed to parse existing config, using defaults: {}", e);
850 toml_edit::DocumentMut::default()
851 });
852
853 if !doc.contains_key("rules") {
857 let mut t = toml_edit::Table::new();
858 t.set_implicit(true);
859 doc["rules"] = toml_edit::Item::Table(t);
860 } else if doc["rules"].is_inline_table() {
861 return Err("Cannot update rules config: the existing 'rules' entry in \
862 .normalize/config.toml is an inline table (e.g. `rules = {...}`). \
863 Convert it to a [rules] section first."
864 .to_string());
865 }
866
867 {
869 let rules_table = doc["rules"]
870 .as_table_mut()
871 .ok_or_else(|| "'rules' is not a TOML table".to_string())?;
872 if !rules_table.contains_key("rule") {
873 let mut t = toml_edit::Table::new();
874 t.set_implicit(true);
875 rules_table.insert("rule", toml_edit::Item::Table(t));
876 }
877 }
878
879 if !changes_syntax.is_empty() {
881 let rule_table = doc["rules"]["rule"]
882 .as_table_mut()
883 .ok_or_else(|| "'rules.rule' is not a TOML table".to_string())?;
884 for id in &changes_syntax {
885 if !rule_table.contains_key(id) {
886 rule_table[id] = toml_edit::Item::Table(toml_edit::Table::new());
887 }
888 rule_table[id]["enabled"] = toml_edit::value(enable);
889 }
890 }
891
892 if !changes_fact.is_empty() {
894 let rule_table = doc["rules"]["rule"]
895 .as_table_mut()
896 .ok_or_else(|| "'rules.rule' is not a TOML table".to_string())?;
897 for id in &changes_fact {
898 if !rule_table.contains_key(id) {
899 rule_table[id] = toml_edit::Item::Table(toml_edit::Table::new());
900 }
901 rule_table[id]["enabled"] = toml_edit::value(enable);
902 }
903 }
904
905 if let Some(parent) = config_path.parent() {
907 std::fs::create_dir_all(parent)
908 .map_err(|e| format!("Failed to create config directory: {e}"))?;
909 }
910
911 std::fs::write(&config_path, doc.to_string())
912 .map_err(|e| format!("Failed to write config: {e}"))?;
913
914 Ok(out)
915}
916
917pub fn show_rule(
922 root: &Path,
923 id: &str,
924 use_colors: bool,
925 config: &RulesRunConfig,
926) -> Result<String, String> {
927 let syntax_rules = normalize_syntax_rules::load_all_rules(root, &config.rules);
929 let fact_rules = interpret::load_all_rules(root, &config.rules);
930
931 let found_syntax = syntax_rules.iter().find(|r| r.id == id);
933 let found_fact = fact_rules.iter().find(|r| r.id == id);
934
935 let mut out = String::new();
936
937 match (found_syntax, found_fact) {
938 (Some(r), _) => {
939 out.push_str(&format!("{} [syntax]\n", r.id));
940 out.push_str(&format!(
941 " severity: {}\n",
942 paint_severity(&r.severity.to_string(), use_colors)
943 ));
944 out.push_str(&format!(" enabled: {}\n", r.enabled));
945 if !r.tags.is_empty() {
946 out.push_str(&format!(
947 " tags: {}\n",
948 paint_tags(&r.tags, use_colors)
949 ));
950 }
951 if !r.languages.is_empty() {
952 out.push_str(&format!(" langs: {}\n", r.languages.join(", ")));
953 }
954 if !r.allow.is_empty() {
955 out.push_str(&format!(
956 " allow: {}\n",
957 r.allow
958 .iter()
959 .map(|p| p.as_str())
960 .collect::<Vec<_>>()
961 .join(" ")
962 ));
963 }
964 if let Some(ref fix) = r.fix {
965 if fix.is_empty() {
966 out.push_str(" fix: (delete match)\n");
967 } else {
968 out.push_str(&format!(" fix: {}\n", fix));
969 }
970 }
971 out.push_str(&format!(" message: {}\n", r.message));
972 if let Some(ref doc) = r.doc {
973 out.push('\n');
974 out.push_str(doc);
975 out.push('\n');
976 } else {
977 out.push('\n');
978 out.push_str(
979 "(no documentation — add a markdown comment block after the frontmatter)\n",
980 );
981 }
982 out.push('\n');
983 out.push_str(&format_config_snippet(&r.id, config.rules.rules.get(&r.id)));
984 }
985 (_, Some(r)) => {
986 out.push_str(&format!("{} [fact]\n", r.id));
987 out.push_str(&format!(
988 " severity: {}\n",
989 paint_severity(&r.severity.to_string(), use_colors)
990 ));
991 out.push_str(&format!(" enabled: {}\n", r.enabled));
992 if !r.tags.is_empty() {
993 out.push_str(&format!(
994 " tags: {}\n",
995 paint_tags(&r.tags, use_colors)
996 ));
997 }
998 if !r.allow.is_empty() {
999 out.push_str(&format!(
1000 " allow: {}\n",
1001 r.allow
1002 .iter()
1003 .map(|p| p.as_str())
1004 .collect::<Vec<_>>()
1005 .join(" ")
1006 ));
1007 }
1008 out.push_str(&format!(" message: {}\n", r.message));
1009 if let Some(ref doc) = r.doc {
1010 out.push('\n');
1011 out.push_str(doc);
1012 out.push('\n');
1013 } else {
1014 out.push('\n');
1015 out.push_str(
1016 "(no documentation — add a markdown comment block after the frontmatter)\n",
1017 );
1018 }
1019 out.push('\n');
1020 out.push_str(&format_config_snippet(&r.id, config.rules.rules.get(&r.id)));
1021 }
1022 _ => return Err(format!("Rule not found: {}", id)),
1023 }
1024
1025 Ok(out)
1026}
1027
1028fn format_config_snippet(
1029 id: &str,
1030 override_: Option<&normalize_rules_config::RuleOverride>,
1031) -> String {
1032 let mut out = String::new();
1033 out.push_str("Configuration (.normalize/config.toml):\n");
1034 if let Some(o) = override_ {
1035 out.push_str(&format!(" [rules.\"{id}\"]\n"));
1036 if let Some(ref sev) = o.severity {
1037 out.push_str(&format!(" severity = \"{sev}\"\n"));
1038 }
1039 if let Some(enabled) = o.enabled {
1040 out.push_str(&format!(" enabled = {enabled}\n"));
1041 }
1042 if !o.allow.is_empty() {
1043 let patterns = o
1044 .allow
1045 .iter()
1046 .map(|p| format!("\"{p}\""))
1047 .collect::<Vec<_>>()
1048 .join(", ");
1049 out.push_str(&format!(" allow = [{patterns}]\n"));
1050 }
1051 } else {
1052 out.push_str(" # No overrides set. Example:\n");
1053 out.push_str(&format!(" [rules.\"{id}\"]\n"));
1054 out.push_str(" severity = \"error\" # error | warning | info | hint\n");
1055 out.push_str(" enabled = false # disable this rule\n");
1056 out.push_str(" allow = [\"**/tests/**\"] # skip matching files\n");
1057 }
1058 out.push('\n');
1059 out.push_str(&format!(" # Or use: normalize rules enable {id}\n"));
1060 out.push_str(&format!(" # normalize rules disable {id}\n"));
1061 out
1062}
1063
1064pub fn list_tags(
1069 root: &Path,
1070 show_rules: bool,
1071 tag_filter: Option<&str>,
1072 use_colors: bool,
1073 config: &RulesRunConfig,
1074) -> Result<String, String> {
1075 let syntax_rules = normalize_syntax_rules::load_all_rules(root, &config.rules);
1077 let fact_rules = interpret::load_all_rules(root, &config.rules);
1078
1079 let all_unified = build_unified_rules(&syntax_rules, &fact_rules);
1081
1082 let mut tag_map: std::collections::BTreeMap<String, (String, Vec<String>)> =
1085 std::collections::BTreeMap::new();
1086
1087 for r in &syntax_rules {
1089 for tag in &r.tags {
1090 tag_map
1091 .entry(tag.clone())
1092 .or_insert_with(|| ("builtin".to_string(), Vec::new()))
1093 .1
1094 .push(r.id.clone());
1095 }
1096 }
1097 for r in &fact_rules {
1098 for tag in &r.tags {
1099 tag_map
1100 .entry(tag.clone())
1101 .or_insert_with(|| ("builtin".to_string(), Vec::new()))
1102 .1
1103 .push(r.id.clone());
1104 }
1105 }
1106
1107 let rule_tags = &config.rule_tags;
1109 for tag_name in rule_tags.keys() {
1110 let entry = tag_map
1111 .entry(tag_name.clone())
1112 .or_insert_with(|| ("user-defined".to_string(), Vec::new()));
1113 if entry.0 == "builtin" {
1115 entry.0 = "builtin+user".to_string();
1116 }
1117 let mut visited = HashSet::new();
1119 let resolved = expand_tag(tag_name, rule_tags, &all_unified, &mut visited);
1120 for id in resolved {
1121 if !entry.1.contains(&id.to_string()) {
1122 entry.1.push(id.to_string());
1123 }
1124 }
1125 }
1126
1127 if let Some(t) = tag_filter {
1129 tag_map.retain(|k, _| k == t);
1130 }
1131
1132 let mut out = String::new();
1133
1134 if tag_map.is_empty() {
1135 out.push_str("No tags found.\n");
1136 return Ok(out);
1137 }
1138
1139 for (tag, (origin, ids)) in &tag_map {
1140 let count = ids.len();
1141 let tag_display = if use_colors {
1142 tag_color(tag).paint(tag.as_str()).to_string()
1143 } else {
1144 tag.clone()
1145 };
1146 if show_rules {
1147 let ids_str: Vec<&str> = ids.iter().map(|s| s.as_str()).collect();
1148 out.push_str(&format!(
1149 "{:20} [{}] {}\n",
1150 tag_display,
1151 origin,
1152 ids_str.join(" ")
1153 ));
1154 } else {
1155 out.push_str(&format!(
1156 "{:20} [{}] {} rule{}\n",
1157 tag_display,
1158 origin,
1159 count,
1160 if count == 1 { "" } else { "s" }
1161 ));
1162 }
1163 }
1164
1165 Ok(out)
1166}
1167
1168pub fn show_rule_structured(
1175 root: &Path,
1176 id: &str,
1177 config: &RulesRunConfig,
1178) -> Result<RuleInfoReport, String> {
1179 let syntax_rules = normalize_syntax_rules::load_all_rules(root, &config.rules);
1180 let fact_rules = interpret::load_all_rules(root, &config.rules);
1181
1182 let found_syntax = syntax_rules.iter().find(|r| r.id == id);
1183 let found_fact = fact_rules.iter().find(|r| r.id == id);
1184
1185 match (found_syntax, found_fact) {
1186 (Some(r), _) => Ok(RuleInfoReport {
1187 id: r.id.clone(),
1188 rule_type: "syntax".to_string(),
1189 severity: r.severity.to_string(),
1190 enabled: r.enabled,
1191 builtin: r.builtin,
1192 tags: r.tags.clone(),
1193 languages: r.languages.clone(),
1194 message: r.message.clone(),
1195 fix: r.fix.clone(),
1196 description: r.doc.clone(),
1197 allow: r.allow.iter().map(|p| p.as_str().to_string()).collect(),
1198 }),
1199 (_, Some(r)) => Ok(RuleInfoReport {
1200 id: r.id.clone(),
1201 rule_type: "fact".to_string(),
1202 severity: r.severity.to_string(),
1203 enabled: r.enabled,
1204 builtin: r.builtin,
1205 tags: r.tags.clone(),
1206 languages: Vec::new(),
1207 message: r.message.clone(),
1208 fix: None,
1209 description: r.doc.clone(),
1210 allow: r.allow.iter().map(|p| p.as_str().to_string()).collect(),
1211 }),
1212 _ => Err(format!("Rule not found: {}", id)),
1213 }
1214}
1215
1216pub fn list_tags_structured(
1219 root: &Path,
1220 tag_filter: Option<&str>,
1221 config: &RulesRunConfig,
1222) -> Result<RulesTagsReport, String> {
1223 let syntax_rules = normalize_syntax_rules::load_all_rules(root, &config.rules);
1224 let fact_rules = interpret::load_all_rules(root, &config.rules);
1225
1226 let all_unified = build_unified_rules(&syntax_rules, &fact_rules);
1227
1228 let mut tag_map: std::collections::BTreeMap<String, (String, Vec<String>)> =
1229 std::collections::BTreeMap::new();
1230
1231 for r in &syntax_rules {
1232 for tag in &r.tags {
1233 tag_map
1234 .entry(tag.clone())
1235 .or_insert_with(|| ("builtin".to_string(), Vec::new()))
1236 .1
1237 .push(r.id.clone());
1238 }
1239 }
1240 for r in &fact_rules {
1241 for tag in &r.tags {
1242 tag_map
1243 .entry(tag.clone())
1244 .or_insert_with(|| ("builtin".to_string(), Vec::new()))
1245 .1
1246 .push(r.id.clone());
1247 }
1248 }
1249
1250 let rule_tags = &config.rule_tags;
1251 for tag_name in rule_tags.keys() {
1252 let entry = tag_map
1253 .entry(tag_name.clone())
1254 .or_insert_with(|| ("user-defined".to_string(), Vec::new()));
1255 if entry.0 == "builtin" {
1256 entry.0 = "builtin+user".to_string();
1257 }
1258 let mut visited = HashSet::new();
1259 let resolved = expand_tag(tag_name, rule_tags, &all_unified, &mut visited);
1260 for id in resolved {
1261 if !entry.1.contains(&id.to_string()) {
1262 entry.1.push(id.to_string());
1263 }
1264 }
1265 }
1266
1267 if let Some(t) = tag_filter {
1268 tag_map.retain(|k, _| k == t);
1269 }
1270
1271 let tags = tag_map
1272 .into_iter()
1273 .map(|(tag, (source, rules))| {
1274 let count = rules.len();
1275 TagEntry {
1276 tag,
1277 source,
1278 count,
1279 rules,
1280 }
1281 })
1282 .collect();
1283
1284 Ok(RulesTagsReport { tags })
1285}
1286
1287pub async fn collect_fact_diagnostics(
1288 root: &Path,
1289 config: &RulesConfig,
1290 filter_ids: Option<&HashSet<String>>,
1291 filter_rule: Option<&str>,
1292) -> Vec<normalize_facts_rules_api::Diagnostic> {
1293 collect_fact_diagnostics_incremental(root, config, filter_ids, filter_rule, None).await
1294}
1295
1296pub async fn collect_fact_diagnostics_incremental(
1307 root: &Path,
1308 config: &RulesConfig,
1309 filter_ids: Option<&HashSet<String>>,
1310 filter_rule: Option<&str>,
1311 changed_files: Option<&[PathBuf]>,
1312) -> Vec<normalize_facts_rules_api::Diagnostic> {
1313 let all_rules_unfiltered = interpret::load_all_rules(root, config);
1314 let all_rules: Vec<_> = all_rules_unfiltered
1315 .into_iter()
1316 .filter(|r| r.enabled)
1317 .filter(|r| filter_ids.is_none_or(|ids| ids.contains(&r.id)))
1318 .filter(|r| filter_rule.is_none_or(|id| r.id == id))
1319 .collect();
1320
1321 if all_rules.is_empty() {
1322 return Vec::new();
1323 }
1324
1325 let relations = match ensure_relations(root).await {
1326 Ok(r) => r,
1327 Err(e) => {
1328 tracing::warn!("failed to build relations for fact rules: {}", e);
1329 return Vec::new();
1330 }
1331 };
1332
1333 let mut all_diagnostics: Vec<normalize_facts_rules_api::Diagnostic> = Vec::new();
1334
1335 if let Some(changed) = changed_files {
1336 let changed_strs: Vec<&str> = changed
1338 .iter()
1339 .map(|p| p.to_str().unwrap_or(""))
1340 .filter(|s| !s.is_empty())
1341 .collect();
1342
1343 let mut cache = engine_cache().lock().unwrap();
1345
1346 for rule in &all_rules {
1347 let cache_key = engine_cache_key(root, &rule.id);
1348 let mut cached_engine: Option<interpret::CachedRuleEngine> =
1350 cache.remove(&cache_key).map(|s| s.0);
1351
1352 let mut diagnostics = match interpret::run_rule_with_cache(
1353 &mut cached_engine,
1354 rule,
1355 &relations,
1356 &changed_strs,
1357 ) {
1358 Ok(d) => d,
1359 Err(e) => {
1360 tracing::warn!(rule_id = %rule.id, "incremental fact rule failed: {}", e);
1361 if let Some(engine) = cached_engine {
1363 cache.insert(cache_key, SendableEngine(engine));
1364 }
1365 continue;
1366 }
1367 };
1368
1369 if let Some(engine) = cached_engine {
1371 cache.insert(cache_key, SendableEngine(engine));
1372 }
1373
1374 if !rule.allow.is_empty() {
1376 diagnostics.retain(|d| {
1377 let match_str = match d.location.as_ref() {
1378 Some(loc) => loc.file.as_str(),
1379 None => d.message.as_str(),
1380 };
1381 !rule.allow.iter().any(|p| p.matches(match_str))
1382 });
1383 }
1384 use normalize_facts_rules_api::DiagnosticLevel;
1385 use normalize_rules_config::Severity;
1386 match rule.severity {
1387 Severity::Error => {
1388 for d in &mut diagnostics {
1389 d.level = DiagnosticLevel::Error;
1390 }
1391 }
1392 Severity::Info | Severity::Hint => {
1393 for d in &mut diagnostics {
1394 if d.level == DiagnosticLevel::Warning {
1395 d.level = DiagnosticLevel::Hint;
1396 }
1397 }
1398 }
1399 Severity::Warning => {}
1400 }
1401
1402 all_diagnostics.extend(diagnostics);
1403 }
1404 } else {
1406 let rule_refs: Vec<&interpret::FactsRule> = all_rules.iter().collect();
1408 all_diagnostics = match interpret::run_rules_batch(&rule_refs, &relations) {
1409 Ok(diagnostics) => diagnostics,
1410 Err(e) => {
1411 tracing::warn!("fact rules batch failed: {}", e);
1412 Vec::new()
1413 }
1414 };
1415 }
1416
1417 interpret::filter_inline_allowed(&mut all_diagnostics, root);
1418 all_diagnostics
1419}
1420
1421pub fn apply_native_rules_config(
1425 report: &mut normalize_output::diagnostics::DiagnosticsReport,
1426 config: &RulesConfig,
1427) {
1428 use normalize_output::diagnostics::Severity;
1429 report.issues.retain_mut(|issue| {
1430 let Some(override_) = config.rules.get(&issue.rule_id) else {
1431 return true;
1432 };
1433 if override_.enabled == Some(false) {
1435 return false;
1436 }
1437 if !override_.allow.is_empty() {
1439 let patterns: Vec<glob::Pattern> = override_
1440 .allow
1441 .iter()
1442 .filter_map(|p| glob::Pattern::new(p).ok())
1443 .collect();
1444 if patterns.iter().any(|p| p.matches(&issue.file)) {
1445 return false;
1446 }
1447 }
1448 if let Some(sev_str) = &override_.severity {
1449 issue.severity = match sev_str.as_str() {
1450 "error" => Severity::Error,
1451 "warning" => Severity::Warning,
1452 "info" => Severity::Info,
1453 "hint" => Severity::Hint,
1454 _ => issue.severity,
1455 };
1456 }
1457 true
1458 });
1459}
1460
1461#[cfg(unix)]
1474pub fn try_rules_via_daemon(
1475 root: &Path,
1476 filter_ids: Option<&HashSet<String>>,
1477 filter_rule: Option<&str>,
1478 engine: Option<&str>,
1479 filter_files: Option<&[String]>,
1480) -> Option<Vec<normalize_output::diagnostics::Issue>> {
1481 use std::io::{Read, Write};
1482 use std::os::unix::net::UnixStream;
1483 use std::time::Duration;
1484
1485 let socket_path = dirs::config_dir()
1486 .unwrap_or_else(|| std::path::PathBuf::from("."))
1487 .join("normalize")
1488 .join("daemon.sock");
1489
1490 if !socket_path.exists() {
1491 return None;
1492 }
1493
1494 let mut stream = UnixStream::connect(&socket_path).ok()?;
1495 stream
1499 .set_read_timeout(Some(Duration::from_millis(500)))
1500 .ok();
1501 stream.set_write_timeout(Some(Duration::from_secs(5))).ok();
1502
1503 let filter_ids_vec: Option<Vec<String>> = filter_ids.map(|ids| ids.iter().cloned().collect());
1504 let filter_files_vec: Option<Vec<String>> = filter_files.map(|fs| fs.to_vec());
1505
1506 let request = serde_json::json!({
1507 "cmd": "run_rules",
1508 "root": root,
1509 "filter_ids": filter_ids_vec,
1510 "filter_rule": filter_rule,
1511 "engine": engine,
1512 "filter_files": filter_files_vec,
1513 });
1514 let json = serde_json::to_string(&request).ok()?;
1515
1516 stream.write_all(&[0x01]).ok()?;
1518 stream.write_all(json.as_bytes()).ok()?;
1519 stream.write_all(b"\n").ok()?;
1520
1521 let mut hdr = [0u8; 5];
1523 stream.read_exact(&mut hdr).ok()?;
1524 if hdr[0] != 0x01 {
1525 return None;
1527 }
1528 let len = u32::from_le_bytes([hdr[1], hdr[2], hdr[3], hdr[4]]) as usize;
1529
1530 let mut aligned = rkyv::util::AlignedVec::<16>::with_capacity(len);
1532 aligned.resize(len, 0);
1533 stream.read_exact(&mut aligned[..]).ok()?;
1534
1535 let issues =
1536 rkyv::from_bytes::<Vec<normalize_output::diagnostics::Issue>, rkyv::rancor::Error>(
1537 &aligned,
1538 )
1539 .ok()?;
1540
1541 tracing::info!(
1542 root = ?root,
1543 issues = issues.len(),
1544 engine = ?engine,
1545 "rules served from daemon cache (rkyv binary)"
1546 );
1547 Some(issues)
1548}
1549
1550#[cfg(not(unix))]
1551pub fn try_rules_via_daemon(
1552 _root: &Path,
1553 _filter_ids: Option<&HashSet<String>>,
1554 _filter_rule: Option<&str>,
1555 _engine: Option<&str>,
1556 _filter_files: Option<&[String]>,
1557) -> Option<Vec<normalize_output::diagnostics::Issue>> {
1558 None
1559}
1560
1561#[allow(clippy::too_many_arguments)]
1563pub fn run_rules_report(
1564 root: &Path,
1565 project_root: &Path,
1566 filter_rule: Option<&str>,
1567 filter_tag: Option<&str>,
1568 engine: &RuleKind,
1569 debug: &[String],
1570 config: &RulesRunConfig,
1571 files: Option<&[std::path::PathBuf]>,
1572 path_filter: &normalize_rules_config::PathFilter,
1573) -> normalize_output::diagnostics::DiagnosticsReport {
1574 use normalize_output::diagnostics::DiagnosticsReport;
1575
1576 let mut report = DiagnosticsReport::new();
1577
1578 let rule_tags = &config.rule_tags;
1580 let filter_ids: Option<HashSet<String>> = filter_tag.and_then(|tag| {
1581 if rule_tags.contains_key(tag) {
1582 let syntax_rules = normalize_syntax_rules::load_all_rules(root, &config.rules);
1583 let fact_rules = interpret::load_all_rules(root, &config.rules);
1584 let all_unified = build_unified_rules(&syntax_rules, &fact_rules);
1585 let mut visited = HashSet::new();
1586 let ids = expand_tag(tag, rule_tags, &all_unified, &mut visited);
1587 Some(ids.iter().map(|s| s.to_string()).collect())
1588 } else {
1589 None
1590 }
1591 });
1592 let effective_tag = if filter_ids.is_some() {
1593 None
1594 } else {
1595 filter_tag
1596 };
1597
1598 let daemon_engine = match engine {
1602 RuleKind::Syntax => Some("syntax"),
1603 RuleKind::Fact => Some("fact"),
1604 RuleKind::Native => Some("native"),
1605 RuleKind::All => None, _ => None, };
1608 let daemon_covers_request = matches!(
1610 engine,
1611 RuleKind::All | RuleKind::Syntax | RuleKind::Fact | RuleKind::Native
1612 );
1613 let filter_files_rel: Option<Vec<String>> = files.and_then(|fs| {
1617 let mut out = Vec::with_capacity(fs.len());
1618 for f in fs {
1619 let rel = if f.is_absolute() {
1620 f.strip_prefix(project_root).ok()?.to_path_buf()
1621 } else {
1622 f.clone()
1623 };
1624 out.push(rel.to_string_lossy().into_owned());
1625 }
1626 Some(out)
1627 });
1628 let daemon_start = std::time::Instant::now();
1629 let daemon_result = if daemon_covers_request {
1630 try_rules_via_daemon(
1631 project_root,
1632 filter_ids.as_ref(),
1633 filter_rule,
1634 daemon_engine,
1635 filter_files_rel.as_deref(),
1636 )
1637 } else {
1638 None
1639 };
1640
1641 if let Some(daemon_issues) = daemon_result {
1642 eprintln!("[timings] daemon-cache: {:.1?}", daemon_start.elapsed());
1643 for issue in daemon_issues {
1644 report.issues.push(issue);
1645 }
1646 if matches!(engine, RuleKind::All | RuleKind::Syntax) {
1648 report.sources_run.push("syntax-rules".into());
1649 }
1650 if matches!(engine, RuleKind::All | RuleKind::Fact) {
1651 report.sources_run.push("fact-rules".into());
1652 }
1653 if matches!(engine, RuleKind::All | RuleKind::Native) {
1654 report.sources_run.push("native".into());
1655 }
1656 report.daemon_cached = true;
1658 } else {
1659 if matches!(engine, RuleKind::All | RuleKind::Syntax) {
1663 let debug_flags = DebugFlags::from_args(debug);
1664 let findings = crate::cmd_rules::run_syntax_rules(
1665 root,
1666 project_root,
1667 filter_rule,
1668 effective_tag,
1669 filter_ids.as_ref(),
1670 &config.rules,
1671 &debug_flags,
1672 files,
1673 path_filter,
1674 &config.walk,
1675 );
1676 let unique_files: HashSet<&std::path::Path> =
1678 findings.iter().map(|f| f.file.as_path()).collect();
1679 report.files_checked = report.files_checked.max(unique_files.len());
1680 for f in &findings {
1681 report.issues.push(finding_to_issue(f, root));
1682 }
1683 report.sources_run.push("syntax-rules".into());
1684 }
1685
1686 if matches!(engine, RuleKind::All | RuleKind::Fact) {
1688 let rt = tokio::runtime::Runtime::new().unwrap_or_else(|e| {
1691 tracing::warn!("failed to create tokio runtime: {}", e);
1692 panic!("failed to create tokio runtime: {}", e)
1693 });
1694 let diagnostics = std::thread::Builder::new()
1695 .stack_size(64 * 1024 * 1024) .spawn({
1697 let project_root = project_root.to_path_buf();
1698 let rules = config.rules.clone();
1699 let filter_ids = filter_ids.clone();
1700 let filter_rule = filter_rule.map(|s| s.to_string());
1701 move || {
1702 rt.block_on(collect_fact_diagnostics(
1703 &project_root,
1704 &rules,
1705 filter_ids.as_ref(),
1706 filter_rule.as_deref(),
1707 ))
1708 }
1709 })
1710 .expect("failed to spawn fact engine thread")
1711 .join()
1712 .expect("fact engine thread panicked");
1713 let allowed_files = build_gitignore_allowed_set(project_root, &config.walk);
1717
1718 let global_allow: Vec<glob::Pattern> = config
1721 .rules
1722 .global_allow
1723 .iter()
1724 .filter_map(|s| glob::Pattern::new(s).ok())
1725 .collect();
1726 for d in &diagnostics {
1727 let file = match &d.location {
1728 Some(loc) => loc.file.as_str(),
1729 None => d.message.as_str(),
1730 };
1731 if !allowed_files.contains(file) {
1733 continue;
1734 }
1735 if global_allow.is_empty() || !global_allow.iter().any(|p| p.matches(file)) {
1736 report.issues.push(abi_diagnostic_to_issue(d));
1737 }
1738 }
1739 report.sources_run.push("fact-rules".into());
1740 }
1741 }
1742
1743 if matches!(engine, RuleKind::All | RuleKind::Sarif) {
1746 let sarif_report = run_sarif_tools(root, &config.rules.sarif_tools);
1747 report.merge(sarif_report);
1748 }
1749
1750 report.sort();
1751 report
1752}
1753
1754fn sarif_watch_mtime(root: &Path, patterns: &[String]) -> Option<u64> {
1758 let mut max_mtime: Option<u64> = None;
1759 for pattern in patterns {
1760 let full_pattern = root.join(pattern);
1761 let pattern_str = full_pattern.to_string_lossy();
1762 if let Ok(paths) = glob::glob(&pattern_str) {
1763 for entry in paths.flatten() {
1764 let mtime = normalize_native_rules::cache_file_mtime_nanos(&entry);
1765 if mtime > 0 {
1766 max_mtime = Some(max_mtime.map_or(mtime, |prev| prev.max(mtime)));
1767 }
1768 }
1769 }
1770 }
1771 max_mtime
1772}
1773
1774pub fn run_sarif_tools(
1778 root: &Path,
1779 tools: &[SarifTool],
1780) -> normalize_output::diagnostics::DiagnosticsReport {
1781 use normalize_output::diagnostics::{DiagnosticsReport, Issue, Severity};
1782
1783 let mut report = DiagnosticsReport::new();
1784 let root_str = root.to_string_lossy();
1785
1786 for tool in tools {
1787 if tool.command.is_empty() {
1788 continue;
1789 }
1790
1791 enum CacheDecision {
1794 Run,
1796 Hit,
1798 Miss(Box<normalize_native_rules::FindingsCache>, u64),
1800 }
1801
1802 let cache_decision = if tool.watch.is_empty() {
1803 CacheDecision::Run
1804 } else {
1805 match sarif_watch_mtime(root, &tool.watch) {
1806 None => CacheDecision::Run,
1807 Some(max_mtime) => {
1808 let cache = normalize_native_rules::FindingsCache::open(root);
1809 let cache_path = format!("sarif:{}", tool.name);
1810 if let Some(json) = cache.get(&cache_path, max_mtime, "", "sarif") {
1811 if let Ok(issues) = serde_json::from_str::<Vec<Issue>>(&json) {
1812 for issue in issues {
1813 let source = issue.source.clone();
1814 if !report.sources_run.contains(&source) {
1815 report.sources_run.push(source);
1816 }
1817 report.issues.push(issue);
1818 }
1819 }
1820 CacheDecision::Hit
1821 } else {
1822 CacheDecision::Miss(Box::new(cache), max_mtime)
1823 }
1824 }
1825 }
1826 };
1827
1828 if matches!(cache_decision, CacheDecision::Hit) {
1829 continue;
1830 }
1831
1832 let issues_start = report.issues.len();
1833
1834 let args: Vec<String> = tool
1835 .command
1836 .iter()
1837 .map(|a| a.replace("{root}", &root_str))
1838 .collect();
1839
1840 let output = std::process::Command::new(&args[0])
1841 .args(&args[1..])
1842 .current_dir(root)
1843 .output();
1844
1845 let stdout = match output {
1846 Ok(o) => String::from_utf8_lossy(&o.stdout).into_owned(),
1847 Err(e) => {
1848 let msg = format!("failed to run: {e}");
1849 eprintln!("normalize: SARIF tool '{}' {}", tool.name, msg);
1850 report
1851 .tool_errors
1852 .push(normalize_output::diagnostics::ToolFailure {
1853 tool: tool.name.clone(),
1854 message: msg,
1855 });
1856 continue;
1857 }
1858 };
1859
1860 let sarif: serde_json::Value = match serde_json::from_str(&stdout) {
1861 Ok(v) => v,
1862 Err(e) => {
1863 let msg = format!("did not emit valid JSON: {e}");
1864 eprintln!("normalize: SARIF tool '{}' {}", tool.name, msg);
1865 report
1866 .tool_errors
1867 .push(normalize_output::diagnostics::ToolFailure {
1868 tool: tool.name.clone(),
1869 message: msg,
1870 });
1871 continue;
1872 }
1873 };
1874
1875 let runs = match sarif.get("runs").and_then(|v| v.as_array()) {
1876 Some(r) => r,
1877 None => {
1878 let msg = "output missing 'runs' array".to_string();
1879 eprintln!("normalize: SARIF tool '{}' {}", tool.name, msg);
1880 report
1881 .tool_errors
1882 .push(normalize_output::diagnostics::ToolFailure {
1883 tool: tool.name.clone(),
1884 message: msg,
1885 });
1886 continue;
1887 }
1888 };
1889
1890 for run in runs {
1891 let driver_name = run
1892 .pointer("/tool/driver/name")
1893 .and_then(|v| v.as_str())
1894 .unwrap_or(&tool.name);
1895 let source = format!("sarif:{}", driver_name);
1896
1897 let results = run.get("results").and_then(|v| v.as_array());
1898 let Some(results) = results else { continue };
1899
1900 for result in results {
1901 let rule_id = result
1902 .get("ruleId")
1903 .and_then(|v| v.as_str())
1904 .unwrap_or("unknown")
1905 .to_string();
1906 let message = result
1907 .pointer("/message/text")
1908 .and_then(|v| v.as_str())
1909 .unwrap_or("")
1910 .to_string();
1911 let level = result
1912 .get("level")
1913 .and_then(|v| v.as_str())
1914 .unwrap_or("warning");
1915 let severity = match level {
1916 "error" => Severity::Error,
1917 "warning" => Severity::Warning,
1918 "note" | "none" => Severity::Info,
1919 _ => Severity::Warning,
1920 };
1921
1922 let loc = result.pointer("/locations/0/physicalLocation");
1924 let file = loc
1925 .and_then(|l| l.pointer("/artifactLocation/uri"))
1926 .and_then(|v| v.as_str())
1927 .unwrap_or("")
1928 .to_string();
1929 let line = loc
1930 .and_then(|l| l.pointer("/region/startLine"))
1931 .and_then(|v| v.as_u64())
1932 .map(|n| n as usize);
1933 let column = loc
1934 .and_then(|l| l.pointer("/region/startColumn"))
1935 .and_then(|v| v.as_u64())
1936 .map(|n| n as usize);
1937
1938 report.issues.push(Issue {
1939 file,
1940 line,
1941 column,
1942 end_line: None,
1943 end_column: None,
1944 rule_id,
1945 message,
1946 severity,
1947 source: source.clone(),
1948 related: vec![],
1949 suggestion: None,
1950 });
1951 }
1952
1953 report.sources_run.push(source);
1954 }
1955
1956 if let CacheDecision::Miss(cache, max_mtime) = cache_decision {
1957 let cache_path = format!("sarif:{}", tool.name);
1958 let tool_issues = &report.issues[issues_start..];
1959 if let Ok(json) = serde_json::to_string(tool_issues) {
1960 cache.put(&cache_path, max_mtime, "", "sarif", &json);
1961 }
1962 }
1963 }
1964
1965 report
1966}
1967
1968async fn ensure_relations(root: &Path) -> Result<normalize_facts_rules_api::Relations, String> {
1970 match build_relations_from_index(root).await {
1971 Ok(r) => Ok(r),
1972 Err(_) => {
1973 tracing::info!("Facts index not found. Building...");
1974 let normalize_dir = get_normalize_dir(root);
1975 let db_path = normalize_dir.join("index.sqlite");
1976 let mut idx = normalize_facts::FileIndex::open(&db_path, root)
1977 .await
1978 .map_err(|e| format!("Failed to open index: {}", e))?;
1979 let count = idx
1980 .refresh()
1981 .await
1982 .map_err(|e| format!("Failed to index files: {}", e))?;
1983 tracing::info!("Indexed {} files.", count);
1984 let stats = idx
1985 .refresh_call_graph()
1986 .await
1987 .map_err(|e| format!("Failed to index call graph: {}", e))?;
1988 tracing::info!(
1989 "Indexed {} symbols, {} calls, {} imports.",
1990 stats.symbols,
1991 stats.calls,
1992 stats.imports
1993 );
1994 build_relations_from_index(root).await
1995 }
1996 }
1997}
1998
1999fn get_normalize_dir(root: &Path) -> std::path::PathBuf {
2001 if let Ok(index_dir) = std::env::var("NORMALIZE_INDEX_DIR") {
2002 let path = std::path::PathBuf::from(&index_dir);
2003 if path.is_absolute() {
2004 return path;
2005 }
2006 let data_home = std::env::var("XDG_DATA_HOME")
2008 .map(std::path::PathBuf::from)
2009 .unwrap_or_else(|_| {
2010 dirs::home_dir()
2011 .unwrap_or_else(|| std::path::PathBuf::from("."))
2012 .join(".local/share")
2013 });
2014 return data_home.join("normalize").join(&index_dir);
2015 }
2016 root.join(".normalize")
2017}
2018
2019pub async fn build_relations_from_index(
2021 root: &Path,
2022) -> Result<normalize_facts_rules_api::Relations, String> {
2023 use normalize_facts_rules_api::Relations;
2024
2025 let normalize_dir = get_normalize_dir(root);
2026 let db_path = normalize_dir.join("index.sqlite");
2027 let idx = normalize_facts::FileIndex::open(&db_path, root)
2028 .await
2029 .map_err(|e| format!("Failed to open index: {}", e))?;
2030
2031 let mut relations = Relations::new();
2032
2033 let symbols = idx
2035 .all_symbols_with_details()
2036 .await
2037 .map_err(|e| format!("Failed to get symbols: {}", e))?;
2038
2039 for (file, name, kind, start_line, end_line, parent, visibility, is_impl) in &symbols {
2040 relations.add_symbol(file, name, kind, *start_line as u32);
2041 relations.add_symbol_range(file, name, *start_line as u32, *end_line as u32);
2042 relations.add_visibility(file, name, visibility);
2043 if let Some(parent_name) = parent {
2044 relations.add_parent(file, name, parent_name);
2045 }
2046 if *is_impl {
2047 relations.add_is_impl(file, name);
2048 }
2049 }
2050
2051 let attrs = idx
2053 .all_symbol_attributes()
2054 .await
2055 .map_err(|e| format!("Failed to get symbol attributes: {}", e))?;
2056
2057 for (file, name, attribute) in &attrs {
2058 relations.add_attribute(file, name, attribute);
2059 }
2060
2061 let implements = idx
2063 .all_symbol_implements()
2064 .await
2065 .map_err(|e| format!("Failed to get symbol implements: {}", e))?;
2066
2067 for (file, name, interface) in &implements {
2068 relations.add_implements(file, name, interface);
2069 }
2070
2071 let type_methods = idx
2073 .all_type_methods()
2074 .await
2075 .map_err(|e| format!("Failed to get type methods: {}", e))?;
2076
2077 for (file, type_name, method_name) in &type_methods {
2078 relations.add_type_method(file, type_name, method_name);
2079 }
2080
2081 let imports = idx
2083 .all_imports()
2084 .await
2085 .map_err(|e| format!("Failed to get imports: {}", e))?;
2086
2087 for (file, module, name, _line) in imports {
2088 relations.add_import(&file, &module, &name);
2089 }
2090
2091 let calls = idx
2093 .all_calls_with_qualifiers()
2094 .await
2095 .map_err(|e| format!("Failed to get calls: {}", e))?;
2096
2097 for (file, caller, callee, qualifier, line) in &calls {
2098 relations.add_call(file, caller, callee, *line);
2099 if let Some(qual) = qualifier {
2100 relations.add_qualifier(file, caller, callee, qual);
2101 }
2102 }
2103
2104 let cfg_edges = idx
2106 .all_cfg_edges()
2107 .await
2108 .map_err(|e| format!("Failed to get CFG edges: {}", e))?;
2109
2110 for (file, func, func_line, from, to, kind, exception_type) in &cfg_edges {
2111 relations.add_cfg_edge(file, func, *func_line, *from, *to, kind, exception_type);
2112 }
2113
2114 let cfg_effects = idx
2116 .all_cfg_effects()
2117 .await
2118 .map_err(|e| format!("Failed to get CFG effects: {}", e))?;
2119
2120 for (file, func, func_line, block, kind, line, label) in &cfg_effects {
2121 relations.add_cfg_effect(file, func, *func_line, *block, kind, *line, label);
2122 }
2123
2124 Ok(relations)
2125}
2126
2127fn rules_dir(global: bool) -> Option<PathBuf> {
2132 if global {
2133 dirs::config_dir().map(|d| d.join("normalize").join("rules"))
2134 } else {
2135 Some(PathBuf::from(".normalize").join("rules"))
2136 }
2137}
2138
2139fn lock_file_path(global: bool) -> Option<PathBuf> {
2140 if global {
2141 dirs::config_dir().map(|d| d.join("normalize").join("rules.lock"))
2142 } else {
2143 Some(PathBuf::from(".normalize").join("rules.lock"))
2144 }
2145}
2146
2147fn detect_extension(url: &str) -> &'static str {
2149 if url.ends_with(".dl") { "dl" } else { "scm" }
2150}
2151
2152pub fn add_rule(url: &str, global: bool) -> Result<(), String> {
2153 let rules_dir =
2154 rules_dir(global).ok_or_else(|| "Could not determine rules directory".to_string())?;
2155
2156 std::fs::create_dir_all(&rules_dir)
2157 .map_err(|e| format!("Failed to create rules directory: {e}"))?;
2158
2159 let content = download_url(url).map_err(|e| format!("Failed to download rule: {e}"))?;
2160
2161 let rule_id = extract_rule_id(&content).ok_or_else(|| {
2162 "Could not extract rule ID from downloaded content. Rule must have TOML frontmatter with 'id' field".to_string()
2163 })?;
2164
2165 let ext = detect_extension(url);
2166 let rule_path = rules_dir.join(format!("{}.{}", rule_id, ext));
2167 std::fs::write(&rule_path, &content).map_err(|e| format!("Failed to save rule: {e}"))?;
2168
2169 let lock_path =
2170 lock_file_path(global).ok_or_else(|| "Could not determine lock file path".to_string())?;
2171
2172 let mut lock = RulesLock::load(&lock_path);
2173 lock.rules.insert(
2174 rule_id.clone(),
2175 RuleLockEntry {
2176 source: url.to_string(),
2177 content_hash: content_hash(&content),
2178 added: chrono::Utc::now().format("%Y-%m-%d").to_string(),
2179 },
2180 );
2181
2182 if let Err(e) = lock.save(&lock_path) {
2183 eprintln!("Warning: Failed to update lock file: {}", e);
2184 }
2185
2186 println!("Added rule '{}' from {}", rule_id, url);
2187 println!("Saved to: {}", rule_path.display());
2188
2189 Ok(())
2190}
2191
2192pub fn update_rules(rule_id: Option<&str>) -> Result<(), String> {
2193 let mut updated = Vec::new();
2194 let mut errors: Vec<(String, String)> = Vec::new();
2195
2196 for global in [false, true] {
2197 if let (Some(lock_path), Some(rules_dir)) = (lock_file_path(global), rules_dir(global)) {
2198 let lock = RulesLock::load(&lock_path);
2199 for (id, entry) in &lock.rules {
2200 if rule_id.is_some() && rule_id != Some(id.as_str()) {
2201 continue;
2202 }
2203 match download_url(&entry.source) {
2204 Ok(content) => {
2205 let ext = detect_extension(&entry.source);
2206 let path = rules_dir.join(format!("{}.{}", id, ext));
2207 if let Err(e) = std::fs::write(&path, &content) {
2208 errors.push((id.clone(), e.to_string()));
2209 } else {
2210 updated.push(id.clone());
2211 }
2212 }
2213 Err(e) => {
2214 errors.push((id.clone(), e.to_string()));
2215 }
2216 }
2217 }
2218 }
2219 }
2220
2221 if updated.is_empty() && errors.is_empty() {
2222 println!("No imported rules to update.");
2223 } else {
2224 for id in &updated {
2225 println!("Updated: {}", id);
2226 }
2227 for (id, err) in &errors {
2228 eprintln!("Failed to update {}: {}", id, err);
2229 }
2230 }
2231
2232 if errors.is_empty() {
2233 Ok(())
2234 } else {
2235 Err(format!("{} rule(s) failed to update", errors.len()))
2236 }
2237}
2238
2239pub fn remove_rule(rule_id: &str) -> Result<(), String> {
2240 let mut removed = false;
2241
2242 for global in [false, true] {
2243 if removed {
2244 break;
2245 }
2246 if let (Some(lock_path), Some(rules_dir)) = (lock_file_path(global), rules_dir(global)) {
2247 let mut lock = RulesLock::load(&lock_path);
2248 if lock.rules.remove(rule_id).is_some() {
2249 let _ = lock.save(&lock_path);
2250 for ext in ["scm", "dl"] {
2252 let rule_path = rules_dir.join(format!("{}.{}", rule_id, ext));
2253 let _ = std::fs::remove_file(&rule_path);
2254 }
2255 removed = true;
2256 }
2257 }
2258 }
2259
2260 if removed {
2261 println!("Removed rule '{}'", rule_id);
2262 Ok(())
2263 } else {
2264 Err(format!("Rule '{}' not found in lock file", rule_id))
2265 }
2266}
2267
2268fn download_url(url: &str) -> Result<String, String> {
2273 let response = ureq::get(url)
2274 .call()
2275 .map_err(|e| format!("HTTP request failed: {}", e))?;
2276
2277 if response.status() != 200 {
2278 return Err(format!(
2279 "HTTP {}: {}",
2280 response.status(),
2281 response.status_text()
2282 ));
2283 }
2284
2285 response
2286 .into_string()
2287 .map_err(|e| format!("Failed to read response: {}", e))
2288}
2289
2290fn extract_rule_id(content: &str) -> Option<String> {
2291 let lines: Vec<&str> = content.lines().collect();
2292
2293 let mut in_frontmatter = false;
2294 let mut toml_lines = Vec::new();
2295
2296 for line in lines {
2297 let trimmed = line.trim();
2298 if trimmed == "# ---" {
2299 if in_frontmatter {
2300 break;
2301 }
2302 in_frontmatter = true;
2303 continue;
2304 }
2305 if in_frontmatter {
2306 if let Some(rest) = trimmed.strip_prefix("# ") {
2307 toml_lines.push(rest);
2308 } else if let Some(rest) = trimmed.strip_prefix('#') {
2309 toml_lines.push(rest);
2310 }
2311 }
2312 }
2313
2314 if toml_lines.is_empty() {
2315 return None;
2316 }
2317
2318 let toml_content = toml_lines.join("\n");
2319 let table: toml::Table = toml_content.parse().ok()?;
2320 table.get("id")?.as_str().map(|s| s.to_string())
2321}
2322
2323fn content_hash(content: &str) -> String {
2324 use std::collections::hash_map::DefaultHasher;
2325 use std::hash::{Hash, Hasher};
2326
2327 let mut hasher = DefaultHasher::new();
2328 content.hash(&mut hasher);
2329 format!("{:016x}", hasher.finish())
2330}
2331
2332pub fn finding_to_issue(
2338 f: &normalize_syntax_rules::Finding,
2339 root: &std::path::Path,
2340) -> normalize_output::diagnostics::Issue {
2341 use normalize_output::diagnostics::Issue;
2342 let effective_root;
2344 let root = if root.is_file() {
2345 effective_root = root.parent().unwrap_or(root).to_path_buf();
2346 &effective_root
2347 } else {
2348 root
2349 };
2350 let rel_path = f.file.strip_prefix(root).unwrap_or(&f.file);
2351 Issue {
2352 file: rel_path.to_string_lossy().to_string(),
2353 line: Some(f.start_line),
2354 column: Some(f.start_col),
2355 end_line: Some(f.end_line),
2356 end_column: Some(f.end_col),
2357 rule_id: f.rule_id.clone(),
2358 message: f.message.clone(),
2359 severity: syntax_severity(f.severity),
2360 source: "syntax-rules".into(),
2361 related: Vec::new(),
2362 suggestion: f.fix.clone(),
2363 }
2364}
2365
2366fn syntax_severity(s: normalize_syntax_rules::Severity) -> normalize_output::diagnostics::Severity {
2368 use normalize_output::diagnostics::Severity;
2369 match s {
2370 normalize_syntax_rules::Severity::Error => Severity::Error,
2371 normalize_syntax_rules::Severity::Warning => Severity::Warning,
2372 normalize_syntax_rules::Severity::Info => Severity::Info,
2373 normalize_syntax_rules::Severity::Hint => Severity::Hint,
2374 }
2375}
2376
2377pub fn abi_diagnostic_to_issue(
2379 d: &normalize_facts_rules_api::Diagnostic,
2380) -> normalize_output::diagnostics::Issue {
2381 use normalize_output::diagnostics::{Issue, RelatedLocation};
2382
2383 let (file, line, column) = match &d.location {
2384 Some(loc) => (
2385 loc.file.to_string(),
2386 Some(loc.line as usize),
2387 loc.column.map(|c| c as usize),
2388 ),
2389 None => (String::new(), None, None),
2390 };
2391
2392 let related = d
2393 .related
2394 .iter()
2395 .map(|loc| RelatedLocation {
2396 file: loc.file.to_string(),
2397 line: Some(loc.line as usize),
2398 message: None,
2399 })
2400 .collect();
2401
2402 let suggestion = d.suggestion.clone();
2403
2404 Issue {
2405 file,
2406 line,
2407 column,
2408 end_line: None,
2409 end_column: None,
2410 rule_id: d.rule_id.to_string(),
2411 message: d.message.to_string(),
2412 severity: abi_level(d.level),
2413 source: "fact-rules".into(),
2414 related,
2415 suggestion,
2416 }
2417}
2418
2419fn abi_level(
2421 level: normalize_facts_rules_api::DiagnosticLevel,
2422) -> normalize_output::diagnostics::Severity {
2423 use normalize_output::diagnostics::Severity;
2424 match level {
2425 normalize_facts_rules_api::DiagnosticLevel::Hint => Severity::Hint,
2426 normalize_facts_rules_api::DiagnosticLevel::Warning => Severity::Warning,
2427 normalize_facts_rules_api::DiagnosticLevel::Error => Severity::Error,
2428 }
2429}
2430
2431pub fn format_diagnostic(diag: &normalize_facts_rules_api::Diagnostic, use_colors: bool) -> String {
2433 crate::loader::format_diagnostic(diag, use_colors)
2434}
2435
2436fn build_gitignore_allowed_set(
2443 root: &Path,
2444 walk_config: &normalize_rules_config::WalkConfig,
2445) -> HashSet<String> {
2446 let ignore_files = walk_config.ignore_files();
2447 let has_gitignore = ignore_files.contains(&".gitignore");
2448 let mut builder = ignore::WalkBuilder::new(root);
2449 builder
2450 .hidden(false)
2451 .git_ignore(has_gitignore)
2452 .git_global(has_gitignore)
2453 .git_exclude(has_gitignore);
2454 for file in &ignore_files {
2455 if *file != ".gitignore" {
2456 let ignore_path = root.join(file);
2457 if ignore_path.exists() {
2458 builder.add_ignore(ignore_path);
2459 }
2460 }
2461 }
2462 let excludes = walk_config.compiled_excludes(root);
2466 let root_owned = root.to_path_buf();
2467 builder.filter_entry(move |e| {
2468 let path = e.path();
2469 let rel = path.strip_prefix(&root_owned).unwrap_or(path);
2470 if rel.as_os_str().is_empty() {
2471 return true;
2472 }
2473 let is_dir = e.file_type().is_some_and(|ft| ft.is_dir());
2474 !excludes
2475 .matched_path_or_any_parents(rel, is_dir)
2476 .is_ignore()
2477 });
2478 let mut allowed = HashSet::new();
2479 for entry in builder.build().flatten() {
2480 if let Ok(rel) = entry.path().strip_prefix(root) {
2481 allowed.insert(rel.to_string_lossy().into_owned());
2482 }
2483 }
2484 allowed
2485}