1use crate::config::{MatchMode, RuleConfig, Severity, WikiConfig};
2use crate::error::WikiError;
3use crate::link_index::LinkIndex;
4use crate::resolve;
5use crate::wiki::Wiki;
6
7#[derive(Debug, Clone, Copy, PartialEq, Eq)]
9pub enum SeverityFilter {
10 All,
12 ErrorOnly,
14 WarnOnly,
16}
17
18struct LintResult {
19 errors: usize,
20 warnings: usize,
21}
22
23impl LintResult {
24 fn new() -> Self {
25 Self {
26 errors: 0,
27 warnings: 0,
28 }
29 }
30
31 fn tally(&mut self, count: usize, severity: Severity) {
32 match severity {
33 Severity::Error => self.errors += count,
34 Severity::Warn => self.warnings += count,
35 Severity::Off => {}
36 }
37 }
38}
39
40fn should_show(severity: Severity, filter: SeverityFilter) -> bool {
41 match (severity, filter) {
42 (Severity::Off, _) => false,
43 (_, SeverityFilter::All) => true,
44 (Severity::Error, SeverityFilter::ErrorOnly) => true,
45 (Severity::Warn, SeverityFilter::WarnOnly) => true,
46 _ => false,
47 }
48}
49
50pub fn lint(wiki: &Wiki, filter: SeverityFilter) -> Result<usize, WikiError> {
52 let mut result = LintResult::new();
53 let config = wiki.config();
54
55 if should_show(config.checks.broken_links, filter) {
57 let count = run_broken_links(wiki)?;
58 result.tally(count, config.checks.broken_links);
59 }
60
61 if should_show(config.checks.orphan_pages, filter) {
62 let link_index = LinkIndex::build(wiki)?;
63 let count = run_orphan_pages(wiki, &link_index);
64 result.tally(count, config.checks.orphan_pages);
65 }
66
67 if should_show(config.checks.index_coverage, filter)
68 && let Some(index_path) = wiki.index_path()
69 {
70 if index_path.is_file() {
71 let count = run_index_coverage(wiki, &index_path)?;
72 result.tally(count, config.checks.index_coverage);
73 } else {
74 eprintln!(
75 "warn: index file '{}' not found, skipping index coverage",
76 index_path.display()
77 );
78 }
79 }
80
81 for rule in &config.rules {
83 let severity = rule.severity();
84 if !should_show(severity, filter) {
85 continue;
86 }
87 let count = run_rule(wiki, rule)?;
88 result.tally(count, severity);
89 }
90
91 if result.errors > 0 || result.warnings > 0 {
92 let mut parts = Vec::new();
93 if result.errors > 0 {
94 parts.push(format!("{} error(s)", result.errors));
95 }
96 if result.warnings > 0 {
97 parts.push(format!("{} warning(s)", result.warnings));
98 }
99 eprintln!("{}", parts.join(", "));
100 }
101
102 Ok(result.errors)
103}
104
105fn run_rule(wiki: &Wiki, rule: &RuleConfig) -> Result<usize, WikiError> {
106 match rule {
107 RuleConfig::RequiredSections {
108 dirs,
109 sections,
110 severity,
111 ..
112 } => run_required_sections(wiki, dirs, sections, *severity),
113 RuleConfig::RequiredFrontmatter {
114 dirs,
115 fields,
116 severity,
117 ..
118 } => run_required_frontmatter(wiki, dirs, fields, *severity),
119 RuleConfig::MirrorParity {
120 left,
121 right,
122 severity,
123 ..
124 } => run_mirror_parity(wiki, left, right, *severity),
125 RuleConfig::CitationPattern {
126 name,
127 dirs,
128 pattern,
129 match_in,
130 match_mode,
131 severity,
132 } => run_citation_pattern(wiki, name, dirs, pattern, match_in, *match_mode, *severity),
133 }
134}
135
136fn run_broken_links(wiki: &Wiki) -> Result<usize, WikiError> {
137 let severity = wiki.config().checks.broken_links;
138 let mut count = 0;
139 for file_path in wiki.all_scannable_files() {
140 let broken = resolve::find_broken_links(wiki, &file_path)?;
141 let source = wiki.source(&file_path)?;
142 let rel_path = wiki.rel_path(&file_path);
143 for (wl, reason) in &broken {
144 let ref_text = &source[wl.byte_range.clone()];
145 eprintln!(
146 "{severity}[broken-link]: {} in {}",
147 ref_text.trim(),
148 rel_path.display(),
149 );
150 eprintln!(" -> {reason}");
151 count += 1;
152 }
153 }
154 Ok(count)
155}
156
157fn run_orphan_pages(wiki: &Wiki, link_index: &LinkIndex) -> usize {
158 let orphans = link_index.orphans(wiki);
159 for page_id in &orphans {
160 if let Some(entry) = wiki.get(page_id) {
161 eprintln!(
162 "error[orphan]: {} has no inbound wikilinks",
163 entry.rel_path.display(),
164 );
165 }
166 }
167 orphans.len()
168}
169
170fn run_index_coverage(wiki: &Wiki, index_path: &std::path::Path) -> Result<usize, WikiError> {
171 let index_wikilinks = wiki.wikilinks(index_path)?;
172 let referenced: std::collections::HashSet<&str> =
173 index_wikilinks.iter().map(|wl| wl.page.as_str()).collect();
174
175 let mut count = 0;
176 for (page_id, entry) in wiki.pages() {
177 if !referenced.contains(page_id.as_str()) {
178 eprintln!(
179 "error[not-in-index]: {} is not listed in index",
180 entry.rel_path.display(),
181 );
182 count += 1;
183 }
184 }
185 Ok(count)
186}
187
188fn run_required_sections(
189 wiki: &Wiki,
190 dirs: &[String],
191 sections: &[String],
192 severity: Severity,
193) -> Result<usize, WikiError> {
194 let mut count = 0;
195 for entry in wiki.pages().values() {
196 if !WikiConfig::matches_dirs(&entry.rel_path, dirs) {
197 continue;
198 }
199 let file_path = wiki.entry_path(entry);
200 let headings = wiki.headings(&file_path)?;
201 for required in sections {
202 if !headings
203 .iter()
204 .any(|h| h.text.eq_ignore_ascii_case(required))
205 {
206 eprintln!(
207 "{severity}[missing-section]: {} is missing '## {required}'",
208 entry.rel_path.display(),
209 );
210 count += 1;
211 }
212 }
213 }
214 Ok(count)
215}
216
217fn run_required_frontmatter(
218 wiki: &Wiki,
219 dirs: &[String],
220 fields: &[String],
221 severity: Severity,
222) -> Result<usize, WikiError> {
223 let mut count = 0;
224 for entry in wiki.pages().values() {
225 if !WikiConfig::matches_dirs(&entry.rel_path, dirs) {
226 continue;
227 }
228 let file_path = wiki.entry_path(entry);
229 match wiki.frontmatter(&file_path)? {
230 Ok(Some(fm)) => {
231 for field in fields {
232 if !fm.has_field(field) {
233 eprintln!(
234 "{severity}[missing-frontmatter]: {} is missing '{field}'",
235 entry.rel_path.display(),
236 );
237 count += 1;
238 }
239 }
240 }
241 Ok(None) => {
242 eprintln!(
243 "{severity}[no-frontmatter]: {} has no frontmatter",
244 entry.rel_path.display(),
245 );
246 count += 1;
247 }
248 Err(e) => {
249 eprintln!(
250 "{severity}[bad-frontmatter]: {}: {e}",
251 entry.rel_path.display(),
252 );
253 count += 1;
254 }
255 }
256 }
257 Ok(count)
258}
259
260fn run_mirror_parity(
261 wiki: &Wiki,
262 left: &str,
263 right: &str,
264 severity: Severity,
265) -> Result<usize, WikiError> {
266 let mut count = 0;
267
268 let left_dir = wiki.root().path().join(left);
269 let right_dir = wiki.root().path().join(right);
270
271 let left_stems = collect_md_stems(&left_dir)?;
272 let right_stems = collect_md_stems(&right_dir)?;
273
274 for stem in &left_stems {
275 if !right_stems.contains(stem) {
276 eprintln!("{severity}[missing-mirror]: {left}/{stem}.md has no {right}/{stem}.md",);
277 count += 1;
278 }
279 }
280 for stem in &right_stems {
281 if !left_stems.contains(stem) {
282 eprintln!("{severity}[missing-mirror]: {right}/{stem}.md has no {left}/{stem}.md",);
283 count += 1;
284 }
285 }
286
287 Ok(count)
288}
289
290fn collect_md_stems(dir: &std::path::Path) -> Result<std::collections::HashSet<String>, WikiError> {
291 let mut stems = std::collections::HashSet::new();
292 if !dir.is_dir() {
293 return Ok(stems);
294 }
295 for entry in ignore::WalkBuilder::new(dir).hidden(false).build() {
296 let entry = entry.map_err(|e| WikiError::Walk {
297 path: dir.to_path_buf(),
298 source: e,
299 })?;
300 let path = entry.path();
301 if super::is_markdown_file(path)
302 && let Some(stem) = path.file_stem().and_then(|s| s.to_str())
303 {
304 stems.insert(stem.to_owned());
305 }
306 }
307 Ok(stems)
308}
309
310enum MatchDirCache {
312 Contents(Vec<String>),
313 Stems(std::collections::HashSet<String>),
315}
316
317impl MatchDirCache {
318 fn load(dir: &std::path::Path, mode: MatchMode) -> Result<Self, WikiError> {
319 if !dir.is_dir() {
320 return Ok(match mode {
321 MatchMode::Content => Self::Contents(Vec::new()),
322 MatchMode::Filename => Self::Stems(std::collections::HashSet::new()),
323 });
324 }
325 let mut contents = Vec::new();
326 let mut stems = std::collections::HashSet::new();
327 for entry in ignore::WalkBuilder::new(dir).hidden(false).build() {
328 let entry = entry.map_err(|e| WikiError::Walk {
329 path: dir.to_path_buf(),
330 source: e,
331 })?;
332 let path = entry.path();
333 if !super::is_markdown_file(path) {
334 continue;
335 }
336 match mode {
337 MatchMode::Content => {
338 let content =
339 std::fs::read_to_string(path).map_err(|e| WikiError::ReadFile {
340 path: path.to_path_buf(),
341 source: e,
342 })?;
343 contents.push(content);
344 }
345 MatchMode::Filename => {
346 if let Some(stem) = path.file_stem().and_then(|s| s.to_str()) {
347 stems.insert(stem.to_lowercase());
348 }
349 }
350 }
351 }
352 Ok(match mode {
353 MatchMode::Content => Self::Contents(contents),
354 MatchMode::Filename => Self::Stems(stems),
355 })
356 }
357
358 fn contains(&self, needle: &str) -> bool {
359 match self {
360 Self::Contents(pages) => pages.iter().any(|c| c.contains(needle)),
361 Self::Stems(stems) => stems.contains(&needle.to_lowercase()),
362 }
363 }
364}
365
366#[allow(clippy::too_many_arguments)]
367fn run_citation_pattern(
368 wiki: &Wiki,
369 name: &str,
370 dirs: &[String],
371 pattern: &str,
372 match_in: &str,
373 match_mode: MatchMode,
374 severity: Severity,
375) -> Result<usize, WikiError> {
376 let regex = regex_lite::Regex::new(pattern).expect("regex pre-validated at config load");
377
378 let match_dir = wiki.root().path().join(match_in);
379 let cache = MatchDirCache::load(&match_dir, match_mode)?;
380
381 let mut count = 0;
382
383 for entry in wiki.pages().values() {
384 if !WikiConfig::matches_dirs(&entry.rel_path, dirs) {
385 continue;
386 }
387 let file_path = wiki.entry_path(entry);
388 let source = wiki.source(&file_path)?;
389
390 for cap in regex.captures_iter(source) {
391 let Some(id) = cap.name("id").map(|m| m.as_str()) else {
392 continue;
393 };
394
395 if !cache.contains(id) {
396 eprintln!(
397 "{severity}[{name}]: {} references '{id}' but no matching page in {match_in}",
398 entry.rel_path.display(),
399 );
400 count += 1;
401 }
402 }
403 }
404
405 Ok(count)
406}