1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
use lazy_regex::{lazy_regex, Lazy};
use mdbook::{
    book::{Book, Chapter},
    errors::Error,
    preprocess::{CmdPreprocessor, Preprocessor, PreprocessorContext},
    BookItem,
};
use regex::{Captures, Regex};
use std::{collections::HashMap, io};

static WIKILINK_REGEX: Lazy<Regex> =
    lazy_regex!(r"\[\[(?P<link>[^\]\|]+)(?:\|(?P<title>[^\]]+))?\]\]");

pub fn handle_preprocessing(pre: impl Preprocessor) -> Result<(), Error> {
    let (ctx, book) = CmdPreprocessor::parse_input(io::stdin())?;

    if ctx.mdbook_version != mdbook::MDBOOK_VERSION {
        eprintln!(
            "Warning: The {} plugin was built against version {} of mdbook, \
             but we're being called from version {}",
            pre.name(),
            mdbook::MDBOOK_VERSION,
            ctx.mdbook_version
        );
    }

    let processed_book = pre.run(&ctx, book)?;
    //serde_json::to_writer_pretty(io::stderr(), &processed_book);
    serde_json::to_writer(io::stdout(), &processed_book)?;

    Ok(())
}

fn chapter(it: &BookItem) -> Option<&Chapter> {
    if let BookItem::Chapter(ch) = it {
        Some(ch)
    } else {
        None
    }
}

pub struct WikiLinks;

impl Preprocessor for WikiLinks {
    fn name(&self) -> &str {
        "wikilink-preprocessor"
    }

    fn run(&self, _ctx: &PreprocessorContext, mut book: Book) -> Result<Book, Error> {
        let mut chapters_map = HashMap::new();
        for chapter in book.iter().filter_map(chapter) {
            let key = chapter.name.clone();
            if chapter.path.is_none() {
                continue;
            }
            if chapters_map.contains_key(&key) {
                eprintln!("duplicated page title found: {} at {:?}", key, chapter.path);
            }
            chapters_map.insert(key, chapter.path.as_ref().unwrap().clone());
        }

        book.for_each_mut(|it| {
            if let BookItem::Chapter(chapter) = it {
                chapter.content = WIKILINK_REGEX
                    .replace_all(&chapter.content, |it: &Captures| -> String {
                        let key = it.name("link").unwrap().as_str().trim();
                        if !chapters_map.contains_key(key) {
                            return (maud::html! {
                                span.missing-link style="color:darkred;" {
                                   (key)
                                }
                            })
                            .into_string();
                        }
                        let title = it
                            .name("title")
                            .map(|x| x.as_str().trim().to_string())
                            .unwrap_or(key.to_string());
                        let diff_path = pathdiff::diff_paths(
                            chapters_map.get(key).unwrap(),
                            chapter.path.as_ref().unwrap().parent().unwrap(),
                        )
                        .unwrap();

                        return format!(
                            "[{}](<{}>)",
                            title,
                            escape_special_chars(&diff_path.to_string_lossy())
                        );
                    })
                    .to_string();
            }
        });

        Ok(book)
    }
}

/// Escape characters for usage in URLs
fn escape_special_chars(text: &str) -> String {
    text.replace(" ", "%20")
        .replace("<", "%3C")
        .replace(">", "%3E")
        .replace('?', "%3F")
}

#[cfg(test)]
mod tests {
    use crate::WIKILINK_REGEX;

    #[test]
    fn extract_link_regex() {
        let cases = [
            ("[[Link]]", "Link"),
            ("[[🪴 Sowing<Your>Garden]]", "🪴 Sowing<Your>Garden"),
            (
                "[[/Templates/🪴 Sowing<Your>Garden]]",
                "/Templates/🪴 Sowing<Your>Garden",
            ),
        ];

        for (case, expected) in &cases {
            let got = WIKILINK_REGEX
                .captures(case)
                .unwrap()
                .name("link")
                .unwrap()
                .as_str();
            assert_eq!(got.trim(), *expected);
        }
    }

    #[test]
    fn extract_title_regex() {
        let cases = [
            ("[[Link | My New Link]]", "My New Link"),
            ("[[🪴 Sowing<Your>Garden | 🪴 Emoji Link]]", "🪴 Emoji Link"),
            ("[[🪴 Sowing<Your>Garden | 🪴/Emoji/Link]]", "🪴/Emoji/Link"),
        ];

        for (case, expected) in &cases {
            let got = WIKILINK_REGEX
                .captures(case)
                .unwrap()
                .name("title")
                .unwrap()
                .as_str();
            assert_eq!(got.trim(), *expected)
        }
    }
}