1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
#![warn(clippy::redundant_pub_crate)]

use match_all::MatchAll;
use matcher::MatchTokensBuilder;
use proc_macro2::TokenStream;
use syn::Result;
use token_entry::{FindAllStringBuilder, ParseStreamEx, Source, TokenStringBuilder};

pub use matcher::Matcher;
pub use transcriber::Transcriber;

#[macro_use]
mod utils;

pub mod match_all;
mod matcher;
mod text;
mod token_entry;
mod transcriber;

/// Pair [`Matcher`] and [`Transcriber`].
#[derive(Clone, Debug)]
pub struct Rule {
    from: Matcher,
    to: Transcriber,
    nest: bool,
}

impl Rule {
    /// Create a new `Rule` from `Matcher` and `Transcriber`.
    ///
    /// Returns an error if the meta-variables of `Matcher` and `Transcriber` do not match.
    pub fn new(from: Matcher, mut to: Transcriber) -> Result<Self> {
        to.attach(&from.0)?;
        Ok(Rule {
            from,
            to,
            nest: false,
        })
    }
    /// Specifies whether to apply replacements to metavariable matches. (default is false.)
    ///
    /// If false, only the outermost matched range is replaced.
    ///
    /// If true, further substitutions are made for the range matched by meta-variables such as `$e:expr`.
    ///
    /// ```rust
    /// use macro_rules_rt::Rule;
    ///
    /// let from = "a + $e:expr".parse()?;
    /// let to   = "b + $e".parse()?;
    /// let input = "a + a + x";
    /// let rule = Rule::new(from, to)?;
    /// let r_nest_no = rule.clone().replace_all(input)?;
    /// let r_nest_yes = rule.nest(true).replace_all(input)?;
    /// assert_eq!(r_nest_no,  "b + a + x");
    /// assert_eq!(r_nest_yes, "b + b + x");
    /// # Ok::<(), syn::Error>(())
    /// ```
    pub fn nest(self, yes: bool) -> Self {
        Self { nest: yes, ..self }
    }

    /// Replaces all non-overlapping matches in input with the provided transcriber.
    ///
    /// Unlike creating `TokenStream` from `str` and then calling [`Rule::replace_all`],
    /// the original string is preserved as much as possible.
    pub fn replace_all(&self, input: &str) -> Result<String> {
        let (source, input) = Source::from_str(input)?;
        let mut b = TokenStringBuilder::new(&source);
        self.from
            .find_all(input, 0)
            .apply_string(self, &mut FindAllStringBuilder::new(&mut b, 0));
        Ok(b.s)
    }

    /// Replaces all non-overlapping matches in `input` with the provided transcriber.
    pub fn replace_all_tokens(&self, input: TokenStream) -> TokenStream {
        self.from
            .find_all(input.clone(), 0)
            .apply_tokens(&mut 0, input, self)
    }

    /// Replaces all non-overlapping matches in input with the provided transcriber, and returns detailed information.
    pub fn match_all<'a>(&'a self, input: &'a str) -> Result<MatchAll<'a>> {
        let (source, input) = Source::from_str(input)?;
        Ok(self.from.match_all(source, input, self))
    }

    /// If the entire `input` matches the entire `from`, do the conversion. Otherwise, return an error.
    pub fn apply(&self, input: &str) -> Result<String> {
        let (source, input) = Source::from_str(input)?;
        ParseStreamEx::parse_from_tokens(input, 0, |input: &mut ParseStreamEx| {
            let m = self.from.try_match(input)?;
            let mut b = TokenStringBuilder::new(&source);
            self.to.apply_string(&m, self, usize::MAX, &mut b);
            Ok(b.s)
        })
    }

    /// If the entire `input` matches the entire `from`, do the conversion. Otherwise, return an error.
    pub fn apply_tokens(&self, input: TokenStream) -> Result<TokenStream> {
        ParseStreamEx::parse_from_tokens(input, 0, |input: &mut ParseStreamEx| {
            self.apply_tokens_parser(input)
        })
    }
    fn apply_tokens_parser(&self, input: &mut ParseStreamEx) -> Result<TokenStream> {
        let m = self.from.try_match(input)?;
        let mut tokens = TokenStream::new();
        let mut b = MatchTokensBuilder {
            tokens: &mut tokens,
            rule: self,
            tes_len: usize::MAX,
        };
        self.to.apply_tokens_to(&m, &mut b);
        Ok(tokens)
    }
}