1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
#![warn(clippy::redundant_pub_crate)]
use match_all::MatchAll;
use matcher::MatchTokensBuilder;
use proc_macro2::TokenStream;
use syn::Result;
use token_entry::{FindAllStringBuilder, ParseStreamEx, Source, TokenStringBuilder};
pub use matcher::Matcher;
pub use transcriber::Transcriber;
#[macro_use]
mod utils;
pub mod match_all;
mod matcher;
mod text;
mod token_entry;
mod transcriber;
#[derive(Clone, Debug)]
pub struct Rule {
from: Matcher,
to: Transcriber,
nest: bool,
}
impl Rule {
pub fn new(from: Matcher, mut to: Transcriber) -> Result<Self> {
to.attach(&from.0)?;
Ok(Rule {
from,
to,
nest: false,
})
}
pub fn nest(self, yes: bool) -> Self {
Self { nest: yes, ..self }
}
pub fn replace_all(&self, input: &str) -> Result<String> {
let (source, input) = Source::from_str(input)?;
let mut b = TokenStringBuilder::new(&source);
self.from
.find_all(input, 0)
.apply_string(self, &mut FindAllStringBuilder::new(&mut b, 0));
Ok(b.s)
}
pub fn replace_all_tokens(&self, input: TokenStream) -> TokenStream {
self.from
.find_all(input.clone(), 0)
.apply_tokens(&mut 0, input, self)
}
pub fn match_all<'a>(&'a self, input: &'a str) -> Result<MatchAll<'a>> {
let (source, input) = Source::from_str(input)?;
Ok(self.from.match_all(source, input, self))
}
pub fn apply(&self, input: &str) -> Result<String> {
let (source, input) = Source::from_str(input)?;
ParseStreamEx::parse_from_tokens(input, 0, |input: &mut ParseStreamEx| {
let m = self.from.try_match(input)?;
let mut b = TokenStringBuilder::new(&source);
self.to.apply_string(&m, self, usize::MAX, &mut b);
Ok(b.s)
})
}
pub fn apply_tokens(&self, input: TokenStream) -> Result<TokenStream> {
ParseStreamEx::parse_from_tokens(input, 0, |input: &mut ParseStreamEx| {
self.apply_tokens_parser(input)
})
}
fn apply_tokens_parser(&self, input: &mut ParseStreamEx) -> Result<TokenStream> {
let m = self.from.try_match(input)?;
let mut tokens = TokenStream::new();
let mut b = MatchTokensBuilder {
tokens: &mut tokens,
rule: self,
tes_len: usize::MAX,
};
self.to.apply_tokens_to(&m, &mut b);
Ok(tokens)
}
}