1use core::marker::PhantomData;
2
3use alloc::{boxed::Box, vec::Vec};
4
5use crate::{
6 tokenizers::{AsDigits, Many, Opt, Or, Puntuated, Sliced, Spanned, Until},
7 AsSlice, Buffer, Error, Item, Reader, Span, Tokenizer,
8};
9
10pub trait TokenizerExt<'input, B>: Tokenizer<'input, B>
11where
12 B: Buffer<'input>,
13{
14 fn map_ok<F, U>(self, func: F) -> MapOk<Self, F, B>
15 where
16 F: Fn(Self::Token) -> U,
17 Self: Sized,
18 {
19 MapOk {
20 tokenizer: self,
21 func,
22 ph: PhantomData,
23 }
24 }
25
26 fn map_err<F, U>(self, func: F) -> MapErr<Self, F, B>
27 where
28 F: Fn(usize, &B) -> U,
29 U: Into<Box<dyn core::error::Error + Send + Sync>>,
30 Self: Sized,
31 {
32 MapErr {
33 tokenizer: self,
34 func,
35 ph: PhantomData,
36 }
37 }
38
39 fn repeat(self, count: i32) -> Repeat<Self, B>
40 where
41 Self: Sized,
42 {
43 Repeat {
44 tokenizer: self,
45 count,
46 ph: PhantomData,
47 }
48 }
49
50 fn many(self) -> Many<Self, B>
51 where
52 Self: Sized,
53 {
54 Many::new(self)
55 }
56
57 fn until<U>(self, until: U) -> Until<Self, U, B>
58 where
59 Self: Sized,
60 U: Tokenizer<'input, B>,
61 {
62 Until::new(self, until)
63 }
64
65 fn or<T>(self, other: T) -> Or<Self, T, B>
66 where
67 Self: Sized,
68 T: Tokenizer<'input, B>,
69 {
70 Or::new(self, other)
71 }
72
73 fn optional(self) -> Opt<Self, B>
74 where
75 Self: Sized,
76 {
77 Opt::new(self)
78 }
79
80 fn spanned(self) -> Spanned<Self, B>
81 where
82 Self: Sized,
83 {
84 Spanned::new(self)
85 }
86
87 fn into_integer(self, base: u32) -> IntoInteger<Self, B>
88 where
89 Self: Sized,
90 Self::Token: AsDigits,
91 {
92 IntoInteger {
93 tokenizer: self,
94 base,
95 ph: PhantomData,
96 }
97 }
98
99 fn punctuated<P>(self, punct: P) -> Puntuated<Self, P>
100 where
101 Self: Sized,
102 P: Tokenizer<'input, B>,
103 {
104 Puntuated::new(self, punct)
105 }
106
107 fn slice(self) -> Sliced<Self, B>
108 where
109 Self: Sized,
110 B: Buffer<'input>,
111 B::Source: AsSlice<'input>,
112 {
113 Sliced::new(self)
114 }
115
116 fn parse(&self, reader: &mut Reader<'_, 'input, B>) -> Result<Self::Token, Error>
117 where
118 Self: Sized,
119 {
120 reader.parse(self)
121 }
122}
123
124impl<'input, T, B> TokenizerExt<'input, B> for T
125where
126 B: Buffer<'input>,
127 T: Tokenizer<'input, B>,
128{
129}
130
131pub struct MapOk<T, F, B> {
132 tokenizer: T,
133 func: F,
134 ph: PhantomData<fn(&B)>,
135}
136
137impl<'input, T, F, U, B> Tokenizer<'input, B> for MapOk<T, F, B>
138where
139 B: Buffer<'input>,
140 T: Tokenizer<'input, B>,
141 F: Fn(T::Token) -> U,
142{
143 type Token = U;
144
145 fn eat(&self, reader: &mut crate::Reader<'_, 'input, B>) -> Result<(), crate::Error> {
146 self.tokenizer.eat(reader)
147 }
148
149 fn peek(&self, reader: &mut crate::Reader<'_, 'input, B>) -> bool {
150 self.tokenizer.peek(reader)
151 }
152
153 fn to_token(
154 &self,
155 reader: &mut crate::Reader<'_, 'input, B>,
156 ) -> Result<Self::Token, crate::Error> {
157 match self.tokenizer.to_token(reader) {
158 Ok(ret) => Ok((self.func)(ret)),
159 Err(err) => Err(err),
160 }
161 }
162}
163
164pub struct MapErr<T, F, B> {
165 tokenizer: T,
166 func: F,
167 ph: PhantomData<fn(&B)>,
168}
169
170impl<'input, T, F, U, B> Tokenizer<'input, B> for MapErr<T, F, B>
171where
172 B: Buffer<'input>,
173 T: Tokenizer<'input, B>,
174 F: Fn(usize, &B) -> U,
175 U: Into<Box<dyn core::error::Error + Send + Sync>>,
176{
177 type Token = T::Token;
178
179 fn eat(&self, reader: &mut crate::Reader<'_, 'input, B>) -> Result<(), crate::Error> {
180 self.tokenizer
181 .eat(reader)
182 .map_err(|err| Error::new(err.position(), (self.func)(err.position(), reader.buffer())))
183 }
184
185 fn peek(&self, reader: &mut crate::Reader<'_, 'input, B>) -> bool {
186 self.tokenizer.peek(reader)
187 }
188
189 fn to_token(
190 &self,
191 reader: &mut crate::Reader<'_, 'input, B>,
192 ) -> Result<Self::Token, crate::Error> {
193 self.tokenizer
194 .to_token(reader)
195 .map_err(|err| Error::new(err.position(), (self.func)(err.position(), reader.buffer())))
196 }
197}
198
199pub struct IntoInteger<T, B> {
200 tokenizer: T,
201 base: u32,
202 ph: PhantomData<fn(&B)>,
203}
204
205impl<'input, T, B> Tokenizer<'input, B> for IntoInteger<T, B>
206where
207 B: Buffer<'input>,
208 T: Tokenizer<'input, B>,
209 T::Token: AsDigits,
210{
211 type Token = Item<i128>;
212
213 fn eat(&self, reader: &mut crate::Reader<'_, 'input, B>) -> Result<(), crate::Error> {
214 self.tokenizer.eat(reader)
215 }
216
217 fn peek(&self, reader: &mut crate::Reader<'_, 'input, B>) -> bool {
218 self.tokenizer.peek(reader)
219 }
220
221 fn to_token(
222 &self,
223 reader: &mut crate::Reader<'_, 'input, B>,
224 ) -> Result<Self::Token, crate::Error> {
225 let start = reader.position();
226 let digits = self.tokenizer.to_token(reader)?;
227 let end = reader.position();
228 let mut val = 0i128;
229
230 for digit in digits.digits() {
231 val = (self.base as i128) * val + (digit as i128);
232 }
233
234 Ok(Item::new(Span::new(start, end), val))
235 }
236}
237
238pub struct Repeat<T, B> {
239 tokenizer: T,
240 count: i32,
241 ph: PhantomData<fn(&B)>,
242}
243
244impl<'input, T, B> Tokenizer<'input, B> for Repeat<T, B>
245where
246 B: Buffer<'input>,
247 T: Tokenizer<'input, B>,
248{
249 type Token = Item<Vec<T::Token>>;
250
251 fn eat(&self, reader: &mut crate::Reader<'_, 'input, B>) -> Result<(), crate::Error> {
252 let mut count = 0;
253 loop {
254 self.tokenizer.eat(reader)?;
255 count += 1;
256 if count == self.count as usize {
257 break;
258 }
259 }
260
261 Ok(())
262 }
263
264 fn peek(&self, reader: &mut crate::Reader<'_, 'input, B>) -> bool {
265 self.tokenizer.peek(reader)
266 }
267
268 fn to_token(
269 &self,
270 reader: &mut crate::Reader<'_, 'input, B>,
271 ) -> Result<Self::Token, crate::Error> {
272 let start = reader.position();
273
274 let mut output = Vec::with_capacity(self.count as _);
275 loop {
276 let next = self.tokenizer.parse(reader)?;
277 output.push(next);
278 if output.len() == self.count as usize {
279 break;
280 }
281 }
282
283 let end = reader.position();
284
285 Ok(Item::new(Span::new(start, end), output))
286 }
287}