binator/utils/
mod.rs

1#![doc = include_str!("readme.md")]
2
3//! Utils combinator
4//!
5//! [Utils] trait contain everything you want to know
6
7use core::{
8  fmt::{
9    Debug,
10    Display,
11  },
12  ops::{
13    BitOr,
14    FromResidual,
15    Try,
16  },
17};
18
19use crate::{
20  Contexting,
21  Parse,
22  Parsed,
23  Streaming,
24};
25
26mod span;
27pub use span::*;
28mod opt;
29pub use opt::*;
30mod and;
31pub use and::*;
32mod and_then;
33pub use and_then::*;
34mod and_drop;
35pub use and_drop::*;
36mod drop_and;
37pub use drop_and::*;
38mod or;
39pub use or::*;
40mod not;
41pub use not::*;
42mod peek;
43pub use peek::*;
44mod map;
45pub use map::*;
46mod to;
47pub use to::*;
48mod try_map;
49pub use try_map::*;
50mod drop;
51pub use drop::*;
52// mod drop_last;
53// pub use drop_last::*;
54// mod drop_first;
55// pub use drop_first::*;
56
57mod filter;
58pub use filter::*;
59mod filter_map;
60pub use filter_map::*;
61
62mod fold_bounds;
63pub use fold_bounds::*;
64mod try_fold_bounds;
65pub use try_fold_bounds::*;
66mod try_fold_iter;
67pub use try_fold_iter::*;
68mod fold_until;
69pub use fold_until::*;
70mod try_fold_until;
71pub use try_fold_until::*;
72mod fill;
73pub use fill::*;
74
75mod enumerate;
76pub use enumerate::*;
77mod limit;
78pub use limit::*;
79
80mod add_atom;
81pub use add_atom::*;
82
83mod acc;
84pub use acc::*;
85mod try_acc;
86pub use try_acc::*;
87mod extend;
88pub use extend::*;
89mod try_extend;
90pub use try_extend::*;
91mod push;
92pub use push::*;
93mod try_push;
94pub use try_push::*;
95
96/// Atom for most utils combinator
97#[derive(Debug, Clone, Copy, PartialEq, Eq)]
98pub enum UtilsAtom<Stream> {
99  /// When combinator like fold didn't reach the minimun number of Token asked
100  MinNotReach {
101    /// The number of Token found
102    i: usize,
103    /// The number of Token requested
104    min: usize,
105  },
106  /// When combinator like fold_until fail before until is reached
107  UntilNotReach,
108  //  IterEndNotReach,
109  /// When max combinator reached the max allowed.
110  // Stand alone ?
111  Max(usize),
112  /// When filter combinator return failure if filter refuse the Token
113  Filter,
114  /// When Span combinator call diff from stream but it's return Error.
115  /// If you encounter this, it's either mean the two stream are not the same or
116  /// you rewind the stream to a previous point of original stream
117  // missing success token
118  Diff {
119    /// The original steam
120    stream: Stream,
121    /// The stream returned by the success parser
122    stream_success: Stream,
123  },
124}
125
126impl<Stream> Display for UtilsAtom<Stream> {
127  fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
128    match self {
129      UtilsAtom::MinNotReach { i, min } => write!(f, "MinNotReach: {} < {}", i, min),
130      UtilsAtom::UntilNotReach => write!(f, "UntilNotReach"),
131      //      UtilsAtom::IterEndNotReach => write!(f, "IterEndNotReach"),
132      UtilsAtom::Max(n) => write!(f, "Max {}", n),
133      UtilsAtom::Filter { .. } => write!(f, "Filter"),
134      UtilsAtom::Diff { .. } => write!(f, "Diff"),
135    }
136  }
137}
138
139/// Extend Parse trait with combinator
140pub trait Utils<Stream, Context>: Sized + Parse<Stream, Context>
141where
142  Stream: Streaming,
143{
144  /// and_then will call the underline parser, if successful it will call the
145  /// function in parameter and give it the produced Token. The function must
146  /// return a new parser that will be called to producted the Token returned
147  /// by and_then parser.
148  fn and_then<OtherParser, F>(self, f: F) -> AndThen<Self, F>
149  where
150    OtherParser: Parse<Stream, Context>,
151    F: Fn(Self::Token) -> OtherParser,
152  {
153    and_then(self, f)
154  }
155
156  /// and combinator will call the underline parser, and if successful the
157  /// parser given in parameter. If the second parser is also successful, and
158  /// combinator will return a tuple that contain the two Token producted.
159  fn and<OtherParser, OtherToken>(self, other: OtherParser) -> And<Self, OtherParser>
160  where
161    OtherParser: Parse<Stream, Context, Token = OtherToken>,
162  {
163    and(self, other)
164  }
165
166  /// Same than and combinator but it will drop the second Token instead,
167  /// returning only the first Token from the inner parser.
168  fn and_drop<OtherParser, OtherToken>(self, other: OtherParser) -> AndDrop<Self, OtherParser>
169  where
170    OtherParser: Parse<Stream, Context, Token = OtherToken>,
171  {
172    and_drop(self, other)
173  }
174
175  /// Same than and combinator but it will drop the underline Token instead,
176  /// returning only the second Token from the parser in parameter.
177  fn drop_and<OtherParser, OtherToken>(self, other: OtherParser) -> DropAnd<Self, OtherParser>
178  where
179    OtherParser: Parse<Stream, Context, Token = OtherToken>,
180  {
181    drop_and(self, other)
182  }
183
184  /// Call the underline parser but drop the Token if sucessful. This can be
185  /// considered as a shortcut of the toilet closure: `.map(|_| ())`.
186  fn drop(self) -> Drop<Self> {
187    drop(self)
188  }
189
190  /// Will call the underline parser N times to fill an array of size N and
191  /// return [Token; N] if successfull
192  fn fill<const N: usize>(self) -> Fill<Self, N>
193  where
194    Context: Contexting<UtilsAtom<Stream>>,
195  {
196    fill(self)
197  }
198
199  // consider removing too specific, how to replace ?
200  /// This combinator take a Iterator, it will call the inner parser
201  /// as many time the iterator lenght. The Item produced and the Token
202  /// produced will be given to the function F along with the accumulator
203  /// produced by Init function. The Init and F function can use return any
204  /// type that implement Try. The Token produced is the last accumulator value.
205  fn try_fold_iter<IntoIter, Init, Acc, Ret, F>(
206    self, iter: IntoIter, init: Init, f: F,
207  ) -> TryFoldIter<Self, IntoIter, Init, F>
208  where
209    Context: Contexting<UtilsAtom<Stream>>,
210    IntoIter: IntoIterator + Clone,
211    Init: Fn() -> Ret,
212    F: Fn(Acc, Self::Token, IntoIter::Item) -> Ret,
213    Ret: Try<Output = Acc>,
214    Parsed<Acc, Stream, Context>: FromResidual<Ret::Residual>,
215  {
216    try_fold_iter(self, iter, init, f)
217  }
218
219  /// This Combinator will call the inner parser as long as the until
220  /// parser is not successful, each Token produced will be feed to F
221  /// function along with the accumulator.
222  ///
223  /// The Token produced by fold_until is a tuple of the last
224  /// value of the accumulator and the Token from until parser.
225  fn fold_until<TokenUntil, Acc, Until, Init, F>(
226    self, until: Until, init: Init, f: F,
227  ) -> FoldUntil<Self, Until, Init, F>
228  where
229    Context: Contexting<UtilsAtom<Stream>>,
230    Until: Parse<Stream, Context, Token = TokenUntil>,
231    Init: FnMut() -> Acc,
232    F: FnMut(Acc, Self::Token) -> Acc,
233  {
234    fold_until(self, until, init, f)
235  }
236
237  /// The same then fold_until but can be used with type that implement Try
238  fn try_fold_until<TokenUntil, Acc, Parser, Until, Init, Ret, F>(
239    self, until: Until, init: Init, f: F,
240  ) -> TryFoldUntil<Self, Until, Init, F>
241  where
242    Context: Contexting<UtilsAtom<Stream>>,
243    Until: Parse<Stream, Context, Token = TokenUntil>,
244    Init: Fn() -> Ret,
245    F: Fn(Acc, Self::Token) -> Ret,
246    Ret: Try<Output = Acc>,
247    Parsed<(Acc, Until::Token), Stream, Context>: FromResidual<Ret::Residual>,
248  {
249    try_fold_until(self, until, init, f)
250  }
251
252  /// Main fold combinator, the bahavior depend on the Bounds argument.
253  /// This combinator is implemented for Range and usize. The number of
254  /// iteration depend of the type and the value used for the Bounds argument.
255  ///
256  /// | Type                      | Value | Min | Max |
257  /// |:--------------------------|:------|:----|:----|
258  /// | `Range<usize>`            | 2..4  | 2   | 4   |
259  /// | `RangeInclusive<usize>`   | 2..=4 | 2   | 5   |
260  /// | `RangeFrom<usize>`        | 4..   | 4   | ∞   |
261  /// | `RangeTo<usize>`          | ..4   | 0   | 4   |
262  /// | `RangeToInclusive<usize>` | ..=4  | 0   | 5   |
263  /// | `RangeFull`               | ..    | 0   | ∞   |
264  /// | `usize`                   | 4     | 4   | 4   |
265  ///
266  /// If the minimun value is not respected, it will return an Failure. Then
267  /// until the inner parser return a Failure or the maximun value is reach it
268  /// will continue iterate. Then it will return a Success with the last value
269  /// of the Accumulator This offer a great number of possibility for your
270  /// parser with only one combinator.
271  fn fold_bounds<Bounds, Acc, Init, F>(
272    self, bounds: Bounds, init: Init, f: F,
273  ) -> FoldBounds<Self, Bounds, Init, F>
274  where
275    Context: Contexting<UtilsAtom<Stream>>,
276    Init: FnMut() -> Acc,
277    F: FnMut(Acc, Self::Token) -> Acc,
278    Bounds: FoldBoundsParse,
279    Acc: Debug,
280  {
281    fold_bounds(self, bounds, init, f)
282  }
283
284  /// Same than fold_bounds but F and Acc can return type that implement Try
285  fn try_fold_bounds<Bounds, Acc, Init, Ret, F>(
286    self, bounds: Bounds, init: Init, f: F,
287  ) -> TryFoldBounds<Self, Bounds, Init, F>
288  where
289    Context: Contexting<UtilsAtom<Stream>>,
290    Init: Fn() -> Ret,
291    F: Fn(Acc, Self::Token) -> Ret,
292    Ret: Try<Output = Acc>,
293    Parsed<Acc, Stream, Context>: FromResidual<Ret::Residual>,
294    Bounds: TryFoldBoundsParse,
295    Acc: Debug,
296  {
297    try_fold_bounds(self, bounds, init, f)
298  }
299
300  /// if the underline parser is not successful it will add call
301  /// F and add the Atom provided to the Context
302  fn add_atom<F, Atom>(self, f: F) -> AddAtom<Self, F>
303  where
304    F: Fn() -> Atom,
305    Context: Contexting<Atom>,
306  {
307    add_atom(self, f)
308  }
309
310  /// If the underline parser is successful it will call F
311  /// with the Token produced and change return a new Success with
312  /// the Token returned by F.
313  fn map<F, OtherToken>(self, f: F) -> Map<Self, F>
314  where
315    F: Fn(Self::Token) -> OtherToken,
316  {
317    map(self, f)
318  }
319
320  /// Only allow Success path if F return true
321  fn filter<F>(self, f: F) -> Filter<Self, F>
322  where
323    F: Fn(&Self::Token) -> bool,
324    Context: Contexting<UtilsAtom<Stream>>,
325  {
326    filter(self, f)
327  }
328
329  /// Merge of map and filter combinator, only return Success if
330  /// F return Some.
331  fn filter_map<F, OtherToken>(self, f: F) -> FilterMap<Self, F>
332  where
333    F: Fn(Self::Token) -> Option<OtherToken>,
334    OtherToken: Clone,
335    Context: Contexting<UtilsAtom<Stream>>,
336  {
337    filter_map(self, f)
338  }
339
340  /// If underline parse is successful it will drop the Token and replace it
341  /// with the token in argument. This is mostly a inconditional .map()
342  /// usefull to avoid the closure. (Can be used in Slice parser where map is
343  /// not)
344  fn to<OtherToken>(self, t: OtherToken) -> To<Self, OtherToken>
345  where
346    OtherToken: Clone,
347  {
348    to(self, t)
349  }
350
351  /// Evil Combinator, it will reverse Success in Failure and
352  /// Failure in Success but will not touch Error. This Combinator
353  /// should probably never be used.
354  fn not(self) -> Not<Self>
355  where
356    Stream: Clone,
357    Context: Contexting<NotAtom<Self::Token, Stream>>,
358  {
359    not(self)
360  }
361
362  /// Make a parser optional allowing failure. Return Some(Token)
363  /// in case of Success of underline parser and None in case of Failure.
364  /// This parser can't fail.
365  fn opt(self) -> Optional<Self>
366  where
367    Stream: Clone,
368  {
369    opt(self)
370  }
371
372  /// Very much like Iterator .enumerate(). It will add a counter to every
373  /// Token produced. Return a tuple `(counter, Token)`.
374  fn enumerate(self) -> Enumerate<Self> {
375    enumerate(self)
376  }
377
378  // evil ? should we remove ?
379  /// Very much like Iterator .take(), it will only allow n call to inner parser
380  /// before returning Failure. This parser use a state be aware that it must be
381  /// recreate to be reset.
382  fn limit(self, n: usize) -> Limit<Self>
383  where
384    Context: Contexting<UtilsAtom<Stream>>,
385  {
386    limit(self, n)
387  }
388
389  /// Allow branching, or will call the inner parser and if not sucessful
390  /// it will call the second parser. or can be chained many time allowing
391  /// multiple branch.
392  fn or<OtherParser>(self, b: OtherParser) -> Or<Self, OtherParser>
393  where
394    Stream: Clone,
395    OtherParser: Parse<Stream, Context>,
396    Context: BitOr,
397  {
398    or(self, b)
399  }
400
401  /// peek allow to not consume the Stream but return the Token it would have
402  /// produced. It should be used very often since you would parse more than
403  /// once the same input.
404  fn peek(self) -> Peek<Self>
405  where
406    Stream: Clone,
407  {
408    peek(self)
409  }
410
411  /// span allow to save the Stream consumed by the underline parser in a form
412  /// of a Span. This is very usefull for error or to avoid fully tokenize an
413  /// input. Be aware that Span can contains lifetime since it's linked to
414  /// Stream implementation. For example, an Stream of slice u8 will contains
415  /// a lifetime.
416  fn span(self) -> Span<Self>
417  where
418    Context: Contexting<UtilsAtom<Stream>>,
419  {
420    span(self)
421  }
422
423  /// Same than .filter_map() but expect an Atom in case of Failure.
424  fn try_map<OtherToken, F, Ret>(self, f: F) -> TryMap<Self, F>
425  where
426    F: Fn(Self::Token) -> Ret,
427    Ret: Try<Output = OtherToken>,
428    Parsed<OtherToken, Stream, Context>: FromResidual<Ret::Residual>,
429  {
430    try_map(self, f)
431  }
432}
433
434impl<T, Stream, Context> Utils<Stream, Context> for T
435where
436  Stream: Streaming,
437  Self: Parse<Stream, Context>,
438{
439}