1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
pub use enums::*;

pub mod enums;
pub mod traits;
#[cfg(feature = "nom")]
mod grammar;

#[cfg(feature = "nom")]
pub(crate) use grammar::token;
#[cfg(all(feature = "nom", test))]
pub(crate) use grammar::*;

#[cfg(test)]
#[cfg(feature = "nom")]
#[allow(clippy::unwrap_used)]
mod tests {
    use std::fmt::Debug;

    use nom::Finish;
    use rstest::rstest;

    #[allow(unused_imports)]
    use crate::lexer::{token, Ignore::*, Token::Ignore};
    use crate::TokenizationResult;

    #[rstest]
    #[case::ignore_comment(token("// hello world!"), Ignore(Comment("// hello world!")), None)]
    #[case::ignore_comment_another_line(
        token("//hello world!\nthis is not comment"),
        Ignore(Comment("//hello world!")),
        Some("\nthis is not comment")
    )]
    #[case::ignore_multiline_comment(
        token("/* this is\nmultiline comment */"),
        Ignore(MultilineComment("/* this is\nmultiline comment */")),
        None
    )]
    #[case::ignore_multiline_comment_with_rest(
        token("/* this is\nmultiline comment */ this is not"),
        Ignore(MultilineComment("/* this is\nmultiline comment */")),
        Some(" this is not")
    )]
    #[case::ignore_newline(token("\n\n\n"), Ignore(Newline), Some("\n\n"))]
    #[case::ignore_whitespace(token("   \t"), Ignore(Whitespace), None)]
    fn test_parser<T: PartialEq + Debug>(
        #[case] result: TokenizationResult<T>,
        #[case] expected: T,
        #[case] expected_rest: Option<&'static str>,
    ) {
        let (rest, data) = result.finish().unwrap();

        assert_eq!(rest, expected_rest.unwrap_or(""));
        assert_eq!(data, expected);
    }
}