kodept_parse/lexer/
mod.rs

1use cfg_if::cfg_if;
2
3pub use enums::*;
4
5pub mod enums;
6pub mod traits;
7
8cfg_if! {
9	if #[cfg(all(feature = "peg", not(feature = "trace")))] {
10        pub type DefaultLexer = PegLexer<false>;
11    } else if #[cfg(feature = "pest")] {
12        pub type DefaultLexer = PestLexer;
13    } else if #[cfg(feature = "nom")] {
14        pub type DefaultLexer = NomLexer;
15    } else {
16        compilation_error!("Either feature `peg` or `nom` or `pest` must be enabled for this crate")
17    }
18}
19
20#[cfg(feature = "nom")]
21pub type NomLexer = crate::nom::Lexer;
22#[cfg(feature = "peg")]
23pub type PegLexer<const TRACE: bool> = crate::peg::Lexer<TRACE>;
24#[cfg(feature = "pest")]
25pub type PestLexer = crate::pest::Lexer;
26
27#[cfg(test)]
28#[allow(clippy::unwrap_used)]
29mod tests {
30    use crate::common::TokenProducer;
31    use crate::lexer::{DefaultLexer, Ignore::*, Token, Token::*};
32    use rstest::rstest;
33    use std::fmt::Debug;
34
35    #[rstest]
36    #[case::ignore_comment("// hello world!", Comment("// hello world!"), None)]
37    // #[case::ignore_comment_another_line(
38    //     "//hello world!\nthis is not comment",
39    //     Comment("//hello world!"),
40    //     Some("\nthis is not comment")
41    // )]
42    #[case::ignore_multiline_comment(
43        "/* this is\nmultiline comment */",
44        MultilineComment("/* this is\nmultiline comment */"),
45        None
46    )]
47    #[case::ignore_multiline_comment_with_rest(
48        "/* this is\nmultiline comment */ this is not",
49        MultilineComment("/* this is\nmultiline comment */"),
50        Some(" this is not")
51    )]
52    #[case::ignore_newline("\n\n\n", Ignore(Newline), Some("\n\n"))]
53    #[case::ignore_whitespace("   \t", Ignore(Whitespace), None)]
54    fn test_parser<T: PartialEq + Debug + Into<Token<'static>>>(
55        #[case] input: &'static str,
56        #[case] expected: T,
57        #[case] expected_rest: Option<&'static str>,
58    ) {
59        let data = DefaultLexer::new().parse_token(input, 0).unwrap();
60        let rest = &input[data.span.point.length as usize..];
61
62        assert_eq!(data.token, expected.into());
63        assert_eq!(rest, expected_rest.unwrap_or(""));
64    }
65}