1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
//! This crate provides several utilities for creating regex-based lexers. A lexer uses RegexSet's
//! from the regex crate for maximum efficiency. Use this when you want to spin up a lexer quickly.
//!
//! Here is a quick example to get you started:
//! ```rust
//! use relex::*;
//!
//! #[derive(Debug, Clone, PartialEq)]
//! enum MyToken {
//!   Whitespace,
//!   ID,
//!   Float,
//!   Eof,
//!   Unrecognized,
//! }
//! impl TokenKind for MyToken {
//!   fn eof() -> Self { MyToken::Eof }
//!   fn unrecognized() -> Self { MyToken::Unrecognized }
//! }
//!
//! let lex = LexerBuilder::new()
//!   .token(Rule::new(MyToken::Whitespace, r"\s+").unwrap().skip(true))
//!   .token(Rule::new(MyToken::ID, r"[A-Za-z]+").unwrap())
//!   .token(Rule::new(MyToken::Float, r"(\d+)(?:\.(\d+))?").unwrap().capture(true)) // this one captures groups
//!                                                                                  // because it calls `.capture(true)`
//!   .build();
//!
//! let mut scanner = lex.iter(" abc", 0);
//! let token = scanner.next().unwrap();
//! assert_eq!(token.kind, MyToken::ID);
//! assert_eq!(token.text, "abc");
//! assert_eq!(token.skipped[0].kind, MyToken::Whitespace);
//! ```

mod lexer;
pub use lexer::*;

mod rule;
pub use rule::*;

mod token;
pub use token::*;