1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
//! A lexer that reads Lua code and produces tokens.
//! The crate provide two different lexers:
//!  * `FastLexer`: skips all the whitespace tokens
//!  * `FullLexer`: produces every tokens
#[macro_use]
mod tokenizer;

mod utils;
mod fast_tokenizer;
mod full_tokenizer;
mod lexer;
mod token;

pub use lexer::{Lexer, LexerError, LexerErrorType};
pub use fast_tokenizer::{FastLexer, FastTokenizer};
pub use full_tokenizer::{FullLexer, FullTokenizer};
pub use token::{Token, TokenType};