[−][src]Crate regex_lexer
A regex-based lexer (tokenizer).
use regex_lexer::LexerBuilder; #[derive(Debug, PartialEq, Eq)] enum Token { Num(u32), Add, Sub, Mul, Div, Open, Close, } let lexer = LexerBuilder::new() .token(r"[0-9]+", |tok| Some(Token::Num(tok.parse().unwrap()))) .token(r"\+", |_| Some(Token::Add)) .token(r"-", |_| Some(Token::Sub)) .token(r"\*", |_| Some(Token::Mul)) .token(r"/", |_| Some(Token::Div)) .token(r"\(", |_| Some(Token::Open)) .token(r"\)", |_| Some(Token::Close)) .token(r"\s+", |_| None) // skip whitespace .build()?; let source = "(1 + 2) * 3"; assert_eq!( lexer.tokens(source).collect::<Vec<_>>(), vec![ Token::Open, Token::Num(1), Token::Add, Token::Num(2), Token::Close, Token::Mul, Token::Num(3) ], );
Structs
Lexer | A regex-based lexer. |
LexerBuilder | Builder struct for Lexer. |
Tokens | The type returned by Lexer::tokens. |