Expand description

A small library for creating regex-based lexers

Example


use reglex::{RuleList, RuleInput, rule_list, lex};

#[derive(Debug, PartialEq)]
enum Token {
    Keyword,
    Number(u64),
    Hashtag(String),
    Left,
    Right,
}

fn lexer(input: &str) -> Result<Vec<Token>, usize> {
    let regexes: RuleList<Token> = rule_list! [
        r"kw" => |_| Some(Token::Keyword),
        r"\d+" => |s: RuleInput| Some(Token::Number(s[0].parse().unwrap())),
        r"\{" => |_| Some(Token::Left),
        r"\}" => |_| Some(Token::Right),
        r"#([a-z]+)" => |s: RuleInput| Some(Token::Hashtag(s[1].to_string())),
        r"\s" => |_| None,
    ];

    lex(&regexes, input)
}

fn main() {
    assert_eq!(
        lexer("kw  { 12 #hello 53 }"),
        Ok(vec![
            Token::Keyword,
            Token::Left,
            Token::Number(12),
            Token::Hashtag("hello".to_string()),
            Token::Number(53),
            Token::Right,
        ])
    );

    assert_eq!(lexer("kw ERROR! { 12 #hello 53 }"), Err(3));
}

Re-exports

pub use regex;

Macros

Generates a RuleList.

Functions

The lexing function.

Same as lex, but returns tuples of tokens and their positions in the input string.

Type Definitions

The input to lexing rules. Alias to regex::Captures.

A list of lexing rules. Can be generated by the rule_list macro.