sentience-tokenize 0.2.3

Tiny Rust zero-dep tokenizer (ident, number, string, parens, operators, keywords).
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
use sentience_tokenize::{tokenize, LineMap};

fn main() {
    let src = r#"// demo
let rule greet(name) = "hi, " + name
"#;
    let lm = LineMap::new(src);
    match tokenize(src) {
        Ok(toks) => {
            for t in toks {
                let (ls, cs) = lm.to_line_col(t.span.start);
                let (le, ce) = lm.to_line_col(t.span.end);
                println!("{:?} @{}:{}..{}:{}", t.kind, ls, cs, le, ce);
            }
        }
        Err(e) => eprintln!("error: {}", e),
    }
}