alkale 2.0.0

A simple LL(1) lexer library for Rust.
Documentation
//! This is a simple [brainfuck](https://en.wikipedia.org/wiki/Brainfuck) lexer.
//! This is intended to show how to use a [LexerContext] at the very, very basic level.

#![allow(dead_code)]

use alkale::{map_single_char_token, FinalizedLexerResult, LexerResult, SourceCodeScanner};

/// Represents the eight brainfuck tokens
#[derive(Debug, Clone, Copy)]
enum BFTokenType {
    Increment,
    Decrement,
    MoveRight,
    MoveLeft,
    BeginWhile,
    EndWhile,
    ReadIO,
    WriteIO,
}

/// Tokenizes a string according to brainfuck grammar.
fn tokenize(source: &str) -> FinalizedLexerResult<BFTokenType> {
    use BFTokenType::{
        BeginWhile, Decrement, EndWhile, Increment, MoveLeft, MoveRight, ReadIO, WriteIO,
    };

    // Create the reader context
    let context = SourceCodeScanner::new(source);
    let mut result = LexerResult::new();

    // Repeat this code while there are more characters in the source code.
    while context.has_next() {
        // Attempt to map these characters to their respective tokens.
        map_single_char_token!(&context, &mut result,
            '+' => Increment,
            '-' => Decrement,
            '>' => MoveRight,
            '<' => MoveLeft,
            '[' => BeginWhile,
            ']' => EndWhile,
            '.' => WriteIO,
            ',' => ReadIO,
        );

        context.skip();
    }

    // Return the result
    result.finalize()
}

fn main() {
    let program = r#"
    ++++++++[>++++[>
    ++>+++>+++>+<<<<
    -]>+>+>->>+[<]<-
    ]>>.>---.+++++++
    ..+++.>>.<-.<.++
    +.------.-------
    -.>>+.>++.
    "#;

    let result = tokenize(program);

    println!("{result:#?}");
}