#![allow(dead_code)]
use alkale::{map_single_char_token, FinalizedLexerResult, LexerResult, SourceCodeScanner};
#[derive(Debug, Clone, Copy)]
enum BFTokenType {
Increment,
Decrement,
MoveRight,
MoveLeft,
BeginWhile,
EndWhile,
ReadIO,
WriteIO,
}
fn tokenize(source: &str) -> FinalizedLexerResult<BFTokenType> {
use BFTokenType::{
BeginWhile, Decrement, EndWhile, Increment, MoveLeft, MoveRight, ReadIO, WriteIO,
};
let context = SourceCodeScanner::new(source);
let mut result = LexerResult::new();
while context.has_next() {
map_single_char_token!(&context, &mut result,
'+' => Increment,
'-' => Decrement,
'>' => MoveRight,
'<' => MoveLeft,
'[' => BeginWhile,
']' => EndWhile,
'.' => WriteIO,
',' => ReadIO,
);
context.skip();
}
result.finalize()
}
fn main() {
let program = r#"
++++++++[>++++[>
++>+++>+++>+<<<<
-]>+>+>->>+[<]<-
]>>.>---.+++++++
..+++.>>.<-.<.++
+.------.-------
-.>>+.>++.
"#;
let result = tokenize(program);
println!("{result:#?}");
}