use crate::token::*;
use std::iter::Peekable;
fn get_literal<T: Iterator<Item = char>>(it: &mut Peekable<T>) -> Token {
let mut lexeme = String::new();
while let Some(&c) = it.peek() {
match c {
'0'..='9' | '.' => {
lexeme.push(c);
}
_ => break,
}
it.next();
}
Token::Num(
lexeme
.parse::<f64>()
.expect(&format!("Could not parse '{}' as an f64!", lexeme)),
)
}
fn get_ident<T: Iterator<Item = char>>(it: &mut Peekable<T>) -> Token {
let mut lexeme = String::new();
let mut is_fn = false;
while let Some(&c) = it.peek() {
match c {
'a'..='z' | 'A'..='Z' => {
lexeme.push(c);
}
'(' => {
todo!()
}
_ => break,
}
}
match is_fn {
true => Token::Fn(lexeme),
false => Token::Var(lexeme),
}
}
pub fn tokenize(expr: &str) -> Vec<Token> {
let mut tokens = Vec::with_capacity(expr.len());
let mut stream = expr.chars().peekable();
let mut is_sub: bool = false;
while let Some(&c) = stream.peek() {
match c {
'0'..='9' => {
tokens.push(get_literal(&mut stream));
is_sub = true; }
'a'..='z' | 'A'..='Z' => {
tokens.push(get_ident(&mut stream));
is_sub = true;
}
'(' => {
tokens.push(Token::LP);
stream.next();
}
')' => {
tokens.push(Token::RP);
stream.next();
}
'-' => {
if is_sub {
tokens.push(Token::Bin(Op::SUB));
is_sub = false;
} else {
tokens.push(Token::Una(Op::NEG));
}
stream.next();
}
'+' | '*' | '/' | '^' => {
let op = Op::from_char(c).expect("Could not parse operator");
tokens.push(Token::Bin(op));
is_sub = false;
stream.next();
}
'!' => {
tokens.push(Token::Una(Op::FAC));
stream.next();
}
_ => {
stream.next();
}
}
}
tokens
}