use std::collections::HashSet;
use std::iter::Peekable;
use std::str::Chars;
#[derive(Debug, Clone, PartialEq)]
pub enum Token {
Identifier(String),
Keyword(String),
Number(String),
String(String),
Comma,
Whitespace,
Eq,
Neq,
Lt,
Gt,
LtEq,
GtEq,
Plus,
Minus,
Mult,
Div,
LParen,
RParen,
Period,
}
#[derive(Debug)]
pub struct TokenizerError(String);
static KEYWORDS: &'static [&'static str] = &[
"SELECT", "FROM", "WHERE", "LIMIT", "ORDER", "GROUP", "BY", "HAVING", "UNION", "ALL", "INSERT",
"UPDATE", "DELETE", "IN", "NOT", "NULL", "SET", "CREATE", "EXTERNAL", "TABLE", "ASC", "DESC",
"AND", "OR", "AS",
"VARCHAR", "FLOAT", "DOUBLE", "INT", "INTEGER"
];
pub struct Tokenizer {
keywords: HashSet<String>,
pub query: String,
}
impl Tokenizer {
pub fn new(query: &str) -> Self {
let mut tokenizer = Tokenizer {
keywords: HashSet::new(),
query: query.to_string(),
};
KEYWORDS.into_iter().for_each(|k| {
tokenizer.keywords.insert(k.to_string());
});
tokenizer
}
pub fn tokenize(&mut self) -> Result<Vec<Token>, TokenizerError> {
let mut peekable = self.query.chars().peekable();
let mut tokens: Vec<Token> = vec![];
while let Some(token) = self.next_token(&mut peekable)? {
tokens.push(token);
}
Ok(tokens
.into_iter()
.filter(|t| match t {
&Token::Whitespace => false,
_ => true,
})
.collect())
}
fn next_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<Token>, TokenizerError> {
match chars.peek() {
Some(&ch) => match ch {
' ' | '\t' | '\n' => {
chars.next(); Ok(Some(Token::Whitespace))
}
'a'...'z' | 'A'...'Z' | '_' | '@' => {
let mut s = String::new();
while let Some(&ch) = chars.peek() {
match ch {
'a'...'z' | 'A'...'Z' | '_' | '0'...'9' => {
chars.next(); s.push(ch);
}
_ => break,
}
}
if self.keywords.contains(&s) {
Ok(Some(Token::Keyword(s)))
} else {
Ok(Some(Token::Identifier(s)))
}
}
'\'' => {
let mut s = String::new();
chars.next(); while let Some(&ch) = chars.peek() {
match ch {
'\'' => {
chars.next(); break;
}
_ => {
chars.next(); s.push(ch);
}
}
}
Ok(Some(Token::String(s)))
}
'0'...'9' => {
let mut s = String::new();
while let Some(&ch) = chars.peek() {
match ch {
'0'...'9' | '.' => {
chars.next(); s.push(ch);
}
_ => break,
}
}
Ok(Some(Token::Number(s)))
}
',' => {
chars.next();
Ok(Some(Token::Comma))
}
'(' => {
chars.next();
Ok(Some(Token::LParen))
}
')' => {
chars.next();
Ok(Some(Token::RParen))
}
'+' => {
chars.next();
Ok(Some(Token::Plus))
}
'-' => {
chars.next();
Ok(Some(Token::Minus))
}
'*' => {
chars.next();
Ok(Some(Token::Mult))
}
'/' => {
chars.next();
Ok(Some(Token::Div))
}
'=' => {
chars.next();
Ok(Some(Token::Eq))
}
'.' => {
chars.next();
Ok(Some(Token::Period))
}
'!' => {
chars.next(); match chars.peek() {
Some(&ch) => match ch {
'=' => {
chars.next();
Ok(Some(Token::Neq))
}
_ => Err(TokenizerError(format!("TBD"))),
},
None => Err(TokenizerError(format!("TBD"))),
}
}
'<' => {
chars.next(); match chars.peek() {
Some(&ch) => match ch {
'=' => {
chars.next();
Ok(Some(Token::LtEq))
}
'>' => {
chars.next();
Ok(Some(Token::Neq))
}
_ => Ok(Some(Token::Lt)),
},
None => Ok(Some(Token::Lt)),
}
}
'>' => {
chars.next(); match chars.peek() {
Some(&ch) => match ch {
'=' => {
chars.next();
Ok(Some(Token::GtEq))
}
_ => Ok(Some(Token::Gt)),
},
None => Ok(Some(Token::Gt)),
}
}
_ => Err(TokenizerError(String::from(format!(
"unhandled char '{}' in tokenizer",
ch
)))),
},
None => Ok(None),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn tokenize_select_1() {
let sql = String::from("SELECT 1");
let mut tokenizer = Tokenizer::new(&sql);
let tokens = tokenizer.tokenize().unwrap();
let expected = vec![
Token::Keyword(String::from("SELECT")),
Token::Number(String::from("1")),
];
compare(expected, tokens);
}
#[test]
fn tokenize_scalar_function() {
let sql = String::from("SELECT sqrt(1)");
let mut tokenizer = Tokenizer::new(&sql);
let tokens = tokenizer.tokenize().unwrap();
let expected = vec![
Token::Keyword(String::from("SELECT")),
Token::Identifier(String::from("sqrt")),
Token::LParen,
Token::Number(String::from("1")),
Token::RParen,
];
compare(expected, tokens);
}
#[test]
fn tokenize_simple_select() {
let sql = String::from("SELECT * FROM customer WHERE id = 1 LIMIT 5");
let mut tokenizer = Tokenizer::new(&sql);
let tokens = tokenizer.tokenize().unwrap();
let expected = vec![
Token::Keyword(String::from("SELECT")),
Token::Mult,
Token::Keyword(String::from("FROM")),
Token::Identifier(String::from("customer")),
Token::Keyword(String::from("WHERE")),
Token::Identifier(String::from("id")),
Token::Eq,
Token::Number(String::from("1")),
Token::Keyword(String::from("LIMIT")),
Token::Number(String::from("5")),
];
compare(expected, tokens);
}
#[test]
fn tokenize_string_predicate() {
let sql = String::from("SELECT * FROM customer WHERE salary != 'Not Provided'");
let mut tokenizer = Tokenizer::new(&sql);
let tokens = tokenizer.tokenize().unwrap();
let expected = vec![
Token::Keyword(String::from("SELECT")),
Token::Mult,
Token::Keyword(String::from("FROM")),
Token::Identifier(String::from("customer")),
Token::Keyword(String::from("WHERE")),
Token::Identifier(String::from("salary")),
Token::Neq,
Token::String(String::from("Not Provided")),
];
compare(expected, tokens);
}
fn compare(expected: Vec<Token>, actual: Vec<Token>) {
assert_eq!(expected, actual);
}
}