use itertools::Itertools;
use crate::{
objects::Expression,
out::{ErrorType, EvalResult},
token::{
tokentype::{IdentifierType, TokenType},
Token, TokenStream,
},
value::Value,
};
pub type Node = Expression;
#[derive(Debug)]
pub struct Tree(pub Node);
pub fn build_tree(stream: TokenStream) -> EvalResult<Tree> {
check_brackets(&stream)?;
let mut sorted_node_tokens = sort_node_tokens(&stream)?;
Ok(Tree(create_node(
&mut sorted_node_tokens,
&stream,
None,
(0, stream.len()),
)?))
}
fn check_brackets(stream: &TokenStream) -> EvalResult<()> {
let mut depth = 0;
for token in stream {
match token.r#type {
TokenType::OpeningBracket => depth += 1,
TokenType::ClosingBracket => {
depth -= 1;
if depth < 0 {
return Err(ErrorType::InvalidClosingBracket);
}
}
_ => (),
}
}
match depth {
0 => Ok(()),
_ => return Err(ErrorType::MissingClosingBracket),
}
}
#[derive(Clone, PartialEq, Debug)]
struct TokenInfo {
pub token: Token,
pub position: usize,
pub depth: u16,
pub precedence: u16,
}
fn sort_node_tokens(stream: &TokenStream) -> EvalResult<Vec<TokenInfo>> {
let mut sorted = vec![];
let mut depth = 0;
for (position, token) in stream.iter().enumerate() {
if token.r#type == TokenType::OpeningBracket {
depth += 1;
} else if token.r#type == TokenType::ClosingBracket {
depth -= 1;
} else if token.r#type.is_expression() {
let precedence = token.r#type.precedence()?;
sorted.push(TokenInfo {
token: token.clone(),
position,
depth,
precedence,
});
}
}
sorted.sort_by_key(|v| (v.depth, v.precedence, -(v.position as i16)));
Ok(sorted)
}
fn create_node(
sorted_node_tokens: &mut Vec<TokenInfo>,
stream: &TokenStream,
position: Option<usize>,
range: (usize, usize),
) -> EvalResult<Node> {
let index = match position {
Some(value) => {
match sorted_node_tokens.iter().position(|x| x.position == value) {
Some(position) => position,
None => {
return Err(ErrorType::InternalError {
message: String::from("trying to remove non-existing token"),
});
}
}
}
None => 0,
};
if sorted_node_tokens.len() == 0 {
if position == None {
return Ok(Node::Literal(Value::Int(0)));
} else {
return Err(ErrorType::InternalError {
message: String::from("trying to remove non-existing token"),
});
}
}
let token_info = sorted_node_tokens.remove(index);
if token_info.token.r#type.is_binary_operator() && token_info.token.r#type.is_unary_operator() {
match build_binary_operator(sorted_node_tokens, stream, &token_info, range) {
Ok(node) => return Ok(node),
Err(_) => {
return Ok(build_unary_operator(
sorted_node_tokens,
stream,
&token_info,
range,
)?)
}
}
} else if token_info.token.r#type.is_binary_operator() {
return Ok(build_binary_operator(
sorted_node_tokens,
stream,
&token_info,
range,
)?);
} else if token_info.token.r#type.is_unary_operator() {
return Ok(build_unary_operator(
sorted_node_tokens,
stream,
&token_info,
range,
)?);
} else if token_info.token.r#type.is_union_operator() {
return Ok(build_union_operator(
sorted_node_tokens,
stream,
&token_info,
range,
)?);
} else {
match token_info.token.r#type {
TokenType::Literal => {
return Ok(Node::Literal(Value::from_string(token_info.token.value)?))
}
TokenType::Identifier(i_type) => {
let val = &token_info.token.value;
match i_type {
IdentifierType::Var => Ok(Node::Var(val.clone())),
IdentifierType::Function => Ok(Node::Func(
val.clone(),
get_function_parameters(sorted_node_tokens, stream, &token_info)?,
)),
IdentifierType::Unknown => Err(ErrorType::UnknownToken { token: val.clone() }),
}
}
_ => Err(ErrorType::InternalError {
message: format!("token `{}` is not a valid node", token_info.token),
}),
}
}
}
fn build_unary_operator(
sorted_node_tokens: &mut Vec<TokenInfo>,
stream: &TokenStream,
token_info: &TokenInfo,
range: (usize, usize),
) -> EvalResult<Node> {
Ok(Node::Unary(
token_info.token.r#type,
Box::new(
match get_lowest_precedence_node_in_range(
sorted_node_tokens,
stream,
(token_info.position + 1, range.1),
)? {
Some(next_node) => next_node,
None => {
return Err(ErrorType::MissingOperatorArgument {
token: token_info.token.r#type,
})
}
},
),
))
}
fn build_binary_operator(
sorted_node_tokens: &mut Vec<TokenInfo>,
stream: &TokenStream,
token_info: &TokenInfo,
range: (usize, usize),
) -> EvalResult<Node> {
Ok(Node::Binary(
Box::new(
match get_lowest_precedence_node_in_range(
sorted_node_tokens,
stream,
(range.0, token_info.position),
)? {
Some(previous_node) => previous_node,
None => {
return Err(ErrorType::MissingOperatorArgument {
token: token_info.token.r#type,
})
}
},
),
token_info.token.r#type,
Box::new(
match get_lowest_precedence_node_in_range(
sorted_node_tokens,
stream,
(token_info.position + 1, range.1),
)? {
Some(next_node) => next_node,
None => {
return Err(ErrorType::MissingOperatorArgument {
token: token_info.token.r#type,
})
}
},
),
))
}
fn get_lowest_precedence_node_in_range(
sorted_node_tokens: &mut Vec<TokenInfo>,
stream: &TokenStream,
range: (usize, usize),
) -> EvalResult<Option<Node>> {
let candidates: Vec<TokenInfo> = sorted_node_tokens
.iter()
.filter(|&x| x.position >= range.0 && x.position < range.1)
.cloned()
.collect();
if candidates.len() == 0 {
Ok(None)
} else {
Ok(
match candidates
.iter()
.min_by_key(|&x| (x.depth, x.precedence, -(x.position as i16)))
{
Some(value) => {
Some(create_node(
sorted_node_tokens,
stream,
Some(value.position),
range,
)?)
}
None => return Err(ErrorType::EmptyBrackets),
},
)
}
}
fn get_function_parameters(
sorted_node_tokens: &mut Vec<TokenInfo>,
stream: &TokenStream,
func_token: &TokenInfo,
) -> EvalResult<Vec<Box<Node>>> {
let func_pos = func_token.position;
let content_node = {
if func_pos + 1 < stream.len() {
if stream[func_pos + 1].r#type == TokenType::OpeningBracket {
match get_lowest_precedence_node_in_range(
sorted_node_tokens,
stream,
(
func_pos + 1,
get_corresponding_closing_bracket(stream, func_pos + 1)?,
),
)? {
Some(node) => Ok(node),
None => {
return Err(ErrorType::MissingFunctionParameters {
func_name: func_token.token.value.clone(),
})
}
}
} else {
return Err(ErrorType::MissingFunctionParameters {
func_name: func_token.token.value.clone(),
});
}
} else {
return Err(ErrorType::MissingFunctionParameters {
func_name: func_token.token.value.clone(),
});
}
}?;
match content_node {
Expression::Union(nodes) => Ok(nodes),
other => Ok(vec![Box::new(other)]),
}
}
fn get_corresponding_closing_bracket(
stream: &TokenStream,
opening_bracket_pos: usize,
) -> EvalResult<usize> {
let mut index = opening_bracket_pos + 1;
let mut current_depth = 0;
while index < stream.len() {
let token: &Token = &stream[index];
if token.r#type == TokenType::ClosingBracket {
if current_depth == 0 {
return Ok(index.try_into().unwrap());
}
current_depth -= 1;
} else if token.r#type == TokenType::OpeningBracket {
current_depth += 1;
}
index += 1;
}
Err(ErrorType::MissingClosingBracket)
}
fn build_union_operator(
sorted_node_tokens: &mut Vec<TokenInfo>,
stream: &TokenStream,
token_info: &TokenInfo,
range: (usize, usize),
) -> EvalResult<Node> {
let mut union_operators = [vec![token_info.clone()], {
let vec = sorted_node_tokens
.iter()
.filter(|x| {
x.token.r#type == token_info.token.r#type
&& x.depth == token_info.depth
&& x.position >= range.0
&& x.position < range.1
})
.cloned()
.collect_vec();
let mut index = 0;
while index < vec.len() {
let elem = &vec[index];
sorted_node_tokens.remove(sorted_node_tokens.iter().position(|x| *x == *elem).unwrap());
index += 1;
}
vec
}]
.concat();
union_operators.sort_by(|a, b| a.position.cmp(&b.position));
let mut ranges: Vec<(usize, usize)> = vec![];
let mut pos = range.0;
for elem in union_operators {
ranges.push((pos + 1, elem.position));
pos = elem.position;
}
ranges.push((pos + 1, range.1));
let nodes = {
let mut vec = vec![];
for r in ranges {
vec.push(Box::new(
match get_lowest_precedence_node_in_range(sorted_node_tokens, stream, r)? {
Some(node) => node,
None => return Err(ErrorType::EmptyUnion),
},
))
}
vec
};
Ok(Node::Union(nodes))
}