1use crate::lexer::token_iter::TokenIter;
2use crate::token::{TerminalToken, Token, TokenType};
3use crate::Flavor;
4use std::collections::VecDeque;
5
6mod comment;
7mod error;
8mod identifier;
9mod literal;
10mod parse_str;
11mod symbol;
12mod token_iter;
13
14pub use self::error::{LexerError, LexerErrorType};
15
16#[derive(Debug, Clone, PartialEq)]
18pub struct TokenItem<'s> {
19 pub token: Token<'s>,
21
22 pub close_index: Option<usize>,
33}
34
35fn closing_token(opening: TokenType) -> Option<TokenType> {
37 match opening {
38 TokenType::Terminal(TerminalToken::OpenBrace) => {
39 Some(TokenType::Terminal(TerminalToken::CloseBrace))
40 }
41 TokenType::Terminal(TerminalToken::OpenSquare) => {
42 Some(TokenType::Terminal(TerminalToken::CloseSquare))
43 }
44 TokenType::Terminal(TerminalToken::OpenBracket) => {
45 Some(TokenType::Terminal(TerminalToken::CloseBracket))
46 }
47 TokenType::Terminal(TerminalToken::OpenAttributes) => {
48 Some(TokenType::Terminal(TerminalToken::CloseAttributes))
49 }
50 _ => None,
51 }
52}
53
54struct Layer<'s> {
55 open_index: usize,
56 close_ty: TokenType<'s>,
57}
58
59pub fn tokenize(val: &str, flavor: Flavor) -> Result<Vec<TokenItem>, LexerError> {
81 let mut items = Vec::<TokenItem>::new();
82 let mut layers = VecDeque::<Layer>::new();
83
84 for maybe_token in TokenIter::new(val, flavor) {
85 let token = maybe_token?;
86 let token_index = items.len();
87
88 if let Some(top_layer) = layers.back() {
90 if top_layer.close_ty == token.ty {
91 items[top_layer.open_index].close_index = Some(token_index);
92 layers.pop_back();
93 }
94 }
95
96 if let Some(close_ty) = closing_token(token.ty) {
98 layers.push_back(Layer {
99 open_index: token_index,
100 close_ty,
101 });
102 }
103
104 items.push(TokenItem {
105 token,
106 close_index: None,
107 });
108 }
109
110 match layers.back() {
113 None => Ok(items),
114 Some(layer) => {
115 let open_token = &items[layer.open_index].token;
116 Err(LexerError::new(
117 LexerErrorType::UnmatchedOpener {
118 open: open_token.ty,
119 close: layer.close_ty,
120 },
121 open_token.range.clone(),
122 ))
123 }
124 }
125}