1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
use crate::lexer::token_iter::TokenIter;
use crate::token::{TerminalToken, Token, TokenType};
use crate::Flavor;
use std::collections::VecDeque;
mod comment;
mod error;
mod identifier;
mod literal;
mod parse_str;
mod symbol;
mod token_iter;
pub use self::error::{LexerError, LexerErrorType};
#[derive(Debug, Clone, PartialEq)]
pub struct TokenItem<'s> {
pub token: Token<'s>,
pub close_index: Option<usize>,
}
fn closing_token(opening: TokenType) -> Option<TokenType> {
match opening {
TokenType::Terminal(TerminalToken::OpenBrace) => {
Some(TokenType::Terminal(TerminalToken::CloseBrace))
}
TokenType::Terminal(TerminalToken::OpenSquare) => {
Some(TokenType::Terminal(TerminalToken::CloseSquare))
}
TokenType::Terminal(TerminalToken::OpenBracket) => {
Some(TokenType::Terminal(TerminalToken::CloseBracket))
}
TokenType::Terminal(TerminalToken::OpenAttributes) => {
Some(TokenType::Terminal(TerminalToken::CloseAttributes))
}
_ => None,
}
}
struct Layer<'s> {
open_index: usize,
close_ty: TokenType<'s>,
}
pub fn tokenize(val: &str, flavor: Flavor) -> Result<Vec<TokenItem>, LexerError> {
let mut items = Vec::<TokenItem>::new();
let mut layers = VecDeque::<Layer>::new();
for maybe_token in TokenIter::new(val, flavor) {
let token = maybe_token?;
let token_index = items.len();
if let Some(top_layer) = layers.back() {
if top_layer.close_ty == token.ty {
items[top_layer.open_index].close_index = Some(token_index);
layers.pop_back();
}
}
if let Some(close_ty) = closing_token(token.ty) {
layers.push_back(Layer {
open_index: token_index,
close_ty,
});
}
items.push(TokenItem {
token,
close_index: None,
});
}
match layers.back() {
None => Ok(items),
Some(layer) => {
let open_token = &items[layer.open_index].token;
Err(LexerError::new(
LexerErrorType::UnmatchedOpener {
open: open_token.ty,
close: layer.close_ty,
},
open_token.range.clone(),
))
}
}
}