1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
use span::{TextPos, Span};
use token::Token;
#[derive(Debug, Clone, PartialEq)]
pub struct TokensBuf<'s> {
pub tokens: Vec<Token<'s>>,
pub end: TextPos,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct TokensRef<'t, 's: 't> {
pub tokens: &'t [Token<'s>],
pub end: TextPos,
}
impl<'s> TokensBuf<'s> {
pub fn borrow<'t>(&'t self) -> TokensRef<'s, 't> {
TokensRef {
tokens: &self.tokens[..],
end: self.end,
}
}
}
impl<'t, 's: 't> TokensRef<'t, 's> {
pub fn range(&self, start_index: usize, end_index: usize) -> TokensRef<'t, 's> {
TokensRef {
tokens: &self.tokens[start_index..end_index],
end: match self.tokens.get(end_index + 1) {
Some(token) => token.start,
None => self.end,
},
}
}
pub fn range_from(&self, start_index: usize) -> TokensRef<'t, 's> {
TokensRef {
tokens: &self.tokens[start_index..],
end: self.end,
}
}
pub fn split_around(&self, index: usize) -> (TokensRef<'t, 's>, TokensRef<'t, 's>) {
let l = TokensRef {
tokens: &self.tokens[..index],
end: self.tokens[index].start,
};
let r = TokensRef {
tokens: &self.tokens[(index + 1)..],
end: self.end,
};
(l, r)
}
pub fn trim_whitespace(&self) -> TokensRef<'t, 's> {
let mut start_index = None;
for (index, token) in self.tokens.iter().enumerate() {
if !token.is_whitespace() {
start_index = Some(index);
break;
}
}
let start_index = match start_index {
Some(start_index) => start_index,
None => return TokensRef {
tokens: &[],
end: self.end,
},
};
let mut end_index = None;
for (index, token) in self.tokens.iter().enumerate().rev() {
if !token.is_whitespace() {
end_index = Some(index);
break;
}
}
let end_index = end_index.unwrap() + 1;
let end_pos = match end_index == self.tokens.len() {
true => self.end,
false => self.tokens[end_index].start,
};
TokensRef {
tokens: &self.tokens[start_index..end_index],
end: end_pos,
}
}
pub fn span(&self) -> Span {
Span {
start: match self.tokens.first() {
None => self.end,
Some(t) => t.start,
},
end: self.end,
}
}
}