1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
use range::Range;
use read_token;
use std::rc::Rc;
use {
DebugId,
MetaData,
ParseError,
ParseResult,
Tokenizer,
TokenizerState,
};
#[derive(Clone)]
pub struct Token {
pub text: Rc<String>,
pub inverted: bool,
pub property: Option<Rc<String>>,
pub debug_id: DebugId,
}
impl Token {
pub fn parse(
&self,
tokenizer: &mut Tokenizer,
state: &TokenizerState,
chars: &[char],
offset: usize
) -> ParseResult<TokenizerState> {
if let Some(range) = read_token::token(&self.text, chars, offset) {
match &self.property {
&Some(ref name) => {
Ok((range, tokenizer.data(
MetaData::Bool(name.clone(), !self.inverted),
&state,
range
), None))
}
_ => {
return Ok((range, state.clone(), None))
}
}
} else {
return Err((Range::new(offset, 0),
ParseError::ExpectedToken(self.text.clone(),
self.debug_id)));
}
}
}
#[cfg(test)]
mod tests {
use super::super::*;
use std::rc::Rc;
use range::Range;
#[test]
fn expected_token() {
let text = ")";
let chars: Vec<char> = text.chars().collect();
let start_parenthesis = Token {
debug_id: 0,
text: Rc::new("(".into()),
inverted: false,
property: None
};
let mut tokenizer = Tokenizer::new();
let s = TokenizerState::new();
let res = start_parenthesis.parse(&mut tokenizer, &s, &chars, 0);
assert_eq!(res, Err((
Range::new(0, 0),
ParseError::ExpectedToken(Rc::new("(".into()), 0)
))
);
}
#[test]
fn successful() {
let text = "fn foo()";
let chars: Vec<char> = text.chars().collect();
let fn_ = Token {
debug_id: 0,
text: Rc::new("fn ".into()),
inverted: false,
property: None
};
let mut tokenizer = Tokenizer::new();
let s = TokenizerState::new();
let res = fn_.parse(&mut tokenizer, &s, &chars, 0);
assert_eq!(res, Ok((Range::new(0, 3), s, None)));
assert_eq!(tokenizer.tokens.len(), 0);
let mut tokenizer = Tokenizer::new();
let has_arguments: Rc<String> = Rc::new("has_arguments".into());
let start_parenthesis = Token {
debug_id: 0,
text: Rc::new("(".into()),
inverted: false,
property: Some(has_arguments.clone())
};
let s = TokenizerState::new();
let res = start_parenthesis.parse(&mut tokenizer, &s, &chars[6..], 6);
assert_eq!(res, Ok((Range::new(6, 1), TokenizerState(1), None)));
assert_eq!(tokenizer.tokens.len(), 1);
assert_eq!(&tokenizer.tokens[0].1,
&MetaData::Bool(has_arguments.clone(), true));
let mut tokenizer = Tokenizer::new();
let has_arguments: Rc<String> = Rc::new("has_no_arguments".into());
let start_parenthesis = Token {
debug_id: 0,
text: Rc::new("(".into()),
inverted: true,
property: Some(has_arguments.clone())
};
let s = TokenizerState::new();
let res = start_parenthesis.parse(&mut tokenizer, &s, &chars[6..], 6);
assert_eq!(res, Ok((Range::new(6, 1), TokenizerState(1), None)));
assert_eq!(tokenizer.tokens.len(), 1);
assert_eq!(&tokenizer.tokens[0].1,
&MetaData::Bool(has_arguments.clone(), false));
}
}