serial_unit_testing/parser/
mod.rs1use std::fs;
30use std::io::{BufReader, Read};
31use regex::Regex;
32use crate::tests::{TestCase, TestSuite, TestCaseSettings, TestSuiteSettings};
33use crate::utils::TextFormat;
34
35mod error;
36mod token;
37mod string_util;
38mod char_util;
39mod lexer;
40mod finite_state_machine;
41mod options;
42
43use self::lexer::Lexer;
44use self::token::{Token, TokenType};
45use self::error::Error;
46use self::finite_state_machine::FiniteStateMachine;
47use self::options::{set_test_option, set_group_option};
48
49pub fn parse_file(file: &mut fs::File) -> Result<Vec<TestSuite>, Error> {
53 parse_file_with_default_settings(file, Default::default())
54}
55
56pub fn parse_file_with_default_settings(file: &mut fs::File, default_test_settings: TestCaseSettings) -> Result<Vec<TestSuite>, Error> {
60 let mut reader = BufReader::new(file);
61 let mut content = String::new();
62
63 if let Err(_) = reader.read_to_string(&mut content) {
64 return Err(Error::ReadFileError);
65 }
66
67 let mut lexer = Lexer::new(content);
68 let tokens = lexer.get_tokens();
69
70 analyse_tokens(tokens, default_test_settings)
71}
72
73fn analyse_tokens(tokens: Vec<Token>, default_test_settings: TestCaseSettings) -> Result<Vec<TestSuite>, Error> {
74 let mut lines: Vec<Vec<Token>> = Vec::new();
75 let mut line: Vec<Token> = Vec::new();
76
77 for token in tokens {
79 if token.token_type == TokenType::Illegal {
80 return Err(Error::IllegalToken(token.value, token.line, token.column));
81 }
82
83 if token.token_type == TokenType::Newline {
84 if line.len() > 0 {
86 lines.push(line);
87
88 line = Vec::new();
89 }
90
91 continue;
92 }
93
94 line.push(token);
95 }
96
97 let mut test_suites: Vec<TestSuite> = Vec::new();
99
100 let group_state_machine = FiniteStateMachine::new(1, vec!(4), |state, token| -> u32 {
105 match state {
106 1 if token.token_type == TokenType::LeftGroupParenthesis => 2,
107 2 if token.token_type == TokenType::Identifier => 3,
108 2 if token.token_type == TokenType::ContentSeparator => 5,
109 3 if token.token_type == TokenType::RightGroupParenthesis => 4,
110 3 if token.token_type == TokenType::ContentSeparator => 5,
111 5 if token.token_type == TokenType::Identifier => 6,
112 6 if token.token_type == TokenType::OptionSeparator => 7,
113 7 if token.token_type == TokenType::Identifier => 3,
114 _ => 0
115 }
116 });
117
118 let test_state_machine = FiniteStateMachine::new(1, vec!(9), |state, token| -> u32 {
120 match state {
121 1 if token.token_type == TokenType::LeftTestParenthesis => 2,
122 1 if token.token_type == TokenType::FormatSpecifier => 5,
123 1 if token.token_type == TokenType::Content => 5,
124 2 if token.token_type == TokenType::Identifier => 3,
125 2 if token.token_type == TokenType::ContentSeparator => 10,
126 3 if token.token_type == TokenType::RightTestParenthesis => 4,
127 3 if token.token_type == TokenType::ContentSeparator => 10,
128 4 if token.token_type == TokenType::FormatSpecifier => 5,
129 4 if token.token_type == TokenType::Content => 6,
130 5 if token.token_type == TokenType::Content => 6,
131 6 if token.token_type == TokenType::DirectionSeparator => 7,
132 7 if token.token_type == TokenType::FormatSpecifier => 8,
133 7 if token.token_type == TokenType::Content => 9,
134 8 if token.token_type == TokenType::Content => 9,
135 10 if token.token_type == TokenType::Identifier => 11,
136 11 if token.token_type == TokenType::OptionSeparator => 12,
137 11 if token.token_type == TokenType::ContentSeparator => 10,
138 11 if token.token_type == TokenType::RightTestParenthesis => 4,
139 12 if token.token_type == TokenType::Identifier => 3,
140 _ => 0
141 }
142 });
143
144 for line in lines {
145 let first_token: &Token = line.first().unwrap();
146
147 if first_token.token_type == TokenType::LeftGroupParenthesis {
148 match analyse_test_group(&line, &group_state_machine, default_test_settings.clone()) {
149 Ok(test_suite) => test_suites.push(test_suite),
150 Err(err) => return Err(err)
151 };
152
153 continue;
154 }
155
156 if first_token.token_type == TokenType::LeftTestParenthesis || first_token.token_type == TokenType::FormatSpecifier || first_token.token_type == TokenType::Content {
157 match analyse_test(&line, &test_state_machine) {
158 Ok(test) => {
159 if test_suites.is_empty() {
160 test_suites.push(TestSuite::new(String::new()));
161 }
162
163 let test_suite: &mut TestSuite = test_suites.last_mut().unwrap();
164 test_suite.push(test);
165 }
166 Err(err) => return Err(err)
167 };
168
169 continue;
170 }
171
172 return Err(Error::InvalidLineStart(first_token.line, first_token.column));
173 }
174
175 Ok(test_suites)
176}
177
178fn analyse_test_group(tokens: &Vec<Token>, state_machine: &FiniteStateMachine, default_test_settings: TestCaseSettings) -> Result<TestSuite, Error> {
179 let result = state_machine.run(&tokens);
180
181 if let Err((state, token)) = result {
182 return match state {
183 2 => Err(Error::MissingGroupIdentifier(token.line, token.column)),
184 3 => Err(Error::MissingClosingParenthesis("]".to_string(), token.line, token.column)),
185 5 => Err(Error::MissingOptionIdentifier(token.line, token.column)),
186 6 => Err(Error::MissingOptionSeparator(token.line, token.column)),
187 7 => Err(Error::MissingOptionValue(token.line, token.column)),
188 _ => Err(Error::UnknownError(token.line, token.column))
189 };
190 }
191
192 let mut index = 1;
193
194 let name = if tokens[1].token_type == TokenType::Identifier {
195 index += 1;
196
197 tokens[1].value.clone()
198 } else {
199 String::new()
200 };
201
202 let mut settings = TestSuiteSettings::default();
203 let mut test_settings = default_test_settings;
204
205 analyse_group_options(&tokens[index..], &mut settings, &mut test_settings)?;
206
207 let test_suite = TestSuite::new_with_settings(name, settings, test_settings);
208
209 Ok(test_suite)
210}
211
212fn analyse_test(tokens: &Vec<Token>, state_machine: &FiniteStateMachine) -> Result<TestCase, Error> {
213 let result = state_machine.run(&tokens);
214
215 if let Err((state, token)) = result {
216 return match state {
217 2 => Err(Error::MissingTestIdentifier(token.line, token.column)),
218 3 => Err(Error::MissingClosingParenthesis(")".to_string(), token.line, token.column)),
219 4 | 5 => Err(Error::MissingContent("input".to_string(), token.line, token.column)),
220 6 => Err(Error::MissingDirectionSeparator(token.line, token.column)),
221 7 | 8 => Err(Error::MissingContent("output".to_string(), token.line, token.column)),
222 10 => Err(Error::MissingOptionIdentifier(token.line, token.column)),
223 11 => Err(Error::MissingOptionSeparator(token.line, token.column)),
224 12 => Err(Error::MissingOptionValue(token.line, token.column)),
225 _ => Err(Error::UnknownError(token.line, token.column))
226 };
227 }
228
229 let mut name = String::new();
231 let input: String;
232 let output: String;
233 let mut settings = TestCaseSettings::default();
234 let mut input_format: Option<TextFormat> = None;
235 let mut output_format: Option<TextFormat> = None;
236
237 let mut index = 0;
238
239 if tokens[index].token_type == TokenType::LeftTestParenthesis {
240 if tokens[index + 1].token_type == TokenType::Identifier {
241 name = tokens[1].value.clone();
242 index += 1;
243 }
244
245 index += 1;
246
247 index += analyse_test_options(&tokens[index..], &mut settings)?;
248 }
249
250 if tokens[index].token_type == TokenType::FormatSpecifier {
251 input_format = Some(get_text_format(&tokens[index])?);
252 index += 1;
253 }
254
255 input = tokens[index].value.clone();
256
257 index += 2;
259
260 if tokens[index].token_type == TokenType::FormatSpecifier {
261 output_format = Some(get_text_format(&tokens[index])?);
262 index += 1;
263 }
264
265 output = tokens[index].value.clone();
266 if let Err(_) = Regex::new(&output) {
267 return Err(Error::InvalidOutputContent(output, tokens[index].line, tokens[index].column));
268 }
269
270 let mut test = TestCase::new(name, input, output);
271 test.settings = settings;
272
273 if let Some(format) = input_format {
274 test.input_format = format;
275 }
276
277 if let Some(format) = output_format {
278 test.output_format = format;
279 }
280
281 Ok(test)
282}
283
284fn analyse_test_options(tokens: &[Token], settings: &mut TestCaseSettings) -> Result<usize, Error> {
285 let mut index = 0;
286
287 while tokens[index].token_type == TokenType::ContentSeparator {
288 let mut option_length = 1;
290 while tokens[index + option_length].token_type != TokenType::ContentSeparator && tokens[index + option_length].token_type != TokenType::RightTestParenthesis {
291 option_length += 1;
292 }
293
294 let offset = set_test_option(&tokens[index + 1 .. index + option_length], settings)?;
295
296 index += 2 + offset;
297 }
298
299 Ok(index + 1)
300}
301
302fn analyse_group_options(tokens: &[Token], settings: &mut TestSuiteSettings, test_settings: &mut TestCaseSettings) -> Result<usize, Error> {
303 let mut index = 0;
304
305 while tokens[index].token_type == TokenType::ContentSeparator {
306 let mut option_length = 1;
308 while tokens[index + option_length].token_type != TokenType::ContentSeparator && tokens[index + option_length].token_type != TokenType::RightGroupParenthesis {
309 option_length += 1;
310 }
311
312 let offset = match set_test_option(&tokens[index + 1 .. index + option_length], test_settings) {
314 Ok(offset) => offset,
315 Err(err) => {
316 match err {
317 Error::UnknownTestOption(_, _, _) => set_group_option(&tokens[index + 1 .. index + option_length], settings)?,
318 _ => return Err(err)
319 }
320 },
321 };
322
323 index += 2 + offset;
324 }
325
326 Ok(index + 1)
327}
328
329fn get_text_format(token: &Token) -> Result<TextFormat, Error> {
330 match token.value.as_str() {
331 "b" => Ok(TextFormat::Binary),
332 "o" => Ok(TextFormat::Octal),
333 "d" => Ok(TextFormat::Decimal),
334 "h" => Ok(TextFormat::Hex),
335 _ => Err(Error::UnknownError(token.line, token.column))
336 }
337}