tergo_parser/
pre_parsing_hooks.rsuse tokenizer::tokens::CommentedToken;
use tokenizer::tokens::Token;
pub fn pre_parse<'a>(tokens: &'a mut [CommentedToken<'a>]) -> Vec<&'a CommentedToken<'a>> {
let mut it = 0;
let mut tokens_without_comments = vec![];
while it < tokens.len() {
if let Token::Comment(comment) = tokens[it].token {
let mut comments = vec![];
if it > 1
&& matches!(tokens[it - 1].token, Token::Newline)
&& matches!(tokens[it - 2].token, Token::Newline)
{
comments.push("");
}
comments.push(comment);
while let Some(&last_added) = tokens_without_comments.last() {
let last_token: &CommentedToken<'a> = &tokens[last_added];
if matches!(last_token.token, Token::Newline) {
tokens_without_comments.pop();
} else {
break;
}
}
if it > 0 {
tokens_without_comments.push(it - 1);
}
it += 1;
loop {
match tokens[it].token {
Token::Newline => {
if matches!(tokens[it - 1].token, Token::Newline) {
comments.push("");
}
}
Token::Comment(comment) => comments.push(comment),
_ => break,
}
it += 1;
}
tokens[it].leading_comments = Some(comments);
tokens_without_comments.push(it);
} else if let Token::InlineComment(comment) = tokens[it].token {
tokens[it - 1].inline_comment = Some(comment);
} else {
tokens_without_comments.push(it);
}
it += 1;
}
tokens_without_comments
.into_iter()
.map(|id| &tokens[id])
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
use tokenizer::tokens::commented_tokens;
#[test]
fn test_pre_parse() {
let mut tokens = commented_tokens![
Token::Comment("Comment"),
Token::Newline,
Token::Symbol("7"),
Token::InlineComment("Inline comment")
];
let commented_tokens = pre_parse(&mut tokens);
assert!(commented_tokens.len() == 1);
let res_token = commented_tokens[0];
assert_eq!(
res_token.leading_comments,
Some(vec!["Comment"]),
"The length of the leading comments does not match"
);
assert!(res_token.inline_comment.is_some());
assert!(matches!(
res_token.inline_comment.unwrap(),
"Inline comment"
));
}
}