use super::*;
use df_ls_diagnostics::{DMExtraInfo, DiagnosticsInfo};
#[derive(Debug, PartialEq, Eq)]
enum TokenizeTokenPhase {
OpenBracketFound,
OpenBracketError,
TokenBodyCorrect,
TokenBodyError,
CloseBracketFound,
CloseBracketError,
}
pub(crate) fn tokenize_token(
tok_help: &mut TokenizerHelper,
regex_list: &RegexList,
diagnostics: &mut DiagnosticsInfo,
) -> TokenizerResult {
let mut current_phase;
let mut first_tokenizer_end_reason: Option<TokenizerEnd> = None;
let token = tok_help.create_start_tsnode("token", Some("token"));
let token_id = token.id;
let token_open_bracket_result =
tokenize_open_bracket(tok_help, regex_list, diagnostics, token_id);
current_phase = match token_open_bracket_result {
Ok(open_bracket) => {
tok_help.add_node_to_tree(token, ROOT_ID);
tok_help.add_node_to_tree(open_bracket, token_id);
TokenizeTokenPhase::OpenBracketFound
}
Err(err) => {
if first_tokenizer_end_reason.is_none() {
first_tokenizer_end_reason = Some(err);
}
TokenizeTokenPhase::OpenBracketError
}
};
if current_phase == TokenizeTokenPhase::OpenBracketFound {
let token_body_result =
token_body::tokenize_token_body(tok_help, regex_list, diagnostics, token_id);
current_phase = if let Err(err) = token_body_result {
if first_tokenizer_end_reason.is_none() {
first_tokenizer_end_reason = Some(err);
}
TokenizeTokenPhase::TokenBodyError
} else {
TokenizeTokenPhase::TokenBodyCorrect
};
}
if current_phase == TokenizeTokenPhase::TokenBodyCorrect {
if tok_help.check_if_next_char_matches_any_of(&[' ', '[', '\n', '\r']) {
current_phase = TokenizeTokenPhase::CloseBracketError;
} else {
let close_bracket =
tok_help.get_next_match(®ex_list.token_close_bracket, "]", None, false, true);
current_phase = match close_bracket {
TokenMatchStatus::Ok(result) => {
tok_help.add_node_to_tree(result, token_id);
TokenizeTokenPhase::CloseBracketFound
}
TokenMatchStatus::OkWithPrefixFound(prefix, result) => {
utils::handle_prefix(prefix, diagnostics);
tok_help.add_node_to_tree(result, token_id);
TokenizeTokenPhase::CloseBracketFound
}
TokenMatchStatus::NoMatch => TokenizeTokenPhase::CloseBracketError,
TokenMatchStatus::EoF => {
if first_tokenizer_end_reason.is_none() {
first_tokenizer_end_reason = Some(TokenizerEnd::UnexpectedEoF);
}
TokenizeTokenPhase::CloseBracketError
}
}
}
}
if current_phase == TokenizeTokenPhase::TokenBodyError
|| current_phase == TokenizeTokenPhase::CloseBracketError
{
let close_bracket = tok_help.create_start_tsnode("]", None);
diagnostics.add_message(
DMExtraInfo::new(close_bracket.get_range()),
"missing_end_bracket",
);
tok_help.add_node_to_tree(close_bracket, token_id);
}
tok_help.set_end_point_for(token_id);
match first_tokenizer_end_reason {
Some(err) => Err(err),
None => Ok(()),
}
}
fn tokenize_open_bracket(
tok_help: &mut TokenizerHelper,
regex_list: &RegexList,
_diagnostics: &mut DiagnosticsInfo,
_token_id: u64,
) -> Result<DataNode, TokenizerEnd> {
let open_bracket =
tok_help.get_next_match(®ex_list.token_open_bracket, "[", None, false, true);
match open_bracket {
TokenMatchStatus::Ok(result) => {
Ok(result)
}
TokenMatchStatus::OkWithPrefixFound(prefix, _result) => {
panic!(
"Tokenizer error: found `[` with prefix. Prefix is: `{:?}`",
prefix
);
}
TokenMatchStatus::EoF => {
Err(TokenizerEnd::ExpectedEof)
}
TokenMatchStatus::NoMatch => {
let point = tok_help.get_point();
panic!(
"There is more in the file but it is not a comment, \
but also not a `[`. Location: {:?}",
point
)
}
}
}