df_ls_lexical_analysis 0.3.0-rc.1

A language server for Dwarf Fortress RAW files
Documentation
use super::*;
use df_ls_diagnostics::{DMExtraInfo, DiagnosticsInfo};

/// The different phases the parser can be in while tokenizing a token.
#[derive(Debug, PartialEq, Eq)]
enum TokenizeTokenPhase {
    OpenBracketFound,
    OpenBracketError,

    TokenBodyCorrect,
    TokenBodyError,

    CloseBracketFound,
    CloseBracketError,
}

/// Tokenize one token.
/// Starting with `[` and everything after, including the next `]`.
pub(crate) fn tokenize_token(
    tok_help: &mut TokenizerHelper,
    regex_list: &RegexList,
    diagnostics: &mut DiagnosticsInfo,
) -> TokenizerResult {
    // Keep track for if we had an error until that point.
    let mut current_phase;
    let mut first_tokenizer_end_reason: Option<TokenizerEnd> = None;
    // start token
    let token = tok_help.create_start_tsnode("token", Some("token"));
    let token_id = token.id;

    // `[`
    let token_open_bracket_result =
        tokenize_open_bracket(tok_help, regex_list, diagnostics, token_id);

    current_phase = match token_open_bracket_result {
        Ok(open_bracket) => {
            // If no Error add token to tree
            tok_help.add_node_to_tree(token, ROOT_ID);
            // Add token because it is not empty
            tok_help.add_node_to_tree(open_bracket, token_id);
            TokenizeTokenPhase::OpenBracketFound
        }
        Err(err) => {
            if first_tokenizer_end_reason.is_none() {
                first_tokenizer_end_reason = Some(err);
            }
            TokenizeTokenPhase::OpenBracketError
        }
    };

    // Token Body
    if current_phase == TokenizeTokenPhase::OpenBracketFound {
        // Tokenize body, and always close/add token.
        let token_body_result =
            token_body::tokenize_token_body(tok_help, regex_list, diagnostics, token_id);

        current_phase = if let Err(err) = token_body_result {
            if first_tokenizer_end_reason.is_none() {
                first_tokenizer_end_reason = Some(err);
            }
            TokenizeTokenPhase::TokenBodyError
        } else {
            TokenizeTokenPhase::TokenBodyCorrect
        };
    }

    // `]`
    if current_phase == TokenizeTokenPhase::TokenBodyCorrect {
        // Now make sure the token is closed correctly.
        // If next token is ` `, `[`, `\n` or `\r` this means the `close_bracket` is missing
        if tok_help.check_if_next_char_matches_any_of(&[' ', '[', '\n', '\r']) {
            current_phase = TokenizeTokenPhase::CloseBracketError;
        } else {
            // `]`
            let close_bracket =
                tok_help.get_next_match(&regex_list.token_close_bracket, "]", None, false, true);

            current_phase = match close_bracket {
                TokenMatchStatus::Ok(result) => {
                    tok_help.add_node_to_tree(result, token_id);
                    TokenizeTokenPhase::CloseBracketFound
                }
                TokenMatchStatus::OkWithPrefixFound(prefix, result) => {
                    utils::handle_prefix(prefix, diagnostics);
                    tok_help.add_node_to_tree(result, token_id);
                    TokenizeTokenPhase::CloseBracketFound
                }
                TokenMatchStatus::NoMatch => TokenizeTokenPhase::CloseBracketError,
                TokenMatchStatus::EoF => {
                    // This is not reachable because of `tokenize_token_arguments`,
                    // but might be in the future.
                    if first_tokenizer_end_reason.is_none() {
                        first_tokenizer_end_reason = Some(TokenizerEnd::UnexpectedEoF);
                    }
                    TokenizeTokenPhase::CloseBracketError
                }
            }
        }
    }

    if current_phase == TokenizeTokenPhase::TokenBodyError
        || current_phase == TokenizeTokenPhase::CloseBracketError
    {
        // Tokenizer ended inside body. Mark end of token as missing
        // Add missing token
        let close_bracket = tok_help.create_start_tsnode("]", None);
        diagnostics.add_message(
            // No extra template data needed
            DMExtraInfo::new(close_bracket.get_range()),
            "missing_end_bracket",
        );
        tok_help.add_node_to_tree(close_bracket, token_id);
    }

    // end token
    tok_help.set_end_point_for(token_id);

    match first_tokenizer_end_reason {
        Some(err) => Err(err),
        None => Ok(()),
    }
}

/// Check for an open bracket (`[`).
/// `Ok` if found.
/// `Err` if EoF.
/// Panic if Other Characters found. (This should be checked beforehand)
fn tokenize_open_bracket(
    tok_help: &mut TokenizerHelper,
    regex_list: &RegexList,
    _diagnostics: &mut DiagnosticsInfo,
    _token_id: u64,
) -> Result<DataNode, TokenizerEnd> {
    // This token only need to be added if it is not empty
    // `[`
    let open_bracket =
        tok_help.get_next_match(&regex_list.token_open_bracket, "[", None, false, true);
    match open_bracket {
        TokenMatchStatus::Ok(result) => {
            // Add token because it is not empty
            Ok(result)
        }
        TokenMatchStatus::OkWithPrefixFound(prefix, _result) => {
            panic!(
                "Tokenizer error: found `[` with prefix. Prefix is: `{:?}`",
                prefix
            );
        }
        TokenMatchStatus::EoF => {
            // This is valid, no error needed
            // EoF found so further checking the file is not needed.
            Err(TokenizerEnd::ExpectedEof)
        }
        TokenMatchStatus::NoMatch => {
            let point = tok_help.get_point();
            panic!(
                "There is more in the file but it is not a comment, \
                but also not a `[`. Location: {:?}",
                point
            )
        }
    }
}