df_ls_lexical_analysis 0.3.0-rc.1

A language server for Dwarf Fortress RAW files
Documentation
use super::*;
use df_ls_diagnostics::DiagnosticsInfo;

/// Tokenize all arguments, everything after the token name.
/// This includes the `:` after the token name.
pub(crate) fn tokenize_token_arguments(
    tok_help: &mut TokenizerHelper,
    regex_list: &RegexList,
    diagnostics: &mut DiagnosticsInfo,
    token_id: u64,
) -> TokenizerResult {
    // start token_arguments
    let token_arguments = tok_help.create_start_tsnode("token_arguments", Some("token_arguments"));
    let token_arguments_id = token_arguments.id;
    // This token only need to be added if it is not empty
    let mut token_arguments_added = false;

    // Loop over all arguments
    loop {
        // See if early return is wanted
        if tok_help.check_if_next_char_matches_any_of(&[']', '\n', '\r']) {
            break;
        }

        // `:`
        let separator = tok_help.get_next_match(&regex_list.token_separator, ":", None, true, true);
        match separator {
            TokenMatchStatus::Ok(result) => {
                if !token_arguments_added {
                    // Add token because it is not empty
                    token_arguments_added = true;
                    tok_help.add_node_to_tree(token_arguments.clone(), token_id);
                }
                tok_help.add_node_to_tree(result, token_arguments_id);
            }
            TokenMatchStatus::OkWithPrefixFound(_prefix, _result) => {
                unreachable!("Match is optional");
            }
            TokenMatchStatus::NoMatch => break, // go out of `token_arguments` loop
            TokenMatchStatus::EoF => {
                // Close `token_arguments_added`
                if token_arguments_added {
                    tok_help.set_end_point_for(token_arguments_id);
                }
                return TokenizerResult::Err(TokenizerEnd::UnexpectedEoF);
            }
        }

        // See if early return is wanted
        if tok_help.check_if_next_char_matches_any_of(&['[', ']', '\n', '\r']) {
            // Add empty token argument node.
            let token_argument_empty =
                tok_help.create_start_tsnode("token_argument_empty", Some("token_argument_empty"));
            tok_help.add_node_to_tree(token_argument_empty, token_arguments_id);
            break;
        }

        // Check if empty argument
        if tok_help.check_if_next_char_match(':') {
            // Add empty token argument node.
            let token_argument_empty =
                tok_help.create_start_tsnode("token_argument_empty", Some("token_argument_empty"));
            tok_help.add_node_to_tree(token_argument_empty, token_arguments_id);
            continue;
        }

        // Tokenize actual value of argument
        let found_token_argument = token_argument::tokenize_token_argument(
            tok_help,
            regex_list,
            diagnostics,
            token_arguments_id,
            true,
        );
        // Finding argument returned an error (most likely EoF)
        if found_token_argument.is_err() {
            // Add empty token argument node.
            let token_argument_empty =
                tok_help.create_start_tsnode("token_argument_empty", Some("token_argument_empty"));
            tok_help.add_node_to_tree(token_argument_empty, token_arguments_id);
            // In case of EOF (or no match)
            tok_help.set_end_point_for(token_arguments_id);
            return TokenizerResult::Err(TokenizerEnd::UnexpectedEoF);
        }
    }
    // end token_arguments
    tok_help.set_end_point_for(token_arguments_id);

    Ok(())
}