df_ls_lexical_analysis 0.3.0-rc.1

A language server for Dwarf Fortress RAW files
Documentation
use super::*;
use df_ls_diagnostics::DiagnosticsInfo;

/// Only call this function when we know the cursor is at start of Pipe Argument.
/// For this to happen at least one pipe character must be present.
pub(crate) fn tokenize_token_argument_pipe_arguments(
    tok_help: &mut TokenizerHelper,
    regex_list: &RegexList,
    diagnostics: &mut DiagnosticsInfo,
    token_arguments_id: u64,
) -> TokenizerResult {
    // Code is very similar to `tokenize_token_arguments`
    // start token_pipe_arguments
    let token_pipe_arguments = tok_help.create_start_tsnode(
        "token_argument_pipe_arguments",
        Some("token_argument_pipe_arguments"),
    );
    let token_pipe_arguments_id = token_pipe_arguments.id;
    tok_help.add_node_to_tree(token_pipe_arguments, token_arguments_id);

    let mut allow_add_empty = true;

    // Loop over all Pipe Arguments
    loop {
        // See if early return is wanted
        if tok_help.check_if_next_char_matches_any_of(&[':', ']', '\n', '\r']) {
            break;
        }

        // Separator `|` (optional)
        let separator =
            tok_help.get_next_match(&regex_list.token_pipe_separator, "|", None, true, true);
        match separator {
            TokenMatchStatus::Ok(result) => {
                if allow_add_empty {
                    // Add empty token argument node.
                    let token_argument_empty = tok_help
                        .create_start_tsnode("token_argument_empty", Some("token_argument_empty"));
                    tok_help.add_node_to_tree(token_argument_empty, token_pipe_arguments_id);
                }
                tok_help.add_node_to_tree(result, token_pipe_arguments_id);
                allow_add_empty = true;
            }
            TokenMatchStatus::OkWithPrefixFound(_prefix, _result) => {
                unreachable!("Match is optional");
            }
            TokenMatchStatus::NoMatch => {
                // Is optional so, do nothing, just continue checking.
            }
            TokenMatchStatus::EoF => {
                // end token_pipe_arguments
                tok_help.set_end_point_for(token_pipe_arguments_id);
                return TokenizerResult::Err(TokenizerEnd::UnexpectedEoF);
            }
        }

        // See if early return is wanted
        if tok_help.check_if_next_char_matches_any_of(&[':', '[', ']', '\n', '\r']) {
            if allow_add_empty {
                // Add empty token argument node.
                let token_argument_empty = tok_help
                    .create_start_tsnode("token_argument_empty", Some("token_argument_empty"));
                tok_help.add_node_to_tree(token_argument_empty, token_pipe_arguments_id);
            }
            break;
        }

        // Check if empty argument
        if tok_help.check_if_next_char_match('|') {
            continue;
        }

        // Tokenize actual value of argument
        let found_token_argument = token_argument::tokenize_token_argument(
            tok_help,
            regex_list,
            diagnostics,
            token_pipe_arguments_id,
            false,
        );
        // Finding argument returned an error (most likely EoF)
        if found_token_argument.is_err() {
            // Add empty token argument node.
            let token_argument_empty =
                tok_help.create_start_tsnode("token_argument_empty", Some("token_argument_empty"));
            tok_help.add_node_to_tree(token_argument_empty, token_pipe_arguments_id);
            // In case of EOF (or no match)
            tok_help.set_end_point_for(token_pipe_arguments_id);
            return TokenizerResult::Err(TokenizerEnd::UnexpectedEoF);
        }
        allow_add_empty = false;
    }
    // end token_pipe_arguments
    tok_help.set_end_point_for(token_pipe_arguments_id);

    Ok(())
}