df_ls_lexical_analysis 0.3.0-rc.1

A language server for Dwarf Fortress RAW files
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
use super::*;
use df_ls_diagnostics::DiagnosticsInfo;

/// Tokenize the body of the token.
/// Everything in between the `[` and `]`.
pub(crate) fn tokenize_token_body(
    tok_help: &mut TokenizerHelper,
    regex_list: &RegexList,
    diagnostics: &mut DiagnosticsInfo,
    token_id: u64,
) -> TokenizerResult {
    // Token Name
    token_name::tokenize_token_name(tok_help, regex_list, diagnostics, token_id)?;

    // Token Arguments
    token_arguments::tokenize_token_arguments(tok_help, regex_list, diagnostics, token_id)
}