use super::*;
use df_ls_diagnostics::{DMExtraInfo, DiagnosticsInfo, Position, Range};
pub(crate) fn tokenize_header(
tok_help: &mut TokenizerHelper,
regex_list: &RegexList,
diagnostics: &mut DiagnosticsInfo,
) -> TokenizerResult {
let _optional_line_space =
tok_help.get_next_match(®ex_list.line_space, "", None, true, true);
let header = tok_help.get_next_match(®ex_list.header, "header", Some("header"), false, true);
match header {
TokenMatchStatus::Ok(result) => {
tok_help.add_node_to_tree(result, ROOT_ID);
}
TokenMatchStatus::OkWithPrefixFound(prefix, result) => {
utils::handle_prefix(prefix, diagnostics);
tok_help.add_node_to_tree(result, ROOT_ID);
}
_ => {
diagnostics.add_message(
DMExtraInfo::new(Range {
start: Position {
line: tok_help.get_point().row as u32,
character: tok_help.get_point().column as u32,
},
end: Position {
line: tok_help.get_point().row as u32,
character: tok_help.get_point().column as u32,
},
}),
"missing_header",
);
}
}
let newline = tok_help.get_next_match(®ex_list.newline, "", None, false, true);
match newline {
TokenMatchStatus::Ok(_result) => TokenizerResult::Ok(()),
TokenMatchStatus::OkWithPrefixFound(prefix, _result) => {
utils::handle_prefix(prefix, diagnostics);
TokenizerResult::Ok(())
}
TokenMatchStatus::EoF => {
TokenizerResult::Err(TokenizerEnd::ExpectedEof)
}
_ => {
diagnostics.add_message(
DMExtraInfo::new(Range {
start: Position {
line: tok_help.get_point().row as u32,
character: tok_help.get_point().column as u32,
},
end: Position {
line: tok_help.get_point().row as u32,
character: tok_help.get_point().column as u32,
},
}),
"missing_newline",
);
TokenizerResult::Ok(())
}
}
}